diff --git a/.gitignore b/.gitignore index c9f841b..f7d387a 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ backend.vars # caches lambdas/.zip-cache/* + +# Junk files +.DS_Store diff --git a/README.md b/README.md index 3bd2e38..5dbfb81 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,7 @@ for dxw's Dalmatian hosting platform. | [aws_cloudtrail.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail) | resource | | [aws_cloudwatch_log_group.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource | | [aws_cloudwatch_log_group.cloudwatch_slack_alerts_lambda_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource | +| [aws_cloudwatch_log_group.datadog_forwarder_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource | | [aws_cloudwatch_log_group.delete_default_resources_lambda_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource | | [aws_codestarconnections_connection.connections](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codestarconnections_connection) | resource | | [aws_glue_catalog_database.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/glue_catalog_database) | resource | @@ -50,6 +51,13 @@ for dxw's Dalmatian hosting platform. | [aws_iam_policy.custom](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [aws_iam_policy.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [aws_iam_policy.datadog_aws_integration_resource_collection](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_kms_encrypt](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_kms_encrypt_wildcard](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_s3_object_read](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_s3_object_rw](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_secret](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | +| [aws_iam_policy.datadog_forwarder_tags](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [aws_iam_policy.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [aws_iam_policy.delete_default_resources_vpc_delete_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | | [aws_iam_policy.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource | @@ -57,6 +65,7 @@ for dxw's Dalmatian hosting platform. | [aws_iam_role.cloudwatch_slack_alerts_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role.custom](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | +| [aws_iam_role.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource | | [aws_iam_role_policy_attachment.cloudtrail_cloudwatch_logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | @@ -65,6 +74,13 @@ for dxw's Dalmatian hosting platform. | [aws_iam_role_policy_attachment.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [aws_iam_role_policy_attachment.datadog_aws_integration_resource_collection](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [aws_iam_role_policy_attachment.datadog_aws_integration_security_audit](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_kms_encrypt](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_kms_encrypt_wildcard](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_s3_object_read](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_s3_object_rw](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_secret](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | +| [aws_iam_role_policy_attachment.datadog_forwarder_tags](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [aws_iam_role_policy_attachment.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [aws_iam_role_policy_attachment.delete_default_resources_vpc_delete_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | | [aws_iam_role_policy_attachment.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource | @@ -73,37 +89,54 @@ for dxw's Dalmatian hosting platform. | [aws_kms_alias.cloudwatch_opsgenie_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | | [aws_kms_alias.cloudwatch_opsgenie_alerts_sns_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | | [aws_kms_alias.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | +| [aws_kms_alias.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | +| [aws_kms_alias.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | | [aws_kms_alias.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource | | [aws_kms_key.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_kms_key.cloudtrail_cloudwatch_logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_kms_key.cloudwatch_opsgenie_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_kms_key.cloudwatch_opsgenie_alerts_sns_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_kms_key.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | +| [aws_kms_key.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | +| [aws_kms_key.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_kms_key.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource | | [aws_lambda_function.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource | +| [aws_lambda_function.datadog_service_log_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource | | [aws_lambda_function.delete_default_resources](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource | | [aws_lambda_permission.cloudwatch_slack_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource | +| [aws_lambda_permission.datadog_forwarder_allow_cloudwatch](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource | +| [aws_lambda_permission.datadog_forwarder_allow_s3](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource | +| [aws_lambda_permission.datadog_forwarder_allow_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource | | [aws_route53_zone.root](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/route53_zone) | resource | | [aws_s3_bucket.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | | [aws_s3_bucket.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | +| [aws_s3_bucket.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | | [aws_s3_bucket.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource | | [aws_s3_bucket_lifecycle_configuration.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource | | [aws_s3_bucket_lifecycle_configuration.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource | +| [aws_s3_bucket_lifecycle_configuration.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource | | [aws_s3_bucket_lifecycle_configuration.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource | | [aws_s3_bucket_logging.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource | | [aws_s3_bucket_logging.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource | +| [aws_s3_bucket_logging.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource | | [aws_s3_bucket_policy.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | | [aws_s3_bucket_policy.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | +| [aws_s3_bucket_policy.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | | [aws_s3_bucket_policy.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource | | [aws_s3_bucket_public_access_block.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | | [aws_s3_bucket_public_access_block.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | +| [aws_s3_bucket_public_access_block.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | | [aws_s3_bucket_public_access_block.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource | | [aws_s3_bucket_server_side_encryption_configuration.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource | | [aws_s3_bucket_server_side_encryption_configuration.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource | +| [aws_s3_bucket_server_side_encryption_configuration.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource | | [aws_s3_bucket_server_side_encryption_configuration.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource | | [aws_s3_bucket_versioning.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource | | [aws_s3_bucket_versioning.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource | +| [aws_s3_bucket_versioning.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource | | [aws_s3_bucket_versioning.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource | +| [aws_secretsmanager_secret.datadog_api_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret) | resource | +| [aws_secretsmanager_secret_version.datadog_api_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret_version) | resource | | [aws_sns_topic.cloudwatch_opsgenie_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource | | [aws_sns_topic.cloudwatch_opsgenie_alerts_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource | | [aws_sns_topic.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource | @@ -115,7 +148,10 @@ for dxw's Dalmatian hosting platform. | [aws_sns_topic_subscription.cloudwatch_slack_alerts_lambda_subscription](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic_subscription) | resource | | [aws_ssm_service_setting.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/ssm_service_setting) | resource | | [datadog_integration_aws.aws](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws) | resource | +| [datadog_integration_aws_lambda_arn.datadog_forwarder_arn](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws_lambda_arn) | resource | +| [datadog_integration_aws_log_collection.datadog_forwarder](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws_log_collection) | resource | | [archive_file.cloudwatch_slack_alerts_lambda](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source | +| [archive_file.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source | | [archive_file.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source | | [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source | | [aws_regions.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/regions) | data source | @@ -142,6 +178,9 @@ for dxw's Dalmatian hosting platform. | [custom\_iam\_roles](#input\_custom\_iam\_roles) | Configure custom IAM roles/policies |
map(object({
description = string
policies = map(object({
description = string
Version = string
Statement = list(object({
Action = list(string)
Effect = string
Resource = string
}))
}))
assume_role_policy = object({
Version = string
Statement = list(object({
Action = list(string)
Effect = string
Principal = map(string)
}))
})
}))
| n/a | yes | | [datadog\_api\_key](#input\_datadog\_api\_key) | Datadog API key | `string` | n/a | yes | | [datadog\_app\_key](#input\_datadog\_app\_key) | Datadog App key | `string` | n/a | yes | +| [datadog\_forwarder\_enhanced\_metrics](#input\_datadog\_forwarder\_enhanced\_metrics) | Set the environment variable DD\_ENHANCED\_METRICS on the Forwarder. Set to false to stop the Forwarder from generating enhanced metrics itself, but it will still forward custom metrics from other lambdas. | `bool` | n/a | yes | +| [datadog\_forwarder\_log\_retention](#input\_datadog\_forwarder\_log\_retention) | Datadog Forwarder S3 bucket retention in days. Set to 0 to keep all logs. | `number` | n/a | yes | +| [datadog\_forwarder\_store\_failed\_events](#input\_datadog\_forwarder\_store\_failed\_events) | Set environment variable DD\_STORE\_FAILED\_EVENTS on the Forwarder. Set to true to enable the forwarder to also store event data in the S3 bucket | `bool` | n/a | yes | | [datadog\_region](#input\_datadog\_region) | Datadog region | `string` | n/a | yes | | [delete\_default\_resources\_lambda\_kms\_encryption](#input\_delete\_default\_resources\_lambda\_kms\_encryption) | Conditionally encrypt the Delete Default Resources Lambda logs with KMS | `bool` | n/a | yes | | [delete\_default\_resources\_log\_retention](#input\_delete\_default\_resources\_log\_retention) | Log retention for the Delete Default Resources Lambda | `number` | n/a | yes | @@ -149,6 +188,7 @@ for dxw's Dalmatian hosting platform. | [enable\_cloudwatch\_opsgenie\_alerts](#input\_enable\_cloudwatch\_opsgenie\_alerts) | Enable CloudWatch Opsgenie alerts. This creates an SNS topic to which alerts and pipelines can send messages, which are then sent to the Opsgenie SNS endpoint. | `bool` | n/a | yes | | [enable\_cloudwatch\_slack\_alerts](#input\_enable\_cloudwatch\_slack\_alerts) | Enable CloudWatch Slack alerts. This creates an SNS topic to which alerts and pipelines can send messages, which are then picked up by a Lambda function that forwards them to a Slack webhook. | `bool` | n/a | yes | | [enable\_datadog\_aws\_integration](#input\_enable\_datadog\_aws\_integration) | Conditionally create the datadog AWS integration role (https://docs.datadoghq.com/integrations/guide/aws-terraform-setup/) and configure the datadog integration | `bool` | n/a | yes | +| [enable\_datadog\_forwarder](#input\_enable\_datadog\_forwarder) | Conditionally launch Datadog AWS service log forwarder lambda | `bool` | n/a | yes | | [enable\_delete\_default\_resources](#input\_enable\_delete\_default\_resources) | Creates a Lambda function which deletes all default VPCs and resources within them. This only needs to be ran once, either through the AWS console or via the AWS CLI | `bool` | n/a | yes | | [enable\_route53\_root\_hosted\_zone](#input\_enable\_route53\_root\_hosted\_zone) | Conditionally create Route53 hosted zone, which will contain the DNS records for resources launched within the account. | `bool` | n/a | yes | | [enable\_s3\_tfvars](#input\_enable\_s3\_tfvars) | enable\_s3\_tfvars | `bool` | n/a | yes | diff --git a/datadog-forwarder-lambda.tf b/datadog-forwarder-lambda.tf new file mode 100644 index 0000000..109fbc1 --- /dev/null +++ b/datadog-forwarder-lambda.tf @@ -0,0 +1,283 @@ +resource "aws_kms_key" "datadog_forwarder" { + count = local.enable_datadog_forwarder ? 1 : 0 + + description = "This key is used to encrypt the DataDog Forwarder Lambda logs (${local.project_name})" + deletion_window_in_days = 10 + enable_key_rotation = true + policy = templatefile( + "${path.root}/policies/kms-key-policy.json.tpl", + { + statement = <=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: wrapt (<2,>=1.10) +Provides-Extra: dev +Requires-Dist: tox ; extra == 'dev' +Requires-Dist: PyTest ; extra == 'dev' +Requires-Dist: PyTest-Cov ; extra == 'dev' +Requires-Dist: bump2version (<1) ; extra == 'dev' +Requires-Dist: sphinx (<2) ; extra == 'dev' + + +Deprecated Library +------------------ + +Deprecated is Easy to Use +````````````````````````` + +If you need to mark a function or a method as deprecated, +you can use the ``@deprecated`` decorator: + +Save in a hello.py: + +.. code:: python + + from deprecated import deprecated + + + @deprecated(version='1.2.1', reason="You should use another function") + def some_old_function(x, y): + return x + y + + + class SomeClass(object): + @deprecated(version='1.3.0', reason="This method is deprecated") + def some_old_method(self, x, y): + return x + y + + + some_old_function(12, 34) + obj = SomeClass() + obj.some_old_method(5, 8) + + +And Easy to Setup +````````````````` + +And run it: + +.. code:: bash + + $ pip install Deprecated + $ python hello.py + hello.py:15: DeprecationWarning: Call to deprecated function (or staticmethod) some_old_function. + (You should use another function) -- Deprecated since version 1.2.0. + some_old_function(12, 34) + hello.py:17: DeprecationWarning: Call to deprecated method some_old_method. + (This method is deprecated) -- Deprecated since version 1.3.0. + obj.some_old_method(5, 8) + + +You can document your code +`````````````````````````` + +Have you ever wonder how to document that some functions, classes, methods, etc. are deprecated? +This is now possible with the integrated Sphinx directives: + +For instance, in hello_sphinx.py: + +.. code:: python + + from deprecated.sphinx import deprecated + from deprecated.sphinx import versionadded + from deprecated.sphinx import versionchanged + + + @versionadded(version='1.0', reason="This function is new") + def function_one(): + '''This is the function one''' + + + @versionchanged(version='1.0', reason="This function is modified") + def function_two(): + '''This is the function two''' + + + @deprecated(version='1.0', reason="This function will be removed soon") + def function_three(): + '''This is the function three''' + + + function_one() + function_two() + function_three() # warns + + help(function_one) + help(function_two) + help(function_three) + + +The result it immediate +``````````````````````` + +Run it: + +.. code:: bash + + $ python hello_sphinx.py + + hello_sphinx.py:23: DeprecationWarning: Call to deprecated function (or staticmethod) function_three. + (This function will be removed soon) -- Deprecated since version 1.0. + function_three() # warns + + Help on function function_one in module __main__: + + function_one() + This is the function one + + .. versionadded:: 1.0 + This function is new + + Help on function function_two in module __main__: + + function_two() + This is the function two + + .. versionchanged:: 1.0 + This function is modified + + Help on function function_three in module __main__: + + function_three() + This is the function three + + .. deprecated:: 1.0 + This function will be removed soon + + +Links +````` + +* `Python package index (PyPi) `_ +* `GitHub website `_ +* `Read The Docs `_ +* `EBook on Lulu.com `_ +* `StackOverFlow Q&A `_ +* `Development version + `_ + diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD new file mode 100644 index 0000000..d8f67cb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD @@ -0,0 +1,13 @@ +Deprecated-1.2.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Deprecated-1.2.14.dist-info/LICENSE.rst,sha256=HoPt0VvkGbXVveNy4yXlJ_9PmRX1SOfHUxS0H2aZ6Dw,1081 +Deprecated-1.2.14.dist-info/METADATA,sha256=xQYvk5nwOfnkxxRD-VHkpE-sMu0IBHRZ8ayspypfkTs,5354 +Deprecated-1.2.14.dist-info/RECORD,, +Deprecated-1.2.14.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Deprecated-1.2.14.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110 +Deprecated-1.2.14.dist-info/top_level.txt,sha256=nHbOYawKPQQE5lQl-toUB1JBRJjUyn_m_Mb8RVJ0RjA,11 +deprecated/__init__.py,sha256=ZphiULqDVrESSB0mLV2WA88JyhQxZSK44zuDGbV5k-g,349 +deprecated/__pycache__/__init__.cpython-311.pyc,, +deprecated/__pycache__/classic.cpython-311.pyc,, +deprecated/__pycache__/sphinx.cpython-311.pyc,, +deprecated/classic.py,sha256=QugmUi7IhBvp2nDvMtyWqFDPRR43-9nfSZG1ZJSDpFM,9880 +deprecated/sphinx.py,sha256=NqQ0oKGcVn6yUe23iGbCieCgvWbEDQSPt9QelbXJnDU,10258 diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL new file mode 100644 index 0000000..f771c29 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt new file mode 100644 index 0000000..9f8d550 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt @@ -0,0 +1 @@ +deprecated diff --git a/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF b/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF new file mode 100644 index 0000000..b55229a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF @@ -0,0 +1,70 @@ +-----BEGIN PKCS7----- +MIAGCSqGSIb3DQEHAqCAMIACAQExDTALBglghkgBZQMEAgIwCwYJKoZIhvcNAQcB +oIAwggJ7MIICAqADAgECAhEAgseesYiwj2v65uNdIxGWjDAKBggqhkjOPQQDAzBp +MQswCQYDVQQGEwJVUzEMMAoGA1UECgwDQVdTMRUwEwYDVQQLDAxDcnlwdG9ncmFw +aHkxCzAJBgNVBAgMAldBMSgwJgYDVQQDDB9TaWduZXIgdXMtZWFzdC0xIFNVQk9S +RElOQVRFIENBMB4XDTI0MTAxMTAzNTUwNloXDTI0MTAxNDA0NTUwNVowYjELMAkG +A1UEBhMCVVMxCzAJBgNVBAgMAldBMRAwDgYDVQQHDAdTZWF0dGxlMQwwCgYDVQQK +DANBV1MxFTATBgNVBAsMDENyeXB0b2dyYXBoeTEPMA0GA1UEAwwGc2lnbmVyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAENEJBApwU+6Uew83wGVUmaoQVqhH8OWJ8uhNK +pYeGe2aO+ltRW56QVdcbTV5bndUcW9nslQ946H5Zn1Un/j/WPxkBJjUfNZvKvwDL +kD2kbr6fpJnpTrylWuJC/JyRXP3Uo3UwczAJBgNVHRMEAjAAMB8GA1UdIwQYMBaA +FBW0Hd/bDRgmoXefjl93qtSKwJXVMB0GA1UdDgQWBBRcF773L+DbVZJg2nkGpbHH +SGoCJDAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwMwCgYI +KoZIzj0EAwMDZwAwZAIwEVaZHYn31KWcjTkpUDxyYtN2OZdFmDx05ZwyOjz74MbS +ZI1gWjpUz8fG5ZeZ2rzfAjAiJzHe0f6PnC29XXlT+XbaLCgX+93ma2BO0ssYugs6 +FMXwBljceZQwC+X4Dpp58agwggJzMIIB+qADAgECAhEAzX5q6bZNJ1qnMgY/JSNY +BDAKBggqhkjOPQQDAzBpMQswCQYDVQQGEwJVUzEMMAoGA1UECgwDQVdTMRUwEwYD +VQQLDAxDcnlwdG9ncmFwaHkxCzAJBgNVBAgMAldBMSgwJgYDVQQDDB9TaWduZXIg +dXMtd2VzdC0yIFNVQk9SRElOQVRFIENBMB4XDTI0MDgwMTE0MjIxNloXDTI1MDUw +MTE1MjIxNlowaTELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UECwwM +Q3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEoMCYGA1UEAwwfU2lnbmVyIHVzLWVh +c3QtMSBTVUJPUkRJTkFURSBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABB6NdKgv +U6rWCAUVIeCLunsKQNOQeSGS0+Vu1NMa3pyx8kJTdhkB/Alc2BiC9q8Vg7JS8I9y +sLS73z4eTu915XFBRPNEszWLLcjuxyms5V1261wOnVompfb6sWohfQDQ9aNmMGQw +EgYDVR0TAQH/BAgwBgEB/wIBADAfBgNVHSMEGDAWgBQiEqKJBM6ppy9zVIGE+K0F +Mi1HazAdBgNVHQ4EFgQUFbQd39sNGCahd5+OX3eq1IrAldUwDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBXdKjpiudEhq7Fb/vVcb9DVuelVi2O8SENH +kwH5aBiMe1S1VYTkP8eRtaiM6Qzh8wIwEz/oPBUFEDawCkVpg7i726Db75ZdISZ+ +IzTrUnDdv0xinWVl7/2oKs7UQx1birspMIICbTCCAfOgAwIBAgIRANBJlLz1Fa3M +o9YlS8oV3AUwCgYIKoZIzj0EAwMwYjELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FX +UzEVMBMGA1UECwwMQ3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEhMB8GA1UEAwwY +U2lnbmVyIHVzLXdlc3QtMiBST09UIENBMB4XDTIxMDcxMjE5NTAzN1oXDTI2MDcx +MjIwNTAzN1owaTELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UECwwM +Q3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEoMCYGA1UEAwwfU2lnbmVyIHVzLXdl +c3QtMiBTVUJPUkRJTkFURSBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABNzE7TkM +6mrYlE9trVdemsxNbCWXUwjM1x8mOqtZ04mL6xLPnubDeGX0C+Zx4QjH3/kspxcZ +hAyvV2wvs8SA/HMVv1gMVwrmqtMgsNzBF7DjROPZ2aVRaNdb4DZYpVKTk6NmMGQw +EgYDVR0TAQH/BAgwBgEB/wIBATAfBgNVHSMEGDAWgBR57Gmd9LD9Ijf7LzNGP0Gx +CPahXzAdBgNVHQ4EFgQUIhKiiQTOqacvc1SBhPitBTItR2swDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2gAMGUCMDK5gsIf52Gh5TFT2WQDWwgLfoHlaUqGq2yv +/TPFvJQ6PeU52DxsFkUjBK9y3D/CdwIxANHVSZ9E625jNBrq7211RBbA9FG39N8z +dDyrvpQPuCid4fruMkuAPOLnoWOk5YpV+DCCAkQwggHKoAMCAQICEQCwD+lKFGkZ +G4M9/Aaa0RubMAoGCCqGSM49BAMDMGIxCzAJBgNVBAYTAlVTMQwwCgYDVQQKDANB +V1MxFTATBgNVBAsMDENyeXB0b2dyYXBoeTELMAkGA1UECAwCV0ExITAfBgNVBAMM +GFNpZ25lciB1cy13ZXN0LTIgUk9PVCBDQTAgFw0yMDA3MTYxODIxNDdaGA8yMTIw +MDcxNjE5MjE0N1owYjELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UE +CwwMQ3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEhMB8GA1UEAwwYU2lnbmVyIHVz +LXdlc3QtMiBST09UIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEZVHVljB0VcdR +HM0iy/fmrq8iLSA4W1myRPlG7EDEXD5jwZ05J3oWceNJ9RQjHhSRBUEWu1UEhGJ8 +GSQcE0CoT2qp5qKFjBrPyRD9L3K9w/ZIumQvYsuv30zlJDPyo4Xuo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR57Gmd9LD9Ijf7LzNGP0GxCPahXzAOBgNV +HQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIxAOZ2pyA0jXTej7akG1tz3/PQ +dubi6A+9ZhzMx4kIWvdd/AflwCy33hvVPDoWbVG8vAIwHrwSAF/cyvDpmSnbJmll +5gHk0spcT17Y5BEEkXSENlsajdDLje9JjGgvaUdVLqMcAAAxggKrMIICpwIBATB+ +MGkxCzAJBgNVBAYTAlVTMQwwCgYDVQQKDANBV1MxFTATBgNVBAsMDENyeXB0b2dy +YXBoeTELMAkGA1UECAwCV0ExKDAmBgNVBAMMH1NpZ25lciB1cy1lYXN0LTEgU1VC +T1JESU5BVEUgQ0ECEQCCx56xiLCPa/rm410jEZaMMAsGCWCGSAFlAwQCAqCCAZ8w +GAYJKoZIhvcNAQkDMQsGCSqGSIb3DQEHATAgBgcrgTuBCQEDMRUYEzIwMzYwMTEx +MTU1MDUzLjcwMVowIgYJKoZIhvcNAQkFMRUYEzIwMjQxMDExMTU1MDUzLjcwMVow +KAYJKoZIhvcNAQk0MRswGTALBglghkgBZQMEAgKhCgYIKoZIzj0EAwMwPwYJKoZI +hvcNAQkEMTIEMLjbYXVmR5oZlI5YUD9xABhD9seqVJ1fKmT3UkDiWBZK3RlBus8+ +xB7MlG/8YxEn/DBlBgcrgTuBCQECMVoMWGFybjphd3M6c2lnbmVyOnVzLWVhc3Qt +MTo0NjQ2MjI1MzIwMTI6L3NpZ25pbmctam9icy9jODlkOTE4NC1mY2FhLTRjYzUt +ODFmMC0zMjQyZjdmYjllMTUwawYHK4E7gQkBBDFgDF5hcm46YXdzOnNpZ25lcjp1 +cy1lYXN0LTE6NDY0NjIyNTMyMDEyOi9zaWduaW5nLXByb2ZpbGVzL0RhdGFkb2dM +YW1iZGFTaWduaW5nUHJvZmlsZS85dk1JOVpBR0xjMAoGCCqGSM49BAMDBGYwZAIw +M+knoFUHpSY1U+qmX1EUQCenrg4n+wc1fK5pv8K+LddOf9KHqrY28GkutH3sLF31 +AjBX0SUoW/3rEedtYJ/N9uGDlNn69Iw2ooboeBNjK9xb4QMTHsCPBR8PBFve33rq +ADYAAAAAAAA= +-----END PKCS7----- diff --git a/lambdas/aws-dd-forwarder-3.127.0/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py new file mode 100644 index 0000000..51b1c25 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: MIT + +""" +Classes Without Boilerplate +""" + +from functools import partial +from typing import Callable + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._compat import Protocol +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Converter, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance(Protocol): + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Converter", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + + +def _make_getattr(mod_name: str) -> Callable: + """ + Create a metadata proxy for packaging information that uses *mod_name* in + its warnings and errors. + """ + + def __getattr__(name: str) -> str: + if name not in ("__version__", "__version_info__"): + msg = f"module {mod_name} has no attribute {name}" + raise AttributeError(msg) + + try: + from importlib.metadata import metadata + except ImportError: + from importlib_metadata import metadata + + meta = metadata("attrs") + + if name == "__version_info__": + return VersionInfo._from_version_string(meta["version"]) + + return meta["version"] + + return __getattr__ + + +__getattr__ = _make_getattr(__name__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi new file mode 100644 index 0000000..6ae0a83 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi @@ -0,0 +1,388 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Generic, + Mapping, + Protocol, + Sequence, + TypeVar, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo +from attrs import ( + define as define, + field as field, + mutable as mutable, + frozen as frozen, + _EqOrderType, + _ValidatorType, + _ConverterType, + _ReprArgType, + _OnSetAttrType, + _OnSetAttrArgType, + _FieldTransformer, + _ValidatorArgType, +) + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_FilterType = Callable[["Attribute[_T]", _T], bool] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +_A = TypeVar("_A", bound=type[AttrsInstance]) + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +In = TypeVar("In") +Out = TypeVar("Out") + +class Converter(Generic[In, Out]): + @overload + def __init__(self, converter: Callable[[In], Out]) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance, Attribute], Out], + *, + takes_self: Literal[True], + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, Attribute], Out], + *, + takes_field: Literal[True], + ) -> None: ... + @overload + def __init__( + self, + converter: Callable[[In, AttrsInstance], Out], + *, + takes_self: Literal[True], + ) -> None: ... + +class Attribute(Generic[_T]): + name: str + default: _T | None + validator: _ValidatorType[_T] | None + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: bool | None + init: bool + converter: _ConverterType | Converter[Any, _T] | None + metadata: dict[Any, Any] + type: type[_T] | None + kw_only: bool + on_setattr: _OnSetAttrType + alias: str | None + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: type[_T] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + type: object = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., +) -> Any: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: _C, + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> _C: ... +@overload +@dataclass_transform(order_default=True, field_specifiers=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., + unsafe_hash: bool | None = ..., +) -> Callable[[_C], _C]: ... +def fields(cls: type[AttrsInstance]) -> Any: ... +def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _A, + globalns: dict[str, Any] | None = ..., + localns: dict[str, Any] | None = ..., + attribs: list[Attribute[Any]] | None = ..., + include_extras: bool = ..., +) -> _A: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: list[str] | tuple[str, ...] | dict[str, Any], + bases: tuple[type, ...] = ..., + class_body: dict[str, Any] | None = ..., + repr_ns: str | None = ..., + repr: bool = ..., + cmp: _EqOrderType | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + collect_by_mro: bool = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + dict_factory: type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ..., + tuple_keys: bool | None = ..., +) -> dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: _FilterType[Any] | None = ..., + tuple_factory: type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py new file mode 100644 index 0000000..f367bb3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py @@ -0,0 +1,160 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attrs.field`'s ``eq``, ``order``, + and ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if at least + one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + Args: + eq (typing.Callable | None): + Callable used to evaluate equality of two objects. + + lt (typing.Callable | None): + Callable used to evaluate whether one object is less than another + object. + + le (typing.Callable | None): + Callable used to evaluate whether one object is less than or equal + to another object. + + gt (typing.Callable | None): + Callable used to evaluate whether one object is greater than + another object. + + ge (typing.Callable | None): + Callable used to evaluate whether one object is greater than or + equal to another object. + + require_same_type (bool): + When `True`, equality and ordering methods will return + `NotImplemented` if objects are not of the same type. + + class_name (str | None): Name of class. Defaults to "Comparable". + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + msg = "eq must be define is order to complete ordering from lt, le, gt, ge." + raise ValueError(msg) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + return all(func(self, other) for func in self._requirements) + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi new file mode 100644 index 0000000..cc7893b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: _CompareWithType | None = ..., + lt: _CompareWithType | None = ..., + le: _CompareWithType | None = ..., + gt: _CompareWithType | None = ..., + ge: _CompareWithType | None = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> type: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py new file mode 100644 index 0000000..104eeb0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py @@ -0,0 +1,103 @@ +# SPDX-License-Identifier: MIT + +import inspect +import platform +import sys +import threading + +from collections.abc import Mapping, Sequence # noqa: F401 +from typing import _GenericAlias + + +PYPY = platform.python_implementation() == "PyPy" +PY_3_8_PLUS = sys.version_info[:2] >= (3, 8) +PY_3_9_PLUS = sys.version_info[:2] >= (3, 9) +PY_3_10_PLUS = sys.version_info[:2] >= (3, 10) +PY_3_11_PLUS = sys.version_info[:2] >= (3, 11) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) +PY_3_13_PLUS = sys.version_info[:2] >= (3, 13) +PY_3_14_PLUS = sys.version_info[:2] >= (3, 14) + + +if sys.version_info < (3, 8): + try: + from typing_extensions import Protocol + except ImportError: # pragma: no cover + Protocol = object +else: + from typing import Protocol # noqa: F401 + +if PY_3_14_PLUS: # pragma: no cover + import annotationlib + + _get_annotations = annotationlib.get_annotations + +else: + + def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + return cls.__dict__.get("__annotations__", {}) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() + + +def get_generic_base(cl): + """If this is a generic class (A[str]), return the generic base for it.""" + if cl.__class__ is _GenericAlias: + return cl.__origin__ + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py new file mode 100644 index 0000000..9c245b1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + msg = "'run' must be bool." + raise TypeError(msg) + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py new file mode 100644 index 0000000..355cef4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py @@ -0,0 +1,522 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._compat import PY_3_9_PLUS, get_generic_base +from ._make import _OBJ_SETATTR, NOTHING, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the *attrs* attribute values of *inst* as a dict. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + dict_factory (~typing.Callable): + A callable to produce dictionaries from. For example, to produce + ordered dictionaries instead of normal Python dictionaries, pass in + ``collections.OrderedDict``. + + retain_collection_types (bool): + Do not convert to `list` when encountering an attribute whose type + is `tuple` or `set`. Only meaningful if *recurse* is `True`. + + value_serializer (typing.Callable | None): + A hook that is called for every attribute or dict key/value. It + receives the current instance, field and value and must return the + (updated) value. The hook is run *after* the optional *filter* has + been applied. + + Returns: + Return type of *dict_factory*. + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 + If a dict has a collection for a key, it is serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + items = [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + try: + rv[a.name] = cf(items) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv[a.name] = cf(*items) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the *attrs* attribute values of *inst* as a tuple. + + Optionally recurse into other *attrs*-decorated classes. + + Args: + inst: Instance of an *attrs*-decorated class. + + recurse (bool): + Recurse into classes that are also *attrs*-decorated. + + filter (~typing.Callable): + A callable whose return code determines whether an attribute or + element is included (`True`) or dropped (`False`). Is called with + the `attrs.Attribute` as the first argument and the value as the + second argument. + + tuple_factory (~typing.Callable): + A callable to produce tuples from. For example, to produce lists + instead of tuples. + + retain_collection_types (bool): + Do not convert to `list` or `dict` when encountering an attribute + which type is `tuple`, `dict` or `set`. Only meaningful if + *recurse* is `True`. + + Returns: + Return type of *tuple_factory* + + Raises: + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + items = [ + ( + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + ) + for j in v + ] + try: + rv.append(cf(items)) + except TypeError: + if not issubclass(cf, tuple): + raise + # Workaround for TypeError: cf.__new__() missing 1 required + # positional argument (which appears, for a namedturle) + rv.append(cf(*items)) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk + ), + ( + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv + ), + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with *attrs* attributes. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + Returns: + bool: + """ + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is not None: + return True + + # No attrs, maybe it's a specialized generic (A[str])? + generic_base = get_generic_base(cls) + if generic_base is not None: + generic_attrs = getattr(generic_base, "__attrs_attrs__", None) + if generic_attrs is not None: + # Stick it on here for speed next time. + cls.__attrs_attrs__ = generic_attrs + return generic_attrs is not None + return False + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + This is different from `evolve` that applies the changes to the arguments + that create the new instance. + + `evolve`'s behavior is preferable, but there are `edge cases`_ where it + doesn't work. Therefore `assoc` is deprecated, but will not be removed. + + .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251 + + Args: + inst: Instance of a class with *attrs* attributes. + + changes: Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + attrs.exceptions.AttrsAttributeNotFoundError: + If *attr_name* couldn't be found on *cls*. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. This function will not be + removed du to the slightly different approach compared to + `attrs.evolve`, though. + """ + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + msg = f"{k} is not an attrs attribute on {new.__class__}." + raise AttrsAttributeNotFoundError(msg) + _OBJ_SETATTR(new, k, v) + return new + + +def evolve(*args, **changes): + """ + Create a new instance, based on the first positional argument with + *changes* applied. + + Args: + + inst: + Instance of a class with *attrs* attributes. *inst* must be passed + as a positional argument. + + changes: + Keyword changes in the new copy. + + Returns: + A copy of inst with *changes* incorporated. + + Raises: + TypeError: + If *attr_name* couldn't be found in the class ``__init__``. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + .. versionadded:: 17.1.0 + .. deprecated:: 23.1.0 + It is now deprecated to pass the instance using the keyword argument + *inst*. It will raise a warning until at least April 2024, after which + it will become an error. Always pass the instance as a positional + argument. + .. versionchanged:: 24.1.0 + *inst* can't be passed as a keyword argument anymore. + """ + try: + (inst,) = args + except ValueError: + msg = ( + f"evolve() takes 1 positional argument, but {len(args)} were given" + ) + raise TypeError(msg) from None + + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types( + cls, globalns=None, localns=None, attribs=None, include_extras=True +): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in :class:`Attribute`'s + *type* field. In other words, you don't need to resolve your types if you + only use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, for example, if the name only + exists inside a method, you may pass *globalns* or *localns* to specify + other dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + Args: + cls (type): Class to resolve. + + globalns (dict | None): Dictionary containing global variables. + + localns (dict | None): Dictionary containing local variables. + + attribs (list | None): + List of attribs for the given class. This is necessary when calling + from inside a ``field_transformer`` since *cls* is not an *attrs* + class yet. + + include_extras (bool): + Resolve more accurately, if possible. Pass ``include_extras`` to + ``typing.get_hints``, if supported by the typing module. On + supported Python versions (3.9+), this resolves the types more + accurately. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class and you didn't pass any attribs. + + NameError: If types cannot be resolved because of missing variables. + + Returns: + *cls* so you can use this function also as a class decorator. Please + note that you have to apply it **after** `attrs.define`. That means the + decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + .. versionadded:: 23.1.0 *include_extras* + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + kwargs = {"globalns": globalns, "localns": localns} + + if PY_3_9_PLUS: + kwargs["include_extras"] = include_extras + + hints = typing.get_type_hints(cls, **kwargs) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _OBJ_SETATTR(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py new file mode 100644 index 0000000..bf00c5f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py @@ -0,0 +1,2960 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import abc +import contextlib +import copy +import enum +import functools +import inspect +import itertools +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + PY_3_8_PLUS, + PY_3_10_PLUS, + PY_3_11_PLUS, + _AnnotationExtractor, + _get_annotations, + get_generic_base, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_OBJ_SETATTR = object.__setattr__ +_INIT_FACTORY_PAT = "__attr_factory_%s" +_CLASSVAR_PREFIXES = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_HASH_CACHE_FIELD = "_attrs_cached_hash" + +_EMPTY_METADATA_SINGLETON = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_SENTINEL = object() + +_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when `None` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when `None` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since `None` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008 + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new field / attribute on a class. + + Identical to `attrs.field`, except it's not keyword-only. + + Consider using `attrs.field` in new code (``attr.ib`` will *never* go away, + though). + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attr.s` (or similar)! + + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is `None` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 + *converter* as a replacement for the deprecated *convert* to achieve + consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if factory is not None: + if default is not NOTHING: + msg = ( + "The `default` and `factory` arguments are mutually exclusive." + ) + raise ValueError(msg) + if not callable(factory): + msg = "The `factory` argument must be a callable." + raise ValueError(msg) + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + Evaluate the script with the given global (globs) and local (locs) + variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs, locals=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} if locals is None else locals + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_CLASSVAR_PREFIXES) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + return attrib_name in cls.__dict__ + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) # noqa: PLW2901 + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + If *collect_by_mro* is True, collect them in the correct MRO order, + otherwise use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = list(these.items()) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + a = attrib() if a is NOTHING else attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}" + raise ValueError(msg) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +def _make_cached_property_getattr(cached_properties, original_getattr, cls): + lines = [ + # Wrapped to get `__class__` into closure cell for super() + # (It will be replaced with the newly constructed class after construction). + "def wrapper(_cls):", + " __class__ = _cls", + " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):", + " func = cached_properties.get(item)", + " if func is not None:", + " result = func(self)", + " _setter = _cached_setattr_get(self)", + " _setter(item, result)", + " return result", + ] + if original_getattr is not None: + lines.append( + " return original_getattr(self, item)", + ) + else: + lines.extend( + [ + " try:", + " return super().__getattribute__(item)", + " except AttributeError:", + " if not hasattr(super(), '__getattr__'):", + " raise", + " return super().__getattr__(item)", + " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"", + " raise AttributeError(original_error)", + ] + ) + + lines.extend( + [ + " return __getattr__", + "__getattr__ = wrapper(_cls)", + ] + ) + + unique_filename = _generate_unique_filename(cls, "getattr") + + glob = { + "cached_properties": cached_properties, + "_cached_setattr_get": _OBJ_SETATTR.__get__, + "original_getattr": original_getattr, + } + + return _make_method( + "__getattr__", + "\n".join(lines), + unique_filename, + glob, + locals={ + "_cls": cls, + }, + ) + + +def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + "__traceback__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_pre_init_has_args", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._pre_init_has_args = False + if self._has_pre_init: + # Check if the pre init method has more arguments than just `self` + # We want to pass arguments if pre init expects arguments + pre_init_func = cls.__attrs_pre_init__ + pre_init_signature = inspect.signature(pre_init_func) + self._pre_init_has_args = len(pre_init_signature.parameters) > 1 + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _DEFAULT_ON_SETATTR, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _DEFAULT_ON_SETATTR + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + cls = self._create_slots_class() + else: + cls = self._patch_original_class() + if PY_3_10_PLUS: + cls = abc.update_abstractmethods(cls) + + # The method gets only called if it's not inherited from a base class. + # _has_own_attribute does NOT work properly for classmethods. + if ( + getattr(cls, "__attrs_init_subclass__", None) + and "__attrs_init_subclass__" not in cls.__dict__ + ): + cls.__attrs_init_subclass__() + + return cls + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _SENTINEL) is not _SENTINEL + ): + # An AttributeError can happen if a base class defines a + # class variable and we want to set an attribute with the + # same name by using only a type annotation. + with contextlib.suppress(AttributeError): + delattr(cls, name) + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _OBJ_SETATTR + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in (*tuple(self._attr_names), "__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _OBJ_SETATTR + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = {} + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + if PY_3_8_PLUS: + cached_properties = { + name: cached_property.func + for name, cached_property in cd.items() + if isinstance(cached_property, functools.cached_property) + } + else: + # `functools.cached_property` was introduced in 3.8. + # So can't be used before this. + cached_properties = {} + + # Collect methods with a `__class__` reference that are shadowed in the new class. + # To know to update them. + additional_closure_functions_to_update = [] + if cached_properties: + class_annotations = _get_annotations(self._cls) + for name, func in cached_properties.items(): + # Add cached properties to names for slotting. + names += (name,) + # Clear out function from class to avoid clashing. + del cd[name] + additional_closure_functions_to_update.append(func) + annotation = inspect.signature(func).return_annotation + if annotation is not inspect.Parameter.empty: + class_annotations[name] = annotation + + original_getattr = cd.get("__getattr__") + if original_getattr is not None: + additional_closure_functions_to_update.append(original_getattr) + + cd["__getattr__"] = _make_cached_property_getattr( + cached_properties, original_getattr, self._cls + ) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_HASH_CACHE_FIELD) + + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in itertools.chain( + cls.__dict__.values(), additional_closure_functions_to_update + ): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # noqa: PERF203 + # ValueError: Cell is empty + pass + else: + if match: + cell.cell_contents = cls + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + msg = "__str__ can only be generated if a __repr__ exists." + raise ValueError(msg) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _OBJ_SETATTR.__get__(self) + if isinstance(state, tuple): + # Backward compatibility with attrs instances pickled with + # attrs versions before v22.2.0 which stored tuples. + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + else: + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_HASH_CACHE_FIELD, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._pre_init_has_args, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + msg = "Can't combine custom __setattr__ with on_setattr hooks." + raise ValueError(msg) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _OBJ_SETATTR(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + with contextlib.suppress(AttributeError): + method.__module__ = self._cls.__module__ + + with contextlib.suppress(AttributeError): + method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}" + + with contextlib.suppress(AttributeError): + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + msg = "Don't mix `cmp` with `eq' and `order`." + raise ValueError(msg) + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + msg = "`order` can only be True if `eq` is True too." + raise ValueError(msg) + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will + *never* go away, though). + + Args: + repr_ns (str): + When using nested classes, there was no way in Python 2 to + automatically detect that. This argument allows to set a custom + name for a more meaningful ``repr`` output. This argument is + pointless in Python 3 and is therefore deprecated. + + .. caution:: + Refer to `attrs.define` for the rest of the parameters, but note that they + can have different defaults. + + Notably, leaving *on_setattr* as `None` will **not** add any hooks. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports `None` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. deprecated:: 24.1.0 *repr_ns* + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + """ + if repr_ns is not None: + import warnings + + warnings.warn( + DeprecationWarning( + "The `repr_ns` argument is deprecated and will be removed in or after August 2025." + ), + stacklevel=2, + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + msg = "Can't freeze a class with a custom __setattr__." + raise ValueError(msg) + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + msg = "Invalid value for hash. Must be True, False, or None." + raise TypeError(msg) + + if hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled." + raise TypeError(msg) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + msg = "Invalid value for cache_hash. To use hash caching, init must be True." + raise TypeError(msg) + + if ( + PY_3_10_PLUS + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):" + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_HASH_CACHE_FIELD} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})" + ) + else: + lines.append(f" self.{a.name} == other.{a.name}") + if a is not attrs[-1]: + lines[-1] = f"{lines[-1]} and" + lines.append(" )") + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of *attrs* attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + .. versionchanged:: 23.1.0 Add support for generic classes. + """ + generic_base = get_generic_base(cls) + + if generic_base is None and not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + + attrs = getattr(cls, "__attrs_attrs__", None) + + if attrs is None: + if generic_base is not None: + attrs = getattr(generic_base, "__attrs_attrs__", None) + if attrs is not None: + # Even though this is global state, stick it on here to speed + # it up. We rely on `cls` being cached for this to be + # efficient. + cls.__attrs_attrs__ = attrs + return attrs + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of *attrs* attributes for a class, whose keys + are the attribute names. + + Args: + cls (type): Class to introspect. + + Raises: + TypeError: If *cls* is not a class. + + attrs.exceptions.NotAnAttrsClassError: + If *cls* is not an *attrs* class. + + Returns: + dict[str, attrs.Attribute]: Dict of attribute name to definition + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + msg = "Passed object must be a class." + raise TypeError(msg) + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + msg = f"{cls!r} is not an attrs-decorated class." + raise NotAnAttrsClassError(msg) + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + Args: + inst: Instance of a class with *attrs* attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + cls = base_attr_map.get(a_name) + return cls and "__slots__" in cls.__dict__ + + +def _make_init( + cls, + attrs, + pre_init, + pre_init_has_args, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + msg = "Frozen classes can't use on_setattr." + raise ValueError(msg) + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + pre_init_has_args, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + "__attrs_init__" if attrs_init else "__init__", + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})" + + +def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter( + attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter +) -> str: + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True, converter) + + return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}" + + +def _determine_setters( + frozen: bool, slots: bool, base_attr_map: dict[str, type] +): + """ + Determine the correct setter functions based on whether a class is frozen + and/or slotted. + """ + if frozen is True: + if slots is True: + return (), _setattr, _setattr_with_converter + + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + + def fmt_setter( + attr_name: str, value_var: str, has_on_setattr: bool + ) -> str: + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name: str, + value_var: str, + has_on_setattr: bool, + converter: Converter, + ) -> str: + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr, converter + ) + + return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}" + + return ( + ("_inst_dict = self.__dict__",), + fmt_setter, + fmt_setter_with_converter, + ) + + # Not frozen -- we can just assign directly. + return (), _assign, _assign_with_converter + + +def _attrs_to_init_script( + attrs: list[Attribute], + is_frozen: bool, + is_slotted: bool, + call_pre_init: bool, + pre_init_has_args: bool, + call_post_init: bool, + does_cache_hash: bool, + base_attr_map: dict[str, type], + is_exc: bool, + needs_cached_setattr: bool, + has_cls_on_setattr: bool, + method_name: str, +) -> tuple[str, dict, dict]: + """ + Return a script of an initializer for *attrs*, a dict of globals, and + annotations for the initializer. + + The globals are required by the generated script. + """ + lines = ["self.__attrs_pre_init__()"] if call_pre_init else [] + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. Note _setattr will be used again below if + # does_cache_hash is True. + "_setattr = _cached_setattr_get(self)" + ) + + extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters( + is_frozen, is_slotted, base_attr_map + ) + lines.extend(extra_lines) + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + maybe_self = "self" if has_factory and a.default.takes_self else "" + + if a.converter and not isinstance(a.converter, Converter): + converter = Converter(a.converter) + else: + converter = a.converter + + if a.init is False: + if has_factory: + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + elif converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _INIT_FACTORY_PAT % (a.name,) + if converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + converter, + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr, converter + ) + ) + names_for_globals[converter._get_global_name(a.name)] = ( + converter.converter + ) + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and converter is None: + annotations[arg_name] = a.type + elif converter is not None and converter._first_param_type: + # Use the type from the converter if present. + annotations[arg_name] = converter._first_param_type + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if call_post_init: + lines.append("self.__attrs_post_init__()") + + # Because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to field + # values during post-init combined with post-init accessing the hash code + # would result in silent bugs. + if does_cache_hash: + if is_frozen: + if is_slotted: + init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)" + else: + init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None" + else: + init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None" + lines.append(init_hash_cache) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + pre_init_args = args + if kw_only_args: + # leading comma & kw_only args + args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}" + pre_init_kw_only_args = ", ".join( + [ + f"{kw_arg_name}={kw_arg_name}" + # We need to remove the defaults from the kw_only_args. + for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args) + ] + ) + pre_init_args += ", " if pre_init_args else "" + pre_init_args += pre_init_kw_only_args + + if call_pre_init and pre_init_has_args: + # If pre init method has arguments, pass same arguments as `__init__`. + lines[0] = f"self.__attrs_pre_init__({pre_init_args})" + + # Python 3.7 doesn't allow backslashes in f strings. + NL = "\n " + return ( + f"""def {method_name}(self, {args}): + {NL.join(lines) if lines else 'pass'} +""", + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + .. warning:: + + You should never instantiate this class yourself. + + The class has *all* arguments of `attr.ib` (except for ``factory`` which is + only syntactic sugar for ``default=Factory(...)`` plus the following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The + callables that are used for comparing and ordering objects by this + attribute, respectively. These are set by passing a callable to + `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also + :ref:`comparison customization `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _OBJ_SETATTR.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _EMPTY_METADATA_SINGLETON + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + msg = "Type annotation and type argument cannot both be present" + raise ValueError(msg) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attrs.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attrs.evolve` but that function does not work + with {class}`Attribute`. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _OBJ_SETATTR.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + ( + types.MappingProxyType(dict(value)) + if value + else _EMPTY_METADATA_SINGLETON + ), + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = ( + *tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ), + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + Raises: + DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + Args: + factory (typing.Callable): + A callable that takes either none or exactly one mandatory + positional argument depending on *takes_self*. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +class Converter: + """ + Stores a converter callable. + + Allows for the wrapped converter to take additional arguments. The + arguments are passed in the order they are documented. + + Args: + converter (Callable): A callable that converts the passed value. + + takes_self (bool): + Pass the partially initialized instance that is being initialized + as a positional argument. (default: `False`) + + takes_field (bool): + Pass the field definition (an :class:`Attribute`) into the + converter as a positional argument. (default: `False`) + + .. versionadded:: 24.1.0 + """ + + __slots__ = ( + "converter", + "takes_self", + "takes_field", + "_first_param_type", + "_global_name", + "__call__", + ) + + def __init__(self, converter, *, takes_self=False, takes_field=False): + self.converter = converter + self.takes_self = takes_self + self.takes_field = takes_field + + ex = _AnnotationExtractor(converter) + self._first_param_type = ex.get_first_param_type() + + if not (self.takes_self or self.takes_field): + self.__call__ = lambda value, _, __: self.converter(value) + elif self.takes_self and not self.takes_field: + self.__call__ = lambda value, instance, __: self.converter( + value, instance + ) + elif not self.takes_self and self.takes_field: + self.__call__ = lambda value, __, field: self.converter( + value, field + ) + else: + self.__call__ = lambda value, instance, field: self.converter( + value, instance, field + ) + + rt = ex.get_return_type() + if rt is not None: + self.__call__.__annotations__["return"] = rt + + @staticmethod + def _get_global_name(attr_name: str) -> str: + """ + Return the name that a converter for an attribute name *attr_name* + would have. + """ + return f"__attr_converter_{attr_name}" + + def _fmt_converter_call(self, attr_name: str, value_var: str) -> str: + """ + Return a string that calls the converter for an attribute name + *attr_name* and the value in variable named *value_var* according to + `self.takes_self` and `self.takes_field`. + """ + if not (self.takes_self or self.takes_field): + return f"{self._get_global_name(attr_name)}({value_var})" + + if self.takes_self and self.takes_field: + return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])" + + if self.takes_self: + return f"{self._get_global_name(attr_name)}({value_var}, self)" + + return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])" + + def __getstate__(self): + """ + Return a dict containing only converter and takes_self -- the rest gets + computed when loading. + """ + return { + "converter": self.converter, + "takes_self": self.takes_self, + "takes_field": self.takes_field, + } + + def __setstate__(self, state): + """ + Load instance from state. + """ + self.__init__(**state) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in ("converter", "takes_self", "takes_field") +] + +Converter = _add_hash( + _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f +) + + +def make_class( + name, attrs, bases=(object,), class_body=None, **attributes_arguments +): + r""" + A quick way to create a new class called *name* with *attrs*. + + Args: + name (str): The name for the new class. + + attrs( list | dict): + A list of names or a dictionary of mappings of names to `attr.ib`\ + s / `attrs.field`\ s. + + The order is deduced from the order of the names or attributes + inside *attrs*. Otherwise the order of the definition of the + attributes is used. + + bases (tuple[type, ...]): Classes that the new class will subclass. + + class_body (dict): + An optional dictionary of class attributes for the new class. + + attributes_arguments: Passed unmodified to `attr.s`. + + Returns: + type: A new class with *attrs*. + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + .. versionchanged:: 23.2.0 *class_body* + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + msg = "attrs argument must be a dict or a list." + raise TypeError(msg) + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if class_body is not None: + body.update(class_body) + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + with contextlib.suppress(AttributeError, ValueError): + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + cls = _attrs(these=cls_dict, **attributes_arguments)(type_) + # Only add type annotations now or "_attrs()" will complain: + cls.__annotations__ = { + k: v.type for k, v in cls_dict.items() if v.type is not None + } + return cls + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, unsafe_hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if they + have any. + + converters (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val, inst, field): + for c in converters: + val = c(val, inst, field) if isinstance(c, Converter) else c(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__.update({"val": A, "return": A}) + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + last = converters[-1] + if not PY_3_11_PLUS and isinstance(last, Converter): + last = last.__call__ + + # Get return type from last converter. + rt = _AnnotationExtractor(last).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return Converter(pipe_converter, takes_self=True, takes_field=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py new file mode 100644 index 0000000..dbb65cc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py @@ -0,0 +1,631 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + _DEFAULT_ON_SETATTR, + NOTHING, + _frozen_setattrs, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds :term:`dunder methods` according to + :term:`fields ` specified using :doc:`type annotations `, + `field()` calls, or the *these* argument. + + Since *attrs* patches or replaces an existing class, you cannot use + `object.__init_subclass__` with *attrs* classes, because it runs too early. + As a replacement, you can define ``__attrs_init_subclass__`` on your class. + It will be called by *attrs* classes that subclass it after they're + created. See also :ref:`init-subclass`. + + Args: + slots (bool): + Create a :term:`slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, + so we encourage you to read the :term:`glossary entry `. + + auto_detect (bool): + Instead of setting the *init*, *repr*, *eq*, and *hash* arguments + explicitly, assume they are set to True **unless any** of the + involved methods for one of the arguments is implemented in the + *current* class (meaning, it is *not* inherited from some base + class). + + So, for example by implementing ``__eq__`` on a class yourself, + *attrs* will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a + sensible ``__ne__`` by default, so it *should* be enough to only + implement ``__eq__`` in most cases). + + Passing True or False` to *init*, *repr*, *eq*, *cmp*, or *hash* + overrides whatever *auto_detect* would determine. + + auto_exc (bool): + If the class subclasses `BaseException` (which implicitly includes + any subclass of any exception), the following happens to behave + like a well-behaved Python exception class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids [#]_ , + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the + ``args`` attribute, + - the value of *str* is ignored leaving ``__str__`` to base + classes. + + .. [#] + Note that *attrs* will *not* remove existing implementations of + ``__hash__`` or the equality methods. It just won't add own + ones. + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + A callable that is run whenever the user attempts to set an + attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same + arguments as validators: the instance, the attribute that is being + modified, and the new value. + + If no exception is raised, the attribute is set to the return value + of the callable. + + If a list of callables is passed, they're automatically wrapped in + an `attrs.setters.pipe`. + + If left None, the default behavior is to run converters and + validators whenever an attribute is set. + + init (bool): + Create a ``__init__`` method that initializes the *attrs* + attributes. Leading underscores are stripped for the argument name, + unless an alias is set on the attribute. + + .. seealso:: + `init` shows advanced ways to customize the generated + ``__init__`` method, including executing code before and after. + + repr(bool): + Create a ``__repr__`` method with a human readable representation + of *attrs* attributes. + + str (bool): + Create a ``__str__`` method that is identical to ``__repr__``. This + is usually not necessary except for `Exception`\ s. + + eq (bool | None): + If True or None (default), add ``__eq__`` and ``__ne__`` methods + that check two instances for equality. + + .. seealso:: + `comparison` describes how to customize the comparison behavior + going as far comparing NumPy arrays. + + order (bool | None): + If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` + methods that behave like *eq* above and allow instances to be + ordered. + + They compare the instances as if they were tuples of their *attrs* + attributes if and only if the types of both classes are + *identical*. + + If `None` mirror value of *eq*. + + .. seealso:: `comparison` + + cmp (bool | None): + Setting *cmp* is equivalent to setting *eq* and *order* to the same + value. Must not be mixed with *eq* or *order*. + + unsafe_hash (bool | None): + If None (default), the ``__hash__`` method is generated according + how *eq* and *frozen* are set. + + 1. If *both* are True, *attrs* will generate a ``__hash__`` for + you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set + to None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning + the ``__hash__`` method of the base class will be used. If the + base class is `object`, this means it will fall back to id-based + hashing. + + Although not recommended, you can decide for yourself and force + *attrs* to create one (for example, if the class is immutable even + though you didn't freeze it programmatically) by passing True or + not. Both of these cases are rather special and should be used + carefully. + + .. seealso:: + + - Our documentation on `hashing`, + - Python's documentation on `object.__hash__`, + - and the `GitHub issue that led to the default \ behavior + `_ for more + details. + + hash (bool | None): + Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence. + + cache_hash (bool): + Ensure that the object's hash code is computed only once and stored + on the object. If this is set to True, hashing must be either + explicitly or implicitly enabled for this class. If the hash code + is cached, avoid any reassignments of fields involved in hash code + computation or mutations of the objects those fields point to after + object creation. If such changes occur, the behavior of the + object's hash code is undefined. + + frozen (bool): + Make instances immutable after initialization. If someone attempts + to modify a frozen instance, `attrs.exceptions.FrozenInstanceError` + is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` + method on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other + words: ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You + can circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + kw_only (bool): + Make all attributes keyword-only in the generated ``__init__`` (if + *init* is False, this parameter is ignored). + + weakref_slot (bool): + Make instances weak-referenceable. This has no effect unless + *slots* is True. + + field_transformer (~typing.Callable | None): + A function that is called with the original class object and all + fields right before *attrs* finalizes the class. You can use this, + for example, to automatically add converters or validators to + fields based on their types. + + .. seealso:: `transform-fields` + + match_args (bool): + If True (default), set ``__match_args__`` on the class to support + :pep:`634` (*Structural Pattern Matching*). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and + later. Ignored on older Python versions. + + collect_by_mro (bool): + If True, *attrs* collects attributes from base classes correctly + according to the `method resolution order + `_. If False, *attrs* + will mimic the (wrong) behavior of `dataclasses` and :pep:`681`. + + See also `issue #428 + `_. + + getstate_setstate (bool | None): + .. note:: + + This is usually only interesting for slotted classes and you + should probably just set *auto_detect* to True. + + If True, ``__getstate__`` and ``__setstate__`` are generated and + attached to the class. This is necessary for slotted classes to be + pickleable. If left None, it's True by default for slotted classes + and False for dict classes. + + If *auto_detect* is True, and *getstate_setstate* is left None, and + **either** ``__getstate__`` or ``__setstate__`` is detected + directly on the class (meaning: not inherited), it is set to False + (this is usually what you want). + + auto_attribs (bool | None): + If True, look at type annotations to determine which attributes to + use, like `dataclasses`. If False, it will only look for explicit + :func:`field` class attributes, like classic *attrs*. + + If left None, it will guess: + + 1. If any attributes are annotated and no unannotated + `attrs.field`\ s are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.field`\ s. + + If *attrs* decides to look at type annotations, **all** fields + **must** be annotated. If *attrs* encounters a field that is set to + a :func:`field` / `attr.ib` but lacks a type annotation, an + `attrs.exceptions.UnannotatedAttributeError` is raised. Use + ``field_name: typing.Any = field(...)`` if you don't want to set a + type. + + .. warning:: + + For features that use the attribute name to create decorators + (for example, :ref:`validators `), you still *must* + assign :func:`field` / `attr.ib` to them. Otherwise Python will + either not find the name or try to use the default value to + call, for example, ``validator`` on it. + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `field()` are **ignored**. + + these (dict[str, object]): + A dictionary of name to the (private) return value of `field()` + mappings. This is useful to avoid the definition of your attributes + within the class body because you can't (for example, if you want + to add ``__repr__`` methods to Django models) or don't want to. + + If *these* is not `None`, *attrs* will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside + *these*. + + Arguably, this is a rather obscure feature. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + .. versionchanged:: 24.1.0 + Instances are not compared as tuples of attributes anymore, but using a + big ``and`` condition. This is faster and has more correct behavior for + uncomparable values like `math.nan`. + .. versionadded:: 24.1.0 + If a class has an *inherited* classmethod called + ``__attrs_init_subclass__``, it is executed after the class is created. + .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*. + + .. note:: + + The main differences to the classic `attr.s` are: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - Converters and validators run when attributes are set by default -- + if *frozen* is `False`. + - *slots=True* + + Usually, this has only upsides and few visible effects in everyday + programming. But it *can* lead to some surprising behaviors, so + please make sure to read :term:`slotted classes`. + + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around + for backwards-compatibility have been removed. + + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _DEFAULT_ON_SETATTR + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)." + raise ValueError(msg) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but `None` if used as `@attrs()`. + if maybe_cls is None: + return wrap + + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new :term:`field` / :term:`attribute` on a class. + + .. warning:: + + Does **nothing** unless the class is also decorated with + `attrs.define` (or similar)! + + Args: + default: + A value that is used if an *attrs*-generated ``__init__`` is used + and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will + be used to construct a new value (useful for mutable data types + like lists or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a + value *must* be supplied when instantiating; otherwise a + `TypeError` will be raised. + + .. seealso:: `defaults` + + factory (~typing.Callable): + Syntactic sugar for ``default=attr.Factory(factory)``. + + validator (~typing.Callable | list[~typing.Callable]): + Callable that is called by *attrs*-generated ``__init__`` methods + after the instance has been initialized. They receive the + initialized instance, the :func:`~attrs.Attribute`, and the passed + value. + + The return value is *not* inspected so the validator has to throw + an exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `attrs.validators.get_disabled` / `attrs.validators.set_disabled`. + + The validator can also be set using decorator notation as shown + below. + + .. seealso:: :ref:`validators` + + repr (bool | ~typing.Callable): + Include this attribute in the generated ``__repr__`` method. If + True, include the attribute; if False, omit it. By default, the + built-in ``repr()`` function is used. To override how the attribute + value is formatted, pass a ``callable`` that takes a single value + and returns a string. Note that the resulting string is used as-is, + which means it will be used directly *instead* of calling + ``repr()`` (the default). + + eq (bool | ~typing.Callable): + If True (default), include this attribute in the generated + ``__eq__`` and ``__ne__`` methods that check two instances for + equality. To override how the attribute value is compared, pass a + callable that takes a single value and returns the value to be + compared. + + .. seealso:: `comparison` + + order (bool | ~typing.Callable): + If True (default), include this attributes in the generated + ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To + override how the attribute value is ordered, pass a callable that + takes a single value and returns the value to be ordered. + + .. seealso:: `comparison` + + cmp(bool | ~typing.Callable): + Setting *cmp* is equivalent to setting *eq* and *order* to the same + value. Must not be mixed with *eq* or *order*. + + .. seealso:: `comparison` + + hash (bool | None): + Include this attribute in the generated ``__hash__`` method. If + None (default), mirror *eq*'s value. This is the correct behavior + according the Python spec. Setting this value to anything else + than None is *discouraged*. + + .. seealso:: `hashing` + + init (bool): + Include this attribute in the generated ``__init__`` method. + + It is possible to set this to False and set a default value. In + that case this attributed is unconditionally initialized with the + specified default value or factory. + + .. seealso:: `init` + + converter (typing.Callable | Converter): + A callable that is called by *attrs*-generated ``__init__`` methods + to convert attribute's value to the desired format. + + If a vanilla callable is passed, it is given the passed-in value as + the only positional argument. It is possible to receive additional + arguments by wrapping the callable in a `Converter`. + + Either way, the returned value will be used as the new value of the + attribute. The value is converted before being passed to the + validator, if any. + + .. seealso:: :ref:`converters` + + metadata (dict | None): + An arbitrary mapping, to be used by third-party code. + + .. seealso:: `extending-metadata`. + + type (type): + The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backwards-compatibility and for usage + with `make_class`. Regardless of the approach used, the type will + be stored on ``Attribute.type``. + + Please note that *attrs* doesn't do anything with this metadata by + itself. You can use it as part of your own code or for `static type + checking `. + + kw_only (bool): + Make this attribute keyword-only in the generated ``__init__`` (if + ``init`` is False, this parameter is ignored). + + on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]): + Allows to overwrite the *on_setattr* setting from `attr.s`. If left + None, the *on_setattr* value from `attr.s` is used. Set to + `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `define()`. + + alias (str | None): + Override this attribute's parameter name in the generated + ``__init__`` method. If left None, default to ``name`` stripped + of leading underscores. See `private-attributes`. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionadded:: 22.2.0 *alias* + .. versionadded:: 23.1.0 + The *type* parameter has been re-added; mostly for `attrs.make_class`. + Please note that type checkers ignore this metadata. + + .. seealso:: + + `attr.ib` + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + type=type, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi new file mode 100644 index 0000000..ca7b71e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py new file mode 100644 index 0000000..51a1312 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py new file mode 100644 index 0000000..9238311 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py @@ -0,0 +1,151 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to `None`. + + Type annotations will be inferred from the wrapped converter's, if it has + any. + + Args: + converter (typing.Callable): + the converter that is used for non-`None` values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace `None` values by *default* or the result + of *factory*. + + Args: + default: + Value to be used if `None` is passed. Passing an instance of + `attrs.Factory` is supported, however the ``takes_self`` option is + *not*. + + factory (typing.Callable): + A callable that takes no parameters whose result is used if `None` + is passed. + + Raises: + TypeError: If **neither** *default* or *factory* is passed. + + TypeError: If **both** *default* and *factory* are passed. + + ValueError: + If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + msg = "Must pass either `default` or `factory`." + raise TypeError(msg) + + if default is not NOTHING and factory is not None: + msg = "Must pass either `default` or `factory` but not both." + raise TypeError(msg) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + msg = "`takes_self` is not supported by default_if_none." + raise ValueError(msg) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (for example, from environment variables) to real + booleans. + + Values mapping to `True`: + + - ``True`` + - ``"true"`` / ``"t"`` + - ``"yes"`` / ``"y"`` + - ``"on"`` + - ``"1"`` + - ``1`` + + Values mapping to `False`: + + - ``False`` + - ``"false"`` / ``"f"`` + - ``"no"`` / ``"n"`` + - ``"off"`` + - ``"0"`` + - ``0`` + + Raises: + ValueError: For any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + + if val in (True, "true", "t", "yes", "y", "on", "1", 1): + return True + if val in (False, "false", "f", "no", "n", "off", "0", 0): + return False + + msg = f"Cannot convert value to bool: {val!r}" + raise ValueError(msg) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi new file mode 100644 index 0000000..9ef478f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from attrs import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py new file mode 100644 index 0000000..3b7abb8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py @@ -0,0 +1,95 @@ +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +from typing import ClassVar + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args: ClassVar[tuple[str]] = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An *attrs* function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-*attrs* class has been passed into an *attrs* function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set when defining the field and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has a field without a type annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an *attrs* feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A field requiring a callable has been set with a value that is not + callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py new file mode 100644 index 0000000..689b170 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attrs.asdict` and `attrs.astuple`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, str)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Create a filter that only allows *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to include. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.1.0 Accept strings with field names. + """ + cls, names, attrs = _split_what(what) + + def include_(attribute, value): + return ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return include_ + + +def exclude(*what): + """ + Create a filter that does **not** allow *what*. + + Args: + what (list[type, str, attrs.Attribute]): + What to exclude. Can be a type, a name, or an attribute. + + Returns: + Callable: + A callable that can be passed to `attrs.asdict`'s and + `attrs.astuple`'s *filter* argument. + + .. versionchanged:: 23.3.0 Accept field name string as input argument + """ + cls, names, attrs = _split_what(what) + + def exclude_(attribute, value): + return not ( + value.__class__ in cls + or attribute.name in names + or attribute in attrs + ) + + return exclude_ diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi new file mode 100644 index 0000000..974abdc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any + +from . import Attribute, _FilterType + +def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... +def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/py.typed b/lambdas/aws-dd-forwarder-3.127.0/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py new file mode 100644 index 0000000..a9ce016 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + # This can be removed once we drop 3.8 and use attrs.Converter instead. + from ._make import Converter + + if not isinstance(c, Converter): + return c(new_value) + + return c(new_value, instance, attrib) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# Sphinx's autodata stopped working, so the docstring is inlined in the API +# docs. +NO_OP = object() diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi new file mode 100644 index 0000000..73abf36 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi @@ -0,0 +1,20 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute +from attrs import _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py new file mode 100644 index 0000000..8a56717 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py @@ -0,0 +1,711 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager +from re import Pattern + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "or_", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + Args: + disabled (bool): If `True`, disable running all validators. + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + Returns: + bool:`True` if validators are currently disabled. + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type]): The type to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)" + raise ValueError( + msg, + attr, + self.pattern, + value, + ) + + def __repr__(self): + return f"" + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called with a + string that doesn't match *regex*. + + Args: + regex (str, re.Pattern): + A regex string or precompiled pattern to match against + + flags (int): + Flags that will be passed to the underlying re function (default 0) + + func (typing.Callable): + Which underlying `re` function to call. Valid options are + `re.fullmatch`, `re.search`, and `re.match`; the default `None` + means `re.fullmatch`. For performance reasons, the pattern is + always precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + msg = "'func' must be one of {}.".format( + ", ".join( + sorted(e and e.__name__ or "None" for e in set(valid_funcs)) + ) + ) + raise ValueError(msg) + + if isinstance(regex, Pattern): + if flags: + msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead" + raise TypeError(msg) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return f"" + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to `None` in addition to satisfying the requirements of + the sub-validator. + + Args: + validator + (typing.Callable | tuple[typing.Callable] | list[typing.Callable]): + A validator (or validators) that is used for non-`None` values. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators. + """ + if isinstance(validator, (list, tuple)): + return _OptionalValidator(_AndValidator(validator)) + + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _InValidator: + options = attrib() + _original_options = attrib(hash=False) + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})" + raise ValueError( + msg, + attr, + self._original_options, + value, + ) + + def __repr__(self): + return f"" + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called with a + value that does not belong in the *options* provided. + + The check is performed using ``value in options``, so *options* has to + support that operation. + + To keep the validator hashable, dicts, lists, and sets are transparently + transformed into a `tuple`. + + Args: + options: Allowed options. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected options, and the value it got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + .. versionchanged:: 24.1.0 + *options* that are a list, dict, or a set are now transformed into a + tuple to keep the validator hashable. + """ + repr_options = options + if isinstance(options, (list, dict, set)): + options = tuple(options) + + return _InValidator(options, repr_options) + + +@attrs(repr=False, slots=False, unsafe_hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attrs.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute that is + not callable. + + .. versionadded:: 19.1.0 + + Raises: + attrs.exceptions.NotCallableError: + With a human readable error message containing the attribute + (`attrs.Attribute`) name, and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + f"" + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + Args: + member_validator: Validator to apply to iterable members. + + iterable_validator: + Validator to apply to iterable itself (optional). + + Raises + TypeError: if any sub-validators fail + + .. versionadded:: 19.1.0 + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return f"" + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + Args: + key_validator: Validator to apply to dictionary keys. + + value_validator: Validator to apply to dictionary values. + + mapping_validator: + Validator to apply to top-level mapping attribute (optional). + + .. versionadded:: 19.1.0 + + Raises: + TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number larger or equal to *val*. + + The validator uses `operator.lt` to compare the values. + + Args: + val: Exclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number greater than *val*. + + The validator uses `operator.le` to compare the values. + + Args: + val: Inclusive upper bound for values. + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller than *val*. + + The validator uses `operator.ge` to compare the values. + + Args: + val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called with a + number smaller or equal to *val*. + + The validator uses `operator.ge` to compare the values. + + Args: + val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + Args: + length (int): Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + Args: + length (int): Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})." + raise TypeError( + msg, + attr, + self.type, + value, + ) + + def __repr__(self): + return f"" + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called with a + wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + Args: + type (type | tuple[type, ...]): The type(s) to check for. + + Raises: + TypeError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the expected type, and the value it got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return f"" + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + Args: + validator: A validator to be logically inverted. + + msg (str): + Message to raise if validator fails. Formatted with keys + ``exc_types`` and ``validator``. + + exc_types (tuple[type, ...]): + Exception type(s) to capture. Other types raised by child + validators will not be intercepted and pass through. + + Raises: + ValueError: + With a human readable error message, the attribute (of type + `attrs.Attribute`), the validator that failed to raise an + exception, the value it got, and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) + + +@attrs(repr=False, slots=True, unsafe_hash=True) +class _OrValidator: + validators = attrib() + + def __call__(self, inst, attr, value): + for v in self.validators: + try: + v(inst, attr, value) + except Exception: # noqa: BLE001, PERF203, S112 + continue + else: + return + + msg = f"None of {self.validators!r} satisfied for value {value!r}" + raise ValueError(msg) + + def __repr__(self): + return f"" + + +def or_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators until one of them is + satisfied. + + Args: + validators (~collections.abc.Iterable[typing.Callable]): + Arbitrary number of validators. + + Raises: + ValueError: + If no validator is satisfied. Raised with a human-readable error + message listing all the wrapped validators and the value that + failed all of them. + + .. versionadded:: 24.1.0 + """ + vals = [] + for v in validators: + vals.extend(v.validators if isinstance(v, _OrValidator) else [v]) + + return _OrValidator(tuple(vals)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi new file mode 100644 index 0000000..a314110 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi @@ -0,0 +1,83 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + Mapping, + Match, + Pattern, + TypeVar, + overload, +) + +from attrs import _ValidatorType +from attrs import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2]] +) -> _ValidatorType[_T1 | _T2]: ... +@overload +def instance_of( + type: tuple[type[_T1], type[_T2], type[_T3]] +) -> _ValidatorType[_T1 | _T2 | _T3]: ... +@overload +def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ... +def optional( + validator: ( + _ValidatorType[_T] + | list[_ValidatorType[_T]] + | tuple[_ValidatorType[_T]] + ), +) -> _ValidatorType[_T | None]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Pattern[AnyStr] | AnyStr, + flags: int = ..., + func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: _ValidatorType[_I] | None = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: _ValidatorType[_M] | None = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: str | None = None, + exc_types: type[Exception] | Iterable[type[Exception]] = ..., +) -> _ValidatorType[_T]: ... +def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA new file mode 100644 index 0000000..a85b378 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA @@ -0,0 +1,242 @@ +Metadata-Version: 2.3 +Name: attrs +Version: 24.2.0 +Summary: Classes Without Boilerplate +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: GitHub, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Author-email: Hynek Schlawack +License-Expression: MIT +License-File: LICENSE +Keywords: attribute,boilerplate,class +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Requires-Dist: importlib-metadata; python_version < '3.8' +Provides-Extra: benchmark +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'benchmark' +Requires-Dist: hypothesis; extra == 'benchmark' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'benchmark' +Requires-Dist: pympler; extra == 'benchmark' +Requires-Dist: pytest-codspeed; extra == 'benchmark' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'benchmark' +Requires-Dist: pytest-xdist[psutil]; extra == 'benchmark' +Requires-Dist: pytest>=4.3.0; extra == 'benchmark' +Provides-Extra: cov +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'cov' +Requires-Dist: coverage[toml]>=5.3; extra == 'cov' +Requires-Dist: hypothesis; extra == 'cov' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'cov' +Requires-Dist: pympler; extra == 'cov' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'cov' +Requires-Dist: pytest-xdist[psutil]; extra == 'cov' +Requires-Dist: pytest>=4.3.0; extra == 'cov' +Provides-Extra: dev +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'dev' +Requires-Dist: hypothesis; extra == 'dev' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'dev' +Requires-Dist: pre-commit; extra == 'dev' +Requires-Dist: pympler; extra == 'dev' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'dev' +Requires-Dist: pytest-xdist[psutil]; extra == 'dev' +Requires-Dist: pytest>=4.3.0; extra == 'dev' +Provides-Extra: docs +Requires-Dist: cogapp; extra == 'docs' +Requires-Dist: furo; extra == 'docs' +Requires-Dist: myst-parser; extra == 'docs' +Requires-Dist: sphinx; extra == 'docs' +Requires-Dist: sphinx-notfound-page; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier; extra == 'docs' +Requires-Dist: towncrier<24.7; extra == 'docs' +Provides-Extra: tests +Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests' +Requires-Dist: hypothesis; extra == 'tests' +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests' +Requires-Dist: pympler; extra == 'tests' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests' +Requires-Dist: pytest-xdist[psutil]; extra == 'tests' +Requires-Dist: pytest>=4.3.0; extra == 'tests' +Provides-Extra: tests-mypy +Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests-mypy' +Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests-mypy' +Description-Content-Type: text/markdown + +

+ + attrs + +

+ + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + + + +

+ + + + + + + + +

+ + + +

+ Please consider joining them to help make attrs’s maintenance more sustainable! +

+ + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation! + + +### Hate Type Annotations!? + +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types: + +```python +from attrs import define, field + +@define +class SomeClass: + a_number = field(default=42) + list_of_numbers = field(factory=list) +``` + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger. + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice. + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs) + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek). + +## Release Information + +### Deprecations + +- Given the amount of warnings raised in the broader ecosystem, we've decided to only soft-deprecate the *hash* argument to `@define` / `@attr.s`. + Please don't use it in new code, but we don't intend to remove it anymore. + [#1330](https://github.com/python-attrs/attrs/issues/1330) + + +### Changes + +- `attrs.converters.pipe()` (and its syntactic sugar of passing a list for `attrs.field()`'s / `attr.ib()`'s *converter* argument) works again when passing `attrs.setters.convert` to *on_setattr* (which is default for `attrs.define`). + [#1328](https://github.com/python-attrs/attrs/issues/1328) +- Restored support for PEP [649](https://peps.python.org/pep-0649/) / [749](https://peps.python.org/pep-0749/)-implementing Pythons -- currently 3.14-dev. + [#1329](https://github.com/python-attrs/attrs/issues/1329) + + + +--- + +[Full changelog →](https://www.attrs.org/en/stable/changelog.html) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD new file mode 100644 index 0000000..2a91231 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD @@ -0,0 +1,56 @@ +attr/__init__.py,sha256=l8Ewh5KZE7CCY0i1iDfSCnFiUTIkBVoqsXjX9EZnIVA,2087 +attr/__init__.pyi,sha256=aTVHBPX6krCGvbQvOl_UKqEzmi2HFsaIVm2WKmAiqVs,11434 +attr/__pycache__/__init__.cpython-311.pyc,, +attr/__pycache__/_cmp.cpython-311.pyc,, +attr/__pycache__/_compat.cpython-311.pyc,, +attr/__pycache__/_config.cpython-311.pyc,, +attr/__pycache__/_funcs.cpython-311.pyc,, +attr/__pycache__/_make.cpython-311.pyc,, +attr/__pycache__/_next_gen.cpython-311.pyc,, +attr/__pycache__/_version_info.cpython-311.pyc,, +attr/__pycache__/converters.cpython-311.pyc,, +attr/__pycache__/exceptions.cpython-311.pyc,, +attr/__pycache__/filters.cpython-311.pyc,, +attr/__pycache__/setters.cpython-311.pyc,, +attr/__pycache__/validators.cpython-311.pyc,, +attr/_cmp.py,sha256=3umHiBtgsEYtvNP_8XrQwTCdFoZIX4DEur76N-2a3X8,4123 +attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368 +attr/_compat.py,sha256=n2Uk3c-ywv0PkFfGlvqR7SzDXp4NOhWmNV_ZK6YfWoM,2958 +attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843 +attr/_funcs.py,sha256=SGDmNlED1TM3tgO9Ap2mfRfVI24XEAcrNQs7o2eBXHQ,17386 +attr/_make.py,sha256=BjENJz5eJoojJVbCoupWjXLLEZJ7VID89lisLbQUlmQ,91479 +attr/_next_gen.py,sha256=dhGb96VFg4kXBkS9Zdz1A2uxVJ99q_RT1hw3kLA9-uI,24630 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=vNa58pZi9V6uxBzl4t1QrHbQfkT4iRFAodyXe7lcgg0,3506 +attr/converters.pyi,sha256=mpDoVFO3Cpx8xYSSV0iZFl7IAHuoNBglxKfxHvLj_sY,410 +attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795 +attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=faMQeiBo_nbXYnPaQ1pq8PXeA7Zr-uNsVsPMiKCmxhc,1619 +attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584 +attr/validators.py,sha256=985eTP6RHyon61YEauMJgyNy1rEOhJWiSXMJgRxPtrQ,20045 +attr/validators.pyi,sha256=LjKf7AoXZfvGSfT3LRs61Qfln94konYyMUPoJJjOxK4,2502 +attrs-24.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-24.2.0.dist-info/METADATA,sha256=3Jgk4lr9Y1SAqAcwOLPN_mpW0wc6VOGm-yHt1LsPIHw,11524 +attrs-24.2.0.dist-info/RECORD,, +attrs-24.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs-24.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +attrs-24.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs/__init__.py,sha256=5FHo-EMFOX-g4ialSK4fwOjuoHzLISJDZCwoOl02Ty8,1071 +attrs/__init__.pyi,sha256=o3l92VsD9kHz8sldEtb_tllBTs3TeL-vIBMTxo2Zc_4,7703 +attrs/__pycache__/__init__.cpython-311.pyc,, +attrs/__pycache__/converters.cpython-311.pyc,, +attrs/__pycache__/exceptions.cpython-311.pyc,, +attrs/__pycache__/filters.cpython-311.pyc,, +attrs/__pycache__/setters.cpython-311.pyc,, +attrs/__pycache__/validators.cpython-311.pyc,, +attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76 +attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76 +attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73 +attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL new file mode 100644 index 0000000..cdd68a4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..2bd6453 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py new file mode 100644 index 0000000..963b197 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py @@ -0,0 +1,67 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Converter, + Factory, + _make_getattr, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "Converter", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] + +__getattr__ = _make_getattr(__name__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi new file mode 100644 index 0000000..b2670de --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi @@ -0,0 +1,252 @@ +import sys + +from typing import ( + Any, + Callable, + Mapping, + Sequence, + overload, + TypeVar, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import Converter as Converter +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import has as has +from attr import make_class as make_class +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators +from attr import attrib, asdict as asdict, astuple as astuple + +if sys.version_info >= (3, 11): + from typing import dataclass_transform +else: + from typing_extensions import dataclass_transform + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = bool | Callable[[Any], Any] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_ReprType = Callable[[Any], str] +_ReprArgType = bool | _ReprType +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType +_FieldTransformer = Callable[ + [type, list["Attribute[Any]"]], list["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]] + +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: _T | None = ..., + validator: _ValidatorArgType[_T] | None = ..., + repr: _ReprArgType = ..., + hash: bool | None = ..., + init: bool = ..., + metadata: Mapping[Any, Any] | None = ..., + converter: _ConverterType | Converter[Any, _T] | None = ..., + factory: Callable[[], _T] | None = ..., + kw_only: bool = ..., + eq: _EqOrderType | None = ..., + order: _EqOrderType | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + alias: str | None = ..., + type: type | None = ..., +) -> Any: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(field_specifiers=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define + +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: _C, + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field)) +def frozen( + maybe_cls: None = ..., + *, + these: dict[str, Any] | None = ..., + repr: bool = ..., + unsafe_hash: bool | None = ..., + hash: bool | None = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: bool | None = ..., + order: bool | None = ..., + auto_detect: bool = ..., + getstate_setstate: bool | None = ..., + on_setattr: _OnSetAttrArgType | None = ..., + field_transformer: _FieldTransformer | None = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py new file mode 100644 index 0000000..7821f6c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa: F403 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py new file mode 100644 index 0000000..3323f9d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa: F403 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py new file mode 100644 index 0000000..3080f48 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa: F403 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/py.typed b/lambdas/aws-dd-forwarder-3.127.0/attrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py new file mode 100644 index 0000000..f3d73bb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa: F403 diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py new file mode 100644 index 0000000..037e124 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa: F403 diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run b/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run new file mode 100755 index 0000000..0cc0787 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from ddtrace.commands.ddtrace_run import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dog b/lambdas/aws-dd-forwarder-3.127.0/bin/dog new file mode 100755 index 0000000..7111893 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dog @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from datadog.dogshell import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell new file mode 100755 index 0000000..7111893 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from datadog.dogshell import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap new file mode 100755 index 0000000..5be900e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from datadog.dogshell.wrap import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap b/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap new file mode 100755 index 0000000..5be900e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from datadog.dogshell.wrap import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer b/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer new file mode 100755 index 0000000..aae4757 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer @@ -0,0 +1,8 @@ +#!/usr/local/bin/python3.11 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING new file mode 100644 index 0000000..ba5a523 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING @@ -0,0 +1,21 @@ +The MIT License (MIT) +Copyright Contributors to the bytecode project. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA new file mode 100644 index 0000000..19faf45 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.1 +Name: bytecode +Version: 0.15.1 +Summary: Python module to generate and modify bytecode +Author-email: Victor Stinner +Maintainer-email: "Matthieu C. Dartiailh" +License: The MIT License (MIT) + Copyright Contributors to the bytecode project. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Project-URL: homepage, https://github.com/MatthieuDartiailh/bytecode +Project-URL: documentation, https://bytecode.readthedocs.io/en/latest/ +Project-URL: repository, https://github.com/MatthieuDartiailh/bytecode +Project-URL: changelog, https://github.com/MatthieuDartiailh/bytecode/blob/main/doc/changelog.rst +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: COPYING +Requires-Dist: typing-extensions ; python_version < "3.10" + +******** +bytecode +******** + +.. image:: https://img.shields.io/pypi/v/bytecode.svg + :alt: Latest release on the Python Cheeseshop (PyPI) + :target: https://pypi.python.org/pypi/bytecode + +.. image:: https://github.com/MatthieuDartiailh/bytecode/workflows/Continuous%20Integration/badge.svg + :target: https://github.com/MatthieuDartiailh/bytecode/actions + :alt: Continuous integration + +.. image:: https://github.com/MatthieuDartiailh/bytecode/workflows/Documentation%20building/badge.svg + :target: https://github.com/MatthieuDartiailh/bytecode/actions + :alt: Documentation building + +.. image:: https://img.shields.io/codecov/c/github/MatthieuDartiailh/bytecode/master.svg + :alt: Code coverage of bytecode on codecov.io + :target: https://codecov.io/github/MatthieuDartiailh/bytecode + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :alt: Code formatted using Black + :target: https://github.com/psf/black + +``bytecode`` is a Python module to generate and modify bytecode. + +* `bytecode project homepage at GitHub + `_ (code, bugs) +* `bytecode documentation + `_ +* `Download latest bytecode release at the Python Cheeseshop (PyPI) + `_ + +Install bytecode: ``python3 -m pip install bytecode``. It requires Python 3.8 +or newer. The latest release that supports Python 3.7 and 3.6 is 0.13.0. +The latest release that supports Python 3.5 is 0.12.0. For Python 2.7 support, +have a look at `dead-bytecode `_ +instead. + +Example executing ``print('Hello World!')``: + +.. code:: python + + from bytecode import Instr, Bytecode + + bytecode = Bytecode([Instr("LOAD_NAME", 'print'), + Instr("LOAD_CONST", 'Hello World!'), + Instr("CALL_FUNCTION", 1), + Instr("POP_TOP"), + Instr("LOAD_CONST", None), + Instr("RETURN_VALUE")]) + code = bytecode.to_code() + exec(code) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD new file mode 100644 index 0000000..49b55cf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD @@ -0,0 +1,22 @@ +bytecode-0.15.1.dist-info/COPYING,sha256=15CDvwHVcioF_s6S_mWdkWdw96tvB21WZKc8jvc8N5M,1094 +bytecode-0.15.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bytecode-0.15.1.dist-info/METADATA,sha256=btrMOPa27_H0V6neBiLPJiunLrto9ukEE-PWoTtFGvM,4627 +bytecode-0.15.1.dist-info/RECORD,, +bytecode-0.15.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bytecode-0.15.1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 +bytecode-0.15.1.dist-info/top_level.txt,sha256=9BhdB7HqYZ-PvHNoWX6ilwLYWQqcgEOLwdb3aXm5Gys,9 +bytecode/__init__.py,sha256=lsE6qqd_1wYjGq6s3q1Rhz1AyAjf98F4iJSrfg01F3k,6919 +bytecode/__pycache__/__init__.cpython-311.pyc,, +bytecode/__pycache__/bytecode.cpython-311.pyc,, +bytecode/__pycache__/cfg.cpython-311.pyc,, +bytecode/__pycache__/concrete.cpython-311.pyc,, +bytecode/__pycache__/flags.cpython-311.pyc,, +bytecode/__pycache__/instr.cpython-311.pyc,, +bytecode/__pycache__/version.cpython-311.pyc,, +bytecode/bytecode.py,sha256=6oveflTRGnrzTQEP9Z0tp6ySwmXQ_DXIibdAGOZt5lY,11126 +bytecode/cfg.py,sha256=J0FOZD1n-LbPLGmPRggmj_1SxWZvcQQbuXeUDskRDv8,41785 +bytecode/concrete.py,sha256=NVsAef1Ya5MvhZfx0xKclP4eearg7vAixY2RpHtQFhk,52168 +bytecode/flags.py,sha256=eY4nrTIDkOBYswI-wXQ-p3mKfriH7pUNYaDien4OI6g,6189 +bytecode/instr.py,sha256=2fynmuZq46eXDyzIMS1e3wzGpXnm7BuY7rHGSsFkh7U,26777 +bytecode/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bytecode/version.py,sha256=kz4YxQj6evqzVm2eaPEN9t8SwhJI1_YkLx-G2dMjhoI,519 diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL new file mode 100644 index 0000000..7e68873 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt new file mode 100644 index 0000000..b37707e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt @@ -0,0 +1 @@ +bytecode diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py new file mode 100644 index 0000000..11eb7d6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py @@ -0,0 +1,218 @@ +__all__ = [ + "Label", + "Instr", + "SetLineno", + "Bytecode", + "ConcreteInstr", + "ConcreteBytecode", + "ControlFlowGraph", + "CompilerFlags", + "Compare", + "BinaryOp", + "__version__", +] + +from io import StringIO +from typing import List, Union + +# import needed to use it in bytecode.py +from bytecode.bytecode import ( # noqa + BaseBytecode, + Bytecode, + _BaseBytecodeList, + _InstrList, +) + +# import needed to use it in bytecode.py +from bytecode.cfg import BasicBlock, ControlFlowGraph # noqa + +# import needed to use it in bytecode.py +from bytecode.concrete import _ConvertBytecodeToConcrete # noqa +from bytecode.concrete import ConcreteBytecode, ConcreteInstr +from bytecode.flags import CompilerFlags + +# import needed to use it in bytecode.py +from bytecode.instr import ( # noqa + UNSET, + BinaryOp, + CellVar, + Compare, + FreeVar, + Instr, + Intrinsic1Op, + Intrinsic2Op, + Label, + SetLineno, + TryBegin, + TryEnd, +) +from bytecode.version import __version__ + + +def format_bytecode( + bytecode: Union[Bytecode, ConcreteBytecode, ControlFlowGraph], + *, + lineno: bool = False, +) -> str: + try_begins: List[TryBegin] = [] + + def format_line(index, line): + nonlocal cur_lineno, prev_lineno + if lineno: + if cur_lineno != prev_lineno: + line = "L.% 3s % 3s: %s" % (cur_lineno, index, line) + prev_lineno = cur_lineno + else: + line = " % 3s: %s" % (index, line) + else: + line = line + return line + + def format_instr(instr, labels=None): + text = instr.name + arg = instr._arg + if arg is not UNSET: + if isinstance(arg, Label): + try: + arg = "<%s>" % labels[arg] + except KeyError: + arg = "" + elif isinstance(arg, BasicBlock): + try: + arg = "<%s>" % labels[id(arg)] + except KeyError: + arg = "" + else: + arg = repr(arg) + text = "%s %s" % (text, arg) + return text + + def format_try_begin(instr: TryBegin, labels: dict) -> str: + if isinstance(instr.target, Label): + try: + arg = "<%s>" % labels[instr.target] + except KeyError: + arg = "" + else: + try: + arg = "<%s>" % labels[id(instr.target)] + except KeyError: + arg = "" + line = "TryBegin %s -> %s [%s]" % ( + len(try_begins), + arg, + instr.stack_depth, + ) + (" last_i" if instr.push_lasti else "") + + # Track the seen try begin + try_begins.append(instr) + + return line + + def format_try_end(instr: TryEnd) -> str: + i = try_begins.index(instr.entry) if instr.entry in try_begins else "" + return "TryEnd (%s)" % i + + buffer = StringIO() + + indent = " " * 4 + + cur_lineno = bytecode.first_lineno + prev_lineno = None + + if isinstance(bytecode, ConcreteBytecode): + offset = 0 + for c_instr in bytecode: + fields = [] + if c_instr.lineno is not None: + cur_lineno = c_instr.lineno + if lineno: + fields.append(format_instr(c_instr)) + line = "".join(fields) + line = format_line(offset, line) + else: + fields.append("% 3s %s" % (offset, format_instr(c_instr))) + line = "".join(fields) + buffer.write(line + "\n") + + if isinstance(c_instr, ConcreteInstr): + offset += c_instr.size + + if bytecode.exception_table: + buffer.write("\n") + buffer.write("Exception table:\n") + for entry in bytecode.exception_table: + buffer.write( + f"{entry.start_offset} to {entry.stop_offset} -> " + f"{entry.target} [{entry.stack_depth}]" + + (" lasti" if entry.push_lasti else "") + + "\n" + ) + + elif isinstance(bytecode, Bytecode): + labels: dict[Label, str] = {} + for index, instr in enumerate(bytecode): + if isinstance(instr, Label): + labels[instr] = "label_instr%s" % index + + for index, instr in enumerate(bytecode): + if isinstance(instr, Label): + label = labels[instr] + line = "%s:" % label + if index != 0: + buffer.write("\n") + elif isinstance(instr, TryBegin): + line = indent + format_line(index, format_try_begin(instr, labels)) + indent += " " + elif isinstance(instr, TryEnd): + indent = indent[:-2] + line = indent + format_line(index, format_try_end(instr)) + else: + if instr.lineno is not None: + cur_lineno = instr.lineno + line = format_instr(instr, labels) + line = indent + format_line(index, line) + buffer.write(line + "\n") + buffer.write("\n") + + elif isinstance(bytecode, ControlFlowGraph): + cfg_labels = {} + for block_index, block in enumerate(bytecode, 1): + cfg_labels[id(block)] = "block%s" % block_index + + for block_index, block in enumerate(bytecode, 1): + buffer.write("%s:\n" % cfg_labels[id(block)]) + seen_instr = False + for index, instr in enumerate(block): + if isinstance(instr, TryBegin): + line = indent + format_line( + index, format_try_begin(instr, cfg_labels) + ) + indent += " " + elif isinstance(instr, TryEnd): + if seen_instr: + indent = indent[:-2] + line = indent + format_line(index, format_try_end(instr)) + else: + if isinstance(instr, Instr): + seen_instr = True + if instr.lineno is not None: + cur_lineno = instr.lineno + line = format_instr(instr, cfg_labels) + line = indent + format_line(index, line) + buffer.write(line + "\n") + if block.next_block is not None: + buffer.write(indent + "-> %s\n" % cfg_labels[id(block.next_block)]) + buffer.write("\n") + else: + raise TypeError("unknown bytecode class") + + return buffer.getvalue()[:-1] + + +def dump_bytecode( + bytecode: Union[Bytecode, ConcreteBytecode, ControlFlowGraph], + *, + lineno: bool = False, +) -> None: + print(format_bytecode(bytecode, lineno=lineno)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py new file mode 100644 index 0000000..149bb37 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py @@ -0,0 +1,330 @@ +# alias to keep the 'bytecode' variable free +import sys +import types +from abc import abstractmethod +from typing import ( + Any, + Dict, + Generic, + Iterator, + List, + Optional, + Sequence, + SupportsIndex, + TypeVar, + Union, + overload, +) + +import bytecode as _bytecode +from bytecode.flags import CompilerFlags, infer_flags +from bytecode.instr import ( + _UNSET, + UNSET, + BaseInstr, + Instr, + Label, + SetLineno, + TryBegin, + TryEnd, +) + + +class BaseBytecode: + def __init__(self) -> None: + self.argcount = 0 + self.posonlyargcount = 0 + self.kwonlyargcount = 0 + self.first_lineno = 1 + self.name = "" + self.qualname = self.name + self.filename = "" + self.docstring: Union[str, None, _UNSET] = UNSET + # We cannot recreate cellvars/freevars from instructions because of super() + # special-case, which involves an implicit __class__ cell/free variable + # We could try to detect it. + # CPython itself breaks if one aliases super so we could maybe make it work + # but it will require careful design and will be done later in the future. + self.cellvars: List[str] = [] + self.freevars: List[str] = [] + self._flags: CompilerFlags = CompilerFlags(0) + + def _copy_attr_from(self, bytecode: "BaseBytecode") -> None: + self.argcount = bytecode.argcount + self.posonlyargcount = bytecode.posonlyargcount + self.kwonlyargcount = bytecode.kwonlyargcount + self.flags = bytecode.flags + self.first_lineno = bytecode.first_lineno + self.name = bytecode.name + self.qualname = bytecode.qualname + self.filename = bytecode.filename + self.docstring = bytecode.docstring + self.cellvars = list(bytecode.cellvars) + self.freevars = list(bytecode.freevars) + + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + + if self.argcount != other.argcount: + return False + if self.posonlyargcount != other.posonlyargcount: + return False + if self.kwonlyargcount != other.kwonlyargcount: + return False + if self.flags != other.flags: + return False + if self.first_lineno != other.first_lineno: + return False + if self.filename != other.filename: + return False + if self.name != other.name: + return False + if self.qualname != other.qualname: + return False + if self.docstring != other.docstring: + return False + if self.cellvars != other.cellvars: + return False + if self.freevars != other.freevars: + return False + if self.compute_stacksize() != other.compute_stacksize(): + return False + + return True + + @property + def flags(self) -> CompilerFlags: + return self._flags + + @flags.setter + def flags(self, value: CompilerFlags) -> None: + if not isinstance(value, CompilerFlags): + value = CompilerFlags(value) + self._flags = value + + def update_flags(self, *, is_async: Optional[bool] = None) -> None: + # infer_flags reasonably only accept concrete subclasses + self.flags = infer_flags(self, is_async) # type: ignore + + @abstractmethod + def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int: + raise NotImplementedError + + +T = TypeVar("T", bound="_BaseBytecodeList") +U = TypeVar("U") + + +class _BaseBytecodeList(BaseBytecode, list, Generic[U]): + """List subclass providing type stable slicing and copying.""" + + @overload + def __getitem__(self, index: SupportsIndex) -> U: + ... + + @overload + def __getitem__(self: T, index: slice) -> T: + ... + + def __getitem__(self, index): + value = super().__getitem__(index) + if isinstance(index, slice): + value = type(self)(value) + value._copy_attr_from(self) + + return value + + def copy(self: T) -> T: + # This is a list subclass and works + new = type(self)(super().copy()) # type: ignore + new._copy_attr_from(self) + return new + + def legalize(self) -> None: + """Check that all the element of the list are valid and remove SetLineno.""" + lineno_pos = [] + set_lineno = None + current_lineno = self.first_lineno + + for pos, instr in enumerate(self): + if isinstance(instr, SetLineno): + set_lineno = instr.lineno + lineno_pos.append(pos) + continue + # Filter out other pseudo instructions + if not isinstance(instr, BaseInstr): + continue + if set_lineno is not None: + instr.lineno = set_lineno + elif instr.lineno is UNSET: + instr.lineno = current_lineno + elif instr.lineno is not None: + current_lineno = instr.lineno + + for i in reversed(lineno_pos): + del self[i] + + def __iter__(self) -> Iterator[U]: + instructions = super().__iter__() + for instr in instructions: + self._check_instr(instr) + yield instr + + def _check_instr(self, instr): + raise NotImplementedError() + + +V = TypeVar("V") + + +class _InstrList(List[V]): + # Providing a stricter typing for this helper whose use is limited to the __eq__ + # implementation is more effort than it is worth. + def _flat(self) -> List: + instructions: List = [] + labels = {} + jumps = [] + try_begins: Dict[TryBegin, int] = {} + try_jumps = [] + + offset = 0 + instr: Any + for index, instr in enumerate(self): + if isinstance(instr, Label): + instructions.append("label_instr%s" % index) + labels[instr] = offset + elif isinstance(instr, TryBegin): + try_begins.setdefault(instr, len(try_begins)) + assert isinstance(instr.target, Label) + try_jumps.append((instr.target, len(instructions))) + instructions.append(instr) + elif isinstance(instr, TryEnd): + instructions.append(("TryEnd", try_begins[instr.entry])) + else: + if isinstance(instr, Instr) and isinstance(instr.arg, Label): + target_label = instr.arg + instr = _bytecode.ConcreteInstr( + instr.name, 0, location=instr.location + ) + jumps.append((target_label, instr)) + instructions.append(instr) + offset += 1 + + for target_label, instr in jumps: + instr.arg = labels[target_label] + + for target_label, index in try_jumps: + instr = instructions[index] + assert isinstance(instr, TryBegin) + instructions[index] = ( + "TryBegin", + try_begins[instr], + labels[target_label], + instr.push_lasti, + ) + + return instructions + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, _InstrList): + other = _InstrList(other) + + return self._flat() == other._flat() + + +class Bytecode( + _InstrList[Union[Instr, Label, TryBegin, TryEnd, SetLineno]], + _BaseBytecodeList[Union[Instr, Label, TryBegin, TryEnd, SetLineno]], +): + def __init__( + self, + instructions: Sequence[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = (), + ) -> None: + BaseBytecode.__init__(self) + self.argnames: List[str] = [] + for instr in instructions: + self._check_instr(instr) + self.extend(instructions) + + def __iter__(self) -> Iterator[Union[Instr, Label, TryBegin, TryEnd, SetLineno]]: + instructions = super().__iter__() + seen_try_begin = False + for instr in instructions: + self._check_instr(instr) + if isinstance(instr, TryBegin): + if seen_try_begin: + raise RuntimeError("TryBegin pseudo instructions cannot be nested.") + seen_try_begin = True + elif isinstance(instr, TryEnd): + seen_try_begin = False + yield instr + + def _check_instr(self, instr: Any) -> None: + if not isinstance(instr, (Label, SetLineno, Instr, TryBegin, TryEnd)): + raise ValueError( + "Bytecode must only contain Label, " + "SetLineno, and Instr objects, " + "but %s was found" % type(instr).__name__ + ) + + def _copy_attr_from(self, bytecode: BaseBytecode) -> None: + super()._copy_attr_from(bytecode) + if isinstance(bytecode, Bytecode): + self.argnames = bytecode.argnames + + @staticmethod + def from_code( + code: types.CodeType, + prune_caches: bool = True, + conserve_exception_block_stackdepth: bool = False, + ) -> "Bytecode": + concrete = _bytecode.ConcreteBytecode.from_code(code) + return concrete.to_bytecode( + prune_caches=prune_caches, + conserve_exception_block_stackdepth=conserve_exception_block_stackdepth, + ) + + def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int: + cfg = _bytecode.ControlFlowGraph.from_bytecode(self) + return cfg.compute_stacksize(check_pre_and_post=check_pre_and_post) + + def to_code( + self, + compute_jumps_passes: Optional[int] = None, + stacksize: Optional[int] = None, + *, + check_pre_and_post: bool = True, + compute_exception_stack_depths: bool = True, + ) -> types.CodeType: + # Prevent reconverting the concrete bytecode to bytecode and cfg to do the + # calculation if we need to do it. + if stacksize is None or ( + sys.version_info >= (3, 11) and compute_exception_stack_depths + ): + cfg = _bytecode.ControlFlowGraph.from_bytecode(self) + stacksize = cfg.compute_stacksize( + check_pre_and_post=check_pre_and_post, + compute_exception_stack_depths=compute_exception_stack_depths, + ) + self = cfg.to_bytecode() + compute_exception_stack_depths = False # avoid redoing everything + bc = self.to_concrete_bytecode( + compute_jumps_passes=compute_jumps_passes, + compute_exception_stack_depths=compute_exception_stack_depths, + ) + return bc.to_code( + stacksize=stacksize, + compute_exception_stack_depths=compute_exception_stack_depths, + ) + + def to_concrete_bytecode( + self, + compute_jumps_passes: Optional[int] = None, + compute_exception_stack_depths: bool = True, + ) -> "_bytecode.ConcreteBytecode": + converter = _bytecode._ConvertBytecodeToConcrete(self) + return converter.to_concrete_bytecode( + compute_jumps_passes=compute_jumps_passes, + compute_exception_stack_depths=compute_exception_stack_depths, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py new file mode 100644 index 0000000..7f554fa --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py @@ -0,0 +1,1061 @@ +import sys +import types +from collections import defaultdict +from dataclasses import dataclass +from typing import ( + Any, + Dict, + Generator, + Iterable, + Iterator, + List, + Optional, + Set, + SupportsIndex, + Tuple, + TypeVar, + Union, + overload, +) + +# alias to keep the 'bytecode' variable free +import bytecode as _bytecode +from bytecode.concrete import ConcreteInstr +from bytecode.flags import CompilerFlags +from bytecode.instr import UNSET, Instr, Label, SetLineno, TryBegin, TryEnd + +T = TypeVar("T", bound="BasicBlock") +U = TypeVar("U", bound="ControlFlowGraph") + + +class BasicBlock(_bytecode._InstrList[Union[Instr, SetLineno, TryBegin, TryEnd]]): + def __init__( + self, + instructions: Optional[ + Iterable[Union[Instr, SetLineno, TryBegin, TryEnd]] + ] = None, + ) -> None: + # a BasicBlock object, or None + self.next_block: Optional["BasicBlock"] = None + if instructions: + super().__init__(instructions) + + def __iter__(self) -> Iterator[Union[Instr, SetLineno, TryBegin, TryEnd]]: + index = 0 + while index < len(self): + instr = self[index] + index += 1 + + if not isinstance(instr, (SetLineno, Instr, TryBegin, TryEnd)): + raise ValueError( + "BasicBlock must only contain SetLineno and Instr objects, " + "but %s was found" % instr.__class__.__name__ + ) + + if isinstance(instr, Instr) and instr.has_jump(): + if index < len(self) and any( + isinstance(self[i], Instr) for i in range(index, len(self)) + ): + raise ValueError( + "Only the last instruction of a basic " "block can be a jump" + ) + + if not isinstance(instr.arg, BasicBlock): + raise ValueError( + "Jump target must a BasicBlock, got %s", + type(instr.arg).__name__, + ) + + if isinstance(instr, TryBegin): + if not isinstance(instr.target, BasicBlock): + raise ValueError( + "TryBegin target must a BasicBlock, got %s", + type(instr.target).__name__, + ) + + yield instr + + @overload + def __getitem__( + self, index: SupportsIndex + ) -> Union[Instr, SetLineno, TryBegin, TryEnd]: + ... + + @overload + def __getitem__(self: T, index: slice) -> T: + ... + + def __getitem__(self, index): + value = super().__getitem__(index) + if isinstance(index, slice): + value = type(self)(value) + value.next_block = self.next_block + + return value + + def get_last_non_artificial_instruction(self) -> Optional[Instr]: + for instr in reversed(self): + if isinstance(instr, Instr): + return instr + + return None + + def copy(self: T) -> T: + new = type(self)(super().copy()) + new.next_block = self.next_block + return new + + def legalize(self, first_lineno: int) -> int: + """Check that all the element of the list are valid and remove SetLineno.""" + lineno_pos = [] + set_lineno = None + current_lineno = first_lineno + + for pos, instr in enumerate(self): + if isinstance(instr, SetLineno): + set_lineno = current_lineno = instr.lineno + lineno_pos.append(pos) + continue + if isinstance(instr, (TryBegin, TryEnd)): + continue + + if set_lineno is not None: + instr.lineno = set_lineno + elif instr.lineno is UNSET: + instr.lineno = current_lineno + elif instr.lineno is not None: + current_lineno = instr.lineno + + for i in reversed(lineno_pos): + del self[i] + + return current_lineno + + def get_jump(self) -> Optional["BasicBlock"]: + if not self: + return None + + last_instr = self.get_last_non_artificial_instruction() + if last_instr is None or not last_instr.has_jump(): + return None + + target_block = last_instr.arg + assert isinstance(target_block, BasicBlock) + return target_block + + def get_trailing_try_end(self, index: int): + while index + 1 < len(self): + if isinstance(b := self[index + 1], TryEnd): + return b + index += 1 + + return None + + +def _update_size(pre_delta, post_delta, size, maxsize, minsize): + size += pre_delta + if size < 0: + msg = "Failed to compute stacksize, got negative size" + raise RuntimeError(msg) + size += post_delta + maxsize = max(maxsize, size) + minsize = min(minsize, size) + return size, maxsize, minsize + + +# We can never have nested TryBegin, so we can simply update the min stack size +# when we encounter one and use the number we have when we encounter the TryEnd + + +@dataclass +class _StackSizeComputationStorage: + """Common storage shared by the computers involved in computing CFG stack usage.""" + + #: Should we check that all stack operation are "safe" i.e. occurs while there + #: is a sufficient number of items on the stack. + check_pre_and_post: bool + + #: Id the blocks for which an analysis is under progress to avoid getting stuck + #: in recursions. + seen_blocks: Set[int] + + #: Sizes and exception handling status with which the analysis of the block + #: has been performed. Used to avoid running multiple times equivalent analysis. + blocks_startsizes: Dict[int, Set[Tuple[int, Optional[bool]]]] + + #: Track the encountered TryBegin pseudo-instruction to update their target + #: depth at the end of the calculation. + try_begins: List[TryBegin] + + #: Stacksize that should be used for exception blocks. This is the smallest size + #: with which this block was reached which is the only size that can be safely + #: restored. + exception_block_startsize: Dict[int, int] + + #: Largest stack size used in an exception block. We record the size corresponding + #: to the smallest start size for the block since the interpreter enforces that + #: we start with this size. + exception_block_maxsize: Dict[int, int] + + +class _StackSizeComputer: + """Helper computing the stack usage for a single block.""" + + #: Common storage shared by all helpers involved in the stack size computation + common: _StackSizeComputationStorage + + #: Block this helper is running the computation for. + block: BasicBlock + + #: Current stack usage. + size: int + + #: Maximal stack usage. + maxsize: int + + #: Minimal stack usage. This value is only relevant in between a TryBegin/TryEnd + #: pair and determine the startsize for the exception handling block associated + #: with the try begin. + minsize: int + + #: Flag indicating if the block analyzed is an exception handler (i.e. a target + #: of a TryBegin). + exception_handler: Optional[bool] + + #: TryBegin that was encountered before jumping to this block and for which + #: no try end was met yet. + pending_try_begin: Optional[TryBegin] + + def __init__( + self, + common: _StackSizeComputationStorage, + block: BasicBlock, + size: int, + maxsize: int, + minsize: int, + exception_handler: Optional[bool], + pending_try_begin: Optional[TryBegin], + ) -> None: + self.common = common + self.block = block + self.size = size + self.maxsize = maxsize + self.minsize = minsize + self.exception_handler = exception_handler + self.pending_try_begin = pending_try_begin + self._current_try_begin = pending_try_begin + + def run(self) -> Generator[Union["_StackSizeComputer", int], int, None]: + """Iterate over the block instructions to compute stack usage.""" + # Blocks are not hashable but in this particular context we know we won't be + # modifying blocks in place so we can safely use their id as hash rather than + # making them generally hashable which would be weird since they are list + # subclasses + block_id = id(self.block) + + # If the block is currently being visited (seen = True) or + # it was visited previously with parameters that makes the computation + # irrelevant return the maxsize. + fingerprint = (self.size, self.exception_handler) + if id(self.block) in self.common.seen_blocks or ( + not self._is_stacksize_computation_relevant(block_id, fingerprint) + ): + yield self.maxsize + + # Prevent recursive visit of block if two blocks are nested (jump from one + # to the other). + self.common.seen_blocks.add(block_id) + + # Track which size has been used to run an analysis to avoid re-running multiple + # times the same calculation. + self.common.blocks_startsizes[block_id].add(fingerprint) + + # If this block is an exception handler reached through the exception table + # we will push some extra objects on the stack before processing start. + if self.exception_handler is not None: + self._update_size(0, 1 + self.exception_handler) + # True is used to indicated that push_lasti is True, leading to pushing + # an extra object on the stack. + + for i, instr in enumerate(self.block): + # Ignore SetLineno + if isinstance(instr, (SetLineno)): + continue + + # When we encounter a TryBegin, we: + # - store it as the current TryBegin (since TryBegin cannot be nested) + # - record its existence to remember to update its stack size when + # the computation ends + # - update the minsize to the current size value since we need to + # know the minimal stack usage between the TryBegin/TryEnd pair to + # set the startsize of the exception handling block + # + # This approach does not require any special handling for with statements. + if isinstance(instr, TryBegin): + assert self._current_try_begin is None + self.common.try_begins.append(instr) + self._current_try_begin = instr + self.minsize = self.size + + continue + + elif isinstance(instr, TryEnd): + # When we encounter a TryEnd we can start the computation for the + # exception block using the minimum stack size encountered since + # the TryBegin matching this TryEnd. + + # TryBegin cannot be nested so a TryEnd should always match the + # current try begin. However inside the CFG some blocks may + # start with a TryEnd relevant only when reaching this block + # through a particular jump. So we are lenient here. + if instr.entry is not self._current_try_begin: + continue + + # Compute the stack usage of the exception handler + assert isinstance(instr.entry.target, BasicBlock) + yield from self._compute_exception_handler_stack_usage( + instr.entry.target, + instr.entry.push_lasti, + ) + self._current_try_begin = None + continue + + # For instructions with a jump first compute the stacksize required when the + # jump is taken. + if instr.has_jump(): + effect = ( + instr.pre_and_post_stack_effect(jump=True) + if self.common.check_pre_and_post + else (instr.stack_effect(jump=True), 0) + ) + taken_size, maxsize, minsize = _update_size( + *effect, self.size, self.maxsize, self.minsize + ) + + # Yield the parameters required to compute the stacksize required + # by the block to which the jump points to and resume when we now + # the maxsize. + assert isinstance(instr.arg, BasicBlock) + maxsize = yield _StackSizeComputer( + self.common, + instr.arg, + taken_size, + maxsize, + minsize, + None, + # Do not propagate the TryBegin if a final instruction is followed + # by a TryEnd. + None + if instr.is_final() and self.block.get_trailing_try_end(i) + else self._current_try_begin, + ) + + # Update the maximum used size by the usage implied by the following + # the jump + self.maxsize = max(self.maxsize, maxsize) + + # For unconditional jumps abort early since the other instruction will + # never be seen. + if instr.is_uncond_jump(): + # Check for TryEnd after the final instruction which is possible + # TryEnd being only pseudo instructions + if te := self.block.get_trailing_try_end(i): + # TryBegin cannot be nested + assert te.entry is self._current_try_begin + + assert isinstance(te.entry.target, BasicBlock) + yield from self._compute_exception_handler_stack_usage( + te.entry.target, + te.entry.push_lasti, + ) + + self.common.seen_blocks.remove(id(self.block)) + yield self.maxsize + + # jump=False: non-taken path of jumps, or any non-jump + effect = ( + instr.pre_and_post_stack_effect(jump=False) + if self.common.check_pre_and_post + else (instr.stack_effect(jump=False), 0) + ) + self._update_size(*effect) + + # Instruction is final (return, raise, ...) so any following instruction + # in the block is dead code. + if instr.is_final(): + # Check for TryEnd after the final instruction which is possible + # TryEnd being only pseudo instructions. + if te := self.block.get_trailing_try_end(i): + assert isinstance(te.entry.target, BasicBlock) + yield from self._compute_exception_handler_stack_usage( + te.entry.target, + te.entry.push_lasti, + ) + + self.common.seen_blocks.remove(id(self.block)) + + yield self.maxsize + + if self.block.next_block: + self.maxsize = yield _StackSizeComputer( + self.common, + self.block.next_block, + self.size, + self.maxsize, + self.minsize, + None, + self._current_try_begin, + ) + + self.common.seen_blocks.remove(id(self.block)) + + yield self.maxsize + + # --- Private API + + _current_try_begin: Optional[TryBegin] + + def _update_size(self, pre_delta: int, post_delta: int) -> None: + size, maxsize, minsize = _update_size( + pre_delta, post_delta, self.size, self.maxsize, self.minsize + ) + self.size = size + self.minsize = minsize + self.maxsize = maxsize + + def _compute_exception_handler_stack_usage( + self, block: BasicBlock, push_lasti: bool + ) -> Generator[Union["_StackSizeComputer", int], int, None]: + b_id = id(block) + if self.minsize < self.common.exception_block_startsize[b_id]: + block_size = yield _StackSizeComputer( + self.common, + block, + self.minsize, + self.maxsize, + self.minsize, + push_lasti, + None, + ) + # The entry cannot be smaller than abs(stc.minimal_entry_size) as otherwise + # we an underflow would have occured. + self.common.exception_block_startsize[b_id] = self.minsize + self.common.exception_block_maxsize[b_id] = block_size + + def _is_stacksize_computation_relevant( + self, block_id: int, fingerprint: Tuple[int, Optional[bool]] + ) -> bool: + if sys.version_info >= (3, 11): + # The computation is relevant if the block was not visited previously + # with the same starting size and exception handler status than the + # one in use + return fingerprint not in self.common.blocks_startsizes[block_id] + else: + # The computation is relevant if the block was only visited with smaller + # starting sizes than the one in use + if sizes := self.common.blocks_startsizes[block_id]: + return fingerprint[0] > max(f[0] for f in sizes) + else: + return True + + +class ControlFlowGraph(_bytecode.BaseBytecode): + def __init__(self) -> None: + super().__init__() + self._blocks: List[BasicBlock] = [] + self._block_index: Dict[int, int] = {} + self.argnames: List[str] = [] + + self.add_block() + + def legalize(self) -> None: + """Legalize all blocks.""" + current_lineno = self.first_lineno + for block in self._blocks: + current_lineno = block.legalize(current_lineno) + + def get_block_index(self, block: BasicBlock) -> int: + try: + return self._block_index[id(block)] + except KeyError: + raise ValueError("the block is not part of this bytecode") + + def _add_block(self, block: BasicBlock) -> None: + block_index = len(self._blocks) + self._blocks.append(block) + self._block_index[id(block)] = block_index + + def add_block( + self, instructions: Optional[Iterable[Union[Instr, SetLineno]]] = None + ) -> BasicBlock: + block = BasicBlock(instructions) + self._add_block(block) + return block + + def compute_stacksize( + self, + *, + check_pre_and_post: bool = True, + compute_exception_stack_depths: bool = True, + ) -> int: + """Compute the stack size by iterating through the blocks + + The implementation make use of a generator function to avoid issue with + deeply nested recursions. + + """ + # In the absence of any block return 0 + if not self: + return 0 + + # Create the common storage for the calculation + common = _StackSizeComputationStorage( + check_pre_and_post, + seen_blocks=set(), + blocks_startsizes={id(b): set() for b in self}, + exception_block_startsize=dict.fromkeys([id(b) for b in self], 32768), + exception_block_maxsize=dict.fromkeys([id(b) for b in self], -32768), + try_begins=[], + ) + + # Starting with Python 3.10, generator and coroutines start with one object + # on the stack (None, anything is an error). + initial_stack_size = 0 + if sys.version_info >= (3, 10) and self.flags & ( + CompilerFlags.GENERATOR + | CompilerFlags.COROUTINE + | CompilerFlags.ASYNC_GENERATOR + ): + initial_stack_size = 1 + + # Create a generator/coroutine responsible of dealing with the first block + coro = _StackSizeComputer( + common, self[0], initial_stack_size, 0, 0, None, None + ).run() + + # Create a list of generator that have not yet been exhausted + coroutines: List[Generator[Union[_StackSizeComputer, int], int, None]] = [] + + push_coroutine = coroutines.append + pop_coroutine = coroutines.pop + args = None + + try: + while True: + # Mypy does not seem to honor the fact that one must send None + # to a brand new generator irrespective of its send type. + args = coro.send(None) # type: ignore + + # Consume the stored generators as long as they return a simple + # integer that is to be used to resume the last stored generator. + while isinstance(args, int): + coro = pop_coroutine() + args = coro.send(args) + + # Otherwise we enter a new block and we store the generator under + # use and create a new one to process the new block + push_coroutine(coro) + coro = args.run() + + except IndexError: + # The exception occurs when all the generators have been exhausted + # in which case the last yielded value is the stacksize. + assert args is not None and isinstance(args, int) + + # Exception handling block size is reported separately since we need + # to report only the stack usage for the smallest start size for the + # block + args = max(args, *common.exception_block_maxsize.values()) + + # Check if there is dead code that may contain TryBegin/TryEnd pairs. + # For any such pair we set a huge size (the exception table format does not + # mandate a maximum value). We do so so that if the pair is fused with + # another it does not alter the computed size. + for block in self: + if not common.blocks_startsizes[id(block)]: + for i in block: + if isinstance(i, TryBegin) and i.stack_depth is UNSET: + i.stack_depth = 32768 + + # If requested update the TryBegin stack size + if compute_exception_stack_depths: + for tb in common.try_begins: + size = common.exception_block_startsize[id(tb.target)] + assert size >= 0 + tb.stack_depth = size + + return args + + def __repr__(self) -> str: + return "" % len(self._blocks) + + # Helper to obtain a flat list of instr, which does not refer to block at + # anymore. Used for comparison of different CFG. + def _get_instructions( + self, + ) -> List: + instructions: List = [] + try_begins: Dict[TryBegin, int] = {} + + for block in self: + for index, instr in enumerate(block): + if isinstance(instr, TryBegin): + assert isinstance(instr.target, BasicBlock) + try_begins.setdefault(instr, len(try_begins)) + instructions.append( + ( + "TryBegin", + try_begins[instr], + self.get_block_index(instr.target), + instr.push_lasti, + ) + ) + elif isinstance(instr, TryEnd): + instructions.append(("TryEnd", try_begins[instr.entry])) + elif isinstance(instr, Instr) and ( + instr.has_jump() or instr.is_final() + ): + if instr.has_jump(): + target_block = instr.arg + assert isinstance(target_block, BasicBlock) + # We use a concrete instr here to be able to use an integer as + # argument rather than a Label. This is fine for comparison + # purposes which is our sole goal here. + c_instr = ConcreteInstr( + instr.name, + self.get_block_index(target_block), + location=instr.location, + ) + instructions.append(c_instr) + else: + instructions.append(instr) + + if te := block.get_trailing_try_end(index): + instructions.append(("TryEnd", try_begins[te.entry])) + break + else: + instructions.append(instr) + + return instructions + + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + + if self.argnames != other.argnames: + return False + + instrs1 = self._get_instructions() + instrs2 = other._get_instructions() + if instrs1 != instrs2: + return False + # FIXME: compare block.next_block + + return super().__eq__(other) + + def __len__(self) -> int: + return len(self._blocks) + + def __iter__(self) -> Iterator[BasicBlock]: + return iter(self._blocks) + + @overload + def __getitem__(self, index: Union[int, BasicBlock]) -> BasicBlock: + ... + + @overload + def __getitem__(self: U, index: slice) -> U: + ... + + def __getitem__(self, index): + if isinstance(index, BasicBlock): + index = self.get_block_index(index) + return self._blocks[index] + + def __delitem__(self, index: Union[int, BasicBlock]) -> None: + if isinstance(index, BasicBlock): + index = self.get_block_index(index) + block = self._blocks[index] + del self._blocks[index] + del self._block_index[id(block)] + for index in range(index, len(self)): + block = self._blocks[index] + self._block_index[id(block)] -= 1 + + def split_block(self, block: BasicBlock, index: int) -> BasicBlock: + if not isinstance(block, BasicBlock): + raise TypeError("expected block") + block_index = self.get_block_index(block) + + if index < 0: + raise ValueError("index must be positive") + + block = self._blocks[block_index] + if index == 0: + return block + + if index > len(block): + raise ValueError("index out of the block") + + instructions = block[index:] + if not instructions: + if block_index + 1 < len(self): + return self[block_index + 1] + + del block[index:] + + block2 = BasicBlock(instructions) + block.next_block = block2 + + for block in self[block_index + 1 :]: + self._block_index[id(block)] += 1 + + self._blocks.insert(block_index + 1, block2) + self._block_index[id(block2)] = block_index + 1 + + return block2 + + def get_dead_blocks(self) -> List[BasicBlock]: + if not self: + return [] + + seen_block_ids = set() + stack = [self[0]] + while stack: + block = stack.pop() + if id(block) in seen_block_ids: + continue + seen_block_ids.add(id(block)) + for i in block: + if isinstance(i, Instr) and isinstance(i.arg, BasicBlock): + stack.append(i.arg) + elif isinstance(i, TryBegin): + assert isinstance(i.target, BasicBlock) + stack.append(i.target) + + return [b for b in self if id(b) not in seen_block_ids] + + @staticmethod + def from_bytecode(bytecode: _bytecode.Bytecode) -> "ControlFlowGraph": + # label => instruction index + label_to_block_index = {} + jumps = [] + try_end_locations = {} + for index, instr in enumerate(bytecode): + if isinstance(instr, Label): + label_to_block_index[instr] = index + elif isinstance(instr, Instr) and isinstance(instr.arg, Label): + jumps.append((index, instr.arg)) + elif isinstance(instr, TryBegin): + assert isinstance(instr.target, Label) + jumps.append((index, instr.target)) + elif isinstance(instr, TryEnd): + try_end_locations[instr.entry] = index + + # Figure out on which index block targeted by a label start + block_starts = {} + for target_index, target_label in jumps: + target_index = label_to_block_index[target_label] + block_starts[target_index] = target_label + + bytecode_blocks = ControlFlowGraph() + bytecode_blocks._copy_attr_from(bytecode) + bytecode_blocks.argnames = list(bytecode.argnames) + + # copy instructions, convert labels to block labels + block = bytecode_blocks[0] + labels = {} + jumping_instrs: List[Instr] = [] + # Map input TryBegin to CFG TryBegins (split across blocks may yield multiple + # TryBegin from a single in the bytecode). + try_begins: Dict[TryBegin, list[TryBegin]] = {} + # Storage for TryEnds that need to be inserted at the beginning of a block. + # We use a list because the same block can be reached through several paths + # with different active TryBegins + add_try_end: Dict[Label, List[TryEnd]] = defaultdict(list) + + # Track the currently active try begin + active_try_begin: Optional[TryBegin] = None + try_begin_inserted_in_block = False + last_instr: Optional[Instr] = None + for index, instr in enumerate(bytecode): + # Reference to the current block if we create a new one in the following. + old_block: BasicBlock | None = None + + # First we determine if we need to create a new block: + # - by checking the current instruction index + if index in block_starts: + old_label = block_starts[index] + # Create a new block if the last created one is not empty + # (of real instructions) + if index != 0 and (li := block.get_last_non_artificial_instruction()): + old_block = block + new_block = bytecode_blocks.add_block() + # If the last non artificial instruction is not final connect + # this block to the next. + if not li.is_final(): + block.next_block = new_block + block = new_block + if old_label is not None: + labels[old_label] = block + + # - by inspecting the last instr + elif block.get_last_non_artificial_instruction() and last_instr is not None: + # The last instruction is final but we did not create a block + # -> sounds like a block of dead code but we preserve it + if last_instr.is_final(): + old_block = block + block = bytecode_blocks.add_block() + + # We are dealing with a conditional jump + elif last_instr.has_jump(): + assert isinstance(last_instr.arg, Label) + old_block = block + new_block = bytecode_blocks.add_block() + block.next_block = new_block + block = new_block + + # If we created a new block, we check: + # - if the current instruction is a TryEnd and if the last instruction + # is final in which case we insert the TryEnd in the old block. + # - if we have a currently active TryBegin for which we may need to + # create a TryEnd in the previous block and a new TryBegin in the + # new one because the blocks are not connected. + if old_block is not None: + temp = try_begin_inserted_in_block + try_begin_inserted_in_block = False + + if old_block is not None and last_instr is not None: + # The last instruction is final, if the current instruction is a + # TryEnd insert it in the same block and move to the next instruction + if last_instr.is_final() and isinstance(instr, TryEnd): + assert active_try_begin + nte = instr.copy() + nte.entry = try_begins[active_try_begin][-1] + old_block.append(nte) + active_try_begin = None + continue + + # If we have an active TryBegin and last_instr is: + elif active_try_begin is not None: + # - a jump whose target is beyond the TryEnd of the active + # TryBegin: we remember TryEnd should be prepended to the + # target block. + if ( + last_instr.has_jump() + and active_try_begin in try_end_locations + and ( + # last_instr is a jump so arg is a Label + label_to_block_index[last_instr.arg] # type: ignore + >= try_end_locations[active_try_begin] + ) + ): + assert isinstance(last_instr.arg, Label) + add_try_end[last_instr.arg].append( + TryEnd(try_begins[active_try_begin][-1]) + ) + + # - final and the try begin originate from the current block: + # we insert a TryEnd in the old block and a new TryBegin in + # the new one since the blocks are disconnected. + if last_instr.is_final() and temp: + old_block.append(TryEnd(try_begins[active_try_begin][-1])) + new_tb = TryBegin( + active_try_begin.target, active_try_begin.push_lasti + ) + block.append(new_tb) + # Add this new TryBegin to the map to properly update + # the target. + try_begins[active_try_begin].append(new_tb) + try_begin_inserted_in_block = True + + last_instr = None + + if isinstance(instr, Label): + continue + + # don't copy SetLineno objects + if isinstance(instr, (Instr, TryBegin, TryEnd)): + new = instr.copy() + if isinstance(instr, TryBegin): + assert active_try_begin is None + active_try_begin = instr + try_begin_inserted_in_block = True + assert isinstance(new, TryBegin) + try_begins[instr] = [new] + elif isinstance(instr, TryEnd): + assert isinstance(new, TryEnd) + new.entry = try_begins[instr.entry][-1] + active_try_begin = None + try_begin_inserted_in_block = False + else: + last_instr = instr + if isinstance(instr.arg, Label): + assert isinstance(new, Instr) + jumping_instrs.append(new) + + instr = new + + block.append(instr) + + # Insert the necessary TryEnds at the beginning of block that were marked + # (if we did not already insert an equivalent TryEnd earlier). + for lab, tes in add_try_end.items(): + block = labels[lab] + existing_te_entries = set() + index = 0 + # We use a while loop since the block cannot yet be iterated on since + # jumps still use labels instead of blocks + while index < len(block): + i = block[index] + index += 1 + if isinstance(i, TryEnd): + existing_te_entries.add(i.entry) + else: + break + for te in tes: + if te.entry not in existing_te_entries: + labels[lab].insert(0, te) + existing_te_entries.add(te.entry) + + # Replace labels by block in jumping instructions + for instr in jumping_instrs: + label = instr.arg + assert isinstance(label, Label) + instr.arg = labels[label] + + # Replace labels by block in TryBegin + for b_tb, c_tbs in try_begins.items(): + label = b_tb.target + assert isinstance(label, Label) + for c_tb in c_tbs: + c_tb.target = labels[label] + + return bytecode_blocks + + def to_bytecode(self) -> _bytecode.Bytecode: + """Convert to Bytecode.""" + + used_blocks = set() + for block in self: + target_block = block.get_jump() + if target_block is not None: + used_blocks.add(id(target_block)) + + for tb in (i for i in block if isinstance(i, TryBegin)): + used_blocks.add(id(tb.target)) + + labels = {} + jumps = [] + try_begins = {} + seen_try_end: Set[TryBegin] = set() + instructions: List[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = [] + + # Track the last seen TryBegin and TryEnd to be able to fuse adjacent + # TryEnd/TryBegin pair which share the same target. + # In each case, we store the value found in the CFG and the value + # inserted in the bytecode. + last_try_begin: tuple[TryBegin, TryBegin] | None = None + last_try_end: tuple[TryEnd, TryEnd] | None = None + + for block in self: + if id(block) in used_blocks: + new_label = Label() + labels[id(block)] = new_label + instructions.append(new_label) + + for instr in block: + # don't copy SetLineno objects + if isinstance(instr, (Instr, TryBegin, TryEnd)): + new = instr.copy() + if isinstance(instr, TryBegin): + # If due to jumps and split TryBegin, we encounter a TryBegin + # while we still have a TryBegin ensure they can be fused. + if last_try_begin is not None: + cfg_tb, byt_tb = last_try_begin + assert instr.target is cfg_tb.target + assert instr.push_lasti == cfg_tb.push_lasti + byt_tb.stack_depth = min( + byt_tb.stack_depth, instr.stack_depth + ) + + # If the TryBegin share the target and push_lasti of the + # entry of an adjacent TryEnd, omit the new TryBegin that + # was inserted to allow analysis of the CFG and remove + # the already inserted TryEnd. + if last_try_end is not None: + cfg_te, byt_te = last_try_end + entry = cfg_te.entry + if ( + entry.target is instr.target + and entry.push_lasti == instr.push_lasti + ): + # If we did not yet compute the required stack depth + # keep the value as UNSET + if entry.stack_depth is UNSET: + assert instr.stack_depth is UNSET + byt_te.entry.stack_depth = UNSET + else: + byt_te.entry.stack_depth = min( + entry.stack_depth, instr.stack_depth + ) + try_begins[instr] = byt_te.entry + instructions.remove(byt_te) + continue + assert isinstance(new, TryBegin) + try_begins[instr] = new + last_try_begin = (instr, new) + last_try_end = None + elif isinstance(instr, TryEnd): + # Only keep the first seen TryEnd matching a TryBegin + assert isinstance(new, TryEnd) + if instr.entry in seen_try_end: + continue + seen_try_end.add(instr.entry) + new.entry = try_begins[instr.entry] + last_try_begin = None + last_try_end = (instr, new) + elif isinstance(instr.arg, BasicBlock): + assert isinstance(new, Instr) + jumps.append(new) + last_try_end = None + else: + last_try_end = None + + instr = new + + instructions.append(instr) + + # Map to new labels + for instr in jumps: + instr.arg = labels[id(instr.arg)] + + for tb in set(try_begins.values()): + tb.target = labels[id(tb.target)] + + bytecode = _bytecode.Bytecode() + bytecode._copy_attr_from(self) + bytecode.argnames = list(self.argnames) + bytecode[:] = instructions + + return bytecode + + def to_code( + self, + stacksize: Optional[int] = None, + *, + check_pre_and_post: bool = True, + compute_exception_stack_depths: bool = True, + ) -> types.CodeType: + """Convert to code.""" + if stacksize is None: + stacksize = self.compute_stacksize( + check_pre_and_post=check_pre_and_post, + compute_exception_stack_depths=compute_exception_stack_depths, + ) + bc = self.to_bytecode() + return bc.to_code( + stacksize=stacksize, + check_pre_and_post=False, + compute_exception_stack_depths=False, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py new file mode 100644 index 0000000..4908e1c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py @@ -0,0 +1,1419 @@ +import dis +import inspect +import opcode as _opcode +import struct +import sys +import types +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + MutableSequence, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +# alias to keep the 'bytecode' variable free +import bytecode as _bytecode +from bytecode.flags import CompilerFlags +from bytecode.instr import ( + _UNSET, + BITFLAG2_INSTRUCTIONS, + BITFLAG_INSTRUCTIONS, + INTRINSIC, + INTRINSIC_1OP, + INTRINSIC_2OP, + PLACEHOLDER_LABEL, + UNSET, + BaseInstr, + CellVar, + Compare, + FreeVar, + Instr, + InstrArg, + InstrLocation, + Intrinsic1Op, + Intrinsic2Op, + Label, + SetLineno, + TryBegin, + TryEnd, + _check_arg_int, + const_key, + opcode_has_argument, +) + +# - jumps use instruction +# - lineno use bytes (dis.findlinestarts(code)) +# - dis displays bytes +OFFSET_AS_INSTRUCTION = sys.version_info >= (3, 10) + + +def _set_docstring(code: _bytecode.BaseBytecode, consts: Sequence) -> None: + if not consts: + return + first_const = consts[0] + if isinstance(first_const, str) or first_const is None: + code.docstring = first_const + + +T = TypeVar("T", bound="ConcreteInstr") + + +class ConcreteInstr(BaseInstr[int]): + """Concrete instruction. + + arg must be an integer in the range 0..2147483647. + + It has a read-only size attribute. + + """ + + # For ConcreteInstr the argument is always an integer + _arg: int + + __slots__ = ("_size", "_extended_args") + + def __init__( + self, + name: str, + arg: int = UNSET, + *, + lineno: Union[int, None, _UNSET] = UNSET, + location: Optional[InstrLocation] = None, + extended_args: Optional[int] = None, + ): + # Allow to remember a potentially meaningless EXTENDED_ARG emitted by + # Python to properly compute the size and avoid messing up the jump + # targets + self._extended_args = extended_args + super().__init__(name, arg, lineno=lineno, location=location) + + def _check_arg(self, name: str, opcode: int, arg: int) -> None: + if opcode_has_argument(opcode): + if arg is UNSET: + raise ValueError("operation %s requires an argument" % name) + + _check_arg_int(arg, name) + # opcode == 0 corresponds to CACHE instruction in 3.11+ and was unused before + elif opcode == 0: + arg = arg if arg is not UNSET else 0 + _check_arg_int(arg, name) + else: + if arg is not UNSET: + raise ValueError("operation %s has no argument" % name) + + def _set( + self, + name: str, + arg: int, + ) -> None: + super()._set(name, arg) + size = 2 + if arg is not UNSET: + while arg > 0xFF: + size += 2 + arg >>= 8 + if self._extended_args is not None: + size = 2 + 2 * self._extended_args + self._size = size + + @property + def size(self) -> int: + return self._size + + def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, int]: + return (self._location, self._name, self._arg) + + def get_jump_target(self, instr_offset: int) -> Optional[int]: + # When a jump arg is zero the jump always points to the first non-CACHE + # opcode following the jump. The passed in offset is the offset at + # which the jump opcode starts. So to compute the target, we add to it + # the instruction size (accounting for extended args) and the + # number of caches expected to follow the jump instruction. + s = ( + (self._size // 2) if OFFSET_AS_INSTRUCTION else self._size + ) + self.use_cache_opcodes() + if self.is_forward_rel_jump(): + return instr_offset + s + self._arg + if self.is_backward_rel_jump(): + return instr_offset + s - self._arg + if self.is_abs_jump(): + return self._arg + return None + + def assemble(self) -> bytes: + if self._arg is UNSET: + return bytes((self._opcode, 0)) + + arg = self._arg + b = [self._opcode, arg & 0xFF] + while arg > 0xFF: + arg >>= 8 + b[:0] = [_opcode.EXTENDED_ARG, arg & 0xFF] + + if self._extended_args: + while len(b) < self._size: + b[:0] = [_opcode.EXTENDED_ARG, 0x00] + + return bytes(b) + + @classmethod + def disassemble(cls: Type[T], lineno: Optional[int], code: bytes, offset: int) -> T: + index = 2 * offset if OFFSET_AS_INSTRUCTION else offset + op = code[index] + if opcode_has_argument(op): + arg = code[index + 1] + else: + arg = UNSET + name = _opcode.opname[op] + return cls(name, arg, lineno=lineno) + + def use_cache_opcodes(self) -> int: + return ( + # Not supposed to be used but we need it + dis._inline_cache_entries[self._opcode] # type: ignore + if sys.version_info >= (3, 11) + else 0 + ) + + +class ExceptionTableEntry: + """Entry for a given line in the exception table. + + All offset are expressed in instructions not in bytes. + + """ + + #: Offset in instruction between the beginning of the bytecode and the beginning + #: of this entry. + start_offset: int + + #: Offset in instruction between the beginning of the bytecode and the end + #: of this entry. This offset is inclusive meaning that the instruction it points + #: to is included in the try/except handling. + stop_offset: int + + #: Offset in instruction to the first instruction of the exception handling block. + target: int + + #: Minimal stack depth in the block delineated by start and stop + #: offset of the exception table entry. Used to restore the stack (by + #: popping items) when entering the exception handling block. + stack_depth: int + + #: Should the offset, at which an exception was raised, be pushed on the stack + #: before the exception itself (which is pushed as a single value)). + push_lasti: bool + + __slots__ = ("start_offset", "stop_offset", "target", "stack_depth", "push_lasti") + + def __init__( + self, + start_offset: int, + stop_offset: int, + target: int, + stack_depth: int, + push_lasti: bool, + ) -> None: + self.start_offset = start_offset + self.stop_offset = stop_offset + self.target = target + self.stack_depth = stack_depth + self.push_lasti = push_lasti + + def __repr__(self) -> str: + return ( + "ExceptionTableEntry(" + f"start_offset={self.start_offset}, " + f"stop_offset={self.stop_offset}, " + f"target={self.target}, " + f"stack_depth={self.stack_depth}, " + f"push_lasti={self.push_lasti}" + ) + + +class ConcreteBytecode(_bytecode._BaseBytecodeList[Union[ConcreteInstr, SetLineno]]): + #: List of "constant" objects for the bytecode + consts: List + + #: List of names used by local variables. + names: List[str] + + #: List of names used by input variables. + varnames: List[str] + + #: Table describing portion of the bytecode in which exceptions are caught and + #: where there are handled. + #: Used only in Python 3.11+ + exception_table: List[ExceptionTableEntry] + + def __init__( + self, + instructions=(), + *, + consts: tuple = (), + names: Tuple[str, ...] = (), + varnames: Iterable[str] = (), + exception_table: Optional[List[ExceptionTableEntry]] = None, + ): + super().__init__() + self.consts = list(consts) + self.names = list(names) + self.varnames = list(varnames) + self.exception_table = exception_table or [] + for instr in instructions: + self._check_instr(instr) + self.extend(instructions) + + def __iter__(self) -> Iterator[Union[ConcreteInstr, SetLineno]]: + instructions = super().__iter__() + for instr in instructions: + self._check_instr(instr) + yield instr + + def _check_instr(self, instr: Any) -> None: + if not isinstance(instr, (ConcreteInstr, SetLineno)): + raise ValueError( + "ConcreteBytecode must only contain " + "ConcreteInstr and SetLineno objects, " + "but %s was found" % type(instr).__name__ + ) + + def _copy_attr_from(self, bytecode): + super()._copy_attr_from(bytecode) + if isinstance(bytecode, ConcreteBytecode): + self.consts = bytecode.consts + self.names = bytecode.names + self.varnames = bytecode.varnames + + def __repr__(self) -> str: + return "" % len(self) + + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + + const_keys1 = list(map(const_key, self.consts)) + const_keys2 = list(map(const_key, other.consts)) + if const_keys1 != const_keys2: + return False + + if self.names != other.names: + return False + if self.varnames != other.varnames: + return False + + return super().__eq__(other) + + @staticmethod + def from_code( + code: types.CodeType, *, extended_arg: bool = False + ) -> "ConcreteBytecode": + instructions: MutableSequence[Union[SetLineno, ConcreteInstr]] + # For Python 3.11+ we use dis to extract the detailed location information at + # reduced maintenance cost. + if sys.version_info >= (3, 11): + instructions = [ + # dis.get_instructions automatically handle extended arg which + # we do not want, so we fold back arguments to be between 0 and 255 + ConcreteInstr( + i.opname, + i.arg % 256 if i.arg is not None else UNSET, + location=InstrLocation.from_positions(i.positions) + if i.positions + else None, + ) + for i in dis.get_instructions(code, show_caches=True) + ] + else: + if sys.version_info >= (3, 10): + line_starts = dict( + (offset, lineno) for offset, _, lineno in code.co_lines() + ) + else: + line_starts = dict(dis.findlinestarts(code)) + + # find block starts + instructions = [] + offset = 0 + lineno: Optional[int] = code.co_firstlineno + while offset < (len(code.co_code) // (2 if OFFSET_AS_INSTRUCTION else 1)): + lineno_off = (2 * offset) if OFFSET_AS_INSTRUCTION else offset + if lineno_off in line_starts: + lineno = line_starts[lineno_off] + + instr = ConcreteInstr.disassemble(lineno, code.co_code, offset) + + instructions.append(instr) + offset += (instr.size // 2) if OFFSET_AS_INSTRUCTION else instr.size + + bytecode = ConcreteBytecode() + + # HINT : in some cases Python generate useless EXTENDED_ARG opcode + # with a value of zero. Such opcodes do not increases the size of the + # following opcode the way a normal EXTENDED_ARG does. As a + # consequence, they need to be tracked manually as otherwise the + # offsets in jump targets can end up being wrong. + if not extended_arg: + # The list is modified in place + bytecode._remove_extended_args(instructions) + + bytecode.name = code.co_name + bytecode.filename = code.co_filename + bytecode.flags = CompilerFlags(code.co_flags) + bytecode.argcount = code.co_argcount + bytecode.posonlyargcount = code.co_posonlyargcount + bytecode.kwonlyargcount = code.co_kwonlyargcount + bytecode.first_lineno = code.co_firstlineno + bytecode.names = list(code.co_names) + bytecode.consts = list(code.co_consts) + bytecode.varnames = list(code.co_varnames) + bytecode.freevars = list(code.co_freevars) + bytecode.cellvars = list(code.co_cellvars) + _set_docstring(bytecode, code.co_consts) + if sys.version_info >= (3, 11): + bytecode.exception_table = bytecode._parse_exception_table( + code.co_exceptiontable + ) + bytecode.qualname = code.co_qualname + else: + bytecode.qualname = bytecode.qualname + + bytecode[:] = instructions + return bytecode + + @staticmethod + def _normalize_lineno( + instructions: Sequence[Union[ConcreteInstr, SetLineno]], first_lineno: int + ) -> Iterator[Tuple[int, ConcreteInstr]]: + lineno = first_lineno + # For each instruction compute an "inherited" lineno used: + # - on 3.8 and 3.9 for which a lineno is mandatory + # - to infer a lineno on 3.10+ if no lineno was provided + for instr in instructions: + i_lineno = instr.lineno + # if instr.lineno is not set, it's inherited from the previous + # instruction, or from self.first_lineno + if i_lineno is not None and i_lineno is not UNSET: + lineno = i_lineno + + if isinstance(instr, ConcreteInstr): + yield (lineno, instr) + + def _assemble_code( + self, + ) -> Tuple[bytes, List[Tuple[int, int, int, Optional[InstrLocation]]]]: + offset = 0 + code_str = [] + linenos = [] + for lineno, instr in self._normalize_lineno(self, self.first_lineno): + code_str.append(instr.assemble()) + i_size = instr.size + linenos.append( + ( + (offset * 2) if OFFSET_AS_INSTRUCTION else offset, + i_size, + lineno, + instr.location, + ) + ) + offset += (i_size // 2) if OFFSET_AS_INSTRUCTION else i_size + + return (b"".join(code_str), linenos) + + # Used on 3.8 and 3.9 + @staticmethod + def _assemble_lnotab( + first_lineno: int, linenos: List[Tuple[int, int, int, Optional[InstrLocation]]] + ) -> bytes: + lnotab = [] + old_offset = 0 + old_lineno = first_lineno + for offset, _, lineno, _ in linenos: + dlineno = lineno - old_lineno + if dlineno == 0: + continue + old_lineno = lineno + + doff = offset - old_offset + old_offset = offset + + while doff > 255: + lnotab.append(b"\xff\x00") + doff -= 255 + + while dlineno < -128: + lnotab.append(struct.pack("Bb", doff, -128)) + doff = 0 + dlineno -= -128 + + while dlineno > 127: + lnotab.append(struct.pack("Bb", doff, 127)) + doff = 0 + dlineno -= 127 + + assert 0 <= doff <= 255 + assert -128 <= dlineno <= 127 + + lnotab.append(struct.pack("Bb", doff, dlineno)) + + return b"".join(lnotab) + + @staticmethod + def _pack_linetable( + linetable: List[bytes], doff: int, dlineno: Optional[int] + ) -> None: + if dlineno is not None: + # Ensure linenos are between -126 and +126, by using 127 lines jumps with + # a 0 byte offset + while dlineno < -127: + linetable.append(struct.pack("Bb", 0, -127)) + dlineno -= -127 + + while dlineno > 127: + linetable.append(struct.pack("Bb", 0, 127)) + dlineno -= 127 + + assert -127 <= dlineno <= 127 + else: + dlineno = -128 + + # Ensure offsets are less than 255. + # If an offset is larger, we first mark the line change with an offset of 254 + # then use as many 254 offset with no line change to reduce the offset to + # less than 254. + if doff > 254: + linetable.append(struct.pack("Bb", 254, dlineno)) + doff -= 254 + + while doff > 254: + linetable.append(b"\xfe\x00") + doff -= 254 + linetable.append(struct.pack("Bb", doff, 0)) + + else: + linetable.append(struct.pack("Bb", doff, dlineno)) + + assert 0 <= doff <= 254 + + # Used on 3.10 + def _assemble_linestable( + self, + first_lineno: int, + linenos: Iterable[Tuple[int, int, int, Optional[InstrLocation]]], + ) -> bytes: + if not linenos: + return b"" + + linetable: List[bytes] = [] + old_offset = 0 + + iter_in = iter(linenos) + + offset, i_size, old_lineno, old_location = next(iter_in) + if old_location is not None: + old_dlineno = ( + old_location.lineno - first_lineno + if old_location.lineno is not None + else None + ) + else: + old_dlineno = old_lineno - first_lineno + + for offset, i_size, lineno, location in iter_in: + if location is not None: + dlineno = ( + location.lineno - old_lineno + if location.lineno is not None + else None + ) + else: + dlineno = lineno - old_lineno + + if dlineno == 0 or (old_dlineno is None and dlineno is None): + continue + old_lineno = lineno + + doff = offset - old_offset + old_offset = offset + + self._pack_linetable(linetable, doff, old_dlineno) + old_dlineno = dlineno + + # Pack the line of the last instruction. + doff = offset + i_size - old_offset + self._pack_linetable(linetable, doff, old_dlineno) + + return b"".join(linetable) + + # The formats are describes in CPython/Objects/locations.md + @staticmethod + def _encode_location_varint(varint: int) -> bytearray: + encoded = bytearray() + # We encode on 6 bits + while True: + encoded.append(varint & 0x3F) + varint >>= 6 + if varint: + encoded[-1] |= 0x40 # bit 6 is set except on the last entry + else: + break + return encoded + + def _encode_location_svarint(self, svarint: int) -> bytearray: + if svarint < 0: + return self._encode_location_varint(((-svarint) << 1) | 1) + else: + return self._encode_location_varint(svarint << 1) + + # Python 3.11+ location format encoding + @staticmethod + def _pack_location_header(code: int, size: int) -> int: + return (1 << 7) + (code << 3) + (size - 1 if size <= 8 else 7) + + def _pack_location( + self, size: int, lineno: int, location: Optional[InstrLocation] + ) -> bytearray: + packed = bytearray() + + l_lineno: Optional[int] + # The location was not set so we infer a line. + if location is None: + l_lineno, end_lineno, col_offset, end_col_offset = ( + lineno, + None, + None, + None, + ) + else: + l_lineno, end_lineno, col_offset, end_col_offset = ( + location.lineno, + location.end_lineno, + location.col_offset, + location.end_col_offset, + ) + + # We have no location information so the code is 15 + if l_lineno is None: + packed.append(self._pack_location_header(15, size)) + + # No column info, code 13 + elif col_offset is None: + if end_lineno is not None and end_lineno != l_lineno: + raise ValueError( + "An instruction cannot have no column offset and span " + f"multiple lines (lineno: {l_lineno}, end lineno: {end_lineno}" + ) + packed.extend( + ( + self._pack_location_header(13, size), + *self._encode_location_svarint(l_lineno - lineno), + ) + ) + + # We enforce the end_lineno to be defined + else: + assert end_lineno is not None + assert end_col_offset is not None + + # Short forms + if ( + end_lineno == l_lineno + and l_lineno - lineno == 0 + and col_offset < 72 + and (end_col_offset - col_offset) <= 15 + ): + packed.extend( + ( + self._pack_location_header(col_offset // 8, size), + ((col_offset % 8) << 4) + (end_col_offset - col_offset), + ) + ) + + # One line form + elif ( + end_lineno == l_lineno + and l_lineno - lineno in (1, 2) + and col_offset < 256 + and end_col_offset < 256 + ): + packed.extend( + ( + self._pack_location_header(10 + l_lineno - lineno, size), + col_offset, + end_col_offset, + ) + ) + + # Long form + else: + packed.extend( + ( + self._pack_location_header(14, size), + *self._encode_location_svarint(l_lineno - lineno), + *self._encode_location_varint(end_lineno - l_lineno), + # When decoding in codeobject.c::advance_with_locations + # we remove 1 from the offset ... + *self._encode_location_varint(col_offset + 1), + *self._encode_location_varint(end_col_offset + 1), + ) + ) + + return packed + + def _push_locations( + self, + locations: List[bytearray], + size: int, + lineno: int, + location: InstrLocation, + ) -> int: + # We need the size in instruction not in bytes + size //= 2 + + # Repeatedly add element since we cannot cover more than 8 code + # elements. We recompute each time since in practice we will + # rarely loop. + while True: + locations.append(self._pack_location(size, lineno, location)) + # Update the lineno since if we need more than one entry the + # reference for the delta of the lineno change + lineno = location.lineno if location.lineno is not None else lineno + size -= 8 + if size < 1: + break + + return lineno + + def _assemble_locations( + self, + first_lineno: int, + linenos: Iterable[Tuple[int, int, int, Optional[InstrLocation]]], + ) -> bytes: + if not linenos: + return b"" + + locations: List[bytearray] = [] + + iter_in = iter(linenos) + + _, size, lineno, old_location = next(iter_in) + # Infer the line if location is None + old_location = old_location or InstrLocation(lineno, None, None, None) + lineno = first_lineno + + # We track the last set lineno to be able to compute deltas + for _, i_size, new_lineno, location in iter_in: + # Infer the line if location is None + location = location or InstrLocation(new_lineno, None, None, None) + + # Group together instruction with equivalent locations + if old_location.lineno and old_location == location: + size += i_size + continue + + lineno = self._push_locations(locations, size, lineno, old_location) + + size = i_size + old_location = location + + # Pack the line of the last instruction. + self._push_locations(locations, size, lineno, old_location) + + return b"".join(locations) + + @staticmethod + def _remove_extended_args( + instructions: MutableSequence[Union[SetLineno, ConcreteInstr]] + ) -> None: + # replace jump targets with blocks + # HINT : in some cases Python generate useless EXTENDED_ARG opcode + # with a value of zero. Such opcodes do not increases the size of the + # following opcode the way a normal EXTENDED_ARG does. As a + # consequence, they need to be tracked manually as otherwise the + # offsets in jump targets can end up being wrong. + nb_extended_args = 0 + extended_arg = None + index = 0 + while index < len(instructions): + instr = instructions[index] + + # Skip SetLineno meta instruction + if isinstance(instr, SetLineno): + index += 1 + continue + + if instr.name == "EXTENDED_ARG": + nb_extended_args += 1 + if extended_arg is not None: + extended_arg = (extended_arg << 8) + instr.arg + else: + extended_arg = instr.arg + + del instructions[index] + continue + + if extended_arg is not None: + arg = UNSET if instr.name == "NOP" else (extended_arg << 8) + instr.arg + extended_arg = None + + instr = ConcreteInstr( + instr.name, + arg, + location=instr.location, + extended_args=nb_extended_args, + ) + instructions[index] = instr + nb_extended_args = 0 + + index += 1 + + if extended_arg is not None: + raise ValueError("EXTENDED_ARG at the end of the code") + + # Taken and adapted from exception_handling_notes.txt in cpython/Objects + @staticmethod + def _parse_varint(except_table_iterator: Iterator[int]) -> int: + b = next(except_table_iterator) + val = b & 63 + while b & 64: + val <<= 6 + b = next(except_table_iterator) + val |= b & 63 + return val + + def _parse_exception_table( + self, exception_table: bytes + ) -> List[ExceptionTableEntry]: + assert sys.version_info >= (3, 11) + table = [] + iterator = iter(exception_table) + try: + while True: + start = self._parse_varint(iterator) + length = self._parse_varint(iterator) + end = start + length - 1 # Present as inclusive + target = self._parse_varint(iterator) + dl = self._parse_varint(iterator) + depth = dl >> 1 + lasti = bool(dl & 1) + table.append(ExceptionTableEntry(start, end, target, depth, lasti)) + except StopIteration: + return table + + @staticmethod + def _encode_varint(value: int, set_begin_marker: bool = False) -> Iterator[int]: + # Encode value as a varint on 7 bits (MSB should come first) and set + # the begin marker if requested. + temp: List[int] = [] + assert value >= 0 + while value: + temp.append(value & 63 | (64 if temp else 0)) + value >>= 6 + temp = temp or [0] + if set_begin_marker: + temp[-1] |= 128 + return reversed(temp) + + def _assemble_exception_table(self) -> bytes: + table = bytearray() + for entry in self.exception_table or []: + size = entry.stop_offset - entry.start_offset + 1 + depth = (entry.stack_depth << 1) + entry.push_lasti + table.extend(self._encode_varint(entry.start_offset, True)) + table.extend(self._encode_varint(size)) + table.extend(self._encode_varint(entry.target)) + table.extend(self._encode_varint(depth)) + + return bytes(table) + + def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int: + bytecode = self.to_bytecode() + cfg = _bytecode.ControlFlowGraph.from_bytecode(bytecode) + return cfg.compute_stacksize(check_pre_and_post=check_pre_and_post) + + def to_code( + self, + stacksize: Optional[int] = None, + *, + check_pre_and_post: bool = True, + compute_exception_stack_depths: bool = True, + ) -> types.CodeType: + # Prevent reconverting the concrete bytecode to bytecode and cfg to do the + # calculation if we need to do it. + if stacksize is None or ( + sys.version_info >= (3, 11) and compute_exception_stack_depths + ): + cfg = _bytecode.ControlFlowGraph.from_bytecode(self.to_bytecode()) + stacksize = cfg.compute_stacksize( + check_pre_and_post=check_pre_and_post, + compute_exception_stack_depths=compute_exception_stack_depths, + ) + self = cfg.to_bytecode().to_concrete_bytecode( + compute_exception_stack_depths=False + ) + + # Assemble the code string after round tripping to CFG if necessary. + code_str, linenos = self._assemble_code() + + lnotab = ( + self._assemble_locations(self.first_lineno, linenos) + if sys.version_info >= (3, 11) + else ( + self._assemble_linestable(self.first_lineno, linenos) + if sys.version_info >= (3, 10) + else self._assemble_lnotab(self.first_lineno, linenos) + ) + ) + nlocals = len(self.varnames) + + if sys.version_info >= (3, 11): + return types.CodeType( + self.argcount, + self.posonlyargcount, + self.kwonlyargcount, + nlocals, + stacksize, + int(self.flags), + code_str, + tuple(self.consts), + tuple(self.names), + tuple(self.varnames), + self.filename, + self.name, + self.qualname, + self.first_lineno, + lnotab, + self._assemble_exception_table(), + tuple(self.freevars), + tuple(self.cellvars), + ) + else: + return types.CodeType( + self.argcount, + self.posonlyargcount, + self.kwonlyargcount, + nlocals, + stacksize, + int(self.flags), + code_str, + tuple(self.consts), + tuple(self.names), + tuple(self.varnames), + self.filename, + self.name, + self.first_lineno, + lnotab, + tuple(self.freevars), + tuple(self.cellvars), + ) + + def to_bytecode( + self, + prune_caches: bool = True, + conserve_exception_block_stackdepth: bool = False, + ) -> _bytecode.Bytecode: + # On 3.11 we generate pseudo-instruction from the exception table + + # Copy instruction and remove extended args if any (in-place) + c_instructions = self[:] + self._remove_extended_args(c_instructions) + + # Find jump targets + jump_targets: Set[int] = set() + offset = 0 + for c_instr in c_instructions: + if isinstance(c_instr, SetLineno): + continue + target = c_instr.get_jump_target(offset) + if target is not None: + jump_targets.add(target) + offset += (c_instr.size // 2) if OFFSET_AS_INSTRUCTION else c_instr.size + + # On 3.11+ we need to also look at the exception table for jump targets + for ex_entry in self.exception_table: + jump_targets.add(ex_entry.target) + + # Create look up dict to find entries based on either exception handling + # block exit or entry offsets. Several blocks can end on the same instruction + # so we store a list of entry per offset. + ex_start: Dict[int, ExceptionTableEntry] = {} + ex_end: Dict[int, List[ExceptionTableEntry]] = {} + for entry in self.exception_table: + # Ensure we do not have more than one entry with identical starting + # offsets + assert entry.start_offset not in ex_start + ex_start[entry.start_offset] = entry + ex_end.setdefault(entry.stop_offset, []).append(entry) + + # Create labels and instructions + jumps: List[Tuple[int, int]] = [] + instructions: List[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = [] + labels = {} + tb_instrs: Dict[ExceptionTableEntry, TryBegin] = {} + offset = 0 + # In Python 3.11+ cell and varnames can be shared and are indexed in a single + # array. + # As a consequence, the instruction argument can be either: + # - < len(varnames): the name is shared an we can directly use + # the index to access the name in cellvars + # - > len(varnames): the name is not shared and is offset by the + # number unshared varname. + # Free vars are never shared and correspond to index larger than the + # largest cell var. + # See PyCode_NewWithPosOnlyArgs + if sys.version_info >= (3, 11): + cells_lookup = self.varnames + [ + n for n in self.cellvars if n not in self.varnames + ] + ncells = len(cells_lookup) + else: + ncells = len(self.cellvars) + cells_lookup = self.cellvars + + for lineno, c_instr in self._normalize_lineno( + c_instructions, self.first_lineno + ): + if offset in jump_targets: + label = Label() + labels[offset] = label + instructions.append(label) + + # Handle TryBegin pseudo instructions + if offset in ex_start: + entry = ex_start[offset] + tb_instr = TryBegin( + Label(), + entry.push_lasti, + entry.stack_depth if conserve_exception_block_stackdepth else UNSET, + ) + # Per entry store the pseudo instruction associated + tb_instrs[entry] = tb_instr + instructions.append(tb_instr) + + jump_target = c_instr.get_jump_target(offset) + size = c_instr.size + # If an instruction uses extended args, those appear before the instruction + # causing the instruction to appear at offset that accounts for extended + # args. So we first update the offset to account for extended args, then + # record the instruction offset and then add the instruction itself to the + # offset. + offset += (size // 2 - 1) if OFFSET_AS_INSTRUCTION else (size - 2) + current_instr_offset = offset + offset += 1 if OFFSET_AS_INSTRUCTION else 2 + + # on Python 3.11+ remove CACHE opcodes if we are requested to do so. + # We are careful to first advance the offset and check that the CACHE + # is not a jump target. It should never be the case but we double check. + if prune_caches and c_instr.name == "CACHE": + assert jump_target is None + + # We may need to insert a TryEnd after a CACHE so we need to run the + # through the last block. + else: + arg: InstrArg + c_arg = c_instr.arg + # FIXME: better error reporting + if c_instr.opcode in _opcode.hasconst: + arg = self.consts[c_arg] + elif c_instr.opcode in _opcode.haslocal: + arg = self.varnames[c_arg] + elif c_instr.opcode in _opcode.hasname: + if c_instr.name in BITFLAG_INSTRUCTIONS: + arg = (bool(c_arg & 1), self.names[c_arg >> 1]) + elif c_instr.name in BITFLAG2_INSTRUCTIONS: + arg = (bool(c_arg & 1), bool(c_arg & 2), self.names[c_arg >> 2]) + else: + arg = self.names[c_arg] + elif c_instr.opcode in _opcode.hasfree: + if c_arg < ncells: + name = cells_lookup[c_arg] + arg = CellVar(name) + else: + name = self.freevars[c_arg - ncells] + arg = FreeVar(name) + elif c_instr.opcode in _opcode.hascompare: + arg = Compare( + (c_arg >> 4) if sys.version_info >= (3, 12) else c_arg + ) + elif c_instr.opcode in INTRINSIC_1OP: + arg = Intrinsic1Op(c_arg) + elif c_instr.opcode in INTRINSIC_2OP: + arg = Intrinsic2Op(c_arg) + else: + arg = c_arg + + location = c_instr.location or InstrLocation(lineno, None, None, None) + + if jump_target is not None: + arg = PLACEHOLDER_LABEL + instr_index = len(instructions) + jumps.append((instr_index, jump_target)) + + instructions.append(Instr(c_instr.name, arg, location=location)) + + # We now insert the TryEnd entries + if current_instr_offset in ex_end: + entries = ex_end[current_instr_offset] + for entry in reversed(entries): + instructions.append(TryEnd(tb_instrs[entry])) + + # Replace jump targets with labels + for index, jump_target in jumps: + instr = instructions[index] + assert isinstance(instr, Instr) and instr.arg is PLACEHOLDER_LABEL + # FIXME: better error reporting on missing label + instr.arg = labels[jump_target] + + # Set the label for TryBegin + for entry, tb in tb_instrs.items(): + tb.target = labels[entry.target] + + bytecode = _bytecode.Bytecode() + bytecode._copy_attr_from(self) + + nargs = bytecode.argcount + bytecode.kwonlyargcount + nargs += bytecode.posonlyargcount + if bytecode.flags & inspect.CO_VARARGS: + nargs += 1 + if bytecode.flags & inspect.CO_VARKEYWORDS: + nargs += 1 + bytecode.argnames = self.varnames[:nargs] + _set_docstring(bytecode, self.consts) + + bytecode.extend(instructions) + return bytecode + + +class _ConvertBytecodeToConcrete: + # XXX document attributes + + #: Default number of passes of compute_jumps() before giving up. Refer to + #: assemble_jump_offsets() in compile.c for background. + _compute_jumps_passes = 10 + + def __init__(self, code: _bytecode.Bytecode) -> None: + assert isinstance(code, _bytecode.Bytecode) + self.bytecode = code + + # temporary variables + self.instructions: List[ConcreteInstr] = [] + self.jumps: List[Tuple[int, Label, ConcreteInstr]] = [] + self.labels: Dict[Label, int] = {} + self.exception_handling_blocks: Dict[TryBegin, ExceptionTableEntry] = {} + self.required_caches = 0 + self.seen_manual_cache = False + + # used to build ConcreteBytecode() object + self.consts_indices: Dict[Union[bytes, Tuple[type, int]], int] = {} + self.consts_list: List[Any] = [] + self.names: List[str] = [] + self.varnames: List[str] = [] + + def add_const(self, value: Any) -> int: + key = const_key(value) + if key in self.consts_indices: + return self.consts_indices[key] + index = len(self.consts_indices) + self.consts_indices[key] = index + self.consts_list.append(value) + return index + + @staticmethod + def add(names: List[str], name: str) -> int: + try: + index = names.index(name) + except ValueError: + index = len(names) + names.append(name) + return index + + def concrete_instructions(self) -> None: + lineno = self.bytecode.first_lineno + # Track instruction (index) using cell vars and free vars to be able to update + # the index used once all the names are known. + cell_instrs: List[int] = [] + free_instrs: List[int] = [] + + for instr in self.bytecode: + # Enforce proper use of CACHE opcode on Python 3.11+ by checking we get the + # number we expect or directly generate the needed ones. + if isinstance(instr, Instr) and instr.name == "CACHE": + if not self.required_caches: + raise RuntimeError("Found a CACHE opcode when none was expected.") + self.seen_manual_cache = True + self.required_caches -= 1 + + elif self.required_caches: + if not self.seen_manual_cache: + # We preserve the location of the instruction requiring the + # presence of cache instructions + self.instructions.extend( + [ + ConcreteInstr( + "CACHE", 0, location=self.instructions[-1].location + ) + for i in range(self.required_caches) + ] + ) + self.required_caches = 0 + self.seen_manual_cache = False + else: + raise RuntimeError( + "Found some manual opcode but less than expected. " + f"Missing {self.required_caches} CACHE opcodes." + ) + + if isinstance(instr, Label): + self.labels[instr] = len(self.instructions) + continue + + if isinstance(instr, SetLineno): + lineno = instr.lineno + continue + + if isinstance(instr, TryBegin): + # We expect the stack depth to have be provided or computed earlier + assert instr.stack_depth is not UNSET + # NOTE here we store the index of the instruction at which the + # exception table entry starts. This is not the final value we want, + # we want the offset in the bytecode but that requires to compute + # the jumps first to resolve any possible extended arg needed in a + # jump. + self.exception_handling_blocks[instr] = ExceptionTableEntry( + len(self.instructions), 0, 0, instr.stack_depth, instr.push_lasti + ) + continue + + # Do not handle TryEnd before we insert possible CACHE opcode + if isinstance(instr, TryEnd): + entry = self.exception_handling_blocks[instr.entry] + # The TryEnd is located after the last opcode in the exception entry + # so we move the offset by one. We choose one so that the end does + # encompass a possible EXTENDED_ARG + entry.stop_offset = len(self.instructions) - 1 + continue + + assert isinstance(instr, Instr) + + if instr.lineno is not UNSET and instr.lineno is not None: + lineno = instr.lineno + elif instr.lineno is UNSET: + instr.lineno = lineno + + arg = instr.arg + is_jump = False + if isinstance(arg, Label): + label = arg + # fake value, real value is set in compute_jumps() + arg = 0 + is_jump = True + elif instr.opcode in _opcode.hasconst: + arg = self.add_const(arg) + elif instr.opcode in _opcode.haslocal: + assert isinstance(arg, str) + arg = self.add(self.varnames, arg) + elif instr.opcode in _opcode.hasname: + if instr.name in BITFLAG_INSTRUCTIONS: + assert ( + isinstance(arg, tuple) + and len(arg) == 2 + and isinstance(arg[0], bool) + and isinstance(arg[1], str) + ), arg + index = self.add(self.names, arg[1]) + arg = int(arg[0]) + (index << 1) + elif instr.name in BITFLAG2_INSTRUCTIONS: + assert ( + isinstance(arg, tuple) + and len(arg) == 3 + and isinstance(arg[0], bool) + and isinstance(arg[1], bool) + and isinstance(arg[2], str) + ), arg + index = self.add(self.names, arg[2]) + arg = int(arg[0]) + 2 * int(arg[1]) + (index << 2) + else: + assert isinstance(arg, str), f"Got {arg}, expected a str" + arg = self.add(self.names, arg) + elif instr.opcode in _opcode.hasfree: + if isinstance(arg, CellVar): + cell_instrs.append(len(self.instructions)) + arg = self.bytecode.cellvars.index(arg.name) + else: + assert isinstance(arg, FreeVar) + free_instrs.append(len(self.instructions)) + arg = self.bytecode.freevars.index(arg.name) + elif instr.opcode in _opcode.hascompare: + if isinstance(arg, Compare): + # In Python 3.12 the 4 lowest bits are used for caching + # See compare_masks in compile.c + if sys.version_info >= (3, 12): + arg = arg._get_mask() + (arg.value << 4) + else: + arg = arg.value + elif instr.opcode in INTRINSIC: + if isinstance(arg, (Intrinsic1Op, Intrinsic2Op)): + arg = arg.value + + # The above should have performed all the necessary conversion + assert isinstance(arg, int) + c_instr = ConcreteInstr(instr.name, arg, location=instr.location) + if is_jump: + self.jumps.append((len(self.instructions), label, c_instr)) + + # If the instruction expect some cache + if sys.version_info >= (3, 11): + self.required_caches = c_instr.use_cache_opcodes() + self.seen_manual_cache = False + + self.instructions.append(c_instr) + + # On Python 3.11 varnames and cells can share some names. Wind the shared + # names and update the arg argument of instructions using cell vars. + # We also track by how much to offset free vars which are stored in a + # contiguous array after the cell vars + if sys.version_info >= (3, 11): + # Map naive cell index to shared index + shared_name_indexes: Dict[int, int] = {} + n_shared = 0 + n_unshared = 0 + for i, name in enumerate(self.bytecode.cellvars): + if name in self.varnames: + shared_name_indexes[i] = self.varnames.index(name) + n_shared += 1 + else: + shared_name_indexes[i] = len(self.varnames) + n_unshared + n_unshared += 1 + + for index in cell_instrs: + c_instr = self.instructions[index] + c_instr.arg = shared_name_indexes[c_instr.arg] + + free_offset = len(self.varnames) + len(self.bytecode.cellvars) - n_shared + else: + free_offset = len(self.bytecode.cellvars) + + for index in free_instrs: + c_instr = self.instructions[index] + c_instr.arg += free_offset + + def compute_jumps(self) -> bool: + # For labels we need the offset before the instruction at a given index but for + # exception table entries we need the offset of the instruction which can differ + # in the presence of extended args... + label_offsets = [] + instruction_offsets = [] + offset = 0 + for index, instr in enumerate(self.instructions): + label_offsets.append(offset) + # If an instruction uses extended args, those appear before the instruction + # causing the instruction to appear at offset that accounts for extended + # args. + offset += ( + (instr.size // 2 - 1) if OFFSET_AS_INSTRUCTION else (instr.size - 2) + ) + instruction_offsets.append(offset) + offset += 1 if OFFSET_AS_INSTRUCTION else 2 + # needed if a label is at the end + label_offsets.append(offset) + + # FIXME may need some extra check to validate jump forward vs jump backward + # fix argument of jump instructions: resolve labels + modified = False + for index, label, instr in self.jumps: + target_index = self.labels[label] + target_offset = label_offsets[target_index] + + # FIXME use opcode + # Under 3.12+, FOR_ITER, SEND jump is increased by 1 implicitely + # to skip over END_FOR, END_SEND see Python/instrumentation.c + if sys.version_info >= (3, 12) and instr.name in ("FOR_ITER", "SEND"): + target_offset -= 1 + + if instr.is_forward_rel_jump(): + instr_offset = label_offsets[index] + target_offset -= instr_offset + ( + instr.size // 2 if OFFSET_AS_INSTRUCTION else instr.size + ) + elif instr.is_backward_rel_jump(): + instr_offset = label_offsets[index] + target_offset = ( + instr_offset + + (instr.size // 2 if OFFSET_AS_INSTRUCTION else instr.size) + - target_offset + ) + + old_size = instr.size + # FIXME: better error report if target_offset is negative + instr.arg = target_offset + if instr.size != old_size: + modified = True + + # If a jump required an extended arg hence invalidating the calculation + # we return early before filling the exception table entries + if modified: + return modified + + # Resolve labels for exception handling entries + for tb, entry in self.exception_handling_blocks.items(): + # Set the offset for the start and end offset from the instruction + # index stored when assembling the concrete instructions. + entry.start_offset = instruction_offsets[entry.start_offset] + entry.stop_offset = instruction_offsets[entry.stop_offset] + + # Set the offset to the target instruction + lb = tb.target + assert isinstance(lb, Label) + target_index = self.labels[lb] + target_offset = label_offsets[target_index] + entry.target = target_offset + + return False + + def to_concrete_bytecode( + self, + compute_jumps_passes: Optional[int] = None, + compute_exception_stack_depths: bool = True, + ) -> ConcreteBytecode: + if sys.version_info >= (3, 11) and compute_exception_stack_depths: + cfg = _bytecode.ControlFlowGraph.from_bytecode(self.bytecode) + cfg.compute_stacksize(compute_exception_stack_depths=True) + self.bytecode = cfg.to_bytecode() + + if compute_jumps_passes is None: + compute_jumps_passes = self._compute_jumps_passes + + first_const = self.bytecode.docstring + if first_const is not UNSET: + self.add_const(first_const) + + self.varnames.extend(self.bytecode.argnames) + + self.concrete_instructions() + for pas in range(0, compute_jumps_passes): + modified = self.compute_jumps() + if not modified: + break + else: + raise RuntimeError( + "compute_jumps() failed to converge after" " %d passes" % (pas + 1) + ) + + concrete = ConcreteBytecode( + self.instructions, + consts=tuple(self.consts_list), + names=tuple(self.names), + varnames=self.varnames, + exception_table=list(self.exception_handling_blocks.values()), + ) + concrete._copy_attr_from(self.bytecode) + return concrete diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py new file mode 100644 index 0000000..039150f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py @@ -0,0 +1,187 @@ +import opcode +import sys +from enum import IntFlag +from typing import Optional, Union + +# alias to keep the 'bytecode' variable free +import bytecode as _bytecode + + +class CompilerFlags(IntFlag): + """Possible values of the co_flags attribute of Code object. + + Note: We do not rely on inspect values here as some of them are missing and + furthermore would be version dependent. + + """ + + OPTIMIZED = 0x00001 # noqa + NEWLOCALS = 0x00002 # noqa + VARARGS = 0x00004 # noqa + VARKEYWORDS = 0x00008 # noqa + NESTED = 0x00010 # noqa + GENERATOR = 0x00020 # noqa + NOFREE = 0x00040 # noqa + # New in Python 3.5 + # Used for coroutines defined using async def ie native coroutine + COROUTINE = 0x00080 # noqa + # Used for coroutines defined as a generator and then decorated using + # types.coroutine + ITERABLE_COROUTINE = 0x00100 # noqa + # New in Python 3.6 + # Generator defined in an async def function + ASYNC_GENERATOR = 0x00200 # noqa + + # __future__ flags + # future flags changed in Python 3.9 + if sys.version_info < (3, 9): + FUTURE_GENERATOR_STOP = 0x80000 # noqa + FUTURE_ANNOTATIONS = 0x100000 + else: + FUTURE_GENERATOR_STOP = 0x800000 # noqa + FUTURE_ANNOTATIONS = 0x1000000 + + +def infer_flags( + bytecode: Union[ + "_bytecode.Bytecode", "_bytecode.ConcreteBytecode", "_bytecode.ControlFlowGraph" + ], + is_async: Optional[bool] = None, +): + """Infer the proper flags for a bytecode based on the instructions. + + Because the bytecode does not have enough context to guess if a function + is asynchronous the algorithm tries to be conservative and will never turn + a previously async code into a sync one. + + Parameters + ---------- + bytecode : Bytecode | ConcreteBytecode | ControlFlowGraph + Bytecode for which to infer the proper flags + is_async : bool | None, optional + Force the code to be marked as asynchronous if True, prevent it from + being marked as asynchronous if False and simply infer the best + solution based on the opcode and the existing flag if None. + + """ + flags = CompilerFlags(0) + if not isinstance( + bytecode, + (_bytecode.Bytecode, _bytecode.ConcreteBytecode, _bytecode.ControlFlowGraph), + ): + msg = ( + "Expected a Bytecode, ConcreteBytecode or ControlFlowGraph " + "instance not %s" + ) + raise ValueError(msg % bytecode) + + instructions = ( + bytecode._get_instructions() + if isinstance(bytecode, _bytecode.ControlFlowGraph) + else bytecode + ) + instr_names = { + i.name + for i in instructions + if not isinstance( + i, + ( + _bytecode.SetLineno, + _bytecode.Label, + _bytecode.TryBegin, + _bytecode.TryEnd, + ), + ) + } + + # Identify optimized code + if not (instr_names & {"STORE_NAME", "LOAD_NAME", "DELETE_NAME"}): + flags |= CompilerFlags.OPTIMIZED + + # Check for free variables + if not (instr_names & {opcode.opname[i] for i in opcode.hasfree}): + flags |= CompilerFlags.NOFREE + + # Copy flags for which we cannot infer the right value + flags |= bytecode.flags & ( + CompilerFlags.NEWLOCALS + | CompilerFlags.VARARGS + | CompilerFlags.VARKEYWORDS + | CompilerFlags.NESTED + ) + + sure_generator = instr_names & {"YIELD_VALUE"} + maybe_generator = instr_names & {"YIELD_VALUE", "YIELD_FROM"} + + sure_async = instr_names & { + "GET_AWAITABLE", + "GET_AITER", + "GET_ANEXT", + "BEFORE_ASYNC_WITH", + "SETUP_ASYNC_WITH", + "END_ASYNC_FOR", + "ASYNC_GEN_WRAP", # New in 3.11 + } + + # If performing inference or forcing an async behavior, first inspect + # the flags since this is the only way to identify iterable coroutines + if is_async in (None, True): + if bytecode.flags & CompilerFlags.COROUTINE: + if sure_generator: + flags |= CompilerFlags.ASYNC_GENERATOR + else: + flags |= CompilerFlags.COROUTINE + elif bytecode.flags & CompilerFlags.ITERABLE_COROUTINE: + if sure_async: + msg = ( + "The ITERABLE_COROUTINE flag is set but bytecode that" + "can only be used in async functions have been " + "detected. Please unset that flag before performing " + "inference." + ) + raise ValueError(msg) + flags |= CompilerFlags.ITERABLE_COROUTINE + elif bytecode.flags & CompilerFlags.ASYNC_GENERATOR: + if not sure_generator: + flags |= CompilerFlags.COROUTINE + else: + flags |= CompilerFlags.ASYNC_GENERATOR + + # If the code was not asynchronous before determine if it should now be + # asynchronous based on the opcode and the is_async argument. + else: + if sure_async: + # YIELD_FROM is not allowed in async generator + if sure_generator: + flags |= CompilerFlags.ASYNC_GENERATOR + else: + flags |= CompilerFlags.COROUTINE + + elif maybe_generator: + if is_async: + if sure_generator: + flags |= CompilerFlags.ASYNC_GENERATOR + else: + flags |= CompilerFlags.COROUTINE + else: + flags |= CompilerFlags.GENERATOR + + elif is_async: + flags |= CompilerFlags.COROUTINE + + # If the code should not be asynchronous, check first it is possible and + # next set the GENERATOR flag if relevant + else: + if sure_async: + raise ValueError( + "The is_async argument is False but bytecodes " + "that can only be used in async functions have " + "been detected." + ) + + if maybe_generator: + flags |= CompilerFlags.GENERATOR + + flags |= bytecode.flags & CompilerFlags.FUTURE_GENERATOR_STOP + + return flags diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py new file mode 100644 index 0000000..e927cdf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py @@ -0,0 +1,878 @@ +import dis +import enum +import opcode as _opcode +import sys +from abc import abstractmethod +from dataclasses import dataclass +from marshal import dumps as _dumps +from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union + +try: + from typing import TypeGuard +except ImportError: + from typing_extensions import TypeGuard # type: ignore + +import bytecode as _bytecode + +# --- Instruction argument tools and + +MIN_INSTRUMENTED_OPCODE = getattr(_opcode, "MIN_INSTRUMENTED_OPCODE", 256) + +# Instructions relying on a bit to modify its behavior. +# The lowest bit is used to encode custom behavior. +BITFLAG_INSTRUCTIONS = ( + ("LOAD_GLOBAL", "LOAD_ATTR") + if sys.version_info >= (3, 12) + else ("LOAD_GLOBAL",) + if sys.version_info >= (3, 11) + else () +) + +BITFLAG2_INSTRUCTIONS = ("LOAD_SUPER_ATTR",) if sys.version_info >= (3, 12) else () + +# Intrinsic related opcodes +INTRINSIC_1OP = ( + (_opcode.opmap["CALL_INTRINSIC_1"],) if sys.version_info >= (3, 12) else () +) +INTRINSIC_2OP = ( + (_opcode.opmap["CALL_INTRINSIC_2"],) if sys.version_info >= (3, 12) else () +) +INTRINSIC = INTRINSIC_1OP + INTRINSIC_2OP + + +# Used for COMPARE_OP opcode argument +@enum.unique +class Compare(enum.IntEnum): + LT = 0 + LE = 1 + EQ = 2 + NE = 3 + GT = 4 + GE = 5 + if sys.version_info < (3, 9): + IN = 6 + NOT_IN = 7 + IS = 8 + IS_NOT = 9 + EXC_MATCH = 10 + + if sys.version_info >= (3, 12): + + def _get_mask(self): + if self == Compare.EQ: + return 8 + elif self == Compare.NE: + return 1 + 2 + 4 + elif self == Compare.LT: + return 2 + elif self == Compare.LE: + return 2 + 8 + elif self == Compare.GT: + return 4 + elif self == Compare.GE: + return 4 + 8 + + +# Used for BINARY_OP under Python 3.11+ +@enum.unique +class BinaryOp(enum.IntEnum): + ADD = 0 + AND = 1 + FLOOR_DIVIDE = 2 + LSHIFT = 3 + MATRIX_MULTIPLY = 4 + MULTIPLY = 5 + REMAINDER = 6 + OR = 7 + POWER = 8 + RSHIFT = 9 + SUBTRACT = 10 + TRUE_DIVIDE = 11 + XOR = 12 + INPLACE_ADD = 13 + INPLACE_AND = 14 + INPLACE_FLOOR_DIVIDE = 15 + INPLACE_LSHIFT = 16 + INPLACE_MATRIX_MULTIPLY = 17 + INPLACE_MULTIPLY = 18 + INPLACE_REMAINDER = 19 + INPLACE_OR = 20 + INPLACE_POWER = 21 + INPLACE_RSHIFT = 22 + INPLACE_SUBTRACT = 23 + INPLACE_TRUE_DIVIDE = 24 + INPLACE_XOR = 25 + + +@enum.unique +class Intrinsic1Op(enum.IntEnum): + INTRINSIC_1_INVALID = 0 + INTRINSIC_PRINT = 1 + INTRINSIC_IMPORT_STAR = 2 + INTRINSIC_STOPITERATION_ERROR = 3 + INTRINSIC_ASYNC_GEN_WRAP = 4 + INTRINSIC_UNARY_POSITIVE = 5 + INTRINSIC_LIST_TO_TUPLE = 6 + INTRINSIC_TYPEVAR = 7 + INTRINSIC_PARAMSPEC = 8 + INTRINSIC_TYPEVARTUPLE = 9 + INTRINSIC_SUBSCRIPT_GENERIC = 10 + INTRINSIC_TYPEALIAS = 11 + + +@enum.unique +class Intrinsic2Op(enum.IntEnum): + INTRINSIC_2_INVALID = 0 + INTRINSIC_PREP_RERAISE_STAR = 1 + INTRINSIC_TYPEVAR_WITH_BOUND = 2 + INTRINSIC_TYPEVAR_WITH_CONSTRAINTS = 3 + INTRINSIC_SET_FUNCTION_TYPE_PARAMS = 4 + + +# This make type checking happy but means it won't catch attempt to manipulate an unset +# statically. We would need guard on object attribute narrowed down through methods +class _UNSET(int): + instance = None + + def __new__(cls): + if cls.instance is None: + cls.instance = super().__new__(cls) + return cls.instance + + def __eq__(self, other) -> bool: + return self is other + + +for op in [ + "__abs__", + "__add__", + "__and__", + "__bool__", + "__ceil__", + "__divmod__", + "__float__", + "__floor__", + "__floordiv__", + "__ge__", + "__gt__", + "__hash__", + "__index__", + "__int__", + "__invert__", + "__le__", + "__lshift__", + "__lt__", + "__mod__", + "__mul__", + "__ne__", + "__neg__", + "__or__", + "__pos__", + "__pow__", + "__radd__", + "__rand__", + "__rdivmod__", + "__rfloordiv__", + "__rlshift__", + "__rmod__", + "__rmul__", + "__ror__", + "__round__", + "__rpow__", + "__rrshift__", + "__rshift__", + "__rsub__", + "__rtruediv__", + "__rxor__", + "__sub__", + "__truediv__", + "__trunc__", + "__xor__", +]: + setattr(_UNSET, op, lambda *args: NotImplemented) + + +UNSET = _UNSET() + + +def const_key(obj: Any) -> Union[bytes, Tuple[type, int]]: + try: + return _dumps(obj) + except ValueError: + # For other types, we use the object identifier as an unique identifier + # to ensure that they are seen as unequal. + return (type(obj), id(obj)) + + +class Label: + __slots__ = () + + +#: Placeholder label temporarily used when performing some conversions +#: concrete -> bytecode +PLACEHOLDER_LABEL = Label() + + +class _Variable: + __slots__ = ("name",) + + def __init__(self, name: str) -> None: + self.name: str = name + + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + return self.name == other.name + + def __str__(self) -> str: + return self.name + + def __repr__(self) -> str: + return "<%s %r>" % (self.__class__.__name__, self.name) + + +class CellVar(_Variable): + __slots__ = () + + +class FreeVar(_Variable): + __slots__ = () + + +def _check_arg_int(arg: Any, name: str) -> TypeGuard[int]: + if not isinstance(arg, int): + raise TypeError( + "operation %s argument must be an int, " + "got %s" % (name, type(arg).__name__) + ) + + if not (0 <= arg <= 2147483647): + raise ValueError( + "operation %s argument must be in " "the range 0..2,147,483,647" % name + ) + + return True + + +if sys.version_info >= (3, 12): + + def opcode_has_argument(opcode: int) -> bool: + return opcode in dis.hasarg + +else: + + def opcode_has_argument(opcode: int) -> bool: + return opcode >= dis.HAVE_ARGUMENT + + +# --- Instruction stack effect impact + +# We split the stack effect between the manipulations done on the stack before +# executing the instruction (fetching the elements that are going to be used) +# and what is pushed back on the stack after the execution is complete. + +# Stack effects that do not depend on the argument of the instruction +STATIC_STACK_EFFECTS: Dict[str, Tuple[int, int]] = { + "ROT_TWO": (-2, 2), + "ROT_THREE": (-3, 3), + "ROT_FOUR": (-4, 4), + "DUP_TOP": (-1, 2), + "DUP_TOP_TWO": (-2, 4), + "GET_LEN": (-1, 2), + "GET_ITER": (-1, 1), + "GET_YIELD_FROM_ITER": (-1, 1), + "GET_AWAITABLE": (-1, 1), + "GET_AITER": (-1, 1), + "GET_ANEXT": (-1, 2), + "LIST_TO_TUPLE": (-1, 1), + "LIST_EXTEND": (-2, 1), + "SET_UPDATE": (-2, 1), + "DICT_UPDATE": (-2, 1), + "DICT_MERGE": (-2, 1), + "COMPARE_OP": (-2, 1), + "IS_OP": (-2, 1), + "CONTAINS_OP": (-2, 1), + "IMPORT_NAME": (-2, 1), + "ASYNC_GEN_WRAP": (-1, 1), + "PUSH_EXC_INFO": (-1, 2), + # Pop TOS and push TOS.__aexit__ and result of TOS.__aenter__() + "BEFORE_ASYNC_WITH": (-1, 2), + # Replace TOS based on TOS and TOS1 + "IMPORT_FROM": (-1, 2), + "COPY_DICT_WITHOUT_KEYS": (-2, 2), + # Call a function at position 7 (4 3.11+) on the stack and push the return value + "WITH_EXCEPT_START": (-4, 5) if sys.version_info >= (3, 11) else (-7, 8), + # Starting with Python 3.11 MATCH_CLASS does not push a boolean anymore + "MATCH_CLASS": (-3, 1 if sys.version_info >= (3, 11) else 2), + "MATCH_MAPPING": (-1, 2), + "MATCH_SEQUENCE": (-1, 2), + "MATCH_KEYS": (-2, 3 if sys.version_info >= (3, 11) else 4), + "CHECK_EXC_MATCH": (-2, 2), # (TOS1, TOS) -> (TOS1, bool) + "CHECK_EG_MATCH": (-2, 2), # (TOS, TOS1) -> non-matched, matched or TOS1, None) + "PREP_RERAISE_STAR": (-2, 1), # (TOS1, TOS) -> new exception group) + **{k: (-1, 1) for k in (o for o in _opcode.opmap if (o.startswith("UNARY_")))}, + **{ + k: (-2, 1) + for k in ( + o + for o in _opcode.opmap + if (o.startswith("BINARY_") or o.startswith("INPLACE_")) + ) + }, + # Python 3.12 changes not covered by dis.stack_effect + "BINARY_SLICE": (-3, 1), + # "STORE_SLICE" handled by dis.stack_effect + "LOAD_FROM_DICT_OR_GLOBALS": (-1, 1), + "LOAD_FROM_DICT_OR_DEREF": (-1, 1), + "LOAD_INTRISIC_1": (-1, 1), + "LOAD_INTRISIC_2": (-2, 1), +} + + +DYNAMIC_STACK_EFFECTS: Dict[ + str, Callable[[int, Any, Optional[bool]], Tuple[int, int]] +] = { + # PRECALL pops all arguments (as per its stack effect) and leaves + # the callable and either self or NULL + # CALL pops the 2 above items and push the return + # (when PRECALL does not exist it pops more as encoded by the effect) + "CALL": lambda effect, arg, jump: ( + -2 - arg if sys.version_info >= (3, 12) else -2, + 1, + ), + # 3.12 changed the behavior of LOAD_ATTR + "LOAD_ATTR": lambda effect, arg, jump: (-1, 1 + effect), + "LOAD_SUPER_ATTR": lambda effect, arg, jump: (-3, 3 + effect), + "SWAP": lambda effect, arg, jump: (-arg, arg), + "COPY": lambda effect, arg, jump: (-arg, arg + effect), + "ROT_N": lambda effect, arg, jump: (-arg, arg), + "SET_ADD": lambda effect, arg, jump: (-arg, arg - 1), + "LIST_APPEND": lambda effect, arg, jump: (-arg, arg - 1), + "MAP_ADD": lambda effect, arg, jump: (-arg, arg - 2), + "FORMAT_VALUE": lambda effect, arg, jump: (effect - 1, 1), + # FOR_ITER needs TOS to be an iterator, hence a prerequisite of 1 on the stack + "FOR_ITER": lambda effect, arg, jump: (effect, 0) if jump else (-1, 2), + **{ + # Instr(UNPACK_* , n) pops 1 and pushes n + k: lambda effect, arg, jump: (-1, effect + 1) + for k in ( + "UNPACK_SEQUENCE", + "UNPACK_EX", + ) + }, + **{ + k: lambda effect, arg, jump: (effect - 1, 1) + for k in ( + "MAKE_FUNCTION", + "CALL_FUNCTION", + "CALL_FUNCTION_EX", + "CALL_FUNCTION_KW", + "CALL_METHOD", + *(o for o in _opcode.opmap if o.startswith("BUILD_")), + ) + }, +} + + +# --- Instruction location + + +def _check_location( + location: Optional[int], location_name: str, min_value: int +) -> None: + if location is None: + return + if not isinstance(location, int): + raise TypeError(f"{location_name} must be an int, got {type(location)}") + if location < min_value: + raise ValueError( + f"invalid {location_name}, expected >= {min_value}, got {location}" + ) + + +@dataclass(frozen=True) +class InstrLocation: + """Location information for an instruction.""" + + #: Lineno at which the instruction corresponds. + #: Optional so that a location of None in an instruction encode an unset value. + lineno: Optional[int] + + #: End lineno at which the instruction corresponds (Python 3.11+ only) + end_lineno: Optional[int] + + #: Column offset at which the instruction corresponds (Python 3.11+ only) + col_offset: Optional[int] + + #: End column offset at which the instruction corresponds (Python 3.11+ only) + end_col_offset: Optional[int] + + __slots__ = ["lineno", "end_lineno", "col_offset", "end_col_offset"] + + def __init__( + self, + lineno: Optional[int], + end_lineno: Optional[int], + col_offset: Optional[int], + end_col_offset: Optional[int], + ) -> None: + # Needed because we want the class to be frozen + object.__setattr__(self, "lineno", lineno) + object.__setattr__(self, "end_lineno", end_lineno) + object.__setattr__(self, "col_offset", col_offset) + object.__setattr__(self, "end_col_offset", end_col_offset) + # In Python 3.11 0 is a valid lineno for some instructions (RESUME for example) + _check_location(lineno, "lineno", 0 if sys.version_info >= (3, 11) else 1) + _check_location(end_lineno, "end_lineno", 1) + _check_location(col_offset, "col_offset", 0) + _check_location(end_col_offset, "end_col_offset", 0) + if end_lineno: + if lineno is None: + raise ValueError("End lineno specified with no lineno.") + elif lineno > end_lineno: + raise ValueError( + f"End lineno {end_lineno} cannot be smaller than lineno {lineno}." + ) + + if col_offset is not None or end_col_offset is not None: + if lineno is None or end_lineno is None: + raise ValueError( + "Column offsets were specified but lineno information are " + f"incomplete. Lineno: {lineno}, end lineno: {end_lineno}." + ) + if end_col_offset is not None: + if col_offset is None: + raise ValueError( + "End column offset specified with no column offset." + ) + # Column offset must be increasing inside a signle line but + # have no relations between different lines. + elif lineno == end_lineno and col_offset > end_col_offset: + raise ValueError( + f"End column offset {end_col_offset} cannot be smaller than " + f"column offset: {col_offset}." + ) + else: + raise ValueError( + "No end column offset was specified but a column offset was given." + ) + + @classmethod + def from_positions(cls, position: "dis.Positions") -> "InstrLocation": # type: ignore + return InstrLocation( + position.lineno, + position.end_lineno, + position.col_offset, + position.end_col_offset, + ) + + +class SetLineno: + __slots__ = ("_lineno",) + + def __init__(self, lineno: int) -> None: + # In Python 3.11 0 is a valid lineno for some instructions (RESUME for example) + _check_location(lineno, "lineno", 0 if sys.version_info >= (3, 11) else 1) + self._lineno: int = lineno + + @property + def lineno(self) -> int: + return self._lineno + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, SetLineno): + return False + return self._lineno == other._lineno + + +# --- Pseudo instructions used to represent exception handling (3.11+) + + +class TryBegin: + __slots__ = ("target", "push_lasti", "stack_depth") + + def __init__( + self, + target: Union[Label, "_bytecode.BasicBlock"], + push_lasti: bool, + stack_depth: Union[int, _UNSET] = UNSET, + ) -> None: + self.target: Union[Label, "_bytecode.BasicBlock"] = target + self.push_lasti: bool = push_lasti + self.stack_depth: Union[int, _UNSET] = stack_depth + + def copy(self) -> "TryBegin": + return TryBegin(self.target, self.push_lasti, self.stack_depth) + + +class TryEnd: + __slots__ = "entry" + + def __init__(self, entry: TryBegin) -> None: + self.entry: TryBegin = entry + + def copy(self) -> "TryEnd": + return TryEnd(self.entry) + + +T = TypeVar("T", bound="BaseInstr") +A = TypeVar("A", bound=object) + + +class BaseInstr(Generic[A]): + """Abstract instruction.""" + + __slots__ = ("_name", "_opcode", "_arg", "_location") + + # Work around an issue with the default value of arg + def __init__( + self, + name: str, + arg: A = UNSET, # type: ignore + *, + lineno: Union[int, None, _UNSET] = UNSET, + location: Optional[InstrLocation] = None, + ) -> None: + self._set(name, arg) + if location: + self._location = location + elif lineno is UNSET: + self._location = None + else: + self._location = InstrLocation(lineno, None, None, None) + + # Work around an issue with the default value of arg + def set(self, name: str, arg: A = UNSET) -> None: # type: ignore + """Modify the instruction in-place. + + Replace name and arg attributes. Don't modify lineno. + + """ + self._set(name, arg) + + def require_arg(self) -> bool: + """Does the instruction require an argument?""" + return opcode_has_argument(self._opcode) + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, name: str) -> None: + self._set(name, self._arg) + + @property + def opcode(self) -> int: + return self._opcode + + @opcode.setter + def opcode(self, op: int) -> None: + if not isinstance(op, int): + raise TypeError("operator code must be an int") + if 0 <= op <= 255: + name = _opcode.opname[op] + valid = name != "<%r>" % op + else: + valid = False + if not valid: + raise ValueError("invalid operator code") + + self._set(name, self._arg) + + @property + def arg(self) -> A: + return self._arg + + @arg.setter + def arg(self, arg: A): + self._set(self._name, arg) + + @property + def lineno(self) -> Union[int, _UNSET, None]: + return self._location.lineno if self._location is not None else UNSET + + @lineno.setter + def lineno(self, lineno: Union[int, _UNSET, None]) -> None: + loc = self._location + if loc and ( + loc.end_lineno is not None + or loc.col_offset is not None + or loc.end_col_offset is not None + ): + raise RuntimeError( + "The lineno of an instruction with detailed location information " + "cannot be set." + ) + + if lineno is UNSET: + self._location = None + else: + self._location = InstrLocation(lineno, None, None, None) + + @property + def location(self) -> Optional[InstrLocation]: + return self._location + + @location.setter + def location(self, location: Optional[InstrLocation]) -> None: + if location and not isinstance(location, InstrLocation): + raise TypeError( + "The instr location must be an instance of InstrLocation or None." + ) + self._location = location + + def stack_effect(self, jump: Optional[bool] = None) -> int: + if not self.require_arg(): + arg = None + # 3.11 where LOAD_GLOBAL arg encode whether or we push a null + # 3.12 does the same for LOAD_ATTR + elif self.name in BITFLAG_INSTRUCTIONS and isinstance(self._arg, tuple): + assert len(self._arg) == 2 + arg = self._arg[0] + # 3.12 does a similar trick for LOAD_SUPER_ATTR + elif self.name in BITFLAG2_INSTRUCTIONS and isinstance(self._arg, tuple): + assert len(self._arg) == 3 + arg = self._arg[0] + elif not isinstance(self._arg, int) or self._opcode in _opcode.hasconst: + # Argument is either a non-integer or an integer constant, + # not oparg. + arg = 0 + else: + arg = self._arg + + return dis.stack_effect(self._opcode, arg, jump=jump) + + def pre_and_post_stack_effect(self, jump: Optional[bool] = None) -> Tuple[int, int]: + # Allow to check that execution will not cause a stack underflow + _effect = self.stack_effect(jump=jump) + + n = self.name + if n in STATIC_STACK_EFFECTS: + return STATIC_STACK_EFFECTS[n] + elif n in DYNAMIC_STACK_EFFECTS: + return DYNAMIC_STACK_EFFECTS[n](_effect, self.arg, jump) + else: + # For instruction with no special value we simply consider the effect apply + # before execution + return (_effect, 0) + + def copy(self: T) -> T: + return self.__class__(self._name, self._arg, location=self._location) + + def has_jump(self) -> bool: + return self._has_jump(self._opcode) + + def is_cond_jump(self) -> bool: + """Is a conditional jump?""" + # Ex: POP_JUMP_IF_TRUE, JUMP_IF_FALSE_OR_POP + # IN 3.11+ the JUMP and the IF are no necessary adjacent in the name. + name = self._name + return "JUMP_" in name and "IF_" in name + + def is_uncond_jump(self) -> bool: + """Is an unconditional jump?""" + # JUMP_BACKWARD has been introduced in 3.11+ + # JUMP_ABSOLUTE was removed in 3.11+ + return self.name in { + "JUMP_FORWARD", + "JUMP_ABSOLUTE", + "JUMP_BACKWARD", + "JUMP_BACKWARD_NO_INTERRUPT", + } + + def is_abs_jump(self) -> bool: + """Is an absolute jump.""" + return self._opcode in _opcode.hasjabs + + def is_forward_rel_jump(self) -> bool: + """Is a forward relative jump.""" + return self._opcode in _opcode.hasjrel and "BACKWARD" not in self._name + + def is_backward_rel_jump(self) -> bool: + """Is a backward relative jump.""" + return self._opcode in _opcode.hasjrel and "BACKWARD" in self._name + + def is_final(self) -> bool: + if self._name in { + "RETURN_VALUE", + "RETURN_CONST", + "RAISE_VARARGS", + "RERAISE", + "BREAK_LOOP", + "CONTINUE_LOOP", + }: + return True + if self.is_uncond_jump(): + return True + return False + + def __repr__(self) -> str: + if self._arg is not UNSET: + return "<%s arg=%r location=%s>" % (self._name, self._arg, self._location) + else: + return "<%s location=%s>" % (self._name, self._location) + + def __eq__(self, other: Any) -> bool: + if type(self) is not type(other): + return False + return self._cmp_key() == other._cmp_key() + + # --- Private API + + _name: str + + _location: Optional[InstrLocation] + + _opcode: int + + _arg: A + + def _set(self, name: str, arg: A) -> None: + if not isinstance(name, str): + raise TypeError("operation name must be a str") + try: + opcode = _opcode.opmap[name] + except KeyError: + raise ValueError(f"invalid operation name: {name}") + + if opcode >= MIN_INSTRUMENTED_OPCODE: + raise ValueError( + f"operation {name} is an instrumented or pseudo opcode. " + "Only base opcodes are supported" + ) + + self._check_arg(name, opcode, arg) + + self._name = name + self._opcode = opcode + self._arg = arg + + @staticmethod + def _has_jump(opcode) -> bool: + return opcode in _opcode.hasjrel or opcode in _opcode.hasjabs + + @abstractmethod + def _check_arg(self, name: str, opcode: int, arg: A) -> None: + pass + + @abstractmethod + def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, Any]: + pass + + +InstrArg = Union[ + int, + str, + Label, + CellVar, + FreeVar, + "_bytecode.BasicBlock", + Compare, + Tuple[bool, str], + Tuple[bool, bool, str], +] + + +class Instr(BaseInstr[InstrArg]): + __slots__ = () + + def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, Any]: + arg: Any = self._arg + if self._opcode in _opcode.hasconst: + arg = const_key(arg) + return (self._location, self._name, arg) + + def _check_arg(self, name: str, opcode: int, arg: InstrArg) -> None: + if name == "EXTENDED_ARG": + raise ValueError( + "only concrete instruction can contain EXTENDED_ARG, " + "highlevel instruction can represent arbitrary argument without it" + ) + + if opcode_has_argument(opcode): + if arg is UNSET: + raise ValueError("operation %s requires an argument" % name) + else: + if arg is not UNSET: + raise ValueError("operation %s has no argument" % name) + + if self._has_jump(opcode): + if not isinstance(arg, (Label, _bytecode.BasicBlock)): + raise TypeError( + "operation %s argument type must be " + "Label or BasicBlock, got %s" % (name, type(arg).__name__) + ) + + elif opcode in _opcode.hasfree: + if not isinstance(arg, (CellVar, FreeVar)): + raise TypeError( + "operation %s argument must be CellVar " + "or FreeVar, got %s" % (name, type(arg).__name__) + ) + + elif opcode in _opcode.haslocal or opcode in _opcode.hasname: + if name in BITFLAG_INSTRUCTIONS: + if not ( + isinstance(arg, tuple) + and len(arg) == 2 + and isinstance(arg[0], bool) + and isinstance(arg[1], str) + ): + raise TypeError( + "operation %s argument must be a tuple[bool, str], " + "got %s (value=%s)" % (name, type(arg).__name__, str(arg)) + ) + + elif name in BITFLAG2_INSTRUCTIONS: + if not ( + isinstance(arg, tuple) + and len(arg) == 3 + and isinstance(arg[0], bool) + and isinstance(arg[1], bool) + and isinstance(arg[2], str) + ): + raise TypeError( + "operation %s argument must be a tuple[bool, bool, str], " + "got %s (value=%s)" % (name, type(arg).__name__, str(arg)) + ) + + elif not isinstance(arg, str): + raise TypeError( + "operation %s argument must be a str, " + "got %s" % (name, type(arg).__name__) + ) + + elif opcode in _opcode.hasconst: + if isinstance(arg, Label): + raise ValueError( + "label argument cannot be used " "in %s operation" % name + ) + if isinstance(arg, _bytecode.BasicBlock): + raise ValueError( + "block argument cannot be used " "in %s operation" % name + ) + + elif opcode in _opcode.hascompare: + if not isinstance(arg, Compare): + raise TypeError( + "operation %s argument type must be " + "Compare, got %s" % (name, type(arg).__name__) + ) + + elif opcode in INTRINSIC_1OP: + if not isinstance(arg, Intrinsic1Op): + raise TypeError( + "operation %s argument type must be " + "Intrinsic1Op, got %s" % (name, type(arg).__name__) + ) + + elif opcode in INTRINSIC_2OP: + if not isinstance(arg, Intrinsic2Op): + raise TypeError( + "operation %s argument type must be " + "Intrinsic2Op, got %s" % (name, type(arg).__name__) + ) + + elif opcode_has_argument(opcode): + _check_arg_int(arg, name) diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/py.typed b/lambdas/aws-dd-forwarder-3.127.0/bytecode/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py new file mode 100644 index 0000000..2d91554 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py @@ -0,0 +1,19 @@ +# This file is auto-generated by setuptools-scm do NOT edit it. + +from collections import namedtuple + +#: A namedtuple of the version info for the current release. +_version_info = namedtuple("_version_info", "major minor micro status") + +parts = "0.15.1".split(".", 3) +version_info = _version_info( + int(parts[0]), + int(parts[1]), + int(parts[2]), + parts[3] if len(parts) == 4 else "", +) + +# Remove everything but the 'version_info' from this module. +del namedtuple, _version_info, parts + +__version__ = "0.15.1" diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py new file mode 100644 index 0000000..c38aa00 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py @@ -0,0 +1,172 @@ +import json +import logging +import os +from random import randint +from time import time + +import boto3 +from botocore.exceptions import ClientError + +from caching.common import get_last_modified_time +from settings import ( + DD_S3_BUCKET_NAME, + DD_S3_CACHE_DIRNAME, + DD_S3_CACHE_LOCK_TTL_SECONDS, + DD_TAGS_CACHE_TTL_SECONDS, +) +from telemetry import send_forwarder_internal_metrics + +JITTER_MIN = 1 +JITTER_MAX = 100 +DD_TAGS_CACHE_TTL_SECONDS = DD_TAGS_CACHE_TTL_SECONDS + randint(JITTER_MIN, JITTER_MAX) + + +class BaseTagsCache(object): + def __init__( + self, + prefix, + cache_filename, + cache_lock_filename, + tags_ttl_seconds=DD_TAGS_CACHE_TTL_SECONDS, + ): + self.cache_dirname = DD_S3_CACHE_DIRNAME + self.tags_ttl_seconds = tags_ttl_seconds + self.tags_by_id = {} + self.last_tags_fetch_time = 0 + self.cache_prefix = prefix + self.cache_filename = cache_filename + self.cache_lock_filename = cache_lock_filename + self.logger = logging.getLogger() + self.logger.setLevel( + logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()) + ) + self.resource_tagging_client = boto3.client("resourcegroupstaggingapi") + self.s3_client = boto3.resource("s3") + + def get_resources_paginator(self): + return self.resource_tagging_client.get_paginator("get_resources") + + def get_cache_name_with_prefix(self): + return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_filename}" + + def get_cache_lock_with_prefix(self): + return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_lock_filename}" + + def write_cache_to_s3(self, data): + """Writes tags cache to s3""" + try: + self.logger.debug("Trying to write data to s3: {}".format(data)) + s3_object = self.s3_client.Object( + DD_S3_BUCKET_NAME, self.get_cache_name_with_prefix() + ) + s3_object.put(Body=(bytes(json.dumps(data).encode("UTF-8")))) + except ClientError: + send_forwarder_internal_metrics("s3_cache_write_failure") + self.logger.debug("Unable to write new cache to S3", exc_info=True) + + def acquire_s3_cache_lock(self): + """Acquire cache lock""" + cache_lock_object = self.s3_client.Object( + DD_S3_BUCKET_NAME, self.get_cache_lock_with_prefix() + ) + try: + file_content = cache_lock_object.get() + + # check lock file expiration + last_modified_unix_time = get_last_modified_time(file_content) + if last_modified_unix_time + DD_S3_CACHE_LOCK_TTL_SECONDS >= time(): + return False + except Exception: + self.logger.debug("Unable to get cache lock file") + + # lock file doesn't exist, create file to acquire lock + try: + cache_lock_object.put(Body=(bytes("lock".encode("UTF-8")))) + send_forwarder_internal_metrics("s3_cache_lock_acquired") + self.logger.debug("S3 cache lock acquired") + except ClientError: + self.logger.debug("Unable to write S3 cache lock file", exc_info=True) + return False + + return True + + def release_s3_cache_lock(self): + """Release cache lock""" + try: + cache_lock_object = self.s3_client.Object( + DD_S3_BUCKET_NAME, self.get_cache_lock_with_prefix() + ) + cache_lock_object.delete() + send_forwarder_internal_metrics("s3_cache_lock_released") + self.logger.debug("S3 cache lock released") + except ClientError: + send_forwarder_internal_metrics("s3_cache_lock_release_failure") + self.logger.debug("Unable to release S3 cache lock", exc_info=True) + + def get_cache_from_s3(self): + """Retrieves tags cache from s3 and returns the body along with + the last modified datetime for the cache""" + cache_object = self.s3_client.Object( + DD_S3_BUCKET_NAME, self.get_cache_name_with_prefix() + ) + try: + file_content = cache_object.get() + tags_cache = json.loads(file_content["Body"].read().decode("utf-8")) + last_modified_unix_time = get_last_modified_time(file_content) + except: + send_forwarder_internal_metrics("s3_cache_fetch_failure") + self.logger.debug("Unable to fetch cache from S3", exc_info=True) + return {}, -1 + + return tags_cache, last_modified_unix_time + + def _refresh(self): + """Populate the tags in the local cache by getting cache from s3 + If cache not in s3, then cache is built using build_tags_cache + """ + self.last_tags_fetch_time = time() + + # If the custom tag fetch env var is not set to true do not fetch + if not self.should_fetch_tags(): + self.logger.debug( + "Not fetching custom tags because the env variable for the cache {} is not set to true".format( + self.cache_filename + ) + ) + return + + tags_fetched, last_modified = self.get_cache_from_s3() + + if self._is_expired(last_modified): + send_forwarder_internal_metrics("s3_cache_expired") + self.logger.debug("S3 cache expired, rebuilding cache") + lock_acquired = self.acquire_s3_cache_lock() + if lock_acquired: + success, new_tags_fetched = self.build_tags_cache() + if success: + self.tags_by_id = new_tags_fetched + self.write_cache_to_s3(self.tags_by_id) + elif tags_fetched != {}: + self.tags_by_id = tags_fetched + + self.release_s3_cache_lock() + # s3 cache fetch succeeded and isn't expired + elif last_modified > -1: + self.tags_by_id = tags_fetched + + def _is_expired(self, last_modified=None): + """Returns bool for whether the fetch TTL has expired""" + if not last_modified: + last_modified = self.last_tags_fetch_time + + earliest_time_to_refetch_tags = last_modified + self.tags_ttl_seconds + return time() > earliest_time_to_refetch_tags + + def should_fetch_tags(self): + raise Exception("SHOULD FETCH TAGS MUST BE DEFINED FOR TAGS CACHES") + + def get(self, key): + raise Exception("GET TAGS MUST BE DEFINED FOR TAGS CACHES") + + def build_tags_cache(self): + raise Exception("BUILD TAGS MUST BE DEFINED FOR TAGS CACHES") diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py b/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py new file mode 100644 index 0000000..eef6a53 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py @@ -0,0 +1,24 @@ +from caching.cloudwatch_log_group_cache import CloudwatchLogGroupTagsCache +from caching.step_functions_cache import StepFunctionsTagsCache +from caching.s3_tags_cache import S3TagsCache +from caching.lambda_cache import LambdaTagsCache + + +class CacheLayer: + def __init__(self, prefix): + self._cloudwatch_log_group_cache = CloudwatchLogGroupTagsCache(prefix) + self._s3_tags_cache = S3TagsCache(prefix) + self._step_functions_cache = StepFunctionsTagsCache(prefix) + self._lambda_cache = LambdaTagsCache(prefix) + + def get_cloudwatch_log_group_tags_cache(self): + return self._cloudwatch_log_group_cache + + def get_s3_tags_cache(self): + return self._s3_tags_cache + + def get_step_functions_tags_cache(self): + return self._step_functions_cache + + def get_lambda_tags_cache(self): + return self._lambda_cache diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py new file mode 100644 index 0000000..f20d9a5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py @@ -0,0 +1,194 @@ +import json +import logging +import os +from random import randint +from time import time + +import boto3 +from botocore.config import Config + +from caching.common import sanitize_aws_tag_string +from settings import ( + DD_S3_BUCKET_NAME, + DD_S3_CACHE_DIRNAME, + DD_S3_LOG_GROUP_CACHE_DIRNAME, + DD_TAGS_CACHE_TTL_SECONDS, +) +from telemetry import send_forwarder_internal_metrics + + +class CloudwatchLogGroupTagsCache: + def __init__( + self, + prefix, + ): + self.cache_dirname = f"{DD_S3_CACHE_DIRNAME}/{DD_S3_LOG_GROUP_CACHE_DIRNAME}" + self.cache_ttl_seconds = DD_TAGS_CACHE_TTL_SECONDS + self.bucket_name = DD_S3_BUCKET_NAME + self.cache_prefix = prefix + self.tags_by_log_group = {} + # We need to use the standard retry mode for the Cloudwatch Logs client that defaults to 3 retries + self.cloudwatch_logs_client = boto3.client( + "logs", config=Config(retries={"mode": "standard"}) + ) + self.s3_client = boto3.client("s3") + + self.logger = logging.getLogger() + self.logger.setLevel( + logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()) + ) + + # Initialize the cache + if self._should_fetch_tags(): + self._build_tags_cache() + + def get(self, log_group_arn): + """Get the tags for the Cloudwatch Log Group from the cache + + Will refetch the tags if they are out of date, or a log group is encountered + which isn't in the tag list + + Args: + key (str): the key we're getting tags from the cache for + + Returns: + log_group_tags (str[]): the list of "key:value" Datadog tag strings + """ + # If the custom tag fetch env var is not set to true do not fetch tags + if not self._should_fetch_tags(): + self.logger.debug( + "Not fetching custom tags because the env variable DD_FETCH_LOG_GROUP_TAGS is " + "not set to true" + ) + return [] + + return self._fetch_log_group_tags(log_group_arn) + + def _should_fetch_tags(self): + return os.environ.get("DD_FETCH_LOG_GROUP_TAGS", "false").lower() == "true" + + def _build_tags_cache(self): + try: + prefix = self._get_cache_file_prefix() + response = self.s3_client.list_objects_v2( + Bucket=DD_S3_BUCKET_NAME, Prefix=prefix + ) + cache_files = [content["Key"] for content in response.get("Contents", [])] + for cache_file in cache_files: + log_group_tags, last_modified = self._get_log_group_tags_from_cache( + cache_file + ) + if log_group_tags and not self._is_expired(last_modified): + log_group = cache_file.split("/")[-1].split(".")[0] + self.tags_by_log_group[log_group] = { + "tags": log_group_tags, + "last_modified": last_modified, + } + self.logger.debug( + f"loggroup_tags_cache initialized successfully {self.tags_by_log_group}" + ) + except Exception: + self.logger.exception("failed to build log group tags cache", exc_info=True) + + def _fetch_log_group_tags(self, log_group_arn): + # first, check in-memory cache + log_group_tags_struct = self.tags_by_log_group.get(log_group_arn, None) + if log_group_tags_struct and not self._is_expired( + log_group_tags_struct.get("last_modified", None) + ): + send_forwarder_internal_metrics("loggroup_local_cache_hit") + return log_group_tags_struct.get("tags", []) + + # then, check cache file, update and return + cache_file_name = self._get_cache_file_name(log_group_arn) + log_group_tags, last_modified = self._get_log_group_tags_from_cache( + cache_file_name + ) + if log_group_tags and not self._is_expired(last_modified): + self.tags_by_log_group[log_group_arn] = { + "tags": log_group_tags, + "last_modified": time(), + } + send_forwarder_internal_metrics("loggroup_s3_cache_hit") + return log_group_tags + + # finally, make an api call, update and return + log_group_tags = self._get_log_group_tags(log_group_arn) or [] + self._update_log_group_tags_cache(log_group_arn, log_group_tags) + self.tags_by_log_group[log_group_arn] = { + "tags": log_group_tags, + "last_modified": time(), + } + + return log_group_tags + + def _get_log_group_tags_from_cache(self, cache_file_name): + try: + response = self.s3_client.get_object( + Bucket=self.bucket_name, Key=cache_file_name + ) + tags_cache = json.loads(response.get("Body").read().decode("utf-8")) + last_modified_unix_time = int(response.get("LastModified").timestamp()) + except Exception: + send_forwarder_internal_metrics("loggroup_cache_fetch_failure") + self.logger.exception( + "Failed to get log group tags from cache", exc_info=True + ) + return None, -1 + + return tags_cache, last_modified_unix_time + + def _update_log_group_tags_cache(self, log_group, tags): + cache_file_name = self._get_cache_file_name(log_group) + try: + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=cache_file_name, + Body=(bytes(json.dumps(tags).encode("UTF-8"))), + ) + except Exception: + send_forwarder_internal_metrics("loggroup_cache_write_failure") + self.logger.exception( + "Failed to update log group tags cache", exc_info=True + ) + + def _is_expired(self, last_modified): + if not last_modified: + return True + + # add a random number of seconds to avoid having all tags refetched at the same time + earliest_time_to_refetch_tags = ( + last_modified + self.cache_ttl_seconds + randint(1, 100) + ) + return time() > earliest_time_to_refetch_tags + + def _get_cache_file_name(self, log_group_arn): + log_group_name = log_group_arn.replace("/", "_").replace(":", "_") + return f"{self._get_cache_file_prefix()}/{log_group_name}.json" + + def _get_cache_file_prefix(self): + return f"{self.cache_dirname}/{self.cache_prefix}" + + def _get_log_group_tags(self, log_group_arn): + response = None + try: + send_forwarder_internal_metrics("list_tags_log_group_api_call") + response = self.cloudwatch_logs_client.list_tags_for_resource( + resourceArn=log_group_arn + ) + except Exception: + self.logger.exception("Failed to get log group tags", exc_info=True) + formatted_tags = None + if response is not None: + formatted_tags = [ + ( + "{key}:{value}".format( + key=sanitize_aws_tag_string(k, remove_colons=True), + value=sanitize_aws_tag_string(v, remove_leading_digits=False), + ) + if v + else sanitize_aws_tag_string(k, remove_colons=True) + ) + for k, v in response["tags"].items() + ] + return formatted_tags diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/common.py b/lambdas/aws-dd-forwarder-3.127.0/caching/common.py new file mode 100644 index 0000000..7d7db88 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/common.py @@ -0,0 +1,103 @@ +import os +import datetime +import logging +import re +from collections import defaultdict + +logger = logging.getLogger() +logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper())) + + +_other_chars = r"\w:\-\.\/" +Sanitize = re.compile(r"[^%s]" % _other_chars, re.UNICODE).sub +Dedupe = re.compile(r"_+", re.UNICODE).sub +FixInit = re.compile(r"^[_\d]*", re.UNICODE).sub + + +def get_last_modified_time(s3_file): + last_modified_str = s3_file["ResponseMetadata"]["HTTPHeaders"]["last-modified"] + last_modified_date = datetime.datetime.strptime( + last_modified_str, "%a, %d %b %Y %H:%M:%S %Z" + ) + last_modified_unix_time = int(last_modified_date.strftime("%s")) + return last_modified_unix_time + + +def parse_get_resources_response_for_tags_by_arn(get_resources_page): + """Parses a page of GetResources response for the mapping from ARN to tags + + Args: + get_resources_page (dict[]>): one page of the GetResources response. + Partial example: + {"ResourceTagMappingList": [{ + 'ResourceARN': 'arn:aws:lambda:us-east-1:123497598159:function:my-test-lambda', + 'Tags': [{'Key': 'stage', 'Value': 'dev'}, {'Key': 'team', 'Value': 'serverless'}] + }]} + + Returns: + tags_by_arn (dict): Lambda tag lists keyed by ARN + """ + tags_by_arn = defaultdict(list) + + aws_resouce_tag_mappings = get_resources_page["ResourceTagMappingList"] + for aws_resource_tag_mapping in aws_resouce_tag_mappings: + function_arn = aws_resource_tag_mapping["ResourceARN"] + lowercase_function_arn = function_arn.lower() + + raw_aws_tags = aws_resource_tag_mapping["Tags"] + tags = map(get_dd_tag_string_from_aws_dict, raw_aws_tags) + + tags_by_arn[lowercase_function_arn] += tags + + return tags_by_arn + + +def get_dd_tag_string_from_aws_dict(aws_key_value_tag_dict): + """Converts the AWS dict tag format to the dd key:value string format and truncates to 200 characters + + Args: + aws_key_value_tag_dict (dict): the dict the GetResources endpoint returns for a tag + ex: { "Key": "creator", "Value": "swf"} + + Returns: + key:value colon-separated string built from the dict + ex: "creator:swf" + """ + key = sanitize_aws_tag_string(aws_key_value_tag_dict["Key"], remove_colons=True) + value = sanitize_aws_tag_string( + aws_key_value_tag_dict.get("Value"), remove_leading_digits=False + ) + # Value is optional in DD and AWS + if not value: + return key + return f"{key}:{value}"[0:200] + + +def sanitize_aws_tag_string(tag, remove_colons=False, remove_leading_digits=True): + """Convert characters banned from DD but allowed in AWS tags to underscores""" + global Sanitize, Dedupe, FixInit + + # 1. Replace colons with _ + # 2. Convert to all lowercase unicode string + # 3. Convert bad characters to underscores + # 4. Dedupe contiguous underscores + # 5. Remove initial underscores/digits such that the string + # starts with an alpha char + # FIXME: tag normalization incorrectly supports tags starting + # with a ':', but this behavior should be phased out in future + # as it results in unqueryable data. See dogweb/#11193 + # 6. Strip trailing underscores + + if len(tag) == 0: + # if tag is empty, nothing to do + return tag + + if remove_colons: + tag = tag.replace(":", "_") + tag = Dedupe("_", Sanitize("_", tag.lower())) + if remove_leading_digits: + first_char = tag[0] + if first_char == "_" or "0" <= first_char <= "9": + tag = FixInit("", tag) + tag = tag.rstrip("_") + return tag diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py new file mode 100644 index 0000000..e1d28e0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py @@ -0,0 +1,90 @@ +import os + +from botocore.exceptions import ClientError + +from caching.base_tags_cache import BaseTagsCache +from caching.common import parse_get_resources_response_for_tags_by_arn +from settings import ( + DD_S3_LAMBDA_CACHE_FILENAME, + DD_S3_LAMBDA_CACHE_LOCK_FILENAME, + GET_RESOURCES_LAMBDA_FILTER, +) +from telemetry import send_forwarder_internal_metrics + + +class LambdaTagsCache(BaseTagsCache): + def __init__(self, prefix): + super().__init__( + prefix, DD_S3_LAMBDA_CACHE_FILENAME, DD_S3_LAMBDA_CACHE_LOCK_FILENAME + ) + + def should_fetch_tags(self): + return os.environ.get("DD_FETCH_LAMBDA_TAGS", "false").lower() == "true" + + def build_tags_cache(self): + """Makes API calls to GetResources to get the live tags of the account's Lambda functions + + Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not set to true + + Returns: + tags_by_arn_cache (dict): each Lambda's tags in a dict keyed by ARN + """ + tags_fetch_success = False + tags_by_arn_cache = {} + resource_paginator = self.get_resources_paginator() + + try: + for page in resource_paginator.paginate( + ResourceTypeFilters=[GET_RESOURCES_LAMBDA_FILTER], ResourcesPerPage=100 + ): + send_forwarder_internal_metrics("get_resources_api_calls") + page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page) + tags_by_arn_cache.update(page_tags_by_arn) + tags_fetch_success = True + + except ClientError as e: + self.logger.exception( + "Encountered a ClientError when trying to fetch tags. You may need to give " + "this Lambda's role the 'tag:GetResources' permission" + ) + additional_tags = [ + f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}" + ] + send_forwarder_internal_metrics( + "client_error", additional_tags=additional_tags + ) + tags_fetch_success = False + + self.logger.debug( + "Built this tags cache from GetResources API calls: %s", tags_by_arn_cache + ) + + return tags_fetch_success, tags_by_arn_cache + + def get(self, key): + """Get the tags for the Lambda function from the cache + + Will refetch the tags if they are out of date, or a lambda arn is encountered + which isn't in the tag list + + Note: the ARNs in the cache have been lowercased, so resource_arn must be lowercased + + Args: + key (str): the key we're getting tags from the cache for + + Returns: + lambda_tags (str[]): the list of "key:value" Datadog tag strings + """ + if not self.should_fetch_tags(): + self.logger.debug( + "Not fetching lambda function tags because the env variable DD_FETCH_LAMBDA_TAGS is " + "not set to true" + ) + return [] + + if self._is_expired(): + send_forwarder_internal_metrics("local_lambda_cache_expired") + self.logger.debug("Local cache expired, fetching cache from S3") + self._refresh() + + return self.tags_by_id.get(key, []) diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py new file mode 100644 index 0000000..b60c873 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py @@ -0,0 +1,64 @@ +from botocore.exceptions import ClientError +from caching.base_tags_cache import BaseTagsCache +from caching.common import parse_get_resources_response_for_tags_by_arn +from telemetry import send_forwarder_internal_metrics +from settings import ( + DD_S3_TAGS_CACHE_FILENAME, + DD_S3_TAGS_CACHE_LOCK_FILENAME, + GET_RESOURCES_S3_FILTER, +) + + +class S3TagsCache(BaseTagsCache): + def __init__(self, prefix): + super().__init__( + prefix, DD_S3_TAGS_CACHE_FILENAME, DD_S3_TAGS_CACHE_LOCK_FILENAME + ) + + def should_fetch_tags(self): + return True + + def build_tags_cache(self): + """Makes API calls to GetResources to get the live tags of the account's S3 buckets + Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not set to true + Returns: + tags_by_arn_cache (dict): each S3 bucket's tags in a dict keyed by ARN + """ + tags_fetch_success = False + tags_by_arn_cache = {} + resource_paginator = self.get_resources_paginator() + + try: + for page in resource_paginator.paginate( + ResourceTypeFilters=[GET_RESOURCES_S3_FILTER], ResourcesPerPage=100 + ): + send_forwarder_internal_metrics("get_s3_resources_api_calls") + page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page) + tags_by_arn_cache.update(page_tags_by_arn) + tags_fetch_success = True + except ClientError as e: + self.logger.exception( + "Encountered a ClientError when trying to fetch tags. You may need to give " + "this Lambda's role the 'tag:GetResources' permission" + ) + additional_tags = [ + f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}" + ] + send_forwarder_internal_metrics( + "client_error", additional_tags=additional_tags + ) + tags_fetch_success = False + + self.logger.debug( + "Built this tags cache from GetResources API calls: %s", tags_by_arn_cache + ) + + return tags_fetch_success, tags_by_arn_cache + + def get(self, bucket_arn): + if self._is_expired(): + send_forwarder_internal_metrics("local_s3_tags_cache_expired") + self.logger.debug("Local cache expired, fetching cache from S3") + self._refresh() + + return self.tags_by_id.get(bucket_arn, []) diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py new file mode 100644 index 0000000..4b2c497 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py @@ -0,0 +1,144 @@ +import os +from botocore.exceptions import ClientError +from caching.base_tags_cache import BaseTagsCache +from caching.common import ( + sanitize_aws_tag_string, + parse_get_resources_response_for_tags_by_arn, +) +from telemetry import send_forwarder_internal_metrics +from settings import ( + DD_S3_STEP_FUNCTIONS_CACHE_FILENAME, + DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME, + GET_RESOURCES_STEP_FUNCTIONS_FILTER, +) + + +class StepFunctionsTagsCache(BaseTagsCache): + def __init__(self, prefix): + super().__init__( + prefix, + DD_S3_STEP_FUNCTIONS_CACHE_FILENAME, + DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME, + ) + + def should_fetch_tags(self): + return os.environ.get("DD_FETCH_STEP_FUNCTIONS_TAGS", "false").lower() == "true" + + def build_tags_cache(self): + """Makes API calls to GetResources to get the live tags of the account's Step Functions + Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not + set to true. + Returns: + tags_by_arn_cache (dict): each Lambda's tags in a dict keyed by ARN + """ + tags_fetch_success = False + tags_by_arn_cache = {} + get_resources_paginator = self.get_resources_paginator() + + try: + for page in get_resources_paginator.paginate( + ResourceTypeFilters=[GET_RESOURCES_STEP_FUNCTIONS_FILTER], + ResourcesPerPage=100, + ): + send_forwarder_internal_metrics( + "step_functions_get_resources_api_calls" + ) + page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page) + tags_by_arn_cache.update(page_tags_by_arn) + tags_fetch_success = True + + except ClientError as e: + self.logger.exception( + "Encountered a ClientError when trying to fetch tags. You may need to give " + "this Lambda's role the 'tag:GetResources' permission" + ) + additional_tags = [ + f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}" + ] + send_forwarder_internal_metrics( + "client_error", additional_tags=additional_tags + ) + + self.logger.debug( + "All Step Functions tags refreshed: {}".format(tags_by_arn_cache) + ) + + return tags_fetch_success, tags_by_arn_cache + + def get(self, state_machine_arn): + """Get the tags for the Step Functions from the cache + + Will re-fetch the tags if they are out of date, or a log group is encountered + which isn't in the tag list + + Args: + state_machine_arn (str): the key we're getting tags from the cache for + + Returns: + state_machine_tags (List[str]): the list of "key:value" Datadog tag strings + """ + if self._is_expired(): + send_forwarder_internal_metrics("local_step_functions_tags_cache_expired") + self.logger.debug( # noqa: F821 + "Local cache expired for Step Functions tags. Fetching cache from S3" + ) + self._refresh() + + state_machine_tags = self.tags_by_id.get(state_machine_arn, None) + if state_machine_tags is None: + # If the custom tag fetch env var is not set to true do not fetch + if not self.should_fetch_tags(): + self.logger.debug( + "Not fetching custom tags because the env variable DD_FETCH_STEP_FUNCTIONS_TAGS" + " is not set to true" + ) + return [] + state_machine_tags = self._get_state_machine_tags(state_machine_arn) or [] + self.tags_by_id[state_machine_arn] = state_machine_tags + + return state_machine_tags + + def _get_state_machine_tags(self, state_machine_arn: str): + """Return a list of tags of a state machine in dd format (max 200 chars) + + Example response from get source api: + { + "ResourceTagMappingList": [ + { + "ResourceARN": "arn:aws:states:us-east-1:1234567890:stateMachine:example-machine", + "Tags": [ + { + "Key": "ENV", + "Value": "staging" + } + ] + } + ] + } + + Args: + state_machine_arn (str): the key we're getting tags from the cache for + Returns: + state_machine_arn (List[str]): e.g. ["k1:v1", "k2:v2"] + """ + response = None + formatted_tags = [] + + try: + send_forwarder_internal_metrics("get_state_machine_tags") + response = self.resource_tagging_client.get_resources( + ResourceARNList=[state_machine_arn] + ) + except Exception as e: + self.logger.exception(f"Failed to get Step Functions tags due to {e}") + + if response and len(response.get("ResourceTagMappingList", {})) > 0: + resource_dict = response.get("ResourceTagMappingList")[0] + for a_tag in resource_dict.get("Tags", []): + key = sanitize_aws_tag_string(a_tag["Key"], remove_colons=True) + value = sanitize_aws_tag_string( + a_tag.get("Value"), remove_leading_digits=False + ) + formatted_tags.append(f"{key}:{value}"[:200]) # same logic as lambda + + return formatted_tags diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store new file mode 100644 index 0000000..d86ee78 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py new file mode 100644 index 0000000..6c262fe --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py @@ -0,0 +1,25 @@ +from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy +from .gen import override + +__all__ = ( + "global_converter", + "unstructure", + "structure", + "structure_attrs_fromtuple", + "structure_attrs_fromdict", + "UnstructureStrategy", + "BaseConverter", + "Converter", + "GenConverter", + "override", +) +from cattrs import global_converter + +unstructure = global_converter.unstructure +structure = global_converter.structure +structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple +structure_attrs_fromdict = global_converter.structure_attrs_fromdict +register_structure_hook = global_converter.register_structure_hook +register_structure_hook_func = global_converter.register_structure_hook_func +register_unstructure_hook = global_converter.register_unstructure_hook +register_unstructure_hook_func = global_converter.register_unstructure_hook_func diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py new file mode 100644 index 0000000..4434fe5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py @@ -0,0 +1,8 @@ +from cattrs.converters import ( + BaseConverter, + Converter, + GenConverter, + UnstructureStrategy, +) + +__all__ = ["BaseConverter", "Converter", "GenConverter", "UnstructureStrategy"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py new file mode 100644 index 0000000..f10797a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py @@ -0,0 +1,3 @@ +from cattrs.disambiguators import create_uniq_field_dis_func + +__all__ = ["create_uniq_field_dis_func"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py new file mode 100644 index 0000000..2474247 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py @@ -0,0 +1,3 @@ +from cattrs.dispatch import FunctionDispatch, MultiStrategyDispatch + +__all__ = ["FunctionDispatch", "MultiStrategyDispatch"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py new file mode 100644 index 0000000..af092e9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py @@ -0,0 +1,15 @@ +from cattrs.errors import ( + BaseValidationError, + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + StructureHandlerNotFoundError, +) + +__all__ = [ + "BaseValidationError", + "ClassValidationError", + "ForbiddenExtraKeysError", + "IterableValidationError", + "StructureHandlerNotFoundError", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py new file mode 100644 index 0000000..b1f63b5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py @@ -0,0 +1,21 @@ +from cattrs.cols import iterable_unstructure_factory as make_iterable_unstructure_fn +from cattrs.gen import ( + make_dict_structure_fn, + make_dict_unstructure_fn, + make_hetero_tuple_unstructure_fn, + make_mapping_structure_fn, + make_mapping_unstructure_fn, + override, +) +from cattrs.gen._consts import AttributeOverride + +__all__ = [ + "AttributeOverride", + "make_dict_structure_fn", + "make_dict_unstructure_fn", + "make_hetero_tuple_unstructure_fn", + "make_iterable_unstructure_fn", + "make_mapping_structure_fn", + "make_mapping_unstructure_fn", + "override", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py new file mode 100644 index 0000000..fa6ad35 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py @@ -0,0 +1,3 @@ +from cattrs.preconf import validate_datetime + +__all__ = ["validate_datetime"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py new file mode 100644 index 0000000..4ac9743 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for bson.""" + +from cattrs.preconf.bson import BsonConverter, configure_converter, make_converter + +__all__ = ["BsonConverter", "configure_converter", "make_converter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py new file mode 100644 index 0000000..d590bd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for the stdlib json.""" + +from cattrs.preconf.json import JsonConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "JsonConverter", "make_converter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py new file mode 100644 index 0000000..1a579d6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for msgpack.""" + +from cattrs.preconf.msgpack import MsgpackConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "make_converter", "MsgpackConverter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py new file mode 100644 index 0000000..4450990 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for orjson.""" + +from cattrs.preconf.orjson import OrjsonConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "make_converter", "OrjsonConverter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py new file mode 100644 index 0000000..63d39f1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for pyyaml.""" + +from cattrs.preconf.pyyaml import PyyamlConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "make_converter", "PyyamlConverter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py new file mode 100644 index 0000000..6add731 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for tomlkit.""" + +from cattrs.preconf.tomlkit import TomlkitConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "make_converter", "TomlkitConverter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py new file mode 100644 index 0000000..ef85c47 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py @@ -0,0 +1,5 @@ +"""Preconfigured converters for ujson.""" + +from cattrs.preconf.ujson import UjsonConverter, configure_converter, make_converter + +__all__ = ["configure_converter", "make_converter", "UjsonConverter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/py.typed b/lambdas/aws-dd-forwarder-3.127.0/cattr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA new file mode 100644 index 0000000..0c6a750 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA @@ -0,0 +1,161 @@ +Metadata-Version: 2.3 +Name: cattrs +Version: 24.1.2 +Summary: Composable complex class support for attrs and dataclasses. +Project-URL: Homepage, https://catt.rs +Project-URL: Changelog, https://catt.rs/en/latest/history.html +Project-URL: Bug Tracker, https://github.com/python-attrs/cattrs/issues +Project-URL: Repository, https://github.com/python-attrs/cattrs +Project-URL: Documentation, https://catt.rs/en/stable/ +Author-email: Tin Tvrtkovic +License: MIT +License-File: LICENSE +Keywords: attrs,dataclasses,serialization +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: attrs>=23.1.0 +Requires-Dist: exceptiongroup>=1.1.1; python_version < '3.11' +Requires-Dist: typing-extensions!=4.6.3,>=4.1.0; python_version < '3.11' +Provides-Extra: bson +Requires-Dist: pymongo>=4.4.0; extra == 'bson' +Provides-Extra: cbor2 +Requires-Dist: cbor2>=5.4.6; extra == 'cbor2' +Provides-Extra: msgpack +Requires-Dist: msgpack>=1.0.5; extra == 'msgpack' +Provides-Extra: msgspec +Requires-Dist: msgspec>=0.18.5; (implementation_name == 'cpython') and extra == 'msgspec' +Provides-Extra: orjson +Requires-Dist: orjson>=3.9.2; (implementation_name == 'cpython') and extra == 'orjson' +Provides-Extra: pyyaml +Requires-Dist: pyyaml>=6.0; extra == 'pyyaml' +Provides-Extra: tomlkit +Requires-Dist: tomlkit>=0.11.8; extra == 'tomlkit' +Provides-Extra: ujson +Requires-Dist: ujson>=5.7.0; extra == 'ujson' +Description-Content-Type: text/markdown + +# *cattrs*: Flexible Object Serialization and Validation + +*Because validation belongs to the edges.* + +[![Documentation](https://img.shields.io/badge/Docs-Read%20The%20Docs-black)](https://catt.rs/) +[![License: MIT](https://img.shields.io/badge/license-MIT-C06524)](https://github.com/hynek/stamina/blob/main/LICENSE) +[![PyPI](https://img.shields.io/pypi/v/cattrs.svg)](https://pypi.python.org/pypi/cattrs) +[![Supported Python Versions](https://img.shields.io/pypi/pyversions/cattrs.svg)](https://github.com/python-attrs/cattrs) +[![Downloads](https://static.pepy.tech/badge/cattrs/month)](https://pepy.tech/project/cattrs) +[![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/22405310d6a663164d894a2beab4d44d/raw/covbadge.json)](https://github.com/python-attrs/cattrs/actions/workflows/main.yml) + +--- + + + +**cattrs** is a Swiss Army knife for (un)structuring and validating data in Python. +In practice, that means it converts **unstructured dictionaries** into **proper classes** and back, while **validating** their contents. + + + + +## Example + + + +_cattrs_ works best with [_attrs_](https://www.attrs.org/) classes, and [dataclasses](https://docs.python.org/3/library/dataclasses.html) where simple (un-)structuring works out of the box, even for nested data, without polluting your data model with serialization details: + +```python +>>> from attrs import define +>>> from cattrs import structure, unstructure +>>> @define +... class C: +... a: int +... b: list[str] +>>> instance = structure({'a': 1, 'b': ['x', 'y']}, C) +>>> instance +C(a=1, b=['x', 'y']) +>>> unstructure(instance) +{'a': 1, 'b': ['x', 'y']} +``` + + + + +Have a look at [*Why *cattrs*?*](https://catt.rs/en/latest/why.html) for more examples! + + + +## Features + +### Recursive Unstructuring + +- _attrs_ classes and dataclasses are converted into dictionaries in a way similar to `attrs.asdict()`, or into tuples in a way similar to `attrs.astuple()`. +- Enumeration instances are converted to their values. +- Other types are let through without conversion. This includes types such as integers, dictionaries, lists and instances of non-_attrs_ classes. +- Custom converters for any type can be registered using `register_unstructure_hook`. + + +### Recursive Structuring + +Converts unstructured data into structured data, recursively, according to your specification given as a type. +The following types are supported: + +- `typing.Optional[T]` and its 3.10+ form, `T | None`. +- `list[T]`, `typing.List[T]`, `typing.MutableSequence[T]`, `typing.Sequence[T]` convert to a lists. +- `tuple` and `typing.Tuple` (both variants, `tuple[T, ...]` and `tuple[X, Y, Z]`). +- `set[T]`, `typing.MutableSet[T]`, and `typing.Set[T]` convert to a sets. +- `frozenset[T]`, and `typing.FrozenSet[T]` convert to a frozensets. +- `dict[K, V]`, `typing.Dict[K, V]`, `typing.MutableMapping[K, V]`, and `typing.Mapping[K, V]` convert to a dictionaries. +- [`typing.TypedDict`](https://docs.python.org/3/library/typing.html#typing.TypedDict), ordinary and generic. +- [`typing.NewType`](https://docs.python.org/3/library/typing.html#newtype) +- [PEP 695 type aliases](https://docs.python.org/3/library/typing.html#type-aliases) on 3.12+ +- _attrs_ classes with simple attributes and the usual `__init__`[^simple]. +- All _attrs_ classes and dataclasses with the usual `__init__`, if their complex attributes have type metadata. +- Unions of supported _attrs_ classes, given that all of the classes have a unique field. +- Unions of anything, if you provide a disambiguation function for it. +- Custom converters for any type can be registered using `register_structure_hook`. + +[^simple]: Simple attributes are attributes that can be assigned unstructured data, like numbers, strings, and collections of unstructured data. + + +### Batteries Included + +_cattrs_ comes with pre-configured converters for a number of serialization libraries, including JSON (standard library, [_orjson_](https://pypi.org/project/orjson/), [UltraJSON](https://pypi.org/project/ujson/)), [_msgpack_](https://pypi.org/project/msgpack/), [_cbor2_](https://pypi.org/project/cbor2/), [_bson_](https://pypi.org/project/bson/), [PyYAML](https://pypi.org/project/PyYAML/), [_tomlkit_](https://pypi.org/project/tomlkit/) and [_msgspec_](https://pypi.org/project/msgspec/) (supports only JSON at this time). + +For details, see the [cattrs.preconf package](https://catt.rs/en/stable/preconf.html). + + +## Design Decisions + +_cattrs_ is based on a few fundamental design decisions: + +- Un/structuring rules are separate from the models. + This allows models to have a one-to-many relationship with un/structuring rules, and to create un/structuring rules for models which you do not own and you cannot change. + (_cattrs_ can be configured to use un/structuring rules from models using the [`use_class_methods` strategy](https://catt.rs/en/latest/strategies.html#using-class-specific-structure-and-unstructure-methods).) +- Invent as little as possible; reuse existing ordinary Python instead. + For example, _cattrs_ did not have a custom exception type to group exceptions until the sanctioned Python [`exceptiongroups`](https://docs.python.org/3/library/exceptions.html#ExceptionGroup). + A side-effect of this design decision is that, in a lot of cases, when you're solving _cattrs_ problems you're actually learning Python instead of learning _cattrs_. +- Resist the temptation to guess. + If there are two ways of solving a problem, _cattrs_ should refuse to guess and let the user configure it themselves. + +A foolish consistency is the hobgoblin of little minds, so these decisions can and are sometimes broken, but they have proven to be a good foundation. + + + + +## Credits + +Major credits to Hynek Schlawack for creating [attrs](https://attrs.org) and its predecessor, [characteristic](https://github.com/hynek/characteristic). + +_cattrs_ is tested with [Hypothesis](http://hypothesis.readthedocs.io/en/latest/), by David R. MacIver. + +_cattrs_ is benchmarked using [perf](https://github.com/haypo/perf) and [pytest-benchmark](https://pytest-benchmark.readthedocs.io/en/latest/index.html). + +This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [`audreyr/cookiecutter-pypackage`](https://github.com/audreyr/cookiecutter-pypackage) project template. diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD new file mode 100644 index 0000000..f7b3ae3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD @@ -0,0 +1,96 @@ +cattr/__init__.py,sha256=pODFKaZ7MisyHe_XPc9X6KKG73mqduHUvQO142XwijY,906 +cattr/__pycache__/__init__.cpython-311.pyc,, +cattr/__pycache__/converters.cpython-311.pyc,, +cattr/__pycache__/disambiguators.cpython-311.pyc,, +cattr/__pycache__/dispatch.cpython-311.pyc,, +cattr/__pycache__/errors.cpython-311.pyc,, +cattr/__pycache__/gen.cpython-311.pyc,, +cattr/converters.py,sha256=rQhY4J8r7QTZh5WICuFe4GWO1v0DS3DgQ9r569zd6jg,192 +cattr/disambiguators.py,sha256=ugD1fq1Z5x1pGu5P1lMzcT-IEi1q7IfQJIHEdmg62vM,103 +cattr/dispatch.py,sha256=uVEOgHWR9Hn5tm-wIw-bDccqrxJByVi8yRKaYyvL67k,125 +cattr/errors.py,sha256=V4RhoCObwGrlaM3oyn1H_FYxGR8iAB9dG5NxFDYM548,343 +cattr/gen.py,sha256=hWyKoZ_d2D36Jz_npspyGw8s9pWtUA69sXf0R3uOvgM,597 +cattr/preconf/__init__.py,sha256=NqPE7uhVfcP-PggkUpsbfAutMo8oHjcoB1cvjgLft-s,78 +cattr/preconf/__pycache__/__init__.cpython-311.pyc,, +cattr/preconf/__pycache__/bson.cpython-311.pyc,, +cattr/preconf/__pycache__/json.cpython-311.pyc,, +cattr/preconf/__pycache__/msgpack.cpython-311.pyc,, +cattr/preconf/__pycache__/orjson.cpython-311.pyc,, +cattr/preconf/__pycache__/pyyaml.cpython-311.pyc,, +cattr/preconf/__pycache__/tomlkit.cpython-311.pyc,, +cattr/preconf/__pycache__/ujson.cpython-311.pyc,, +cattr/preconf/bson.py,sha256=Bn4hJxac7OthGg_CR4LCPeBp_fz4kx3QniBVOZhguGs,195 +cattr/preconf/json.py,sha256=HBxWOTqKI7HOlmt-GnN6_wjQz1VphRi70sAOEbx0A2Y,206 +cattr/preconf/msgpack.py,sha256=VXqynPel11_lX8uTg84-u27LQhCqL1OoiF-lTqnoAkQ,207 +cattr/preconf/orjson.py,sha256=fs8qDPDYSBba9D8ib9Df1WVZ8iZaRPQq7kDigAxp14E,203 +cattr/preconf/pyyaml.py,sha256=lhuKwHrcvr16WOtdW4Q0mgIRzB90v1hwZkFXtPKOvAw,203 +cattr/preconf/tomlkit.py,sha256=rk393txIBHeWR66LfnATPh9Im1EFAHPJvSEGGSP2c-8,207 +cattr/preconf/ujson.py,sha256=r6ufraKDqmKdetNZUKxLYVSGmuJ-ckc-UjGYvCamr9k,199 +cattr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cattrs-24.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cattrs-24.1.2.dist-info/METADATA,sha256=Dw1BXPd1jf0ooO8yiPhPNKrkXvGklnIuiYPdELv-Ohk,8420 +cattrs-24.1.2.dist-info/RECORD,, +cattrs-24.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cattrs-24.1.2.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +cattrs-24.1.2.dist-info/licenses/LICENSE,sha256=9fudHt43qIykf0IMSZ3KD0oFvJk-Esd9I1IKrSkcAb8,1074 +cattrs/__init__.py,sha256=peO0_Q9AEguPCMjXlRH-Nj0CahcCw5CJmpnpKxsWKSQ,1835 +cattrs/__pycache__/__init__.cpython-311.pyc,, +cattrs/__pycache__/_compat.cpython-311.pyc,, +cattrs/__pycache__/_generics.cpython-311.pyc,, +cattrs/__pycache__/cols.cpython-311.pyc,, +cattrs/__pycache__/converters.cpython-311.pyc,, +cattrs/__pycache__/disambiguators.cpython-311.pyc,, +cattrs/__pycache__/dispatch.cpython-311.pyc,, +cattrs/__pycache__/errors.cpython-311.pyc,, +cattrs/__pycache__/fns.cpython-311.pyc,, +cattrs/__pycache__/v.cpython-311.pyc,, +cattrs/_compat.py,sha256=DmHUZNi_MnI2UKvNPxwr77zuMs5tl3zDM4rdJK7kJiI,17620 +cattrs/_generics.py,sha256=ymyDdLjXoYi_XPBA_f_-xJC7Bc8RGqoUcdlwTbB7xl8,718 +cattrs/cols.py,sha256=sB9NTOp8pGLMUxVicSHWpcX_4czrD1g5MdCJO0Ko5s0,8433 +cattrs/converters.py,sha256=nMxuapDj3Q75oW4sVXnYdIeHhodwzLNUcDcaIfKMLQM,53916 +cattrs/disambiguators.py,sha256=ljl73QtSB3MAGcl7-phAUR66b4yx_1ORYLb5fUgW8bY,6825 +cattrs/dispatch.py,sha256=fEE100tCqcqC_wl5y2FCdVEocLOuDlys0sduJrTfmB4,6810 +cattrs/errors.py,sha256=rHps9Qp7SoRafb2VuAkMbhsQf4pq87gX1SzM-jluMsE,4070 +cattrs/fns.py,sha256=xQceStzW4qLiMTJgGM-pVUudGwHm0Hin8oCYe1feS5c,633 +cattrs/gen/__init__.py,sha256=yBOs4V1SQ6RAPFSGyIkwi4ZEU7fqA_nQrH6ujgT88eI,38527 +cattrs/gen/__pycache__/__init__.cpython-311.pyc,, +cattrs/gen/__pycache__/_consts.cpython-311.pyc,, +cattrs/gen/__pycache__/_generics.cpython-311.pyc,, +cattrs/gen/__pycache__/_lc.cpython-311.pyc,, +cattrs/gen/__pycache__/_shared.cpython-311.pyc,, +cattrs/gen/__pycache__/typeddicts.cpython-311.pyc,, +cattrs/gen/_consts.py,sha256=ZwT_m2J3S7p-UjltpbA1WtfQZLNj9KhmFYCAv6Zl-g0,511 +cattrs/gen/_generics.py,sha256=_DyXCGql2QIxGhAv3_B1hsi80uPK8PhK2hhZa95YOlo,3011 +cattrs/gen/_lc.py,sha256=ktP5F9oOUo4HpZ4-hlLliLPzr8XjFi31EXMl8YMMs-g,906 +cattrs/gen/_shared.py,sha256=4yX9-TD5yyVzDWlSjkECrQV5B82xHUeBt9n2N5UgOAE,2064 +cattrs/gen/typeddicts.py,sha256=C3Bp8tNM-MI7L7KO0X3sfwSkG5d0ua3j7qDtvcCEBQk,22004 +cattrs/preconf/__init__.py,sha256=dfkUXoU47ZJfmoKX9FsnARKqLlgJeBjMxORMzxrbKbs,604 +cattrs/preconf/__pycache__/__init__.cpython-311.pyc,, +cattrs/preconf/__pycache__/bson.cpython-311.pyc,, +cattrs/preconf/__pycache__/cbor2.cpython-311.pyc,, +cattrs/preconf/__pycache__/json.cpython-311.pyc,, +cattrs/preconf/__pycache__/msgpack.cpython-311.pyc,, +cattrs/preconf/__pycache__/msgspec.cpython-311.pyc,, +cattrs/preconf/__pycache__/orjson.cpython-311.pyc,, +cattrs/preconf/__pycache__/pyyaml.cpython-311.pyc,, +cattrs/preconf/__pycache__/tomlkit.cpython-311.pyc,, +cattrs/preconf/__pycache__/ujson.cpython-311.pyc,, +cattrs/preconf/bson.py,sha256=uBRpTVfwGZ-qfuDYGwsl8eXokVAmcVBedKQPGUmamhc,3656 +cattrs/preconf/cbor2.py,sha256=ANfQUXgs7pyU5-4_2hYmcqUxzQZhWhFzrk_0y6b1yYw,1635 +cattrs/preconf/json.py,sha256=CSU5RosdYyg6cIOpaohgZVfdMtOtKjZlSg837fW4fTw,2035 +cattrs/preconf/msgpack.py,sha256=cgwX_ARi_swQjG6hwa9j-n7FUynLNWIMVLouz_VoTuw,1753 +cattrs/preconf/msgspec.py,sha256=f8J04RXv8UErKAwwzVs1cMbvoM-9erMmmF49zKBbCDo,6343 +cattrs/preconf/orjson.py,sha256=RZ8DI-4K7Xi0QdpIihT9I3Cm-O8Aq8_MTt2R3a4fgEk,3241 +cattrs/preconf/pyyaml.py,sha256=Ga96zLypn2DglTgbrb9h3jcuH-caur_UQI1ADo-ynUA,2298 +cattrs/preconf/tomlkit.py,sha256=2k-BN0ZW3faWmHcMQ1bCvsKCClhdgSjTe056O1xEc4o,3060 +cattrs/preconf/ujson.py,sha256=JBh5dWluwMwKhAJPINJhpse_aQ1p9hzrGo8BuvmG6S0,1863 +cattrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cattrs/strategies/__init__.py,sha256=nkZWCzSRYcS-75FMfk52mioZSuWykaN8hB39Vig5Xkg,339 +cattrs/strategies/__pycache__/__init__.cpython-311.pyc,, +cattrs/strategies/__pycache__/_class_methods.cpython-311.pyc,, +cattrs/strategies/__pycache__/_subclasses.cpython-311.pyc,, +cattrs/strategies/__pycache__/_unions.cpython-311.pyc,, +cattrs/strategies/_class_methods.py,sha256=vfiE3wKm04oc-3T9hchsIyhVzpMpJRdgTbujKsWyVpQ,2597 +cattrs/strategies/_subclasses.py,sha256=zzhLl7fSZlmlBuBY-rPX7L1d_C5tiDFDBmUTeRpG2uI,9204 +cattrs/strategies/_unions.py,sha256=l8CjVVFAwftkBa47g3m2KgtQ_b42Wnv-KwYY_LHReCA,9166 +cattrs/v.py,sha256=cTYt0EW8yr-gzKynw4_XjFv3RLpAF8IebvOb612l9QE,4399 diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL new file mode 100644 index 0000000..cdd68a4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE new file mode 100644 index 0000000..340022c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE @@ -0,0 +1,11 @@ + +MIT License + +Copyright (c) 2016, Tin Tvrtković + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store new file mode 100644 index 0000000..5aad9a2 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py new file mode 100644 index 0000000..db49636 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py @@ -0,0 +1,55 @@ +from typing import Final + +from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy +from .errors import ( + AttributeValidationNote, + BaseValidationError, + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + IterableValidationNote, + StructureHandlerNotFoundError, +) +from .gen import override +from .v import transform_error + +__all__ = [ + "structure", + "unstructure", + "get_structure_hook", + "get_unstructure_hook", + "register_structure_hook_func", + "register_structure_hook", + "register_unstructure_hook_func", + "register_unstructure_hook", + "structure_attrs_fromdict", + "structure_attrs_fromtuple", + "global_converter", + "BaseConverter", + "Converter", + "AttributeValidationNote", + "BaseValidationError", + "ClassValidationError", + "ForbiddenExtraKeysError", + "GenConverter", + "IterableValidationError", + "IterableValidationNote", + "override", + "StructureHandlerNotFoundError", + "transform_error", + "UnstructureStrategy", +] + +#: The global converter. Prefer creating your own if customizations are required. +global_converter: Final = Converter() + +unstructure = global_converter.unstructure +structure = global_converter.structure +structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple +structure_attrs_fromdict = global_converter.structure_attrs_fromdict +register_structure_hook = global_converter.register_structure_hook +register_structure_hook_func = global_converter.register_structure_hook_func +register_unstructure_hook = global_converter.register_unstructure_hook +register_unstructure_hook_func = global_converter.register_unstructure_hook_func +get_structure_hook: Final = global_converter.get_structure_hook +get_unstructure_hook: Final = global_converter.get_unstructure_hook diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py new file mode 100644 index 0000000..027ef47 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py @@ -0,0 +1,578 @@ +import sys +from collections import deque +from collections.abc import Mapping as AbcMapping +from collections.abc import MutableMapping as AbcMutableMapping +from collections.abc import MutableSet as AbcMutableSet +from collections.abc import Set as AbcSet +from dataclasses import MISSING, Field, is_dataclass +from dataclasses import fields as dataclass_fields +from functools import partial +from inspect import signature as _signature +from typing import AbstractSet as TypingAbstractSet +from typing import ( + Any, + Deque, + Dict, + Final, + FrozenSet, + List, + Literal, + NewType, + Optional, + Protocol, + Tuple, + Type, + Union, + get_args, + get_origin, + get_type_hints, +) +from typing import Mapping as TypingMapping +from typing import MutableMapping as TypingMutableMapping +from typing import MutableSequence as TypingMutableSequence +from typing import MutableSet as TypingMutableSet +from typing import Sequence as TypingSequence +from typing import Set as TypingSet + +from attrs import NOTHING, Attribute, Factory, resolve_types +from attrs import fields as attrs_fields +from attrs import fields_dict as attrs_fields_dict + +__all__ = [ + "ANIES", + "adapted_fields", + "fields_dict", + "ExceptionGroup", + "ExtensionsTypedDict", + "get_type_alias_base", + "has", + "is_type_alias", + "is_typeddict", + "TypeAlias", + "TypedDict", +] + +try: + from typing_extensions import TypedDict as ExtensionsTypedDict +except ImportError: # pragma: no cover + ExtensionsTypedDict = None + +if sys.version_info >= (3, 11): + from builtins import ExceptionGroup +else: + from exceptiongroup import ExceptionGroup + +try: + from typing_extensions import is_typeddict as _is_typeddict +except ImportError: # pragma: no cover + assert sys.version_info >= (3, 10) + from typing import is_typeddict as _is_typeddict + +try: + from typing_extensions import TypeAlias +except ImportError: # pragma: no cover + assert sys.version_info >= (3, 11) + from typing import TypeAlias + +LITERALS = {Literal} +try: + from typing_extensions import Literal as teLiteral + + LITERALS.add(teLiteral) +except ImportError: # pragma: no cover + pass + +# On some Python versions, `typing_extensions.Any` is different than +# `typing.Any`. +try: + from typing_extensions import Any as teAny + + ANIES = frozenset([Any, teAny]) +except ImportError: # pragma: no cover + ANIES = frozenset([Any]) + +NoneType = type(None) + + +def is_optional(typ: Type) -> bool: + return is_union_type(typ) and NoneType in typ.__args__ and len(typ.__args__) == 2 + + +def is_typeddict(cls): + """Thin wrapper around typing(_extensions).is_typeddict""" + return _is_typeddict(getattr(cls, "__origin__", cls)) + + +def is_type_alias(type: Any) -> bool: + """Is this a PEP 695 type alias?""" + return False + + +def get_type_alias_base(type: Any) -> Any: + """ + What is this a type alias of? + + Works only on 3.12+. + """ + return type.__value__ + + +def has(cls): + return hasattr(cls, "__attrs_attrs__") or hasattr(cls, "__dataclass_fields__") + + +def has_with_generic(cls): + """Test whether the class if a normal or generic attrs or dataclass.""" + return has(cls) or has(get_origin(cls)) + + +def fields(type): + try: + return type.__attrs_attrs__ + except AttributeError: + return dataclass_fields(type) + + +def fields_dict(type) -> Dict[str, Union[Attribute, Field]]: + """Return the fields_dict for attrs and dataclasses.""" + if is_dataclass(type): + return {f.name: f for f in dataclass_fields(type)} + return attrs_fields_dict(type) + + +def adapted_fields(cl) -> List[Attribute]: + """Return the attrs format of `fields()` for attrs and dataclasses.""" + if is_dataclass(cl): + attrs = dataclass_fields(cl) + if any(isinstance(a.type, str) for a in attrs): + # Do this conditionally in case `get_type_hints` fails, so + # users can resolve on their own first. + type_hints = get_type_hints(cl) + else: + type_hints = {} + return [ + Attribute( + attr.name, + ( + attr.default + if attr.default is not MISSING + else ( + Factory(attr.default_factory) + if attr.default_factory is not MISSING + else NOTHING + ) + ), + None, + True, + None, + True, + attr.init, + True, + type=type_hints.get(attr.name, attr.type), + alias=attr.name, + ) + for attr in attrs + ] + attribs = attrs_fields(cl) + if any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attribs = attrs_fields(cl) + return attribs + + +def is_subclass(obj: type, bases) -> bool: + """A safe version of issubclass (won't raise).""" + try: + return issubclass(obj, bases) + except TypeError: + return False + + +def is_hetero_tuple(type: Any) -> bool: + origin = getattr(type, "__origin__", None) + return origin is tuple and ... not in type.__args__ + + +def is_protocol(type: Any) -> bool: + return is_subclass(type, Protocol) and getattr(type, "_is_protocol", False) + + +def is_bare_final(type) -> bool: + return type is Final + + +def get_final_base(type) -> Optional[type]: + """Return the base of the Final annotation, if it is Final.""" + if type is Final: + return Any + if type.__class__ is _GenericAlias and type.__origin__ is Final: + return type.__args__[0] + return None + + +OriginAbstractSet = AbcSet +OriginMutableSet = AbcMutableSet + +signature = _signature + +if sys.version_info >= (3, 10): + signature = partial(_signature, eval_str=True) + +if sys.version_info >= (3, 9): + from collections import Counter + from collections.abc import MutableSequence as AbcMutableSequence + from collections.abc import MutableSet as AbcMutableSet + from collections.abc import Sequence as AbcSequence + from collections.abc import Set as AbcSet + from types import GenericAlias + from typing import ( + Annotated, + Generic, + TypedDict, + Union, + _AnnotatedAlias, + _GenericAlias, + _SpecialGenericAlias, + _UnionGenericAlias, + ) + from typing import Counter as TypingCounter + + try: + # Not present on 3.9.0, so we try carefully. + from typing import _LiteralGenericAlias + + def is_literal(type) -> bool: + return type in LITERALS or ( + isinstance( + type, (_GenericAlias, _LiteralGenericAlias, _SpecialGenericAlias) + ) + and type.__origin__ in LITERALS + ) + + except ImportError: # pragma: no cover + + def is_literal(_) -> bool: + return False + + Set = AbcSet + AbstractSet = AbcSet + MutableSet = AbcMutableSet + Sequence = AbcSequence + MutableSequence = AbcMutableSequence + MutableMapping = AbcMutableMapping + Mapping = AbcMapping + FrozenSetSubscriptable = frozenset + TupleSubscriptable = tuple + + def is_annotated(type) -> bool: + return getattr(type, "__class__", None) is _AnnotatedAlias + + def is_tuple(type): + return ( + type in (Tuple, tuple) + or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, Tuple)) + or (getattr(type, "__origin__", None) is tuple) + ) + + if sys.version_info >= (3, 12): + from typing import TypeAliasType + + def is_type_alias(type: Any) -> bool: + """Is this a PEP 695 type alias?""" + return isinstance(type, TypeAliasType) + + if sys.version_info >= (3, 10): + + def is_union_type(obj): + from types import UnionType + + return ( + obj is Union + or (isinstance(obj, _UnionGenericAlias) and obj.__origin__ is Union) + or isinstance(obj, UnionType) + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + if typ is NewType or isinstance(typ, NewType): + return typ.__supertype__ + return None + + if sys.version_info >= (3, 11): + from typing import NotRequired, Required + else: + from typing_extensions import NotRequired, Required + + else: + from typing_extensions import NotRequired, Required + + def is_union_type(obj): + return ( + obj is Union + or isinstance(obj, _UnionGenericAlias) + and obj.__origin__ is Union + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + supertype = getattr(typ, "__supertype__", None) + if ( + supertype is not None + and getattr(typ, "__qualname__", "") == "NewType..new_type" + and typ.__module__ in ("typing", "typing_extensions") + ): + return supertype + return None + + def get_notrequired_base(type) -> "Union[Any, Literal[NOTHING]]": + if is_annotated(type): + # Handle `Annotated[NotRequired[int]]` + type = get_args(type)[0] + if get_origin(type) in (NotRequired, Required): + return get_args(type)[0] + return NOTHING + + def is_sequence(type: Any) -> bool: + """A predicate function for sequences. + + Matches lists, sequences, mutable sequences, deques and homogenous + tuples. + """ + origin = getattr(type, "__origin__", None) + return ( + type + in ( + List, + list, + TypingSequence, + TypingMutableSequence, + AbcMutableSequence, + tuple, + Tuple, + deque, + Deque, + ) + or ( + type.__class__ is _GenericAlias + and ( + (origin is not tuple) + and is_subclass(origin, TypingSequence) + or origin is tuple + and type.__args__[1] is ... + ) + ) + or (origin in (list, deque, AbcMutableSequence, AbcSequence)) + or (origin is tuple and type.__args__[1] is ...) + ) + + def is_deque(type): + return ( + type in (deque, Deque) + or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, deque)) + or (getattr(type, "__origin__", None) is deque) + ) + + def is_mutable_set(type: Any) -> bool: + """A predicate function for (mutable) sets. + + Matches built-in sets and sets from the typing module. + """ + return ( + type in (TypingSet, TypingMutableSet, set) + or ( + type.__class__ is _GenericAlias + and is_subclass(type.__origin__, TypingMutableSet) + ) + or (getattr(type, "__origin__", None) in (set, AbcMutableSet, AbcSet)) + ) + + def is_frozenset(type: Any) -> bool: + """A predicate function for frozensets. + + Matches built-in frozensets and frozensets from the typing module. + """ + return ( + type in (FrozenSet, frozenset) + or ( + type.__class__ is _GenericAlias + and is_subclass(type.__origin__, FrozenSet) + ) + or (getattr(type, "__origin__", None) is frozenset) + ) + + def is_bare(type): + return isinstance(type, _SpecialGenericAlias) or ( + not hasattr(type, "__origin__") and not hasattr(type, "__args__") + ) + + def is_mapping(type: Any) -> bool: + """A predicate function for mappings.""" + return ( + type in (dict, Dict, TypingMapping, TypingMutableMapping, AbcMutableMapping) + or ( + type.__class__ is _GenericAlias + and is_subclass(type.__origin__, TypingMapping) + ) + or is_subclass( + getattr(type, "__origin__", type), (dict, AbcMutableMapping, AbcMapping) + ) + ) + + def is_counter(type): + return ( + type in (Counter, TypingCounter) + or getattr(type, "__origin__", None) is Counter + ) + + def is_generic(type) -> bool: + """Whether `type` is a generic type.""" + # Inheriting from protocol will inject `Generic` into the MRO + # without `__orig_bases__`. + return isinstance(type, (_GenericAlias, GenericAlias)) or ( + is_subclass(type, Generic) and hasattr(type, "__orig_bases__") + ) + + def copy_with(type, args): + """Replace a generic type's arguments.""" + if is_annotated(type): + # typing.Annotated requires a special case. + return Annotated[args] + if isinstance(args, tuple) and len(args) == 1: + # Some annotations can't handle 1-tuples. + args = args[0] + return type.__origin__[args] + + def get_full_type_hints(obj, globalns=None, localns=None): + return get_type_hints(obj, globalns, localns, include_extras=True) + +else: + # 3.8 + Set = TypingSet + AbstractSet = TypingAbstractSet + MutableSet = TypingMutableSet + + Sequence = TypingSequence + MutableSequence = TypingMutableSequence + MutableMapping = TypingMutableMapping + Mapping = TypingMapping + FrozenSetSubscriptable = FrozenSet + TupleSubscriptable = Tuple + + from collections import Counter as ColCounter + from typing import Counter, Generic, TypedDict, Union, _GenericAlias + + from typing_extensions import Annotated, NotRequired, Required + from typing_extensions import get_origin as te_get_origin + + def is_annotated(type) -> bool: + return te_get_origin(type) is Annotated + + def is_tuple(type): + return type in (Tuple, tuple) or ( + type.__class__ is _GenericAlias and is_subclass(type.__origin__, Tuple) + ) + + def is_union_type(obj): + return ( + obj is Union or isinstance(obj, _GenericAlias) and obj.__origin__ is Union + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + supertype = getattr(typ, "__supertype__", None) + if ( + supertype is not None + and getattr(typ, "__qualname__", "") == "NewType..new_type" + and typ.__module__ in ("typing", "typing_extensions") + ): + return supertype + return None + + def is_sequence(type: Any) -> bool: + return type in (List, list, Tuple, tuple) or ( + type.__class__ is _GenericAlias + and ( + type.__origin__ not in (Union, Tuple, tuple) + and is_subclass(type.__origin__, TypingSequence) + ) + or (type.__origin__ in (Tuple, tuple) and type.__args__[1] is ...) + ) + + def is_deque(type: Any) -> bool: + return ( + type in (deque, Deque) + or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, deque)) + or type.__origin__ is deque + ) + + def is_mutable_set(type) -> bool: + return type in (set, TypingAbstractSet) or ( + type.__class__ is _GenericAlias + and is_subclass(type.__origin__, (MutableSet, TypingAbstractSet)) + ) + + def is_frozenset(type): + return type is frozenset or ( + type.__class__ is _GenericAlias and is_subclass(type.__origin__, FrozenSet) + ) + + def is_mapping(type: Any) -> bool: + """A predicate function for mappings.""" + return ( + type in (TypingMapping, dict) + or ( + type.__class__ is _GenericAlias + and is_subclass(type.__origin__, TypingMapping) + ) + or is_subclass( + getattr(type, "__origin__", type), (dict, AbcMutableMapping, AbcMapping) + ) + ) + + bare_generic_args = { + List.__args__, + TypingSequence.__args__, + TypingMapping.__args__, + Dict.__args__, + TypingMutableSequence.__args__, + Tuple.__args__, + None, # non-parametrized containers do not have `__args__ attribute in py3.7-8 + } + + def is_bare(type): + return getattr(type, "__args__", None) in bare_generic_args + + def is_counter(type): + return ( + type in (Counter, ColCounter) + or getattr(type, "__origin__", None) is ColCounter + ) + + def is_literal(type) -> bool: + return type in LITERALS or ( + isinstance(type, _GenericAlias) and type.__origin__ in LITERALS + ) + + def is_generic(obj): + return isinstance(obj, _GenericAlias) or ( + is_subclass(obj, Generic) and hasattr(obj, "__orig_bases__") + ) + + def copy_with(type, args): + """Replace a generic type's arguments.""" + return type.copy_with(args) + + def get_notrequired_base(type) -> "Union[Any, Literal[NOTHING]]": + if is_annotated(type): + # Handle `Annotated[NotRequired[int]]` + type = get_origin(type) + + if get_origin(type) in (NotRequired, Required): + return get_args(type)[0] + return NOTHING + + def get_full_type_hints(obj, globalns=None, localns=None): + return get_type_hints(obj, globalns, localns) + + +def is_generic_attrs(type) -> bool: + """Return True for both specialized (A[int]) and unspecialized (A) generics.""" + return is_generic(type) and has(type.__origin__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py new file mode 100644 index 0000000..c473f43 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py @@ -0,0 +1,24 @@ +from typing import Any, Mapping + +from ._compat import copy_with, get_args, is_annotated, is_generic + + +def deep_copy_with(t, mapping: Mapping[str, Any]): + args = get_args(t) + rest = () + if is_annotated(t) and args: + # If we're dealing with `Annotated`, we only map the first type parameter + rest = tuple(args[1:]) + args = (args[0],) + new_args = ( + tuple( + ( + mapping[a.__name__] + if hasattr(a, "__name__") and a.__name__ in mapping + else (deep_copy_with(a, mapping) if is_generic(a) else a) + ) + for a in args + ) + + rest + ) + return copy_with(t, new_args) if new_args != args else t diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py new file mode 100644 index 0000000..8ff5c0f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py @@ -0,0 +1,289 @@ +"""Utility functions for collections.""" + +from __future__ import annotations + +from sys import version_info +from typing import ( + TYPE_CHECKING, + Any, + Iterable, + Literal, + NamedTuple, + Tuple, + TypeVar, + get_type_hints, +) + +from attrs import NOTHING, Attribute + +from ._compat import ANIES, is_bare, is_frozenset, is_mapping, is_sequence, is_subclass +from ._compat import is_mutable_set as is_set +from .dispatch import StructureHook, UnstructureHook +from .errors import IterableValidationError, IterableValidationNote +from .fns import identity +from .gen import ( + AttributeOverride, + already_generating, + make_dict_structure_fn_from_attrs, + make_dict_unstructure_fn_from_attrs, + make_hetero_tuple_unstructure_fn, + mapping_structure_factory, +) +from .gen import make_iterable_unstructure_fn as iterable_unstructure_factory + +if TYPE_CHECKING: + from .converters import BaseConverter + +__all__ = [ + "is_any_set", + "is_frozenset", + "is_namedtuple", + "is_mapping", + "is_set", + "is_sequence", + "iterable_unstructure_factory", + "list_structure_factory", + "namedtuple_structure_factory", + "namedtuple_unstructure_factory", + "namedtuple_dict_structure_factory", + "namedtuple_dict_unstructure_factory", + "mapping_structure_factory", +] + + +def is_any_set(type) -> bool: + """A predicate function for both mutable and frozensets.""" + return is_set(type) or is_frozenset(type) + + +if version_info[:2] >= (3, 9): + + def is_namedtuple(type: Any) -> bool: + """A predicate function for named tuples.""" + + if is_subclass(type, tuple): + for cl in type.mro(): + orig_bases = cl.__dict__.get("__orig_bases__", ()) + if NamedTuple in orig_bases: + return True + return False + +else: + + def is_namedtuple(type: Any) -> bool: + """A predicate function for named tuples.""" + # This is tricky. It may not be possible for this function to be 100% + # accurate, since it doesn't seem like we can distinguish between tuple + # subclasses and named tuples reliably. + + if is_subclass(type, tuple): + for cl in type.mro(): + if cl is tuple: + # No point going further. + break + if "_fields" in cl.__dict__: + return True + return False + + +def _is_passthrough(type: type[tuple], converter: BaseConverter) -> bool: + """If all fields would be passed through, this class should not be processed + either. + """ + return all( + converter.get_unstructure_hook(t) == identity + for t in type.__annotations__.values() + ) + + +T = TypeVar("T") + + +def list_structure_factory(type: type, converter: BaseConverter) -> StructureHook: + """A hook factory for structuring lists. + + Converts any given iterable into a list. + """ + + if is_bare(type) or type.__args__[0] in ANIES: + + def structure_list(obj: Iterable[T], _: type = type) -> list[T]: + return list(obj) + + return structure_list + + elem_type = type.__args__[0] + + try: + handler = converter.get_structure_hook(elem_type) + except RecursionError: + # Break the cycle by using late binding. + handler = converter.structure + + if converter.detailed_validation: + + def structure_list( + obj: Iterable[T], _: type = type, _handler=handler, _elem_type=elem_type + ) -> list[T]: + errors = [] + res = [] + ix = 0 # Avoid `enumerate` for performance. + for e in obj: + try: + res.append(handler(e, _elem_type)) + except Exception as e: + msg = IterableValidationNote( + f"Structuring {type} @ index {ix}", ix, elem_type + ) + e.__notes__ = [*getattr(e, "__notes__", []), msg] + errors.append(e) + finally: + ix += 1 + if errors: + raise IterableValidationError( + f"While structuring {type!r}", errors, type + ) + + return res + + else: + + def structure_list( + obj: Iterable[T], _: type = type, _handler=handler, _elem_type=elem_type + ) -> list[T]: + return [_handler(e, _elem_type) for e in obj] + + return structure_list + + +def namedtuple_unstructure_factory( + cl: type[tuple], converter: BaseConverter, unstructure_to: Any = None +) -> UnstructureHook: + """A hook factory for unstructuring namedtuples. + + :param unstructure_to: Force unstructuring to this type, if provided. + """ + + if unstructure_to is None and _is_passthrough(cl, converter): + return identity + + return make_hetero_tuple_unstructure_fn( + cl, + converter, + unstructure_to=tuple if unstructure_to is None else unstructure_to, + type_args=tuple(cl.__annotations__.values()), + ) + + +def namedtuple_structure_factory( + cl: type[tuple], converter: BaseConverter +) -> StructureHook: + """A hook factory for structuring namedtuples from iterables.""" + # We delegate to the existing infrastructure for heterogenous tuples. + hetero_tuple_type = Tuple[tuple(cl.__annotations__.values())] + base_hook = converter.get_structure_hook(hetero_tuple_type) + return lambda v, _: cl(*base_hook(v, hetero_tuple_type)) + + +def _namedtuple_to_attrs(cl: type[tuple]) -> list[Attribute]: + """Generate pseudo attributes for a namedtuple.""" + return [ + Attribute( + name, + cl._field_defaults.get(name, NOTHING), + None, + False, + False, + False, + True, + False, + type=a, + alias=name, + ) + for name, a in get_type_hints(cl).items() + ] + + +def namedtuple_dict_structure_factory( + cl: type[tuple], + converter: BaseConverter, + detailed_validation: bool | Literal["from_converter"] = "from_converter", + forbid_extra_keys: bool = False, + use_linecache: bool = True, + /, + **kwargs: AttributeOverride, +) -> StructureHook: + """A hook factory for hooks structuring namedtuples from dictionaries. + + :param forbid_extra_keys: Whether the hook should raise a `ForbiddenExtraKeysError` + if unknown keys are encountered. + :param use_linecache: Whether to store the source code in the Python linecache. + + .. versionadded:: 24.1.0 + """ + try: + working_set = already_generating.working_set + except AttributeError: + working_set = set() + already_generating.working_set = working_set + else: + if cl in working_set: + raise RecursionError() + + working_set.add(cl) + + try: + return make_dict_structure_fn_from_attrs( + _namedtuple_to_attrs(cl), + cl, + converter, + _cattrs_forbid_extra_keys=forbid_extra_keys, + _cattrs_use_detailed_validation=detailed_validation, + _cattrs_use_linecache=use_linecache, + **kwargs, + ) + finally: + working_set.remove(cl) + if not working_set: + del already_generating.working_set + + +def namedtuple_dict_unstructure_factory( + cl: type[tuple], + converter: BaseConverter, + omit_if_default: bool = False, + use_linecache: bool = True, + /, + **kwargs: AttributeOverride, +) -> UnstructureHook: + """A hook factory for hooks unstructuring namedtuples to dictionaries. + + :param omit_if_default: When true, attributes equal to their default values + will be omitted in the result dictionary. + :param use_linecache: Whether to store the source code in the Python linecache. + + .. versionadded:: 24.1.0 + """ + try: + working_set = already_generating.working_set + except AttributeError: + working_set = set() + already_generating.working_set = working_set + if cl in working_set: + raise RecursionError() + + working_set.add(cl) + + try: + return make_dict_unstructure_fn_from_attrs( + _namedtuple_to_attrs(cl), + cl, + converter, + _cattrs_omit_if_default=omit_if_default, + _cattrs_use_linecache=use_linecache, + **kwargs, + ) + finally: + working_set.remove(cl) + if not working_set: + del already_generating.working_set diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py new file mode 100644 index 0000000..1490ec2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py @@ -0,0 +1,1419 @@ +from __future__ import annotations + +from collections import Counter, deque +from collections.abc import Mapping as AbcMapping +from collections.abc import MutableMapping as AbcMutableMapping +from collections.abc import MutableSet as AbcMutableSet +from dataclasses import Field +from enum import Enum +from inspect import Signature +from inspect import signature as inspect_signature +from pathlib import Path +from typing import Any, Callable, Iterable, Optional, Tuple, TypeVar, overload + +from attrs import Attribute, resolve_types +from attrs import has as attrs_has + +from ._compat import ( + ANIES, + FrozenSetSubscriptable, + Mapping, + MutableMapping, + MutableSequence, + NoneType, + OriginAbstractSet, + OriginMutableSet, + Sequence, + Set, + TypeAlias, + fields, + get_final_base, + get_newtype_base, + get_origin, + get_type_alias_base, + has, + has_with_generic, + is_annotated, + is_bare, + is_counter, + is_deque, + is_frozenset, + is_generic, + is_generic_attrs, + is_hetero_tuple, + is_literal, + is_mapping, + is_mutable_set, + is_optional, + is_protocol, + is_sequence, + is_tuple, + is_type_alias, + is_typeddict, + is_union_type, + signature, +) +from .cols import ( + is_namedtuple, + iterable_unstructure_factory, + list_structure_factory, + namedtuple_structure_factory, + namedtuple_unstructure_factory, +) +from .disambiguators import create_default_dis_func, is_supported_union +from .dispatch import ( + HookFactory, + MultiStrategyDispatch, + StructuredValue, + StructureHook, + TargetType, + UnstructuredValue, + UnstructureHook, +) +from .errors import ( + IterableValidationError, + IterableValidationNote, + StructureHandlerNotFoundError, +) +from .fns import Predicate, identity, raise_error +from .gen import ( + AttributeOverride, + DictStructureFn, + HeteroTupleUnstructureFn, + IterableUnstructureFn, + MappingStructureFn, + MappingUnstructureFn, + make_dict_structure_fn, + make_dict_unstructure_fn, + make_hetero_tuple_unstructure_fn, + make_mapping_structure_fn, + make_mapping_unstructure_fn, +) +from .gen.typeddicts import make_dict_structure_fn as make_typeddict_dict_struct_fn +from .gen.typeddicts import make_dict_unstructure_fn as make_typeddict_dict_unstruct_fn + +__all__ = ["UnstructureStrategy", "BaseConverter", "Converter", "GenConverter"] + +T = TypeVar("T") +V = TypeVar("V") + +UnstructureHookFactory = TypeVar( + "UnstructureHookFactory", bound=HookFactory[UnstructureHook] +) + +# The Extended factory also takes a converter. +ExtendedUnstructureHookFactory: TypeAlias = Callable[[TargetType, T], UnstructureHook] + +# This typevar for the BaseConverter. +AnyUnstructureHookFactoryBase = TypeVar( + "AnyUnstructureHookFactoryBase", + bound="HookFactory[UnstructureHook] | ExtendedUnstructureHookFactory[BaseConverter]", +) + +# This typevar for the Converter. +AnyUnstructureHookFactory = TypeVar( + "AnyUnstructureHookFactory", + bound="HookFactory[UnstructureHook] | ExtendedUnstructureHookFactory[Converter]", +) + +StructureHookFactory = TypeVar("StructureHookFactory", bound=HookFactory[StructureHook]) + +# The Extended factory also takes a converter. +ExtendedStructureHookFactory: TypeAlias = Callable[[TargetType, T], StructureHook] + +# This typevar for the BaseConverter. +AnyStructureHookFactoryBase = TypeVar( + "AnyStructureHookFactoryBase", + bound="HookFactory[StructureHook] | ExtendedStructureHookFactory[BaseConverter]", +) + +# This typevar for the Converter. +AnyStructureHookFactory = TypeVar( + "AnyStructureHookFactory", + bound="HookFactory[StructureHook] | ExtendedStructureHookFactory[Converter]", +) + +UnstructureHookT = TypeVar("UnstructureHookT", bound=UnstructureHook) +StructureHookT = TypeVar("StructureHookT", bound=StructureHook) + + +class UnstructureStrategy(Enum): + """`attrs` classes unstructuring strategies.""" + + AS_DICT = "asdict" + AS_TUPLE = "astuple" + + +def is_literal_containing_enums(typ: type) -> bool: + return is_literal(typ) and any(isinstance(val, Enum) for val in typ.__args__) + + +def _is_extended_factory(factory: Callable) -> bool: + """Does this factory also accept a converter arg?""" + # We use the original `inspect.signature` to not evaluate string + # annotations. + sig = inspect_signature(factory) + return ( + len(sig.parameters) >= 2 + and (list(sig.parameters.values())[1]).default is Signature.empty + ) + + +class BaseConverter: + """Converts between structured and unstructured data.""" + + __slots__ = ( + "_unstructure_func", + "_unstructure_attrs", + "_structure_attrs", + "_dict_factory", + "_union_struct_registry", + "_structure_func", + "_prefer_attrib_converters", + "detailed_validation", + "_struct_copy_skip", + "_unstruct_copy_skip", + ) + + def __init__( + self, + dict_factory: Callable[[], Any] = dict, + unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT, + prefer_attrib_converters: bool = False, + detailed_validation: bool = True, + unstructure_fallback_factory: HookFactory[UnstructureHook] = lambda _: identity, + structure_fallback_factory: HookFactory[StructureHook] = lambda _: raise_error, + ) -> None: + """ + :param detailed_validation: Whether to use a slightly slower mode for detailed + validation errors. + :param unstructure_fallback_factory: A hook factory to be called when no + registered unstructuring hooks match. + :param structure_fallback_factory: A hook factory to be called when no + registered structuring hooks match. + + .. versionadded:: 23.2.0 *unstructure_fallback_factory* + .. versionadded:: 23.2.0 *structure_fallback_factory* + """ + unstruct_strat = UnstructureStrategy(unstruct_strat) + self._prefer_attrib_converters = prefer_attrib_converters + + self.detailed_validation = detailed_validation + self._union_struct_registry: dict[Any, Callable[[Any, type[T]], T]] = {} + + # Create a per-instance cache. + if unstruct_strat is UnstructureStrategy.AS_DICT: + self._unstructure_attrs = self.unstructure_attrs_asdict + self._structure_attrs = self.structure_attrs_fromdict + else: + self._unstructure_attrs = self.unstructure_attrs_astuple + self._structure_attrs = self.structure_attrs_fromtuple + + self._unstructure_func = MultiStrategyDispatch( + unstructure_fallback_factory, self + ) + self._unstructure_func.register_cls_list( + [(bytes, identity), (str, identity), (Path, str)] + ) + self._unstructure_func.register_func_list( + [ + ( + is_protocol, + lambda o: self.unstructure(o, unstructure_as=o.__class__), + ), + ( + lambda t: get_final_base(t) is not None, + lambda t: self.get_unstructure_hook(get_final_base(t)), + True, + ), + ( + is_type_alias, + lambda t: self.get_unstructure_hook(get_type_alias_base(t)), + True, + ), + (is_mapping, self._unstructure_mapping), + (is_sequence, self._unstructure_seq), + (is_mutable_set, self._unstructure_seq), + (is_frozenset, self._unstructure_seq), + (lambda t: issubclass(t, Enum), self._unstructure_enum), + (has, self._unstructure_attrs), + (is_union_type, self._unstructure_union), + (lambda t: t in ANIES, self.unstructure), + ] + ) + + # Per-instance register of to-attrs converters. + # Singledispatch dispatches based on the first argument, so we + # store the function and switch the arguments in self.loads. + self._structure_func = MultiStrategyDispatch(structure_fallback_factory, self) + self._structure_func.register_func_list( + [ + ( + lambda cl: cl in ANIES or cl is Optional or cl is None, + lambda v, _: v, + ), + (is_generic_attrs, self._gen_structure_generic, True), + (lambda t: get_newtype_base(t) is not None, self._structure_newtype), + (is_type_alias, self._find_type_alias_structure_hook, True), + ( + lambda t: get_final_base(t) is not None, + self._structure_final_factory, + True, + ), + (is_literal, self._structure_simple_literal), + (is_literal_containing_enums, self._structure_enum_literal), + (is_sequence, list_structure_factory, "extended"), + (is_deque, self._structure_deque), + (is_mutable_set, self._structure_set), + (is_frozenset, self._structure_frozenset), + (is_tuple, self._structure_tuple), + (is_namedtuple, namedtuple_structure_factory, "extended"), + (is_mapping, self._structure_dict), + (is_supported_union, self._gen_attrs_union_structure, True), + (is_optional, self._structure_optional), + ( + lambda t: is_union_type(t) and t in self._union_struct_registry, + self._union_struct_registry.__getitem__, + True, + ), + (has, self._structure_attrs), + ] + ) + # Strings are sequences. + self._structure_func.register_cls_list( + [ + (str, self._structure_call), + (bytes, self._structure_call), + (int, self._structure_call), + (float, self._structure_call), + (Enum, self._structure_call), + (Path, self._structure_call), + ] + ) + + self._dict_factory = dict_factory + + self._unstruct_copy_skip = self._unstructure_func.get_num_fns() + self._struct_copy_skip = self._structure_func.get_num_fns() + + def unstructure(self, obj: Any, unstructure_as: Any = None) -> Any: + return self._unstructure_func.dispatch( + obj.__class__ if unstructure_as is None else unstructure_as + )(obj) + + @property + def unstruct_strat(self) -> UnstructureStrategy: + """The default way of unstructuring ``attrs`` classes.""" + return ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ) + + @overload + def register_unstructure_hook(self, cls: UnstructureHookT) -> UnstructureHookT: ... + + @overload + def register_unstructure_hook(self, cls: Any, func: UnstructureHook) -> None: ... + + def register_unstructure_hook( + self, cls: Any = None, func: UnstructureHook | None = None + ) -> Callable[[UnstructureHook]] | None: + """Register a class-to-primitive converter function for a class. + + The converter function should take an instance of the class and return + its Python equivalent. + + May also be used as a decorator. When used as a decorator, the first + argument annotation from the decorated function will be used as the + type to register the hook for. + + .. versionchanged:: 24.1.0 + This method may now be used as a decorator. + """ + if func is None: + # Autodetecting decorator. + func = cls + sig = signature(func) + cls = next(iter(sig.parameters.values())).annotation + self.register_unstructure_hook(cls, func) + + return func + + if attrs_has(cls): + resolve_types(cls) + if is_union_type(cls): + self._unstructure_func.register_func_list([(lambda t: t == cls, func)]) + elif get_newtype_base(cls) is not None: + # This is a newtype, so we handle it specially. + self._unstructure_func.register_func_list([(lambda t: t is cls, func)]) + else: + self._unstructure_func.register_cls_list([(cls, func)]) + return None + + def register_unstructure_hook_func( + self, check_func: Predicate, func: UnstructureHook + ) -> None: + """Register a class-to-primitive converter function for a class, using + a function to check if it's a match. + """ + self._unstructure_func.register_func_list([(check_func, func)]) + + @overload + def register_unstructure_hook_factory( + self, predicate: Predicate + ) -> Callable[[AnyUnstructureHookFactoryBase], AnyUnstructureHookFactoryBase]: ... + + @overload + def register_unstructure_hook_factory( + self, predicate: Predicate, factory: UnstructureHookFactory + ) -> UnstructureHookFactory: ... + + @overload + def register_unstructure_hook_factory( + self, + predicate: Predicate, + factory: ExtendedUnstructureHookFactory[BaseConverter], + ) -> ExtendedUnstructureHookFactory[BaseConverter]: ... + + def register_unstructure_hook_factory(self, predicate, factory=None): + """ + Register a hook factory for a given predicate. + + The hook factory may expose an additional required parameter. In this case, + the current converter will be provided to the hook factory as that + parameter. + + May also be used as a decorator. + + :param predicate: A function that, given a type, returns whether the factory + can produce a hook for that type. + :param factory: A callable that, given a type, produces an unstructuring + hook for that type. This unstructuring hook will be cached. + + .. versionchanged:: 24.1.0 + This method may now be used as a decorator. + The factory may also receive the converter as a second, required argument. + """ + if factory is None: + + def decorator(factory): + # Is this an extended factory (takes a converter too)? + if _is_extended_factory(factory): + self._unstructure_func.register_func_list( + [(predicate, factory, "extended")] + ) + else: + self._unstructure_func.register_func_list( + [(predicate, factory, True)] + ) + + return decorator + + self._unstructure_func.register_func_list( + [ + ( + predicate, + factory, + "extended" if _is_extended_factory(factory) else True, + ) + ] + ) + return factory + + def get_unstructure_hook( + self, type: Any, cache_result: bool = True + ) -> UnstructureHook: + """Get the unstructure hook for the given type. + + This hook can be manually called, or composed with other functions + and re-registered. + + If no hook is registered, the converter unstructure fallback factory + will be used to produce one. + + :param cache: Whether to cache the returned hook. + + .. versionadded:: 24.1.0 + """ + return ( + self._unstructure_func.dispatch(type) + if cache_result + else self._unstructure_func.dispatch_without_caching(type) + ) + + @overload + def register_structure_hook(self, cl: StructureHookT) -> StructureHookT: ... + + @overload + def register_structure_hook(self, cl: Any, func: StructureHook) -> None: ... + + def register_structure_hook( + self, cl: Any, func: StructureHook | None = None + ) -> None: + """Register a primitive-to-class converter function for a type. + + The converter function should take two arguments: + * a Python object to be converted, + * the type to convert to + + and return the instance of the class. The type may seem redundant, but + is sometimes needed (for example, when dealing with generic classes). + + This method may be used as a decorator. In this case, the decorated + hook must have a return type annotation, and this annotation will be used + as the type for the hook. + + .. versionchanged:: 24.1.0 + This method may now be used as a decorator. + """ + if func is None: + # The autodetecting decorator. + func = cl + sig = signature(func) + self.register_structure_hook(sig.return_annotation, func) + return func + + if attrs_has(cl): + resolve_types(cl) + if is_union_type(cl): + self._union_struct_registry[cl] = func + self._structure_func.clear_cache() + elif get_newtype_base(cl) is not None: + # This is a newtype, so we handle it specially. + self._structure_func.register_func_list([(lambda t: t is cl, func)]) + else: + self._structure_func.register_cls_list([(cl, func)]) + return None + + def register_structure_hook_func( + self, check_func: Predicate, func: StructureHook + ) -> None: + """Register a class-to-primitive converter function for a class, using + a function to check if it's a match. + """ + self._structure_func.register_func_list([(check_func, func)]) + + @overload + def register_structure_hook_factory( + self, predicate: Predicate + ) -> Callable[[AnyStructureHookFactoryBase], AnyStructureHookFactoryBase]: ... + + @overload + def register_structure_hook_factory( + self, predicate: Predicate, factory: StructureHookFactory + ) -> StructureHookFactory: ... + + @overload + def register_structure_hook_factory( + self, predicate: Predicate, factory: ExtendedStructureHookFactory[BaseConverter] + ) -> ExtendedStructureHookFactory[BaseConverter]: ... + + def register_structure_hook_factory(self, predicate, factory=None): + """ + Register a hook factory for a given predicate. + + The hook factory may expose an additional required parameter. In this case, + the current converter will be provided to the hook factory as that + parameter. + + May also be used as a decorator. + + :param predicate: A function that, given a type, returns whether the factory + can produce a hook for that type. + :param factory: A callable that, given a type, produces a structuring + hook for that type. This structuring hook will be cached. + + .. versionchanged:: 24.1.0 + This method may now be used as a decorator. + The factory may also receive the converter as a second, required argument. + """ + if factory is None: + # Decorator use. + def decorator(factory): + # Is this an extended factory (takes a converter too)? + if _is_extended_factory(factory): + self._structure_func.register_func_list( + [(predicate, factory, "extended")] + ) + else: + self._structure_func.register_func_list( + [(predicate, factory, True)] + ) + + return decorator + self._structure_func.register_func_list( + [ + ( + predicate, + factory, + "extended" if _is_extended_factory(factory) else True, + ) + ] + ) + return factory + + def structure(self, obj: UnstructuredValue, cl: type[T]) -> T: + """Convert unstructured Python data structures to structured data.""" + return self._structure_func.dispatch(cl)(obj, cl) + + def get_structure_hook(self, type: Any, cache_result: bool = True) -> StructureHook: + """Get the structure hook for the given type. + + This hook can be manually called, or composed with other functions + and re-registered. + + If no hook is registered, the converter structure fallback factory + will be used to produce one. + + :param cache: Whether to cache the returned hook. + + .. versionadded:: 24.1.0 + """ + return ( + self._structure_func.dispatch(type) + if cache_result + else self._structure_func.dispatch_without_caching(type) + ) + + # Classes to Python primitives. + def unstructure_attrs_asdict(self, obj: Any) -> dict[str, Any]: + """Our version of `attrs.asdict`, so we can call back to us.""" + attrs = fields(obj.__class__) + dispatch = self._unstructure_func.dispatch + rv = self._dict_factory() + for a in attrs: + name = a.name + v = getattr(obj, name) + rv[name] = dispatch(a.type or v.__class__)(v) + return rv + + def unstructure_attrs_astuple(self, obj: Any) -> tuple[Any, ...]: + """Our version of `attrs.astuple`, so we can call back to us.""" + attrs = fields(obj.__class__) + dispatch = self._unstructure_func.dispatch + res = [] + for a in attrs: + name = a.name + v = getattr(obj, name) + res.append(dispatch(a.type or v.__class__)(v)) + return tuple(res) + + def _unstructure_enum(self, obj: Enum) -> Any: + """Convert an enum to its value.""" + return obj.value + + def _unstructure_seq(self, seq: Sequence[T]) -> Sequence[T]: + """Convert a sequence to primitive equivalents.""" + # We can reuse the sequence class, so tuples stay tuples. + dispatch = self._unstructure_func.dispatch + return seq.__class__(dispatch(e.__class__)(e) for e in seq) + + def _unstructure_mapping(self, mapping: Mapping[T, V]) -> Mapping[T, V]: + """Convert a mapping of attr classes to primitive equivalents.""" + + # We can reuse the mapping class, so dicts stay dicts and OrderedDicts + # stay OrderedDicts. + dispatch = self._unstructure_func.dispatch + return mapping.__class__( + (dispatch(k.__class__)(k), dispatch(v.__class__)(v)) + for k, v in mapping.items() + ) + + # note: Use UnionType when 3.11 is released as + # the behaviour of @final is changed. This would + # affect how we can support UnionType in ._compat.py + def _unstructure_union(self, obj: Any) -> Any: + """ + Unstructure an object as a union. + + By default, just unstructures the instance. + """ + return self._unstructure_func.dispatch(obj.__class__)(obj) + + # Python primitives to classes. + + def _gen_structure_generic(self, cl: type[T]) -> DictStructureFn[T]: + """Create and return a hook for structuring generics.""" + return make_dict_structure_fn( + cl, self, _cattrs_prefer_attrib_converters=self._prefer_attrib_converters + ) + + def _gen_attrs_union_structure( + self, cl: Any, use_literals: bool = True + ) -> Callable[[Any, type[T]], type[T] | None]: + """ + Generate a structuring function for a union of attrs classes (and maybe None). + + :param use_literals: Whether to consider literal fields. + """ + dis_fn = self._get_dis_func(cl, use_literals=use_literals) + has_none = NoneType in cl.__args__ + + if has_none: + + def structure_attrs_union(obj, _) -> cl: + if obj is None: + return None + return self.structure(obj, dis_fn(obj)) + + else: + + def structure_attrs_union(obj, _): + return self.structure(obj, dis_fn(obj)) + + return structure_attrs_union + + @staticmethod + def _structure_call(obj: Any, cl: type[T]) -> Any: + """Just call ``cl`` with the given ``obj``. + + This is just an optimization on the ``_structure_default`` case, when + we know we can skip the ``if`` s. Use for ``str``, ``bytes``, ``enum``, + etc. + """ + return cl(obj) + + @staticmethod + def _structure_simple_literal(val, type): + if val not in type.__args__: + raise Exception(f"{val} not in literal {type}") + return val + + @staticmethod + def _structure_enum_literal(val, type): + vals = {(x.value if isinstance(x, Enum) else x): x for x in type.__args__} + try: + return vals[val] + except KeyError: + raise Exception(f"{val} not in literal {type}") from None + + def _structure_newtype(self, val: UnstructuredValue, type) -> StructuredValue: + base = get_newtype_base(type) + return self.get_structure_hook(base)(val, base) + + def _find_type_alias_structure_hook(self, type: Any) -> StructureHook: + base = get_type_alias_base(type) + res = self.get_structure_hook(base) + if res == self._structure_call: + # we need to replace the type arg of `structure_call` + return lambda v, _, __base=base: __base(v) + return lambda v, _, __base=base: res(v, __base) + + def _structure_final_factory(self, type): + base = get_final_base(type) + res = self.get_structure_hook(base) + return lambda v, _, __base=base: res(v, __base) + + # Attrs classes. + + def structure_attrs_fromtuple(self, obj: tuple[Any, ...], cl: type[T]) -> T: + """Load an attrs class from a sequence (tuple).""" + conv_obj = [] # A list of converter parameters. + for a, value in zip(fields(cl), obj): + # We detect the type by the metadata. + converted = self._structure_attribute(a, value) + conv_obj.append(converted) + + return cl(*conv_obj) + + def _structure_attribute(self, a: Attribute | Field, value: Any) -> Any: + """Handle an individual attrs attribute.""" + type_ = a.type + attrib_converter = getattr(a, "converter", None) + if self._prefer_attrib_converters and attrib_converter: + # A attrib converter is defined on this attribute, and + # prefer_attrib_converters is set to give these priority over registered + # structure hooks. So, pass through the raw value, which attrs will flow + # into the converter + return value + if type_ is None: + # No type metadata. + return value + + try: + return self._structure_func.dispatch(type_)(value, type_) + except StructureHandlerNotFoundError: + if attrib_converter: + # Return the original value and fallback to using an attrib converter. + return value + raise + + def structure_attrs_fromdict(self, obj: Mapping[str, Any], cl: type[T]) -> T: + """Instantiate an attrs class from a mapping (dict).""" + # For public use. + + conv_obj = {} # Start with a fresh dict, to ignore extra keys. + for a in fields(cl): + try: + val = obj[a.name] + except KeyError: + continue + + # try .alias and .name because this code also supports dataclasses! + conv_obj[getattr(a, "alias", a.name)] = self._structure_attribute(a, val) + + return cl(**conv_obj) + + def _structure_deque(self, obj: Iterable[T], cl: Any) -> deque[T]: + """Convert an iterable to a potentially generic deque.""" + if is_bare(cl) or cl.__args__[0] in ANIES: + res = deque(obj) + else: + elem_type = cl.__args__[0] + handler = self._structure_func.dispatch(elem_type) + if self.detailed_validation: + errors = [] + res = deque() + ix = 0 # Avoid `enumerate` for performance. + for e in obj: + try: + res.append(handler(e, elem_type)) + except Exception as e: + msg = IterableValidationNote( + f"Structuring {cl} @ index {ix}", ix, elem_type + ) + e.__notes__ = [*getattr(e, "__notes__", []), msg] + errors.append(e) + finally: + ix += 1 + if errors: + raise IterableValidationError( + f"While structuring {cl!r}", errors, cl + ) + else: + res = deque(handler(e, elem_type) for e in obj) + return res + + def _structure_set( + self, obj: Iterable[T], cl: Any, structure_to: type = set + ) -> Set[T]: + """Convert an iterable into a potentially generic set.""" + if is_bare(cl) or cl.__args__[0] in ANIES: + return structure_to(obj) + elem_type = cl.__args__[0] + handler = self._structure_func.dispatch(elem_type) + if self.detailed_validation: + errors = [] + res = set() + ix = 0 + for e in obj: + try: + res.add(handler(e, elem_type)) + except Exception as exc: + msg = IterableValidationNote( + f"Structuring {structure_to.__name__} @ element {e!r}", + ix, + elem_type, + ) + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + finally: + ix += 1 + if errors: + raise IterableValidationError(f"While structuring {cl!r}", errors, cl) + return res if structure_to is set else structure_to(res) + if structure_to is set: + return {handler(e, elem_type) for e in obj} + return structure_to([handler(e, elem_type) for e in obj]) + + def _structure_frozenset( + self, obj: Iterable[T], cl: Any + ) -> FrozenSetSubscriptable[T]: + """Convert an iterable into a potentially generic frozenset.""" + return self._structure_set(obj, cl, structure_to=frozenset) + + def _structure_dict(self, obj: Mapping[T, V], cl: Any) -> dict[T, V]: + """Convert a mapping into a potentially generic dict.""" + if is_bare(cl) or cl.__args__ == (Any, Any): + return dict(obj) + key_type, val_type = cl.__args__ + + if self.detailed_validation: + key_handler = self._structure_func.dispatch(key_type) + val_handler = self._structure_func.dispatch(val_type) + errors = [] + res = {} + + for k, v in obj.items(): + try: + value = val_handler(v, val_type) + except Exception as exc: + msg = IterableValidationNote( + f"Structuring mapping value @ key {k!r}", k, val_type + ) + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + continue + + try: + key = key_handler(k, key_type) + res[key] = value + except Exception as exc: + msg = IterableValidationNote( + f"Structuring mapping key @ key {k!r}", k, key_type + ) + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + + if errors: + raise IterableValidationError(f"While structuring {cl!r}", errors, cl) + return res + + if key_type in ANIES: + val_conv = self._structure_func.dispatch(val_type) + return {k: val_conv(v, val_type) for k, v in obj.items()} + if val_type in ANIES: + key_conv = self._structure_func.dispatch(key_type) + return {key_conv(k, key_type): v for k, v in obj.items()} + key_conv = self._structure_func.dispatch(key_type) + val_conv = self._structure_func.dispatch(val_type) + return {key_conv(k, key_type): val_conv(v, val_type) for k, v in obj.items()} + + def _structure_optional(self, obj, union): + if obj is None: + return None + union_params = union.__args__ + other = union_params[0] if union_params[1] is NoneType else union_params[1] + # We can't actually have a Union of a Union, so this is safe. + return self._structure_func.dispatch(other)(obj, other) + + def _structure_tuple(self, obj: Any, tup: type[T]) -> T: + """Deal with structuring into a tuple.""" + tup_params = None if tup in (Tuple, tuple) else tup.__args__ + has_ellipsis = tup_params and tup_params[-1] is Ellipsis + if tup_params is None or (has_ellipsis and tup_params[0] in ANIES): + # Just a Tuple. (No generic information.) + return tuple(obj) + if has_ellipsis: + # We're dealing with a homogenous tuple, Tuple[int, ...] + tup_type = tup_params[0] + conv = self._structure_func.dispatch(tup_type) + if self.detailed_validation: + errors = [] + res = [] + ix = 0 + for e in obj: + try: + res.append(conv(e, tup_type)) + except Exception as exc: + msg = IterableValidationNote( + f"Structuring {tup} @ index {ix}", ix, tup_type + ) + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + finally: + ix += 1 + if errors: + raise IterableValidationError( + f"While structuring {tup!r}", errors, tup + ) + return tuple(res) + return tuple(conv(e, tup_type) for e in obj) + + # We're dealing with a heterogenous tuple. + exp_len = len(tup_params) + try: + len_obj = len(obj) + except TypeError: + pass # most likely an unsized iterator, eg generator + else: + if len_obj > exp_len: + exp_len = len_obj + if self.detailed_validation: + errors = [] + res = [] + for ix, (t, e) in enumerate(zip(tup_params, obj)): + try: + conv = self._structure_func.dispatch(t) + res.append(conv(e, t)) + except Exception as exc: + msg = IterableValidationNote( + f"Structuring {tup} @ index {ix}", ix, t + ) + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + if len(res) < exp_len: + problem = "Not enough" if len(res) < len(tup_params) else "Too many" + exc = ValueError(f"{problem} values in {obj!r} to structure as {tup!r}") + msg = f"Structuring {tup}" + exc.__notes__ = [*getattr(exc, "__notes__", []), msg] + errors.append(exc) + if errors: + raise IterableValidationError(f"While structuring {tup!r}", errors, tup) + return tuple(res) + + res = tuple( + [self._structure_func.dispatch(t)(e, t) for t, e in zip(tup_params, obj)] + ) + if len(res) < exp_len: + problem = "Not enough" if len(res) < len(tup_params) else "Too many" + raise ValueError(f"{problem} values in {obj!r} to structure as {tup!r}") + return res + + def _get_dis_func( + self, + union: Any, + use_literals: bool = True, + overrides: dict[str, AttributeOverride] | None = None, + ) -> Callable[[Any], type]: + """Fetch or try creating a disambiguation function for a union.""" + union_types = union.__args__ + if NoneType in union_types: + # We support unions of attrs classes and NoneType higher in the + # logic. + union_types = tuple(e for e in union_types if e is not NoneType) + + # TODO: technically both disambiguators could support TypedDicts and + # dataclasses... + if not all(has(get_origin(e) or e) for e in union_types): + raise StructureHandlerNotFoundError( + "Only unions of attrs classes supported " + "currently. Register a structure hook manually.", + type_=union, + ) + + return create_default_dis_func( + self, + *union_types, + use_literals=use_literals, + overrides=overrides if overrides is not None else "from_converter", + ) + + def __deepcopy__(self, _) -> BaseConverter: + return self.copy() + + def copy( + self, + dict_factory: Callable[[], Any] | None = None, + unstruct_strat: UnstructureStrategy | None = None, + prefer_attrib_converters: bool | None = None, + detailed_validation: bool | None = None, + ) -> BaseConverter: + """Create a copy of the converter, keeping all existing custom hooks. + + :param detailed_validation: Whether to use a slightly slower mode for detailed + validation errors. + """ + res = self.__class__( + dict_factory if dict_factory is not None else self._dict_factory, + ( + unstruct_strat + if unstruct_strat is not None + else ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ) + ), + ( + prefer_attrib_converters + if prefer_attrib_converters is not None + else self._prefer_attrib_converters + ), + ( + detailed_validation + if detailed_validation is not None + else self.detailed_validation + ), + ) + + self._unstructure_func.copy_to(res._unstructure_func, self._unstruct_copy_skip) + self._structure_func.copy_to(res._structure_func, self._struct_copy_skip) + + return res + + +class Converter(BaseConverter): + """A converter which generates specialized un/structuring functions.""" + + __slots__ = ( + "omit_if_default", + "forbid_extra_keys", + "type_overrides", + "_unstruct_collection_overrides", + ) + + def __init__( + self, + dict_factory: Callable[[], Any] = dict, + unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT, + omit_if_default: bool = False, + forbid_extra_keys: bool = False, + type_overrides: Mapping[type, AttributeOverride] = {}, + unstruct_collection_overrides: Mapping[type, Callable] = {}, + prefer_attrib_converters: bool = False, + detailed_validation: bool = True, + unstructure_fallback_factory: HookFactory[UnstructureHook] = lambda _: identity, + structure_fallback_factory: HookFactory[StructureHook] = lambda _: raise_error, + ): + """ + :param detailed_validation: Whether to use a slightly slower mode for detailed + validation errors. + :param unstructure_fallback_factory: A hook factory to be called when no + registered unstructuring hooks match. + :param structure_fallback_factory: A hook factory to be called when no + registered structuring hooks match. + + .. versionadded:: 23.2.0 *unstructure_fallback_factory* + .. versionadded:: 23.2.0 *structure_fallback_factory* + """ + super().__init__( + dict_factory=dict_factory, + unstruct_strat=unstruct_strat, + prefer_attrib_converters=prefer_attrib_converters, + detailed_validation=detailed_validation, + unstructure_fallback_factory=unstructure_fallback_factory, + structure_fallback_factory=structure_fallback_factory, + ) + self.omit_if_default = omit_if_default + self.forbid_extra_keys = forbid_extra_keys + self.type_overrides = dict(type_overrides) + + unstruct_collection_overrides = { + get_origin(k) or k: v for k, v in unstruct_collection_overrides.items() + } + + self._unstruct_collection_overrides = unstruct_collection_overrides + + # Do a little post-processing magic to make things easier for users. + co = unstruct_collection_overrides + + # abc.Set overrides, if defined, apply to abc.MutableSets and sets + if OriginAbstractSet in co: + if OriginMutableSet not in co: + co[OriginMutableSet] = co[OriginAbstractSet] + co[AbcMutableSet] = co[OriginAbstractSet] # For 3.8 compatibility. + if FrozenSetSubscriptable not in co: + co[FrozenSetSubscriptable] = co[OriginAbstractSet] + + # abc.MutableSet overrrides, if defined, apply to sets + if OriginMutableSet in co and set not in co: + co[set] = co[OriginMutableSet] + + if FrozenSetSubscriptable in co: + co[frozenset] = co[FrozenSetSubscriptable] # For 3.8 compatibility. + + # abc.Sequence overrides, if defined, can apply to MutableSequences, lists and + # tuples + if Sequence in co: + if MutableSequence not in co: + co[MutableSequence] = co[Sequence] + if tuple not in co: + co[tuple] = co[Sequence] + + # abc.MutableSequence overrides, if defined, can apply to lists + if MutableSequence in co: + if list not in co: + co[list] = co[MutableSequence] + if deque not in co: + co[deque] = co[MutableSequence] + + # abc.Mapping overrides, if defined, can apply to MutableMappings + if Mapping in co and MutableMapping not in co: + co[MutableMapping] = co[Mapping] + + # abc.MutableMapping overrides, if defined, can apply to dicts + if MutableMapping in co and dict not in co: + co[dict] = co[MutableMapping] + + # builtins.dict overrides, if defined, can apply to counters + if dict in co and Counter not in co: + co[Counter] = co[dict] + + if unstruct_strat is UnstructureStrategy.AS_DICT: + # Override the attrs handler. + self.register_unstructure_hook_factory( + has_with_generic, self.gen_unstructure_attrs_fromdict + ) + self.register_structure_hook_factory( + has_with_generic, self.gen_structure_attrs_fromdict + ) + self.register_unstructure_hook_factory( + is_annotated, self.gen_unstructure_annotated + ) + self.register_unstructure_hook_factory( + is_hetero_tuple, self.gen_unstructure_hetero_tuple + ) + self.register_unstructure_hook_factory(is_namedtuple)( + namedtuple_unstructure_factory + ) + self.register_unstructure_hook_factory( + is_sequence, self.gen_unstructure_iterable + ) + self.register_unstructure_hook_factory(is_mapping, self.gen_unstructure_mapping) + self.register_unstructure_hook_factory( + is_mutable_set, + lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=set), + ) + self.register_unstructure_hook_factory( + is_frozenset, + lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=frozenset), + ) + self.register_unstructure_hook_factory( + is_optional, self.gen_unstructure_optional + ) + self.register_unstructure_hook_factory( + is_typeddict, self.gen_unstructure_typeddict + ) + self.register_unstructure_hook_factory( + lambda t: get_newtype_base(t) is not None, + lambda t: self.get_unstructure_hook(get_newtype_base(t)), + ) + + self.register_structure_hook_factory(is_annotated, self.gen_structure_annotated) + self.register_structure_hook_factory(is_mapping, self.gen_structure_mapping) + self.register_structure_hook_factory(is_counter, self.gen_structure_counter) + self.register_structure_hook_factory(is_typeddict, self.gen_structure_typeddict) + self.register_structure_hook_factory( + lambda t: get_newtype_base(t) is not None, self.get_structure_newtype + ) + + # We keep these so we can more correctly copy the hooks. + self._struct_copy_skip = self._structure_func.get_num_fns() + self._unstruct_copy_skip = self._unstructure_func.get_num_fns() + + @overload + def register_unstructure_hook_factory( + self, predicate: Predicate + ) -> Callable[[AnyUnstructureHookFactory], AnyUnstructureHookFactory]: ... + + @overload + def register_unstructure_hook_factory( + self, predicate: Predicate, factory: UnstructureHookFactory + ) -> UnstructureHookFactory: ... + + @overload + def register_unstructure_hook_factory( + self, predicate: Predicate, factory: ExtendedUnstructureHookFactory[Converter] + ) -> ExtendedUnstructureHookFactory[Converter]: ... + + def register_unstructure_hook_factory(self, predicate, factory=None): + # This dummy wrapper is required due to how `@overload` works. + return super().register_unstructure_hook_factory(predicate, factory) + + @overload + def register_structure_hook_factory( + self, predicate: Predicate + ) -> Callable[[AnyStructureHookFactory], AnyStructureHookFactory]: ... + + @overload + def register_structure_hook_factory( + self, predicate: Predicate, factory: StructureHookFactory + ) -> StructureHookFactory: ... + + @overload + def register_structure_hook_factory( + self, predicate: Predicate, factory: ExtendedStructureHookFactory[Converter] + ) -> ExtendedStructureHookFactory[Converter]: ... + + def register_structure_hook_factory(self, predicate, factory=None): + # This dummy wrapper is required due to how `@overload` works. + return super().register_structure_hook_factory(predicate, factory) + + def get_structure_newtype(self, type: type[T]) -> Callable[[Any, Any], T]: + base = get_newtype_base(type) + handler = self.get_structure_hook(base) + return lambda v, _: handler(v, base) + + def gen_unstructure_annotated(self, type): + origin = type.__origin__ + return self.get_unstructure_hook(origin) + + def gen_structure_annotated(self, type) -> Callable: + """A hook factory for annotated types.""" + origin = type.__origin__ + hook = self.get_structure_hook(origin) + return lambda v, _: hook(v, origin) + + def gen_unstructure_typeddict(self, cl: Any) -> Callable[[dict], dict]: + """Generate a TypedDict unstructure function. + + Also apply converter-scored modifications. + """ + return make_typeddict_dict_unstruct_fn(cl, self) + + def gen_unstructure_attrs_fromdict( + self, cl: type[T] + ) -> Callable[[T], dict[str, Any]]: + origin = get_origin(cl) + attribs = fields(origin or cl) + if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attrib_overrides = { + a.name: self.type_overrides[a.type] + for a in attribs + if a.type in self.type_overrides + } + + return make_dict_unstructure_fn( + cl, self, _cattrs_omit_if_default=self.omit_if_default, **attrib_overrides + ) + + def gen_unstructure_optional(self, cl: type[T]) -> Callable[[T], Any]: + """Generate an unstructuring hook for optional types.""" + union_params = cl.__args__ + other = union_params[0] if union_params[1] is NoneType else union_params[1] + + if isinstance(other, TypeVar): + handler = self.unstructure + else: + handler = self.get_unstructure_hook(other) + + def unstructure_optional(val, _handler=handler): + return None if val is None else _handler(val) + + return unstructure_optional + + def gen_structure_typeddict(self, cl: Any) -> Callable[[dict, Any], dict]: + """Generate a TypedDict structure function. + + Also apply converter-scored modifications. + """ + return make_typeddict_dict_struct_fn( + cl, self, _cattrs_detailed_validation=self.detailed_validation + ) + + def gen_structure_attrs_fromdict( + self, cl: type[T] + ) -> Callable[[Mapping[str, Any], Any], T]: + attribs = fields(get_origin(cl) or cl if is_generic(cl) else cl) + if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attrib_overrides = { + a.name: self.type_overrides[a.type] + for a in attribs + if a.type in self.type_overrides + } + return make_dict_structure_fn( + cl, + self, + _cattrs_forbid_extra_keys=self.forbid_extra_keys, + _cattrs_prefer_attrib_converters=self._prefer_attrib_converters, + _cattrs_detailed_validation=self.detailed_validation, + **attrib_overrides, + ) + + def gen_unstructure_iterable( + self, cl: Any, unstructure_to: Any = None + ) -> IterableUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or list + ) + h = iterable_unstructure_factory(cl, self, unstructure_to=unstructure_to) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_unstructure_hetero_tuple( + self, cl: Any, unstructure_to: Any = None + ) -> HeteroTupleUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or tuple + ) + h = make_hetero_tuple_unstructure_fn(cl, self, unstructure_to=unstructure_to) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_unstructure_mapping( + self, + cl: Any, + unstructure_to: Any = None, + key_handler: Callable[[Any, Any | None], Any] | None = None, + ) -> MappingUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or dict + ) + h = make_mapping_unstructure_fn( + cl, self, unstructure_to=unstructure_to, key_handler=key_handler + ) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_structure_counter(self, cl: Any) -> MappingStructureFn[T]: + h = make_mapping_structure_fn( + cl, + self, + structure_to=Counter, + val_type=int, + detailed_validation=self.detailed_validation, + ) + self._structure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_structure_mapping(self, cl: Any) -> MappingStructureFn[T]: + structure_to = get_origin(cl) or cl + if structure_to in ( + MutableMapping, + AbcMutableMapping, + Mapping, + AbcMapping, + ): # These default to dicts + structure_to = dict + h = make_mapping_structure_fn( + cl, self, structure_to, detailed_validation=self.detailed_validation + ) + self._structure_func.register_cls_list([(cl, h)], direct=True) + return h + + def copy( + self, + dict_factory: Callable[[], Any] | None = None, + unstruct_strat: UnstructureStrategy | None = None, + omit_if_default: bool | None = None, + forbid_extra_keys: bool | None = None, + type_overrides: Mapping[type, AttributeOverride] | None = None, + unstruct_collection_overrides: Mapping[type, Callable] | None = None, + prefer_attrib_converters: bool | None = None, + detailed_validation: bool | None = None, + ) -> Converter: + """Create a copy of the converter, keeping all existing custom hooks. + + :param detailed_validation: Whether to use a slightly slower mode for detailed + validation errors. + """ + res = self.__class__( + dict_factory if dict_factory is not None else self._dict_factory, + ( + unstruct_strat + if unstruct_strat is not None + else ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ) + ), + omit_if_default if omit_if_default is not None else self.omit_if_default, + ( + forbid_extra_keys + if forbid_extra_keys is not None + else self.forbid_extra_keys + ), + type_overrides if type_overrides is not None else self.type_overrides, + ( + unstruct_collection_overrides + if unstruct_collection_overrides is not None + else self._unstruct_collection_overrides + ), + ( + prefer_attrib_converters + if prefer_attrib_converters is not None + else self._prefer_attrib_converters + ), + ( + detailed_validation + if detailed_validation is not None + else self.detailed_validation + ), + ) + + self._unstructure_func.copy_to( + res._unstructure_func, skip=self._unstruct_copy_skip + ) + self._structure_func.copy_to(res._structure_func, skip=self._struct_copy_skip) + + return res + + +GenConverter: TypeAlias = Converter diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py new file mode 100644 index 0000000..ad36ae3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py @@ -0,0 +1,205 @@ +"""Utilities for union (sum type) disambiguation.""" + +from __future__ import annotations + +from collections import defaultdict +from dataclasses import MISSING +from functools import reduce +from operator import or_ +from typing import TYPE_CHECKING, Any, Callable, Literal, Mapping, Union + +from attrs import NOTHING, Attribute, AttrsInstance + +from ._compat import ( + NoneType, + adapted_fields, + fields_dict, + get_args, + get_origin, + has, + is_literal, + is_union_type, +) +from .gen import AttributeOverride + +if TYPE_CHECKING: + from .converters import BaseConverter + +__all__ = ["is_supported_union", "create_default_dis_func"] + + +def is_supported_union(typ: Any) -> bool: + """Whether the type is a union of attrs classes.""" + return is_union_type(typ) and all( + e is NoneType or has(get_origin(e) or e) for e in typ.__args__ + ) + + +def create_default_dis_func( + converter: BaseConverter, + *classes: type[AttrsInstance], + use_literals: bool = True, + overrides: ( + dict[str, AttributeOverride] | Literal["from_converter"] + ) = "from_converter", +) -> Callable[[Mapping[Any, Any]], type[Any] | None]: + """Given attrs classes or dataclasses, generate a disambiguation function. + + The function is based on unique fields without defaults or unique values. + + :param use_literals: Whether to try using fields annotated as literals for + disambiguation. + :param overrides: Attribute overrides to apply. + + .. versionchanged:: 24.1.0 + Dataclasses are now supported. + """ + if len(classes) < 2: + raise ValueError("At least two classes required.") + + if overrides == "from_converter": + overrides = [ + getattr(converter.get_structure_hook(c), "overrides", {}) for c in classes + ] + else: + overrides = [overrides for _ in classes] + + # first, attempt for unique values + if use_literals: + # requirements for a discriminator field: + # (... TODO: a single fallback is OK) + # - it must always be enumerated + cls_candidates = [ + { + at.name + for at in adapted_fields(get_origin(cl) or cl) + if is_literal(at.type) + } + for cl in classes + ] + + # literal field names common to all members + discriminators: set[str] = cls_candidates[0] + for possible_discriminators in cls_candidates: + discriminators &= possible_discriminators + + best_result = None + best_discriminator = None + for discriminator in discriminators: + # maps Literal values (strings, ints...) to classes + mapping = defaultdict(list) + + for cl in classes: + for key in get_args( + fields_dict(get_origin(cl) or cl)[discriminator].type + ): + mapping[key].append(cl) + + if best_result is None or max(len(v) for v in mapping.values()) <= max( + len(v) for v in best_result.values() + ): + best_result = mapping + best_discriminator = discriminator + + if ( + best_result + and best_discriminator + and max(len(v) for v in best_result.values()) != len(classes) + ): + final_mapping = { + k: v[0] if len(v) == 1 else Union[tuple(v)] + for k, v in best_result.items() + } + + def dis_func(data: Mapping[Any, Any]) -> type | None: + if not isinstance(data, Mapping): + raise ValueError("Only input mappings are supported.") + return final_mapping[data[best_discriminator]] + + return dis_func + + # next, attempt for unique keys + + # NOTE: This could just as well work with just field availability and not + # uniqueness, returning Unions ... it doesn't do that right now. + cls_and_attrs = [ + (cl, *_usable_attribute_names(cl, override)) + for cl, override in zip(classes, overrides) + ] + # For each class, attempt to generate a single unique required field. + uniq_attrs_dict: dict[str, type] = {} + + # We start from classes with the largest number of unique fields + # so we can do easy picks first, making later picks easier. + cls_and_attrs.sort(key=lambda c_a: len(c_a[1]), reverse=True) + + fallback = None # If none match, try this. + + for cl, cl_reqs, back_map in cls_and_attrs: + # We do not have to consider classes we've already processed, since + # they will have been eliminated by the match dictionary already. + other_classes = [ + c_and_a + for c_and_a in cls_and_attrs + if c_and_a[0] is not cl and c_and_a[0] not in uniq_attrs_dict.values() + ] + other_reqs = reduce(or_, (c_a[1] for c_a in other_classes), set()) + uniq = cl_reqs - other_reqs + + # We want a unique attribute with no default. + cl_fields = fields_dict(get_origin(cl) or cl) + for maybe_renamed_attr_name in uniq: + orig_name = back_map[maybe_renamed_attr_name] + if cl_fields[orig_name].default in (NOTHING, MISSING): + break + else: + if fallback is None: + fallback = cl + continue + raise TypeError(f"{cl} has no usable non-default attributes") + uniq_attrs_dict[maybe_renamed_attr_name] = cl + + if fallback is None: + + def dis_func(data: Mapping[Any, Any]) -> type[AttrsInstance] | None: + if not isinstance(data, Mapping): + raise ValueError("Only input mappings are supported") + for k, v in uniq_attrs_dict.items(): + if k in data: + return v + raise ValueError("Couldn't disambiguate") + + else: + + def dis_func(data: Mapping[Any, Any]) -> type[AttrsInstance] | None: + if not isinstance(data, Mapping): + raise ValueError("Only input mappings are supported") + for k, v in uniq_attrs_dict.items(): + if k in data: + return v + return fallback + + return dis_func + + +create_uniq_field_dis_func = create_default_dis_func + + +def _overriden_name(at: Attribute, override: AttributeOverride | None) -> str: + if override is None or override.rename is None: + return at.name + return override.rename + + +def _usable_attribute_names( + cl: type[Any], overrides: dict[str, AttributeOverride] +) -> tuple[set[str], dict[str, str]]: + """Return renamed fields and a mapping to original field names.""" + res = set() + mapping = {} + + for at in adapted_fields(get_origin(cl) or cl): + res.add(n := _overriden_name(at, overrides.get(at.name))) + mapping[n] = at.name + + return res, mapping diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py new file mode 100644 index 0000000..3d746db --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py @@ -0,0 +1,194 @@ +from __future__ import annotations + +from functools import lru_cache, singledispatch +from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, TypeVar + +from attrs import Factory, define + +from ._compat import TypeAlias +from .fns import Predicate + +if TYPE_CHECKING: + from .converters import BaseConverter + +TargetType: TypeAlias = Any +UnstructuredValue: TypeAlias = Any +StructuredValue: TypeAlias = Any + +StructureHook: TypeAlias = Callable[[UnstructuredValue, TargetType], StructuredValue] +UnstructureHook: TypeAlias = Callable[[StructuredValue], UnstructuredValue] + +Hook = TypeVar("Hook", StructureHook, UnstructureHook) +HookFactory: TypeAlias = Callable[[TargetType], Hook] + + +@define +class _DispatchNotFound: + """A dummy object to help signify a dispatch not found.""" + + +@define +class FunctionDispatch: + """ + FunctionDispatch is similar to functools.singledispatch, but + instead dispatches based on functions that take the type of the + first argument in the method, and return True or False. + + objects that help determine dispatch should be instantiated objects. + + :param converter: A converter to be used for factories that require converters. + + .. versionchanged:: 24.1.0 + Support for factories that require converters, hence this requires a + converter when creating. + """ + + _converter: BaseConverter + _handler_pairs: list[tuple[Predicate, Callable[[Any, Any], Any], bool, bool]] = ( + Factory(list) + ) + + def register( + self, + predicate: Predicate, + func: Callable[..., Any], + is_generator=False, + takes_converter=False, + ) -> None: + self._handler_pairs.insert(0, (predicate, func, is_generator, takes_converter)) + + def dispatch(self, typ: Any) -> Callable[..., Any] | None: + """ + Return the appropriate handler for the object passed. + """ + for can_handle, handler, is_generator, takes_converter in self._handler_pairs: + # can handle could raise an exception here + # such as issubclass being called on an instance. + # it's easier to just ignore that case. + try: + ch = can_handle(typ) + except Exception: # noqa: S112 + continue + if ch: + if is_generator: + if takes_converter: + return handler(typ, self._converter) + return handler(typ) + + return handler + return None + + def get_num_fns(self) -> int: + return len(self._handler_pairs) + + def copy_to(self, other: FunctionDispatch, skip: int = 0) -> None: + other._handler_pairs = self._handler_pairs[:-skip] + other._handler_pairs + + +@define(init=False) +class MultiStrategyDispatch(Generic[Hook]): + """ + MultiStrategyDispatch uses a combination of exact-match dispatch, + singledispatch, and FunctionDispatch. + + :param converter: A converter to be used for factories that require converters. + :param fallback_factory: A hook factory to be called when a hook cannot be + produced. + + .. versionchanged:: 23.2.0 + Fallbacks are now factories. + .. versionchanged:: 24.1.0 + Support for factories that require converters, hence this requires a + converter when creating. + """ + + _fallback_factory: HookFactory[Hook] + _converter: BaseConverter + _direct_dispatch: dict[TargetType, Hook] + _function_dispatch: FunctionDispatch + _single_dispatch: Any + dispatch: Callable[[TargetType, BaseConverter], Hook] + + def __init__( + self, fallback_factory: HookFactory[Hook], converter: BaseConverter + ) -> None: + self._fallback_factory = fallback_factory + self._direct_dispatch = {} + self._function_dispatch = FunctionDispatch(converter) + self._single_dispatch = singledispatch(_DispatchNotFound) + self.dispatch = lru_cache(maxsize=None)(self.dispatch_without_caching) + + def dispatch_without_caching(self, typ: TargetType) -> Hook: + """Dispatch on the type but without caching the result.""" + try: + dispatch = self._single_dispatch.dispatch(typ) + if dispatch is not _DispatchNotFound: + return dispatch + except Exception: # noqa: S110 + pass + + direct_dispatch = self._direct_dispatch.get(typ) + if direct_dispatch is not None: + return direct_dispatch + + res = self._function_dispatch.dispatch(typ) + return res if res is not None else self._fallback_factory(typ) + + def register_cls_list(self, cls_and_handler, direct: bool = False) -> None: + """Register a class to direct or singledispatch.""" + for cls, handler in cls_and_handler: + if direct: + self._direct_dispatch[cls] = handler + else: + self._single_dispatch.register(cls, handler) + self.clear_direct() + self.dispatch.cache_clear() + + def register_func_list( + self, + pred_and_handler: list[ + tuple[Predicate, Any] + | tuple[Predicate, Any, bool] + | tuple[Predicate, Callable[[Any, BaseConverter], Any], Literal["extended"]] + ], + ): + """ + Register a predicate function to determine if the handler + should be used for the type. + + :param pred_and_handler: The list of predicates and their associated + handlers. If a handler is registered in `extended` mode, it's a + factory that requires a converter. + """ + for tup in pred_and_handler: + if len(tup) == 2: + func, handler = tup + self._function_dispatch.register(func, handler) + else: + func, handler, is_gen = tup + if is_gen == "extended": + self._function_dispatch.register( + func, handler, is_generator=is_gen, takes_converter=True + ) + else: + self._function_dispatch.register(func, handler, is_generator=is_gen) + self.clear_direct() + self.dispatch.cache_clear() + + def clear_direct(self) -> None: + """Clear the direct dispatch.""" + self._direct_dispatch.clear() + + def clear_cache(self) -> None: + """Clear all caches.""" + self._direct_dispatch.clear() + self.dispatch.cache_clear() + + def get_num_fns(self) -> int: + return self._function_dispatch.get_num_fns() + + def copy_to(self, other: MultiStrategyDispatch, skip: int = 0) -> None: + self._function_dispatch.copy_to(other._function_dispatch, skip=skip) + for cls, fn in self._single_dispatch.registry.items(): + other._single_dispatch.register(cls, fn) + other.clear_cache() diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py new file mode 100644 index 0000000..9148bf1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py @@ -0,0 +1,129 @@ +from typing import Any, List, Optional, Set, Tuple, Type, Union + +from cattrs._compat import ExceptionGroup + + +class StructureHandlerNotFoundError(Exception): + """ + Error raised when structuring cannot find a handler for converting inputs into + :attr:`type_`. + """ + + def __init__(self, message: str, type_: Type) -> None: + super().__init__(message) + self.type_ = type_ + + +class BaseValidationError(ExceptionGroup): + cl: Type + + def __new__(cls, message, excs, cl: Type): + obj = super().__new__(cls, message, excs) + obj.cl = cl + return obj + + def derive(self, excs): + return ClassValidationError(self.message, excs, self.cl) + + +class IterableValidationNote(str): + """Attached as a note to an exception when an iterable element fails structuring.""" + + index: Union[int, str] # Ints for list indices, strs for dict keys + type: Any + + def __new__( + cls, string: str, index: Union[int, str], type: Any + ) -> "IterableValidationNote": + instance = str.__new__(cls, string) + instance.index = index + instance.type = type + return instance + + def __getnewargs__(self) -> Tuple[str, Union[int, str], Any]: + return (str(self), self.index, self.type) + + +class IterableValidationError(BaseValidationError): + """Raised when structuring an iterable.""" + + def group_exceptions( + self, + ) -> Tuple[List[Tuple[Exception, IterableValidationNote]], List[Exception]]: + """Split the exceptions into two groups: with and without validation notes.""" + excs_with_notes = [] + other_excs = [] + for subexc in self.exceptions: + if hasattr(subexc, "__notes__"): + for note in subexc.__notes__: + if note.__class__ is IterableValidationNote: + excs_with_notes.append((subexc, note)) + break + else: + other_excs.append(subexc) + else: + other_excs.append(subexc) + + return excs_with_notes, other_excs + + +class AttributeValidationNote(str): + """Attached as a note to an exception when an attribute fails structuring.""" + + name: str + type: Any + + def __new__(cls, string: str, name: str, type: Any) -> "AttributeValidationNote": + instance = str.__new__(cls, string) + instance.name = name + instance.type = type + return instance + + def __getnewargs__(self) -> Tuple[str, str, Any]: + return (str(self), self.name, self.type) + + +class ClassValidationError(BaseValidationError): + """Raised when validating a class if any attributes are invalid.""" + + def group_exceptions( + self, + ) -> Tuple[List[Tuple[Exception, AttributeValidationNote]], List[Exception]]: + """Split the exceptions into two groups: with and without validation notes.""" + excs_with_notes = [] + other_excs = [] + for subexc in self.exceptions: + if hasattr(subexc, "__notes__"): + for note in subexc.__notes__: + if note.__class__ is AttributeValidationNote: + excs_with_notes.append((subexc, note)) + break + else: + other_excs.append(subexc) + else: + other_excs.append(subexc) + + return excs_with_notes, other_excs + + +class ForbiddenExtraKeysError(Exception): + """ + Raised when `forbid_extra_keys` is activated and such extra keys are detected + during structuring. + + The attribute `extra_fields` is a sequence of those extra keys, which were the + cause of this error, and `cl` is the class which was structured with those extra + keys. + """ + + def __init__( + self, message: Optional[str], cl: Type, extra_fields: Set[str] + ) -> None: + self.cl = cl + self.extra_fields = extra_fields + cln = cl.__name__ + + super().__init__( + message + or f"Extra fields in constructor for {cln}: {', '.join(extra_fields)}" + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py new file mode 100644 index 0000000..748cfb3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py @@ -0,0 +1,22 @@ +"""Useful internal functions.""" + +from typing import Any, Callable, NoReturn, Type, TypeVar + +from ._compat import TypeAlias +from .errors import StructureHandlerNotFoundError + +T = TypeVar("T") + +Predicate: TypeAlias = Callable[[Any], bool] +"""A predicate function determines if a type can be handled.""" + + +def identity(obj: T) -> T: + """The identity function.""" + return obj + + +def raise_error(_, cl: Type) -> NoReturn: + """At the bottom of the condition stack, we explode if we can't handle it.""" + msg = f"Unsupported type: {cl!r}. Register a structure hook for it." + raise StructureHandlerNotFoundError(msg, type_=cl) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py new file mode 100644 index 0000000..97d2876 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py @@ -0,0 +1,1053 @@ +from __future__ import annotations + +import re +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Final, + Iterable, + Literal, + Mapping, + Tuple, + TypeVar, +) + +from attrs import NOTHING, Attribute, Factory, resolve_types + +from .._compat import ( + ANIES, + TypeAlias, + adapted_fields, + get_args, + get_origin, + is_annotated, + is_bare, + is_bare_final, + is_generic, +) +from .._generics import deep_copy_with +from ..dispatch import UnstructureHook +from ..errors import ( + AttributeValidationNote, + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + IterableValidationNote, + StructureHandlerNotFoundError, +) +from ..fns import identity +from ._consts import AttributeOverride, already_generating, neutral +from ._generics import generate_mapping +from ._lc import generate_unique_filename +from ._shared import find_structure_handler + +if TYPE_CHECKING: + from ..converters import BaseConverter + +__all__ = [ + "make_dict_unstructure_fn", + "make_dict_structure_fn", + "make_iterable_unstructure_fn", + "make_hetero_tuple_unstructure_fn", + "make_mapping_unstructure_fn", + "make_mapping_structure_fn", + "make_dict_unstructure_fn_from_attrs", + "make_dict_structure_fn_from_attrs", +] + + +def override( + omit_if_default: bool | None = None, + rename: str | None = None, + omit: bool | None = None, + struct_hook: Callable[[Any, Any], Any] | None = None, + unstruct_hook: Callable[[Any], Any] | None = None, +) -> AttributeOverride: + """Override how a particular field is handled. + + :param omit: Whether to skip the field or not. `None` means apply default handling. + """ + return AttributeOverride(omit_if_default, rename, omit, struct_hook, unstruct_hook) + + +T = TypeVar("T") + + +def make_dict_unstructure_fn_from_attrs( + attrs: list[Attribute], + cl: type, + converter: BaseConverter, + typevar_map: dict[str, Any] = {}, + _cattrs_omit_if_default: bool = False, + _cattrs_use_linecache: bool = True, + _cattrs_use_alias: bool = False, + _cattrs_include_init_false: bool = False, + **kwargs: AttributeOverride, +) -> Callable[[T], dict[str, Any]]: + """ + Generate a specialized dict unstructuring function for a list of attributes. + + Usually used as a building block by more specialized hook factories. + + Any provided overrides are attached to the generated function under the + `overrides` attribute. + + :param cl: The class for which the function is generated; used mostly for its name, + module name and qualname. + :param _cattrs_omit_if_default: if true, attributes equal to their default values + will be omitted in the result dictionary. + :param _cattrs_use_alias: If true, the attribute alias will be used as the + dictionary key by default. + :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False` + will be included. + + .. versionadded:: 24.1.0 + """ + + fn_name = "unstructure_" + cl.__name__ + globs = {} + lines = [] + invocation_lines = [] + internal_arg_parts = {} + + for a in attrs: + attr_name = a.name + override = kwargs.get(attr_name, neutral) + if override.omit: + continue + if override.omit is None and not a.init and not _cattrs_include_init_false: + continue + if override.rename is None: + kn = attr_name if not _cattrs_use_alias else a.alias + else: + kn = override.rename + d = a.default + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + handler = None + if override.unstruct_hook is not None: + handler = override.unstruct_hook + else: + if a.type is not None: + t = a.type + if isinstance(t, TypeVar): + if t.__name__ in typevar_map: + t = typevar_map[t.__name__] + else: + handler = converter.unstructure + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, typevar_map) + + if handler is None: + if ( + is_bare_final(t) + and a.default is not NOTHING + and not isinstance(a.default, Factory) + ): + # This is a special case where we can use the + # type of the default to dispatch on. + t = a.default.__class__ + try: + handler = converter.get_unstructure_hook(t, cache_result=False) + except RecursionError: + # There's a circular reference somewhere down the line + handler = converter.unstructure + else: + handler = converter.unstructure + + is_identity = handler == identity + + if not is_identity: + unstruct_handler_name = f"__c_unstr_{attr_name}" + globs[unstruct_handler_name] = handler + internal_arg_parts[unstruct_handler_name] = handler + invoke = f"{unstruct_handler_name}(instance.{attr_name})" + else: + invoke = f"instance.{attr_name}" + + if d is not NOTHING and ( + (_cattrs_omit_if_default and override.omit_if_default is not False) + or override.omit_if_default + ): + def_name = f"__c_def_{attr_name}" + + if isinstance(d, Factory): + globs[def_name] = d.factory + internal_arg_parts[def_name] = d.factory + if d.takes_self: + lines.append(f" if instance.{attr_name} != {def_name}(instance):") + else: + lines.append(f" if instance.{attr_name} != {def_name}():") + lines.append(f" res['{kn}'] = {invoke}") + else: + globs[def_name] = d + internal_arg_parts[def_name] = d + lines.append(f" if instance.{attr_name} != {def_name}:") + lines.append(f" res['{kn}'] = {invoke}") + + else: + # No default or no override. + invocation_lines.append(f"'{kn}': {invoke},") + + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + if internal_arg_line: + internal_arg_line = f", {internal_arg_line}" + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = ( + [f"def {fn_name}(instance{internal_arg_line}):"] + + [" res = {"] + + [f" {line}" for line in invocation_lines] + + [" }"] + + lines + + [" return res"] + ) + script = "\n".join(total_lines) + fname = generate_unique_filename( + cl, "unstructure", lines=total_lines if _cattrs_use_linecache else [] + ) + + eval(compile(script, fname, "exec"), globs) + + res = globs[fn_name] + res.overrides = kwargs + + return res + + +def make_dict_unstructure_fn( + cl: type[T], + converter: BaseConverter, + _cattrs_omit_if_default: bool = False, + _cattrs_use_linecache: bool = True, + _cattrs_use_alias: bool = False, + _cattrs_include_init_false: bool = False, + **kwargs: AttributeOverride, +) -> Callable[[T], dict[str, Any]]: + """ + Generate a specialized dict unstructuring function for an attrs class or a + dataclass. + + Any provided overrides are attached to the generated function under the + `overrides` attribute. + + :param _cattrs_omit_if_default: if true, attributes equal to their default values + will be omitted in the result dictionary. + :param _cattrs_use_alias: If true, the attribute alias will be used as the + dictionary key by default. + :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False` + will be included. + + .. versionadded:: 23.2.0 *_cattrs_use_alias* + .. versionadded:: 23.2.0 *_cattrs_include_init_false* + """ + origin = get_origin(cl) + attrs = adapted_fields(origin or cl) # type: ignore + + if any(isinstance(a.type, str) for a in attrs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + + mapping = {} + if is_generic(cl): + mapping = generate_mapping(cl, mapping) + + for base in getattr(origin, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = generate_mapping(base, mapping) + break + if origin is not None: + cl = origin + + # We keep track of what we're generating to help with recursive + # class graphs. + try: + working_set = already_generating.working_set + except AttributeError: + working_set = set() + already_generating.working_set = working_set + if cl in working_set: + raise RecursionError() + + working_set.add(cl) + + try: + return make_dict_unstructure_fn_from_attrs( + attrs, + cl, + converter, + mapping, + _cattrs_omit_if_default=_cattrs_omit_if_default, + _cattrs_use_linecache=_cattrs_use_linecache, + _cattrs_use_alias=_cattrs_use_alias, + _cattrs_include_init_false=_cattrs_include_init_false, + **kwargs, + ) + finally: + working_set.remove(cl) + if not working_set: + del already_generating.working_set + + +DictStructureFn = Callable[[Mapping[str, Any], Any], T] + + +def make_dict_structure_fn_from_attrs( + attrs: list[Attribute], + cl: type, + converter: BaseConverter, + typevar_map: dict[str, Any] = {}, + _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter", + _cattrs_use_linecache: bool = True, + _cattrs_prefer_attrib_converters: ( + bool | Literal["from_converter"] + ) = "from_converter", + _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter", + _cattrs_use_alias: bool = False, + _cattrs_include_init_false: bool = False, + **kwargs: AttributeOverride, +) -> DictStructureFn[T]: + """ + Generate a specialized dict structuring function for a list of attributes. + + Usually used as a building block by more specialized hook factories. + + Any provided overrides are attached to the generated function under the + `overrides` attribute. + + :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a + `ForbiddenExtraKeysError` if unknown keys are encountered. + :param _cattrs_use_linecache: Whether to store the source code in the Python + linecache. + :param _cattrs_prefer_attrib_converters: If an _attrs_ converter is present on a + field, use it instead of processing the field normally. + :param _cattrs_detailed_validation: Whether to use a slower mode that produces + more detailed errors. + :param _cattrs_use_alias: If true, the attribute alias will be used as the + dictionary key by default. + :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False` + will be included. + + .. versionadded:: 24.1.0 + """ + + cl_name = cl.__name__ + fn_name = "structure_" + cl_name + + # We have generic parameters and need to generate a unique name for the function + for p in getattr(cl, "__parameters__", ()): + # This is nasty, I am not sure how best to handle `typing.List[str]` or + # `TClass[int, int]` as a parameter type here + try: + name_base = typevar_map[p.__name__] + except KeyError: + pn = p.__name__ + raise StructureHandlerNotFoundError( + f"Missing type for generic argument {pn}, specify it when structuring.", + p, + ) from None + name = getattr(name_base, "__name__", None) or str(name_base) + # `<>` can be present in lambdas + # `|` can be present in unions + name = re.sub(r"[\[\.\] ,<>]", "_", name) + name = re.sub(r"\|", "u", name) + fn_name += f"_{name}" + + internal_arg_parts = {"__cl": cl} + globs = {} + lines = [] + post_lines = [] + pi_lines = [] # post instantiation lines + invocation_lines = [] + + allowed_fields = set() + if _cattrs_forbid_extra_keys == "from_converter": + # BaseConverter doesn't have it so we're careful. + _cattrs_forbid_extra_keys = getattr(converter, "forbid_extra_keys", False) + if _cattrs_detailed_validation == "from_converter": + _cattrs_detailed_validation = converter.detailed_validation + if _cattrs_prefer_attrib_converters == "from_converter": + _cattrs_prefer_attrib_converters = converter._prefer_attrib_converters + + if _cattrs_forbid_extra_keys: + globs["__c_a"] = allowed_fields + globs["__c_feke"] = ForbiddenExtraKeysError + + if _cattrs_detailed_validation: + lines.append(" res = {}") + lines.append(" errors = []") + invocation_lines.append("**res,") + internal_arg_parts["__c_cve"] = ClassValidationError + internal_arg_parts["__c_avn"] = AttributeValidationNote + for a in attrs: + an = a.name + override = kwargs.get(an, neutral) + if override.omit: + continue + if override.omit is None and not a.init and not _cattrs_include_init_false: + continue + t = a.type + if isinstance(t, TypeVar): + t = typevar_map.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, typevar_map) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if override.struct_hook is not None: + # If the user has requested an override, just use that. + handler = override.struct_hook + else: + handler = find_structure_handler( + a, t, converter, _cattrs_prefer_attrib_converters + ) + + struct_handler_name = f"__c_structure_{an}" + if handler is not None: + internal_arg_parts[struct_handler_name] = handler + + ian = a.alias + if override.rename is None: + kn = an if not _cattrs_use_alias else a.alias + else: + kn = override.rename + + allowed_fields.add(kn) + i = " " + + if not a.init: + if a.default is not NOTHING: + pi_lines.append(f"{i}if '{kn}' in o:") + i = f"{i} " + pi_lines.append(f"{i}try:") + i = f"{i} " + type_name = f"__c_type_{an}" + internal_arg_parts[type_name] = t + if handler is not None: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + pi_lines.append( + f"{i}instance.{an} = {struct_handler_name}(o['{kn}'])" + ) + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + pi_lines.append( + f"{i}instance.{an} = {struct_handler_name}(o['{kn}'], {tn})" + ) + else: + pi_lines.append(f"{i}instance.{an} = o['{kn}']") + i = i[:-2] + pi_lines.append(f"{i}except Exception as e:") + i = f"{i} " + pi_lines.append( + f'{i}e.__notes__ = getattr(e, \'__notes__\', []) + [__c_avn("Structuring class {cl.__qualname__} @ attribute {an}", "{an}", __c_type_{an})]' + ) + pi_lines.append(f"{i}errors.append(e)") + + else: + if a.default is not NOTHING: + lines.append(f"{i}if '{kn}' in o:") + i = f"{i} " + lines.append(f"{i}try:") + i = f"{i} " + type_name = f"__c_type_{an}" + internal_arg_parts[type_name] = t + if handler: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + lines.append( + f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'])" + ) + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + lines.append( + f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'], {tn})" + ) + else: + lines.append(f"{i}res['{ian}'] = o['{kn}']") + i = i[:-2] + lines.append(f"{i}except Exception as e:") + i = f"{i} " + lines.append( + f'{i}e.__notes__ = getattr(e, \'__notes__\', []) + [__c_avn("Structuring class {cl.__qualname__} @ attribute {an}", "{an}", __c_type_{an})]' + ) + lines.append(f"{i}errors.append(e)") + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = set(o.keys()) - __c_a", + " if unknown_fields:", + " errors.append(__c_feke('', __cl, unknown_fields))", + ] + + post_lines.append( + f" if errors: raise __c_cve('While structuring ' + {cl_name!r}, errors, __cl)" + ) + if not pi_lines: + instantiation_lines = ( + [" try:"] + + [" return __cl("] + + [f" {line}" for line in invocation_lines] + + [" )"] + + [ + f" except Exception as exc: raise __c_cve('While structuring ' + {cl_name!r}, [exc], __cl)" + ] + ) + else: + instantiation_lines = ( + [" try:"] + + [" instance = __cl("] + + [f" {line}" for line in invocation_lines] + + [" )"] + + [ + f" except Exception as exc: raise __c_cve('While structuring ' + {cl_name!r}, [exc], __cl)" + ] + ) + pi_lines.append(" return instance") + else: + non_required = [] + # The first loop deals with required args. + for a in attrs: + an = a.name + override = kwargs.get(an, neutral) + if override.omit: + continue + if override.omit is None and not a.init and not _cattrs_include_init_false: + continue + if a.default is not NOTHING: + non_required.append(a) + continue + t = a.type + if isinstance(t, TypeVar): + t = typevar_map.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, typevar_map) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if override.struct_hook is not None: + # If the user has requested an override, just use that. + handler = override.struct_hook + else: + handler = find_structure_handler( + a, t, converter, _cattrs_prefer_attrib_converters + ) + + if override.rename is None: + kn = an if not _cattrs_use_alias else a.alias + else: + kn = override.rename + allowed_fields.add(kn) + + if not a.init: + if handler is not None: + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + pi_line = f" instance.{an} = {struct_handler_name}(o['{kn}'])" + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + pi_line = ( + f" instance.{an} = {struct_handler_name}(o['{kn}'], {tn})" + ) + else: + pi_line = f" instance.{an} = o['{kn}']" + + pi_lines.append(pi_line) + else: + if handler: + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + invocation_line = f"{struct_handler_name}(o['{kn}'])," + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + invocation_line = f"{struct_handler_name}(o['{kn}'], {tn})," + else: + invocation_line = f"o['{kn}']," + + if a.kw_only: + invocation_line = f"{a.alias}={invocation_line}" + invocation_lines.append(invocation_line) + + # The second loop is for optional args. + if non_required: + invocation_lines.append("**res,") + lines.append(" res = {}") + + for a in non_required: + an = a.name + override = kwargs.get(an, neutral) + t = a.type + if isinstance(t, TypeVar): + t = typevar_map.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, typevar_map) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if override.struct_hook is not None: + # If the user has requested an override, just use that. + handler = override.struct_hook + else: + handler = find_structure_handler( + a, t, converter, _cattrs_prefer_attrib_converters + ) + + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + + if override.rename is None: + kn = an if not _cattrs_use_alias else a.alias + else: + kn = override.rename + allowed_fields.add(kn) + if not a.init: + pi_lines.append(f" if '{kn}' in o:") + if handler: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + pi_lines.append( + f" instance.{an} = {struct_handler_name}(o['{kn}'])" + ) + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + pi_lines.append( + f" instance.{an} = {struct_handler_name}(o['{kn}'], {tn})" + ) + else: + pi_lines.append(f" instance.{an} = o['{kn}']") + else: + post_lines.append(f" if '{kn}' in o:") + if handler: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + post_lines.append( + f" res['{a.alias}'] = {struct_handler_name}(o['{kn}'])" + ) + else: + tn = f"__c_type_{an}" + internal_arg_parts[tn] = t + post_lines.append( + f" res['{a.alias}'] = {struct_handler_name}(o['{kn}'], {tn})" + ) + else: + post_lines.append(f" res['{a.alias}'] = o['{kn}']") + if not pi_lines: + instantiation_lines = ( + [" return __cl("] + + [f" {line}" for line in invocation_lines] + + [" )"] + ) + else: + instantiation_lines = ( + [" instance = __cl("] + + [f" {line}" for line in invocation_lines] + + [" )"] + ) + pi_lines.append(" return instance") + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = set(o.keys()) - __c_a", + " if unknown_fields:", + " raise __c_feke('', __cl, unknown_fields)", + ] + + # At the end, we create the function header. + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = [ + f"def {fn_name}(o, _, {internal_arg_line}):", + *lines, + *post_lines, + *instantiation_lines, + *pi_lines, + ] + + script = "\n".join(total_lines) + fname = generate_unique_filename( + cl, "structure", lines=total_lines if _cattrs_use_linecache else [] + ) + + eval(compile(script, fname, "exec"), globs) + + res = globs[fn_name] + res.overrides = kwargs + + return res + + +def make_dict_structure_fn( + cl: type[T], + converter: BaseConverter, + _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter", + _cattrs_use_linecache: bool = True, + _cattrs_prefer_attrib_converters: ( + bool | Literal["from_converter"] + ) = "from_converter", + _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter", + _cattrs_use_alias: bool = False, + _cattrs_include_init_false: bool = False, + **kwargs: AttributeOverride, +) -> DictStructureFn[T]: + """ + Generate a specialized dict structuring function for an attrs class or + dataclass. + + Any provided overrides are attached to the generated function under the + `overrides` attribute. + + :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a + `ForbiddenExtraKeysError` if unknown keys are encountered. + :param _cattrs_use_linecache: Whether to store the source code in the Python + linecache. + :param _cattrs_prefer_attrib_converters: If an _attrs_ converter is present on a + field, use it instead of processing the field normally. + :param _cattrs_detailed_validation: Whether to use a slower mode that produces + more detailed errors. + :param _cattrs_use_alias: If true, the attribute alias will be used as the + dictionary key by default. + :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False` + will be included. + + .. versionadded:: 23.2.0 *_cattrs_use_alias* + .. versionadded:: 23.2.0 *_cattrs_include_init_false* + .. versionchanged:: 23.2.0 + The `_cattrs_forbid_extra_keys` and `_cattrs_detailed_validation` parameters + take their values from the given converter by default. + .. versionchanged:: 24.1.0 + The `_cattrs_prefer_attrib_converters` parameter takes its value from the given + converter by default. + """ + + mapping = {} + if is_generic(cl): + base = get_origin(cl) + mapping = generate_mapping(cl, mapping) + if base is not None: + cl = base + + for base in getattr(cl, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = generate_mapping(base, mapping) + break + + attrs = adapted_fields(cl) + + if any(isinstance(a.type, str) for a in attrs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + + # We keep track of what we're generating to help with recursive + # class graphs. + try: + working_set = already_generating.working_set + except AttributeError: + working_set = set() + already_generating.working_set = working_set + else: + if cl in working_set: + raise RecursionError() + + working_set.add(cl) + + try: + return make_dict_structure_fn_from_attrs( + attrs, + cl, + converter, + mapping, + _cattrs_forbid_extra_keys=_cattrs_forbid_extra_keys, + _cattrs_use_linecache=_cattrs_use_linecache, + _cattrs_prefer_attrib_converters=_cattrs_prefer_attrib_converters, + _cattrs_detailed_validation=_cattrs_detailed_validation, + _cattrs_use_alias=_cattrs_use_alias, + _cattrs_include_init_false=_cattrs_include_init_false, + **kwargs, + ) + finally: + working_set.remove(cl) + if not working_set: + del already_generating.working_set + + +IterableUnstructureFn = Callable[[Iterable[Any]], Any] + + +#: A type alias for heterogeneous tuple unstructure hooks. +HeteroTupleUnstructureFn: TypeAlias = Callable[[Tuple[Any, ...]], Any] + + +def make_hetero_tuple_unstructure_fn( + cl: Any, + converter: BaseConverter, + unstructure_to: Any = None, + type_args: tuple | None = None, +) -> HeteroTupleUnstructureFn: + """Generate a specialized unstructure function for a heterogenous tuple. + + :param type_args: If provided, override the type arguments. + """ + fn_name = "unstructure_tuple" + + type_args = get_args(cl) if type_args is None else type_args + + # We can do the dispatch here and now. + handlers = [converter.get_unstructure_hook(type_arg) for type_arg in type_args] + + globs = {f"__cattr_u_{i}": h for i, h in enumerate(handlers)} + if unstructure_to is not tuple: + globs["__cattr_seq_cl"] = unstructure_to or cl + lines = [] + + lines.append(f"def {fn_name}(tup):") + if unstructure_to is not tuple: + lines.append(" res = __cattr_seq_cl((") + else: + lines.append(" res = (") + for i in range(len(handlers)): + if handlers[i] == identity: + lines.append(f" tup[{i}],") + else: + lines.append(f" __cattr_u_{i}(tup[{i}]),") + + if unstructure_to is not tuple: + lines.append(" ))") + else: + lines.append(" )") + + total_lines = [*lines, " return res"] + + eval(compile("\n".join(total_lines), "", "exec"), globs) + + return globs[fn_name] + + +MappingUnstructureFn = Callable[[Mapping[Any, Any]], Any] + + +def make_mapping_unstructure_fn( + cl: Any, + converter: BaseConverter, + unstructure_to: Any = None, + key_handler: Callable[[Any, Any | None], Any] | None = None, +) -> MappingUnstructureFn: + """Generate a specialized unstructure function for a mapping.""" + kh = key_handler or converter.unstructure + val_handler = converter.unstructure + + fn_name = "unstructure_mapping" + + # Let's try fishing out the type args. + if getattr(cl, "__args__", None) is not None: + args = get_args(cl) + if len(args) == 2: + key_arg, val_arg = args + else: + # Probably a Counter + key_arg, val_arg = args, Any + # We can do the dispatch here and now. + kh = key_handler or converter.get_unstructure_hook(key_arg, cache_result=False) + if kh == identity: + kh = None + + val_handler = converter.get_unstructure_hook(val_arg, cache_result=False) + if val_handler == identity: + val_handler = None + + globs = { + "__cattr_mapping_cl": unstructure_to or cl, + "__cattr_k_u": kh, + "__cattr_v_u": val_handler, + } + + k_u = "__cattr_k_u(k)" if kh is not None else "k" + v_u = "__cattr_v_u(v)" if val_handler is not None else "v" + + lines = [] + + lines.append(f"def {fn_name}(mapping):") + lines.append( + f" res = __cattr_mapping_cl(({k_u}, {v_u}) for k, v in mapping.items())" + ) + + total_lines = [*lines, " return res"] + + eval(compile("\n".join(total_lines), "", "exec"), globs) + + return globs[fn_name] + + +MappingStructureFn = Callable[[Mapping[Any, Any], Any], T] + + +# This factory is here for backwards compatibility and circular imports. +def mapping_structure_factory( + cl: type[T], + converter: BaseConverter, + structure_to: type = dict, + key_type=NOTHING, + val_type=NOTHING, + detailed_validation: bool = True, +) -> MappingStructureFn[T]: + """Generate a specialized structure function for a mapping.""" + fn_name = "structure_mapping" + + globs: dict[str, type] = {"__cattr_mapping_cl": structure_to} + + lines = [] + internal_arg_parts = {} + + # Let's try fishing out the type args. + if not is_bare(cl): + args = get_args(cl) + if len(args) == 2: + key_arg_cand, val_arg_cand = args + if key_type is NOTHING: + key_type = key_arg_cand + if val_type is NOTHING: + val_type = val_arg_cand + else: + if key_type is not NOTHING and val_type is NOTHING: + (val_type,) = args + elif key_type is NOTHING and val_type is not NOTHING: + (key_type,) = args + else: + # Probably a Counter + (key_type,) = args + val_type = Any + + is_bare_dict = val_type in ANIES and key_type in ANIES + if not is_bare_dict: + # We can do the dispatch here and now. + key_handler = converter.get_structure_hook(key_type, cache_result=False) + if key_handler == converter._structure_call: + key_handler = key_type + + val_handler = converter.get_structure_hook(val_type, cache_result=False) + if val_handler == converter._structure_call: + val_handler = val_type + + globs["__cattr_k_t"] = key_type + globs["__cattr_v_t"] = val_type + globs["__cattr_k_s"] = key_handler + globs["__cattr_v_s"] = val_handler + k_s = ( + "__cattr_k_s(k, __cattr_k_t)" + if key_handler != key_type + else "__cattr_k_s(k)" + ) + v_s = ( + "__cattr_v_s(v, __cattr_v_t)" + if val_handler != val_type + else "__cattr_v_s(v)" + ) + else: + is_bare_dict = True + + if is_bare_dict: + # No args, it's a bare dict. + lines.append(" res = dict(mapping)") + else: + if detailed_validation: + internal_arg_parts["IterableValidationError"] = IterableValidationError + internal_arg_parts["IterableValidationNote"] = IterableValidationNote + internal_arg_parts["val_type"] = ( + val_type if val_type is not NOTHING else Any + ) + internal_arg_parts["key_type"] = ( + key_type if key_type is not NOTHING else Any + ) + globs["enumerate"] = enumerate + + lines.append(" res = {}; errors = []") + lines.append(" for k, v in mapping.items():") + lines.append(" try:") + lines.append(f" value = {v_s}") + lines.append(" except Exception as e:") + lines.append( + " e.__notes__ = getattr(e, '__notes__', []) + [IterableValidationNote(f'Structuring mapping value @ key {k!r}', k, val_type)]" + ) + lines.append(" errors.append(e)") + lines.append(" continue") + lines.append(" try:") + lines.append(f" key = {k_s}") + lines.append(" res[key] = value") + lines.append(" except Exception as e:") + lines.append( + " e.__notes__ = getattr(e, '__notes__', []) + [IterableValidationNote(f'Structuring mapping key @ key {k!r}', k, key_type)]" + ) + lines.append(" errors.append(e)") + lines.append(" if errors:") + lines.append( + f" raise IterableValidationError('While structuring ' + {repr(cl)!r}, errors, __cattr_mapping_cl)" + ) + else: + lines.append(f" res = {{{k_s}: {v_s} for k, v in mapping.items()}}") + if structure_to is not dict: + lines.append(" res = __cattr_mapping_cl(res)") + + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + if internal_arg_line: + internal_arg_line = f", {internal_arg_line}" + for k, v in internal_arg_parts.items(): + globs[k] = v + + def_line = f"def {fn_name}(mapping, _{internal_arg_line}):" + total_lines = [def_line, *lines, " return res"] + script = "\n".join(total_lines) + + eval(compile(script, "", "exec"), globs) + + return globs[fn_name] + + +make_mapping_structure_fn: Final = mapping_structure_factory + + +# This factory is here for backwards compatibility and circular imports. +def iterable_unstructure_factory( + cl: Any, converter: BaseConverter, unstructure_to: Any = None +) -> UnstructureHook: + """A hook factory for unstructuring iterables. + + :param unstructure_to: Force unstructuring to this type, if provided. + """ + handler = converter.unstructure + + # Let's try fishing out the type args + # Unspecified tuples have `__args__` as empty tuples, so guard + # against IndexError. + if getattr(cl, "__args__", None) not in (None, ()): + type_arg = cl.__args__[0] + if isinstance(type_arg, TypeVar): + type_arg = getattr(type_arg, "__default__", Any) + handler = converter.get_unstructure_hook(type_arg, cache_result=False) + if handler == identity: + # Save ourselves the trouble of iterating over it all. + return unstructure_to or cl + + def unstructure_iterable(iterable, _seq_cl=unstructure_to or cl, _hook=handler): + return _seq_cl(_hook(i) for i in iterable) + + return unstructure_iterable + + +make_iterable_unstructure_fn: Final = iterable_unstructure_factory diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py new file mode 100644 index 0000000..a6dcd03 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py @@ -0,0 +1,19 @@ +from __future__ import annotations + +from threading import local +from typing import Any, Callable + +from attrs import frozen + + +@frozen +class AttributeOverride: + omit_if_default: bool | None = None + rename: str | None = None + omit: bool | None = None # Omit the field completely. + struct_hook: Callable[[Any, Any], Any] | None = None # Structure hook to use. + unstruct_hook: Callable[[Any], Any] | None = None # Structure hook to use. + + +neutral = AttributeOverride() +already_generating = local() diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py new file mode 100644 index 0000000..069c48c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from typing import TypeVar + +from .._compat import get_args, get_origin, is_generic + + +def _tvar_has_default(tvar) -> bool: + """Does `tvar` have a default? + + In CPython 3.13+ and typing_extensions>=4.12.0: + - TypeVars have a `no_default()` method for detecting + if a TypeVar has a default + - TypeVars with `default=None` have `__default__` set to `None` + - TypeVars with no `default` parameter passed + have `__default__` set to `typing(_extensions).NoDefault + + On typing_exensions<4.12.0: + - TypeVars do not have a `no_default()` method for detecting + if a TypeVar has a default + - TypeVars with `default=None` have `__default__` set to `NoneType` + - TypeVars with no `default` parameter passed + have `__default__` set to `typing(_extensions).NoDefault + """ + try: + return tvar.has_default() + except AttributeError: + # compatibility for typing_extensions<4.12.0 + return getattr(tvar, "__default__", None) is not None + + +def generate_mapping(cl: type, old_mapping: dict[str, type] = {}) -> dict[str, type]: + """Generate a mapping of typevars to actual types for a generic class.""" + mapping = dict(old_mapping) + + origin = get_origin(cl) + + if origin is not None: + # To handle the cases where classes in the typing module are using + # the GenericAlias structure but aren't a Generic and hence + # end up in this function but do not have an `__parameters__` + # attribute. These classes are interface types, for example + # `typing.Hashable`. + parameters = getattr(get_origin(cl), "__parameters__", None) + if parameters is None: + return dict(old_mapping) + + for p, t in zip(parameters, get_args(cl)): + if isinstance(t, TypeVar): + continue + mapping[p.__name__] = t + + elif is_generic(cl): + # Origin is None, so this may be a subclass of a generic class. + orig_bases = cl.__orig_bases__ + for base in orig_bases: + if not hasattr(base, "__args__"): + continue + base_args = base.__args__ + if hasattr(base.__origin__, "__parameters__"): + base_params = base.__origin__.__parameters__ + elif any(_tvar_has_default(base_arg) for base_arg in base_args): + # TypeVar with a default e.g. PEP 696 + # https://www.python.org/dev/peps/pep-0696/ + # Extract the defaults for the TypeVars and insert + # them into the mapping + mapping_params = [ + (base_arg, base_arg.__default__) + for base_arg in base_args + if _tvar_has_default(base_arg) + ] + base_params, base_args = zip(*mapping_params) + else: + continue + + for param, arg in zip(base_params, base_args): + mapping[param.__name__] = arg + + return mapping diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py new file mode 100644 index 0000000..04843cd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py @@ -0,0 +1,29 @@ +"""Line-cache functionality.""" + +import linecache +from typing import List + + +def generate_unique_filename(cls: type, func_name: str, lines: List[str] = []) -> str: + """ + Create a "filename" suitable for a function being generated. + + If *lines* are provided, insert them in the first free spot or stop + if a duplicate is found. + """ + extra = "" + count = 1 + + while True: + unique_filename = "".format( + func_name, cls.__module__, getattr(cls, "__qualname__", cls.__name__), extra + ) + if not lines: + return unique_filename + cache_line = (len("\n".join(lines)), None, lines, unique_filename) + if linecache.cache.setdefault(unique_filename, cache_line) == cache_line: + return unique_filename + + # Looks like this spot is taken. Try again. + count += 1 + extra = f"-{count}" diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py new file mode 100644 index 0000000..4e63143 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from attrs import NOTHING, Attribute, Factory + +from .._compat import is_bare_final +from ..dispatch import StructureHook +from ..fns import raise_error + +if TYPE_CHECKING: + from ..converters import BaseConverter + + +def find_structure_handler( + a: Attribute, type: Any, c: BaseConverter, prefer_attrs_converters: bool = False +) -> StructureHook | None: + """Find the appropriate structure handler to use. + + Return `None` if no handler should be used. + """ + try: + if a.converter is not None and prefer_attrs_converters: + # If the user as requested to use attrib converters, use nothing + # so it falls back to that. + handler = None + elif ( + a.converter is not None and not prefer_attrs_converters and type is not None + ): + handler = c.get_structure_hook(type, cache_result=False) + if handler == raise_error: + handler = None + elif type is not None: + if ( + is_bare_final(type) + and a.default is not NOTHING + and not isinstance(a.default, Factory) + ): + # This is a special case where we can use the + # type of the default to dispatch on. + type = a.default.__class__ + handler = c.get_structure_hook(type, cache_result=False) + if handler == c._structure_call: + # Finals can't really be used with _structure_call, so + # we wrap it so the rest of the toolchain doesn't get + # confused. + + def handler(v, _, _h=handler): + return _h(v, type) + + else: + handler = c.get_structure_hook(type, cache_result=False) + else: + handler = c.structure + return handler + except RecursionError: + # This means we're dealing with a reference cycle, so use late binding. + return c.structure diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py new file mode 100644 index 0000000..5614d6f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py @@ -0,0 +1,611 @@ +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar + +from attrs import NOTHING, Attribute + +try: + from inspect import get_annotations + + def get_annots(cl) -> dict[str, Any]: + return get_annotations(cl, eval_str=True) + +except ImportError: + # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older + def get_annots(cl) -> dict[str, Any]: + if isinstance(cl, type): + ann = cl.__dict__.get("__annotations__", {}) + else: + ann = getattr(cl, "__annotations__", {}) + return ann + + +try: + from typing_extensions import _TypedDictMeta +except ImportError: + _TypedDictMeta = None + +from .._compat import ( + TypedDict, + get_full_type_hints, + get_notrequired_base, + get_origin, + is_annotated, + is_bare, + is_generic, +) +from .._generics import deep_copy_with +from ..errors import ( + AttributeValidationNote, + ClassValidationError, + ForbiddenExtraKeysError, + StructureHandlerNotFoundError, +) +from ..fns import identity +from . import AttributeOverride +from ._consts import already_generating, neutral +from ._generics import generate_mapping +from ._lc import generate_unique_filename +from ._shared import find_structure_handler + +if TYPE_CHECKING: + from ..converters import BaseConverter + +__all__ = ["make_dict_unstructure_fn", "make_dict_structure_fn"] + +T = TypeVar("T", bound=TypedDict) + + +def make_dict_unstructure_fn( + cl: type[T], + converter: BaseConverter, + _cattrs_use_linecache: bool = True, + **kwargs: AttributeOverride, +) -> Callable[[T], dict[str, Any]]: + """ + Generate a specialized dict unstructuring function for a TypedDict. + + :param cl: A `TypedDict` class. + :param converter: A Converter instance to use for unstructuring nested fields. + :param kwargs: A mapping of field names to an `AttributeOverride`, for + customization. + :param _cattrs_detailed_validation: Whether to store the generated code in the + _linecache_, for easier debugging and better stack traces. + """ + origin = get_origin(cl) + attrs = _adapted_fields(origin or cl) # type: ignore + req_keys = _required_keys(origin or cl) + + mapping = {} + if is_generic(cl): + mapping = generate_mapping(cl, mapping) + + for base in getattr(origin, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = generate_mapping(base, mapping) + break + + # It's possible for origin to be None if this is a subclass + # of a generic class. + if origin is not None: + cl = origin + + cl_name = cl.__name__ + fn_name = "unstructure_typeddict_" + cl_name + globs = {} + lines = [] + internal_arg_parts = {} + + # We keep track of what we're generating to help with recursive + # class graphs. + try: + working_set = already_generating.working_set + except AttributeError: + working_set = set() + already_generating.working_set = working_set + if cl in working_set: + raise RecursionError() + working_set.add(cl) + + try: + # We want to short-circuit in certain cases and return the identity + # function. + # We short-circuit if all of these are true: + # * no attributes have been overridden + # * all attributes resolve to `converter._unstructure_identity` + for a in attrs: + attr_name = a.name + override = kwargs.get(attr_name, neutral) + if override != neutral: + break + handler = None + t = a.type + + if isinstance(t, TypeVar): + if t.__name__ in mapping: + t = mapping[t.__name__] + else: + # Unbound typevars use late binding. + handler = converter.unstructure + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + if handler is None: + nrb = get_notrequired_base(t) + if nrb is not NOTHING: + t = nrb + try: + handler = converter.get_unstructure_hook(t) + except RecursionError: + # There's a circular reference somewhere down the line + handler = converter.unstructure + is_identity = handler == identity + if not is_identity: + break + else: + # We've not broken the loop. + return identity + + for ix, a in enumerate(attrs): + attr_name = a.name + override = kwargs.get(attr_name, neutral) + if override.omit: + lines.append(f" res.pop('{attr_name}', None)") + continue + if override.rename is not None: + # We also need to pop when renaming, since we're copying + # the original. + lines.append(f" res.pop('{attr_name}', None)") + kn = attr_name if override.rename is None else override.rename + attr_required = attr_name in req_keys + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + handler = None + if override.unstruct_hook is not None: + handler = override.unstruct_hook + else: + t = a.type + + if isinstance(t, TypeVar): + if t.__name__ in mapping: + t = mapping[t.__name__] + else: + handler = converter.unstructure + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + if handler is None: + nrb = get_notrequired_base(t) + if nrb is not NOTHING: + t = nrb + try: + handler = converter.get_unstructure_hook(t) + except RecursionError: + # There's a circular reference somewhere down the line + handler = converter.unstructure + + is_identity = handler == identity + + if not is_identity: + unstruct_handler_name = f"__c_unstr_{ix}" + globs[unstruct_handler_name] = handler + internal_arg_parts[unstruct_handler_name] = handler + invoke = f"{unstruct_handler_name}(instance['{attr_name}'])" + elif override.rename is None: + # We're not doing anything to this attribute, so + # it'll already be present in the input dict. + continue + else: + # Probably renamed, we just fetch it. + invoke = f"instance['{attr_name}']" + + if attr_required: + # No default or no override. + lines.append(f" res['{kn}'] = {invoke}") + else: + lines.append(f" if '{attr_name}' in instance: res['{kn}'] = {invoke}") + + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + if internal_arg_line: + internal_arg_line = f", {internal_arg_line}" + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = [ + f"def {fn_name}(instance{internal_arg_line}):", + " res = instance.copy()", + *lines, + " return res", + ] + script = "\n".join(total_lines) + + fname = generate_unique_filename( + cl, "unstructure", lines=total_lines if _cattrs_use_linecache else [] + ) + + eval(compile(script, fname, "exec"), globs) + finally: + working_set.remove(cl) + if not working_set: + del already_generating.working_set + + return globs[fn_name] + + +def make_dict_structure_fn( + cl: Any, + converter: BaseConverter, + _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter", + _cattrs_use_linecache: bool = True, + _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter", + **kwargs: AttributeOverride, +) -> Callable[[dict, Any], Any]: + """Generate a specialized dict structuring function for typed dicts. + + :param cl: A `TypedDict` class. + :param converter: A Converter instance to use for structuring nested fields. + :param kwargs: A mapping of field names to an `AttributeOverride`, for + customization. + :param _cattrs_detailed_validation: Whether to use a slower mode that produces + more detailed errors. + :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a + `ForbiddenExtraKeysError` if unknown keys are encountered. + :param _cattrs_detailed_validation: Whether to store the generated code in the + _linecache_, for easier debugging and better stack traces. + + .. versionchanged:: 23.2.0 + The `_cattrs_forbid_extra_keys` and `_cattrs_detailed_validation` parameters + take their values from the given converter by default. + """ + + mapping = {} + if is_generic(cl): + base = get_origin(cl) + mapping = generate_mapping(cl, mapping) + if base is not None: + # It's possible for this to be a subclass of a generic, + # so no origin. + cl = base + + for base in getattr(cl, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = generate_mapping(base, mapping) + break + + cl_name = cl.__name__ + fn_name = "structure_" + cl_name + + # We have generic parameters and need to generate a unique name for the function + for p in getattr(cl, "__parameters__", ()): + try: + name_base = mapping[p.__name__] + except KeyError: + pn = p.__name__ + raise StructureHandlerNotFoundError( + f"Missing type for generic argument {pn}, specify it when structuring.", + p, + ) from None + name = getattr(name_base, "__name__", None) or str(name_base) + # `<>` can be present in lambdas + # `|` can be present in unions + name = re.sub(r"[\[\.\] ,<>]", "_", name) + name = re.sub(r"\|", "u", name) + fn_name += f"_{name}" + + internal_arg_parts = {"__cl": cl} + globs = {} + lines = [] + post_lines = [] + + attrs = _adapted_fields(cl) + req_keys = _required_keys(cl) + + allowed_fields = set() + if _cattrs_forbid_extra_keys == "from_converter": + # BaseConverter doesn't have it so we're careful. + _cattrs_forbid_extra_keys = getattr(converter, "forbid_extra_keys", False) + if _cattrs_detailed_validation == "from_converter": + _cattrs_detailed_validation = converter.detailed_validation + + if _cattrs_forbid_extra_keys: + globs["__c_a"] = allowed_fields + globs["__c_feke"] = ForbiddenExtraKeysError + + lines.append(" res = o.copy()") + + if _cattrs_detailed_validation: + lines.append(" errors = []") + internal_arg_parts["__c_cve"] = ClassValidationError + internal_arg_parts["__c_avn"] = AttributeValidationNote + for ix, a in enumerate(attrs): + an = a.name + attr_required = an in req_keys + override = kwargs.get(an, neutral) + if override.omit: + continue + t = a.type + + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + nrb = get_notrequired_base(t) + if nrb is not NOTHING: + t = nrb + + if is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if override.struct_hook is not None: + # If the user has requested an override, just use that. + handler = override.struct_hook + else: + handler = find_structure_handler(a, t, converter) + + struct_handler_name = f"__c_structure_{ix}" + internal_arg_parts[struct_handler_name] = handler + + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + i = " " + if not attr_required: + lines.append(f"{i}if '{kn}' in o:") + i = f"{i} " + lines.append(f"{i}try:") + i = f"{i} " + + tn = f"__c_type_{ix}" + internal_arg_parts[tn] = t + + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + lines.append(f"{i}res['{an}'] = {struct_handler_name}(o['{kn}'])") + else: + lines.append(f"{i}res['{an}'] = {struct_handler_name}(o['{kn}'], {tn})") + if override.rename is not None: + lines.append(f"{i}del res['{kn}']") + i = i[:-2] + lines.append(f"{i}except Exception as e:") + i = f"{i} " + lines.append( + f'{i}e.__notes__ = [*getattr(e, \'__notes__\', []), __c_avn("Structuring typeddict {cl.__qualname__} @ attribute {an}", "{an}", {tn})]' + ) + lines.append(f"{i}errors.append(e)") + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = o.keys() - __c_a", + " if unknown_fields:", + " errors.append(__c_feke('', __cl, unknown_fields))", + ] + + post_lines.append( + f" if errors: raise __c_cve('While structuring ' + {cl.__name__!r}, errors, __cl)" + ) + else: + non_required = [] + + # The first loop deals with required args. + for ix, a in enumerate(attrs): + an = a.name + attr_required = an in req_keys + override = kwargs.get(an, neutral) + if override.omit: + continue + if not attr_required: + non_required.append((ix, a)) + continue + + t = a.type + + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + nrb = get_notrequired_base(t) + if nrb is not NOTHING: + t = nrb + + if override.struct_hook is not None: + handler = override.struct_hook + else: + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + handler = converter.get_structure_hook(t) + + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + + struct_handler_name = f"__c_structure_{ix}" + internal_arg_parts[struct_handler_name] = handler + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + invocation_line = f" res['{an}'] = {struct_handler_name}(o['{kn}'])" + else: + tn = f"__c_type_{ix}" + internal_arg_parts[tn] = t + invocation_line = ( + f" res['{an}'] = {struct_handler_name}(o['{kn}'], {tn})" + ) + + lines.append(invocation_line) + if override.rename is not None: + lines.append(f" del res['{override.rename}']") + + # The second loop is for optional args. + if non_required: + for ix, a in non_required: + an = a.name + override = kwargs.get(an, neutral) + t = a.type + + nrb = get_notrequired_base(t) + if nrb is not NOTHING: + t = nrb + + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + if override.struct_hook is not None: + handler = override.struct_hook + else: + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + handler = converter.get_structure_hook(t) + + struct_handler_name = f"__c_structure_{ix}" + internal_arg_parts[struct_handler_name] = handler + + ian = an + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + post_lines.append(f" if '{kn}' in o:") + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + post_lines.append( + f" res['{ian}'] = {struct_handler_name}(o['{kn}'])" + ) + else: + tn = f"__c_type_{ix}" + internal_arg_parts[tn] = t + post_lines.append( + f" res['{ian}'] = {struct_handler_name}(o['{kn}'], {tn})" + ) + if override.rename is not None: + lines.append(f" res.pop('{override.rename}', None)") + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = o.keys() - __c_a", + " if unknown_fields:", + " raise __c_feke('', __cl, unknown_fields)", + ] + + # At the end, we create the function header. + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = [ + f"def {fn_name}(o, _, {internal_arg_line}):", + *lines, + *post_lines, + " return res", + ] + + script = "\n".join(total_lines) + fname = generate_unique_filename( + cl, "structure", lines=total_lines if _cattrs_use_linecache else [] + ) + + eval(compile(script, fname, "exec"), globs) + return globs[fn_name] + + +def _adapted_fields(cls: Any) -> list[Attribute]: + annotations = get_annots(cls) + hints = get_full_type_hints(cls) + return [ + Attribute( + n, + NOTHING, + None, + False, + False, + False, + False, + False, + type=hints[n] if n in hints else annotations[n], + ) + for n, a in annotations.items() + ] + + +def _is_extensions_typeddict(cls) -> bool: + if _TypedDictMeta is None: + return False + return cls.__class__ is _TypedDictMeta or ( + is_generic(cls) and (cls.__origin__.__class__ is _TypedDictMeta) + ) + + +if sys.version_info >= (3, 11): + + def _required_keys(cls: type) -> set[str]: + return cls.__required_keys__ + +elif sys.version_info >= (3, 9): + from typing_extensions import Annotated, NotRequired, Required, get_args + + # Note that there is no `typing.Required` on 3.9 and 3.10, only in + # `typing_extensions`. Therefore, `typing.TypedDict` will not honor this + # annotation, only `typing_extensions.TypedDict`. + + def _required_keys(cls: type) -> set[str]: + """Our own processor for required keys.""" + if _is_extensions_typeddict(cls): + return cls.__required_keys__ + + # We vendor a part of the typing_extensions logic for + # gathering required keys. *sigh* + own_annotations = cls.__dict__.get("__annotations__", {}) + required_keys = set() + # On 3.8 - 3.10, typing.TypedDict doesn't put typeddict superclasses + # in the MRO, therefore we cannot handle non-required keys properly + # in some situations. Oh well. + for key in getattr(cls, "__required_keys__", []): + annotation_type = own_annotations[key] + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + annotation_origin = get_origin(annotation_type) + + if annotation_origin is NotRequired: + pass + elif cls.__total__: + required_keys.add(key) + return required_keys + +else: + from typing_extensions import Annotated, NotRequired, Required, get_args + + # On 3.8, typing.TypedDicts do not have __required_keys__. + + def _required_keys(cls: type) -> set[str]: + """Our own processor for required keys.""" + if _is_extensions_typeddict(cls): + return cls.__required_keys__ + + own_annotations = cls.__dict__.get("__annotations__", {}) + required_keys = set() + for key in own_annotations: + annotation_type = own_annotations[key] + + if is_annotated(annotation_type): + # If this is `Annotated`, we need to get the origin twice. + annotation_type = get_origin(annotation_type) + + annotation_origin = get_origin(annotation_type) + + if annotation_origin is Required: + required_keys.add(key) + elif annotation_origin is NotRequired: + pass + elif cls.__total__: + required_keys.add(key) + return required_keys diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py new file mode 100644 index 0000000..876576d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py @@ -0,0 +1,27 @@ +import sys +from datetime import datetime +from typing import Any, Callable, TypeVar + +if sys.version_info[:2] < (3, 10): + from typing_extensions import ParamSpec +else: + from typing import ParamSpec + + +def validate_datetime(v, _): + if not isinstance(v, datetime): + raise Exception(f"Expected datetime, got {v}") + return v + + +T = TypeVar("T") +P = ParamSpec("P") + + +def wrap(_: Callable[P, Any]) -> Callable[[Callable[..., T]], Callable[P, T]]: + """Wrap a `Converter` `__init__` in a type-safe way.""" + + def impl(x: Callable[..., T]) -> Callable[P, T]: + return x + + return impl diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py new file mode 100644 index 0000000..e73d131 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py @@ -0,0 +1,106 @@ +"""Preconfigured converters for bson.""" + +from base64 import b85decode, b85encode +from datetime import date, datetime +from typing import Any, Type, TypeVar, Union + +from bson import DEFAULT_CODEC_OPTIONS, CodecOptions, Int64, ObjectId, decode, encode + +from cattrs._compat import AbstractSet, is_mapping +from cattrs.gen import make_mapping_structure_fn + +from ..converters import BaseConverter, Converter +from ..dispatch import StructureHook +from ..strategies import configure_union_passthrough +from . import validate_datetime, wrap + +T = TypeVar("T") + + +class Base85Bytes(bytes): + """A subclass to help with binary key encoding/decoding.""" + + +class BsonConverter(Converter): + def dumps( + self, + obj: Any, + unstructure_as: Any = None, + check_keys: bool = False, + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + ) -> bytes: + return encode( + self.unstructure(obj, unstructure_as=unstructure_as), + check_keys=check_keys, + codec_options=codec_options, + ) + + def loads( + self, + data: bytes, + cl: Type[T], + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + ) -> T: + return self.structure(decode(data, codec_options=codec_options), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the bson library. + + * sets are serialized as lists + * byte mapping keys are base85-encoded into strings when unstructuring, and reverse + * non-string, non-byte mapping keys are coerced into strings when unstructuring + * a deserialization hook is registered for bson.ObjectId by default + """ + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + if issubclass(args[0], str): + key_handler = None + elif issubclass(args[0], bytes): + + def key_handler(k): + return b85encode(k).decode("utf8") + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + def gen_structure_mapping(cl: Any) -> StructureHook: + args = getattr(cl, "__args__", None) + if args and issubclass(args[0], bytes): + h = make_mapping_structure_fn(cl, converter, key_type=Base85Bytes) + else: + h = make_mapping_structure_fn(cl, converter) + return h + + converter.register_structure_hook(Base85Bytes, lambda v, _: b85decode(v)) + converter.register_unstructure_hook_factory(is_mapping, gen_unstructure_mapping) + converter.register_structure_hook_factory(is_mapping, gen_structure_mapping) + + converter.register_structure_hook(ObjectId, lambda v, _: ObjectId(v)) + configure_union_passthrough( + Union[str, bool, int, float, None, bytes, datetime, ObjectId, Int64], converter + ) + + # datetime inherits from date, so identity unstructure hook used + # here to prevent the date unstructure hook running. + converter.register_unstructure_hook(datetime, lambda v: v) + converter.register_structure_hook(datetime, validate_datetime) + converter.register_unstructure_hook(date, lambda v: v.isoformat()) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + + +@wrap(BsonConverter) +def make_converter(*args: Any, **kwargs: Any) -> BsonConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = BsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py new file mode 100644 index 0000000..73a9a97 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py @@ -0,0 +1,50 @@ +"""Preconfigured converters for cbor2.""" + +from datetime import date, datetime, timezone +from typing import Any, Type, TypeVar, Union + +from cbor2 import dumps, loads + +from cattrs._compat import AbstractSet + +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + + +class Cbor2Converter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: bytes, cl: Type[T], **kwargs: Any) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the cbor2 library. + + * datetimes are serialized as timestamp floats + * sets are serialized as lists + """ + converter.register_unstructure_hook(datetime, lambda v: v.timestamp()) + converter.register_structure_hook( + datetime, lambda v, _: datetime.fromtimestamp(v, timezone.utc) + ) + converter.register_unstructure_hook(date, lambda v: v.isoformat()) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + configure_union_passthrough(Union[str, bool, int, float, None, bytes], converter) + + +@wrap(Cbor2Converter) +def make_converter(*args: Any, **kwargs: Any) -> Cbor2Converter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = Cbor2Converter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py new file mode 100644 index 0000000..acc82ae --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py @@ -0,0 +1,56 @@ +"""Preconfigured converters for the stdlib json.""" + +from base64 import b85decode, b85encode +from datetime import date, datetime +from json import dumps, loads +from typing import Any, Type, TypeVar, Union + +from .._compat import AbstractSet, Counter +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + + +class JsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: Union[bytes, str], cl: Type[T], **kwargs: Any) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the stdlib json module. + + * bytes are serialized as base85 strings + * datetimes are serialized as ISO 8601 + * counters are serialized as dicts + * sets are serialized as lists + * union passthrough is configured for unions of strings, bools, ints, + floats and None + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + converter.register_unstructure_hook(date, lambda v: v.isoformat()) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + configure_union_passthrough(Union[str, bool, int, float, None], converter) + + +@wrap(JsonConverter) +def make_converter(*args: Any, **kwargs: Any) -> JsonConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + Counter: dict, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = JsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py new file mode 100644 index 0000000..dd7c369 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py @@ -0,0 +1,54 @@ +"""Preconfigured converters for msgpack.""" + +from datetime import date, datetime, time, timezone +from typing import Any, Type, TypeVar, Union + +from msgpack import dumps, loads + +from cattrs._compat import AbstractSet + +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + + +class MsgpackConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: bytes, cl: Type[T], **kwargs: Any) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the msgpack library. + + * datetimes are serialized as timestamp floats + * sets are serialized as lists + """ + converter.register_unstructure_hook(datetime, lambda v: v.timestamp()) + converter.register_structure_hook( + datetime, lambda v, _: datetime.fromtimestamp(v, timezone.utc) + ) + converter.register_unstructure_hook( + date, lambda v: datetime.combine(v, time(tzinfo=timezone.utc)).timestamp() + ) + converter.register_structure_hook( + date, lambda v, _: datetime.fromtimestamp(v, timezone.utc).date() + ) + configure_union_passthrough(Union[str, bool, int, float, None, bytes], converter) + + +@wrap(MsgpackConverter) +def make_converter(*args: Any, **kwargs: Any) -> MsgpackConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = MsgpackConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py new file mode 100644 index 0000000..6ef84d7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py @@ -0,0 +1,185 @@ +"""Preconfigured converters for msgspec.""" + +from __future__ import annotations + +from base64 import b64decode +from datetime import date, datetime +from enum import Enum +from functools import partial +from typing import Any, Callable, TypeVar, Union, get_type_hints + +from attrs import has as attrs_has +from attrs import resolve_types +from msgspec import Struct, convert, to_builtins +from msgspec.json import Encoder, decode + +from .._compat import ( + fields, + get_args, + get_origin, + has, + is_bare, + is_mapping, + is_sequence, +) +from ..cols import is_namedtuple +from ..converters import BaseConverter, Converter +from ..dispatch import UnstructureHook +from ..fns import identity +from ..gen import make_hetero_tuple_unstructure_fn +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + +__all__ = ["MsgspecJsonConverter", "configure_converter", "make_converter"] + + +class MsgspecJsonConverter(Converter): + """A converter specialized for the _msgspec_ library.""" + + #: The msgspec encoder for dumping. + encoder: Encoder = Encoder() + + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes: + """Unstructure and encode `obj` into JSON bytes.""" + return self.encoder.encode( + self.unstructure(obj, unstructure_as=unstructure_as), **kwargs + ) + + def get_dumps_hook( + self, unstructure_as: Any, **kwargs: Any + ) -> Callable[[Any], bytes]: + """Produce a `dumps` hook for the given type.""" + unstruct_hook = self.get_unstructure_hook(unstructure_as) + if unstruct_hook in (identity, to_builtins): + return self.encoder.encode + return self.dumps + + def loads(self, data: bytes, cl: type[T], **kwargs: Any) -> T: + """Decode and structure `cl` from the provided JSON bytes.""" + return self.structure(decode(data, **kwargs), cl) + + def get_loads_hook(self, cl: type[T]) -> Callable[[bytes], T]: + """Produce a `loads` hook for the given type.""" + return partial(self.loads, cl=cl) + + +def configure_converter(converter: Converter) -> None: + """Configure the converter for the msgspec library. + + * bytes are serialized as base64 strings, directly by msgspec + * datetimes and dates are passed through to be serialized as RFC 3339 directly + * enums are passed through to msgspec directly + * union passthrough configured for str, bool, int, float and None + """ + configure_passthroughs(converter) + + converter.register_unstructure_hook(Struct, to_builtins) + converter.register_unstructure_hook(Enum, to_builtins) + + converter.register_structure_hook(Struct, convert) + converter.register_structure_hook(bytes, lambda v, _: b64decode(v)) + converter.register_structure_hook(datetime, lambda v, _: convert(v, datetime)) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + configure_union_passthrough(Union[str, bool, int, float, None], converter) + + +@wrap(MsgspecJsonConverter) +def make_converter(*args: Any, **kwargs: Any) -> MsgspecJsonConverter: + res = MsgspecJsonConverter(*args, **kwargs) + configure_converter(res) + return res + + +def configure_passthroughs(converter: Converter) -> None: + """Configure optimizing passthroughs. + + A passthrough is when we let msgspec handle something automatically. + """ + converter.register_unstructure_hook(bytes, to_builtins) + converter.register_unstructure_hook_factory(is_mapping, mapping_unstructure_factory) + converter.register_unstructure_hook_factory(is_sequence, seq_unstructure_factory) + converter.register_unstructure_hook_factory(has, attrs_unstructure_factory) + converter.register_unstructure_hook_factory( + is_namedtuple, namedtuple_unstructure_factory + ) + + +def seq_unstructure_factory(type, converter: Converter) -> UnstructureHook: + """The msgspec unstructure hook factory for sequences.""" + if is_bare(type): + type_arg = Any + else: + args = get_args(type) + type_arg = args[0] + handler = converter.get_unstructure_hook(type_arg, cache_result=False) + + if handler in (identity, to_builtins): + return handler + return converter.gen_unstructure_iterable(type) + + +def mapping_unstructure_factory(type, converter: BaseConverter) -> UnstructureHook: + """The msgspec unstructure hook factory for mappings.""" + if is_bare(type): + key_arg = Any + val_arg = Any + key_handler = converter.get_unstructure_hook(key_arg, cache_result=False) + value_handler = converter.get_unstructure_hook(val_arg, cache_result=False) + else: + args = get_args(type) + if len(args) == 2: + key_arg, val_arg = args + else: + # Probably a Counter + key_arg, val_arg = args, Any + key_handler = converter.get_unstructure_hook(key_arg, cache_result=False) + value_handler = converter.get_unstructure_hook(val_arg, cache_result=False) + + if key_handler in (identity, to_builtins) and value_handler in ( + identity, + to_builtins, + ): + return to_builtins + return converter.gen_unstructure_mapping(type) + + +def attrs_unstructure_factory(type: Any, converter: Converter) -> UnstructureHook: + """Choose whether to use msgspec handling or our own.""" + origin = get_origin(type) + attribs = fields(origin or type) + if attrs_has(type) and any(isinstance(a.type, str) for a in attribs): + resolve_types(type) + attribs = fields(origin or type) + + if any( + attr.name.startswith("_") + or ( + converter.get_unstructure_hook(attr.type, cache_result=False) + not in (identity, to_builtins) + ) + for attr in attribs + ): + return converter.gen_unstructure_attrs_fromdict(type) + + return to_builtins + + +def namedtuple_unstructure_factory( + type: type[tuple], converter: BaseConverter +) -> UnstructureHook: + """A hook factory for unstructuring namedtuples, modified for msgspec.""" + + if all( + converter.get_unstructure_hook(t) in (identity, to_builtins) + for t in get_type_hints(type).values() + ): + return identity + + return make_hetero_tuple_unstructure_fn( + type, + converter, + unstructure_to=tuple, + type_args=tuple(get_type_hints(type).values()), + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py new file mode 100644 index 0000000..4b595bc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py @@ -0,0 +1,95 @@ +"""Preconfigured converters for orjson.""" + +from base64 import b85decode, b85encode +from datetime import date, datetime +from enum import Enum +from functools import partial +from typing import Any, Type, TypeVar, Union + +from orjson import dumps, loads + +from .._compat import AbstractSet, is_mapping +from ..cols import is_namedtuple, namedtuple_unstructure_factory +from ..converters import BaseConverter, Converter +from ..fns import identity +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + + +class OrjsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: Union[bytes, bytearray, memoryview, str], cl: Type[T]) -> T: + return self.structure(loads(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the orjson library. + + * bytes are serialized as base85 strings + * datetimes and dates are passed through to be serialized as RFC 3339 by orjson + * typed namedtuples are serialized as lists + * sets are serialized as lists + * string enum mapping keys have special handling + * mapping keys are coerced into strings when unstructuring + + .. versionchanged: 24.1.0 + Add support for typed namedtuples. + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + if issubclass(args[0], str) and issubclass(args[0], Enum): + + def key_handler(v): + return v.value + + else: + # It's possible the handler for the key type has been overridden. + # (For example base85 encoding for bytes.) + # In that case, we want to use the override. + + kh = converter.get_unstructure_hook(args[0]) + if kh != identity: + key_handler = kh + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + converter._unstructure_func.register_func_list( + [ + (is_mapping, gen_unstructure_mapping, True), + ( + is_namedtuple, + partial(namedtuple_unstructure_factory, unstructure_to=tuple), + "extended", + ), + ] + ) + configure_union_passthrough(Union[str, bool, int, float, None], converter) + + +@wrap(OrjsonConverter) +def make_converter(*args: Any, **kwargs: Any) -> OrjsonConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = OrjsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py new file mode 100644 index 0000000..7374625 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py @@ -0,0 +1,72 @@ +"""Preconfigured converters for pyyaml.""" + +from datetime import date, datetime +from functools import partial +from typing import Any, Type, TypeVar, Union + +from yaml import safe_dump, safe_load + +from .._compat import FrozenSetSubscriptable +from ..cols import is_namedtuple, namedtuple_unstructure_factory +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import validate_datetime, wrap + +T = TypeVar("T") + + +def validate_date(v, _): + if not isinstance(v, date): + raise ValueError(f"Expected date, got {v}") + return v + + +class PyyamlConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str: + return safe_dump(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: str, cl: Type[T]) -> T: + return self.structure(safe_load(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the pyyaml library. + + * frozensets are serialized as lists + * string enums are converted into strings explicitly + * datetimes and dates are validated + * typed namedtuples are serialized as lists + + .. versionchanged: 24.1.0 + Add support for typed namedtuples. + """ + converter.register_unstructure_hook( + str, lambda v: v if v.__class__ is str else v.value + ) + + # datetime inherits from date, so identity unstructure hook used + # here to prevent the date unstructure hook running. + converter.register_unstructure_hook(datetime, lambda v: v) + converter.register_structure_hook(datetime, validate_datetime) + converter.register_structure_hook(date, validate_date) + + converter.register_unstructure_hook_factory(is_namedtuple)( + partial(namedtuple_unstructure_factory, unstructure_to=tuple) + ) + + configure_union_passthrough( + Union[str, bool, int, float, None, bytes, datetime, date], converter + ) + + +@wrap(PyyamlConverter) +def make_converter(*args: Any, **kwargs: Any) -> PyyamlConverter: + kwargs["unstruct_collection_overrides"] = { + FrozenSetSubscriptable: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = PyyamlConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py new file mode 100644 index 0000000..0d0180b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py @@ -0,0 +1,87 @@ +"""Preconfigured converters for tomlkit.""" + +from base64 import b85decode, b85encode +from datetime import date, datetime +from enum import Enum +from operator import attrgetter +from typing import Any, Type, TypeVar, Union + +from tomlkit import dumps, loads +from tomlkit.items import Float, Integer, String + +from cattrs._compat import AbstractSet, is_mapping + +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import validate_datetime, wrap + +T = TypeVar("T") +_enum_value_getter = attrgetter("_value_") + + +class TomlkitConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: str, cl: Type[T]) -> T: + return self.structure(loads(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the tomlkit library. + + * bytes are serialized as base85 strings + * sets are serialized as lists + * tuples are serializas as lists + * mapping keys are coerced into strings when unstructuring + """ + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + # Currently, tomlkit has inconsistent behavior on 3.11 + # so we paper over it here. + # https://github.com/sdispater/tomlkit/issues/237 + if issubclass(args[0], str): + key_handler = _enum_value_getter if issubclass(args[0], Enum) else None + elif issubclass(args[0], bytes): + + def key_handler(k: bytes): + return b85encode(k).decode("utf8") + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + converter._unstructure_func.register_func_list( + [(is_mapping, gen_unstructure_mapping, True)] + ) + + # datetime inherits from date, so identity unstructure hook used + # here to prevent the date unstructure hook running. + converter.register_unstructure_hook(datetime, lambda v: v) + converter.register_structure_hook(datetime, validate_datetime) + converter.register_unstructure_hook(date, lambda v: v.isoformat()) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + configure_union_passthrough( + Union[str, String, bool, int, Integer, float, Float], converter + ) + + +@wrap(TomlkitConverter) +def make_converter(*args: Any, **kwargs: Any) -> TomlkitConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + tuple: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = TomlkitConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py new file mode 100644 index 0000000..7256d52 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py @@ -0,0 +1,55 @@ +"""Preconfigured converters for ujson.""" + +from base64 import b85decode, b85encode +from datetime import date, datetime +from typing import Any, AnyStr, Type, TypeVar, Union + +from ujson import dumps, loads + +from cattrs._compat import AbstractSet + +from ..converters import BaseConverter, Converter +from ..strategies import configure_union_passthrough +from . import wrap + +T = TypeVar("T") + + +class UjsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: AnyStr, cl: Type[T], **kwargs: Any) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the ujson library. + + * bytes are serialized as base64 strings + * datetimes are serialized as ISO 8601 + * sets are serialized as lists + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + + converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + converter.register_unstructure_hook(date, lambda v: v.isoformat()) + converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v)) + configure_union_passthrough(Union[str, bool, int, float, None], converter) + + +@wrap(UjsonConverter) +def make_converter(*args: Any, **kwargs: Any) -> UjsonConverter: + kwargs["unstruct_collection_overrides"] = { + AbstractSet: list, + **kwargs.get("unstruct_collection_overrides", {}), + } + res = UjsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/py.typed b/lambdas/aws-dd-forwarder-3.127.0/cattrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py new file mode 100644 index 0000000..9caf073 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py @@ -0,0 +1,12 @@ +"""High level strategies for converters.""" + +from ._class_methods import use_class_methods +from ._subclasses import include_subclasses +from ._unions import configure_tagged_union, configure_union_passthrough + +__all__ = [ + "configure_tagged_union", + "configure_union_passthrough", + "include_subclasses", + "use_class_methods", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py new file mode 100644 index 0000000..c2b6325 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py @@ -0,0 +1,64 @@ +"""Strategy for using class-specific (un)structuring methods.""" + +from inspect import signature +from typing import Any, Callable, Optional, Type, TypeVar + +from .. import BaseConverter + +T = TypeVar("T") + + +def use_class_methods( + converter: BaseConverter, + structure_method_name: Optional[str] = None, + unstructure_method_name: Optional[str] = None, +) -> None: + """ + Configure the converter such that dedicated methods are used for (un)structuring + the instance of a class if such methods are available. The default (un)structuring + will be applied if such an (un)structuring methods cannot be found. + + :param converter: The `Converter` on which this strategy is applied. You can use + :class:`cattrs.BaseConverter` or any other derived class. + :param structure_method_name: Optional string with the name of the class method + which should be used for structuring. If not provided, no class method will be + used for structuring. + :param unstructure_method_name: Optional string with the name of the class method + which should be used for unstructuring. If not provided, no class method will + be used for unstructuring. + + If you want to (un)structured nested objects, just append a converter parameter + to your (un)structuring methods and you will receive the converter there. + + .. versionadded:: 23.2.0 + """ + + if structure_method_name: + + def make_class_method_structure(cl: Type[T]) -> Callable[[Any, Type[T]], T]: + fn = getattr(cl, structure_method_name) + n_parameters = len(signature(fn).parameters) + if n_parameters == 1: + return lambda v, _: fn(v) + if n_parameters == 2: + return lambda v, _: fn(v, converter) + raise TypeError("Provide a class method with one or two arguments.") + + converter.register_structure_hook_factory( + lambda t: hasattr(t, structure_method_name), make_class_method_structure + ) + + if unstructure_method_name: + + def make_class_method_unstructure(cl: Type[T]) -> Callable[[T], T]: + fn = getattr(cl, unstructure_method_name) + n_parameters = len(signature(fn).parameters) + if n_parameters == 1: + return fn + if n_parameters == 2: + return lambda self_: fn(self_, converter) + raise TypeError("Provide a method with no or one argument.") + + converter.register_unstructure_hook_factory( + lambda t: hasattr(t, unstructure_method_name), make_class_method_unstructure + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py new file mode 100644 index 0000000..06a92af --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py @@ -0,0 +1,238 @@ +"""Strategies for customizing subclass behaviors.""" + +from __future__ import annotations + +from gc import collect +from typing import Any, Callable, TypeVar, Union + +from ..converters import BaseConverter +from ..gen import AttributeOverride, make_dict_structure_fn, make_dict_unstructure_fn +from ..gen._consts import already_generating + + +def _make_subclasses_tree(cl: type) -> list[type]: + return [cl] + [ + sscl for scl in cl.__subclasses__() for sscl in _make_subclasses_tree(scl) + ] + + +def _has_subclasses(cl: type, given_subclasses: tuple[type, ...]) -> bool: + """Whether the given class has subclasses from `given_subclasses`.""" + actual = set(cl.__subclasses__()) + given = set(given_subclasses) + return bool(actual & given) + + +def _get_union_type(cl: type, given_subclasses_tree: tuple[type]) -> type | None: + actual_subclass_tree = tuple(_make_subclasses_tree(cl)) + class_tree = tuple(set(actual_subclass_tree) & set(given_subclasses_tree)) + return Union[class_tree] if len(class_tree) >= 2 else None + + +C = TypeVar("C", bound=BaseConverter) + + +def include_subclasses( + cl: type, + converter: C, + subclasses: tuple[type, ...] | None = None, + union_strategy: Callable[[Any, C], Any] | None = None, + overrides: dict[str, AttributeOverride] | None = None, +) -> None: + """ + Configure the converter so that the attrs/dataclass `cl` is un/structured as if it + was a union of itself and all its subclasses that are defined at the time when this + strategy is applied. + + :param cl: A base `attrs` or `dataclass` class. + :param converter: The `Converter` on which this strategy is applied. Do note that + the strategy does not work for a :class:`cattrs.BaseConverter`. + :param subclasses: A tuple of sublcasses whose ancestor is `cl`. If left as `None`, + subclasses are detected using recursively the `__subclasses__` method of `cl` + and its descendents. + :param union_strategy: A callable of two arguments passed by position + (`subclass_union`, `converter`) that defines the union strategy to use to + disambiguate the subclasses union. If `None` (the default), the automatic unique + field disambiguation is used which means that every single subclass + participating in the union must have an attribute name that does not exist in + any other sibling class. + :param overrides: a mapping of `cl` attribute names to overrides (instantiated with + :func:`cattrs.gen.override`) to customize un/structuring. + + .. versionadded:: 23.1.0 + .. versionchanged:: 24.1.0 + When overrides are not provided, hooks for individual classes are retrieved from + the converter instead of generated with no overrides, using converter defaults. + """ + # Due to https://github.com/python-attrs/attrs/issues/1047 + collect() + if subclasses is not None: + parent_subclass_tree = (cl, *subclasses) + else: + parent_subclass_tree = tuple(_make_subclasses_tree(cl)) + + if union_strategy is None: + _include_subclasses_without_union_strategy( + cl, converter, parent_subclass_tree, overrides + ) + else: + _include_subclasses_with_union_strategy( + converter, parent_subclass_tree, union_strategy, overrides + ) + + +def _include_subclasses_without_union_strategy( + cl, + converter: BaseConverter, + parent_subclass_tree: tuple[type], + overrides: dict[str, AttributeOverride] | None, +): + # The iteration approach is required if subclasses are more than one level deep: + for cl in parent_subclass_tree: + # We re-create a reduced union type to handle the following case: + # + # converter.structure(d, as=Child) + # + # In the above, the `as=Child` argument will be transformed to a union type of + # itself and its subtypes, that way we guarantee that the returned object will + # not be the parent. + subclass_union = _get_union_type(cl, parent_subclass_tree) + + def cls_is_cl(cls, _cl=cl): + return cls is _cl + + if overrides is not None: + base_struct_hook = make_dict_structure_fn(cl, converter, **overrides) + base_unstruct_hook = make_dict_unstructure_fn(cl, converter, **overrides) + else: + base_struct_hook = converter.get_structure_hook(cl) + base_unstruct_hook = converter.get_unstructure_hook(cl) + + if subclass_union is None: + + def struct_hook(val: dict, _, _cl=cl, _base_hook=base_struct_hook) -> cl: + return _base_hook(val, _cl) + + else: + dis_fn = converter._get_dis_func(subclass_union, overrides=overrides) + + def struct_hook( + val: dict, + _, + _c=converter, + _cl=cl, + _base_hook=base_struct_hook, + _dis_fn=dis_fn, + ) -> cl: + """ + If val is disambiguated to the class `cl`, use its base hook. + + If val is disambiguated to a subclass, dispatch on its exact runtime + type. + """ + dis_cl = _dis_fn(val) + if dis_cl is _cl: + return _base_hook(val, _cl) + return _c.structure(val, dis_cl) + + def unstruct_hook( + val: parent_subclass_tree[0], + _c=converter, + _cl=cl, + _base_hook=base_unstruct_hook, + ) -> dict: + """ + If val is an instance of the class `cl`, use the hook. + + If val is an instance of a subclass, dispatch on its exact runtime type. + """ + if val.__class__ is _cl: + return _base_hook(val) + return _c.unstructure(val, unstructure_as=val.__class__) + + # This needs to use function dispatch, using singledispatch will again + # match A and all subclasses, which is not what we want. + converter.register_structure_hook_func(cls_is_cl, struct_hook) + converter.register_unstructure_hook_func(cls_is_cl, unstruct_hook) + + +def _include_subclasses_with_union_strategy( + converter: C, + union_classes: tuple[type, ...], + union_strategy: Callable[[Any, C], Any], + overrides: dict[str, AttributeOverride] | None, +): + """ + This function is tricky because we're dealing with what is essentially a circular + reference. + + We need to generate a structure hook for a class that is both: + * specific for that particular class and its own fields + * but should handle specific functions for all its descendants too + + Hence the dance with registering below. + """ + + parent_classes = [cl for cl in union_classes if _has_subclasses(cl, union_classes)] + if not parent_classes: + return + + original_unstruct_hooks = {} + original_struct_hooks = {} + for cl in union_classes: + # In the first pass, every class gets its own unstructure function according to + # the overrides. + # We just generate the hooks, and do not register them. This allows us to + # manipulate the _already_generating set to force runtime dispatch. + already_generating.working_set = set(union_classes) - {cl} + try: + if overrides is not None: + unstruct_hook = make_dict_unstructure_fn(cl, converter, **overrides) + struct_hook = make_dict_structure_fn(cl, converter, **overrides) + else: + unstruct_hook = converter.get_unstructure_hook(cl, cache_result=False) + struct_hook = converter.get_structure_hook(cl, cache_result=False) + finally: + already_generating.working_set = set() + original_unstruct_hooks[cl] = unstruct_hook + original_struct_hooks[cl] = struct_hook + + # Now that's done, we can register all the hooks and generate the + # union handler. The union handler needs them. + final_union = Union[union_classes] # type: ignore + + for cl, hook in original_unstruct_hooks.items(): + + def cls_is_cl(cls, _cl=cl): + return cls is _cl + + converter.register_unstructure_hook_func(cls_is_cl, hook) + + for cl, hook in original_struct_hooks.items(): + + def cls_is_cl(cls, _cl=cl): + return cls is _cl + + converter.register_structure_hook_func(cls_is_cl, hook) + + union_strategy(final_union, converter) + unstruct_hook = converter.get_unstructure_hook(final_union) + struct_hook = converter.get_structure_hook(final_union) + + for cl in union_classes: + # In the second pass, we overwrite the hooks with the union hook. + + def cls_is_cl(cls, _cl=cl): + return cls is _cl + + converter.register_unstructure_hook_func(cls_is_cl, unstruct_hook) + subclasses = tuple([c for c in union_classes if issubclass(c, cl)]) + if len(subclasses) > 1: + u = Union[subclasses] # type: ignore + union_strategy(u, converter) + struct_hook = converter.get_structure_hook(u) + + def sh(payload: dict, _, _u=u, _s=struct_hook) -> cl: + return _s(payload, _u) + + converter.register_structure_hook_func(cls_is_cl, sh) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py new file mode 100644 index 0000000..f0d270d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py @@ -0,0 +1,258 @@ +from collections import defaultdict +from typing import Any, Callable, Dict, Literal, Type, Union + +from attrs import NOTHING + +from cattrs import BaseConverter +from cattrs._compat import get_newtype_base, is_literal, is_subclass, is_union_type + +__all__ = [ + "default_tag_generator", + "configure_tagged_union", + "configure_union_passthrough", +] + + +def default_tag_generator(typ: Type) -> str: + """Return the class name.""" + return typ.__name__ + + +def configure_tagged_union( + union: Any, + converter: BaseConverter, + tag_generator: Callable[[Type], str] = default_tag_generator, + tag_name: str = "_type", + default: Union[Type, Literal[NOTHING]] = NOTHING, +) -> None: + """ + Configure the converter so that `union` (which should be a union) is + un/structured with the help of an additional piece of data in the + unstructured payload, the tag. + + :param converter: The converter to apply the strategy to. + :param tag_generator: A `tag_generator` function is used to map each + member of the union to a tag, which is then included in the + unstructured payload. The default tag generator returns the name of + the class. + :param tag_name: The key under which the tag will be set in the + unstructured payload. By default, `'_type'`. + :param default: An optional class to be used if the tag information + is not present when structuring. + + The tagged union strategy currently only works with the dict + un/structuring base strategy. + + .. versionadded:: 23.1.0 + """ + args = union.__args__ + tag_to_hook = {} + exact_cl_unstruct_hooks = {} + for cl in args: + tag = tag_generator(cl) + struct_handler = converter.get_structure_hook(cl) + unstruct_handler = converter.get_unstructure_hook(cl) + + def structure_union_member(val: dict, _cl=cl, _h=struct_handler) -> cl: + return _h(val, _cl) + + def unstructure_union_member(val: union, _h=unstruct_handler) -> dict: + return _h(val) + + tag_to_hook[tag] = structure_union_member + exact_cl_unstruct_hooks[cl] = unstructure_union_member + + cl_to_tag = {cl: tag_generator(cl) for cl in args} + + if default is not NOTHING: + default_handler = converter.get_structure_hook(default) + + def structure_default(val: dict, _cl=default, _h=default_handler): + return _h(val, _cl) + + tag_to_hook = defaultdict(lambda: structure_default, tag_to_hook) + cl_to_tag = defaultdict(lambda: default, cl_to_tag) + + def unstructure_tagged_union( + val: union, + _exact_cl_unstruct_hooks=exact_cl_unstruct_hooks, + _cl_to_tag=cl_to_tag, + _tag_name=tag_name, + ) -> Dict: + res = _exact_cl_unstruct_hooks[val.__class__](val) + res[_tag_name] = _cl_to_tag[val.__class__] + return res + + if default is NOTHING: + if getattr(converter, "forbid_extra_keys", False): + + def structure_tagged_union( + val: dict, _, _tag_to_cl=tag_to_hook, _tag_name=tag_name + ) -> union: + val = val.copy() + return _tag_to_cl[val.pop(_tag_name)](val) + + else: + + def structure_tagged_union( + val: dict, _, _tag_to_cl=tag_to_hook, _tag_name=tag_name + ) -> union: + return _tag_to_cl[val[_tag_name]](val) + + else: + if getattr(converter, "forbid_extra_keys", False): + + def structure_tagged_union( + val: dict, + _, + _tag_to_hook=tag_to_hook, + _tag_name=tag_name, + _dh=default_handler, + _default=default, + ) -> union: + if _tag_name in val: + val = val.copy() + return _tag_to_hook[val.pop(_tag_name)](val) + return _dh(val, _default) + + else: + + def structure_tagged_union( + val: dict, + _, + _tag_to_hook=tag_to_hook, + _tag_name=tag_name, + _dh=default_handler, + _default=default, + ) -> union: + if _tag_name in val: + return _tag_to_hook[val[_tag_name]](val) + return _dh(val, _default) + + converter.register_unstructure_hook(union, unstructure_tagged_union) + converter.register_structure_hook(union, structure_tagged_union) + + +def configure_union_passthrough(union: Any, converter: BaseConverter) -> None: + """ + Configure the converter to support validating and passing through unions of the + provided types and their subsets. + + For example, all mature JSON libraries natively support producing unions of ints, + floats, Nones, and strings. Using this strategy, a converter can be configured + to efficiently validate and pass through unions containing these types. + + The most important point is that another library (in this example the JSON + library) handles producing the union, and the converter is configured to just + validate it. + + Literals of provided types are also supported, and are checked by value. + + NewTypes of provided types are also supported. + + The strategy is designed to be O(1) in execution time, and independent of the + ordering of types in the union. + + If the union contains a class and one or more of its subclasses, the subclasses + will also be included when validating the superclass. + + .. versionadded:: 23.2.0 + """ + args = set(union.__args__) + + def make_structure_native_union(exact_type: Any) -> Callable: + # `exact_type` is likely to be a subset of the entire configured union (`args`). + literal_values = { + v for t in exact_type.__args__ if is_literal(t) for v in t.__args__ + } + + # We have no idea what the actual type of `val` will be, so we can't + # use it blindly with an `in` check since it might not be hashable. + # So we do an additional check when handling literals. + # Note: do no use `literal_values` here, since {0, False} gets reduced to {0} + literal_classes = { + v.__class__ + for t in exact_type.__args__ + if is_literal(t) + for v in t.__args__ + } + + non_literal_classes = { + get_newtype_base(t) or t + for t in exact_type.__args__ + if not is_literal(t) and ((get_newtype_base(t) or t) in args) + } + + # We augment the set of allowed classes with any configured subclasses of + # the exact subclasses. + non_literal_classes |= { + a for a in args if any(is_subclass(a, c) for c in non_literal_classes) + } + + # We check for spillover - union types not handled by the strategy. + # If spillover exists and we fail to validate our types, we call + # further into the converter with the rest. + spillover = { + a + for a in exact_type.__args__ + if (get_newtype_base(a) or a) not in non_literal_classes + and not is_literal(a) + } + + if spillover: + spillover_type = ( + Union[tuple(spillover)] if len(spillover) > 1 else next(iter(spillover)) + ) + + def structure_native_union( + val: Any, + _: Any, + classes=non_literal_classes, + vals=literal_values, + converter=converter, + spillover=spillover_type, + ) -> exact_type: + if val.__class__ in literal_classes and val in vals: + return val + if val.__class__ in classes: + return val + return converter.structure(val, spillover) + + else: + + def structure_native_union( + val: Any, _: Any, classes=non_literal_classes, vals=literal_values + ) -> exact_type: + if val.__class__ in literal_classes and val in vals: + return val + if val.__class__ in classes: + return val + raise TypeError(f"{val} ({val.__class__}) not part of {_}") + + return structure_native_union + + def contains_native_union(exact_type: Any) -> bool: + """Can we handle this type?""" + if is_union_type(exact_type): + type_args = set(exact_type.__args__) + # We special case optionals, since they are very common + # and are handled a little more efficiently by default. + if len(type_args) == 2 and type(None) in type_args: + return False + + literal_classes = { + lit_arg.__class__ + for t in type_args + if is_literal(t) + for lit_arg in t.__args__ + } + non_literal_types = { + get_newtype_base(t) or t for t in type_args if not is_literal(t) + } + + return (literal_classes | non_literal_types) & args + return False + + converter.register_structure_hook_factory( + contains_native_union, make_structure_native_union + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py new file mode 100644 index 0000000..c3ab18c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py @@ -0,0 +1,112 @@ +"""Cattrs validation.""" + +from typing import Callable, List, Union + +from .errors import ( + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, +) + +__all__ = ["format_exception", "transform_error"] + + +def format_exception(exc: BaseException, type: Union[type, None]) -> str: + """The default exception formatter, handling the most common exceptions. + + The following exceptions are handled specially: + + * `KeyErrors` (`required field missing`) + * `ValueErrors` (`invalid value for type, expected ` or just `invalid value`) + * `TypeErrors` (`invalid value for type, expected ` and a couple special + cases for iterables) + * `cattrs.ForbiddenExtraKeysError` + * some `AttributeErrors` (special cased for structing mappings) + """ + if isinstance(exc, KeyError): + res = "required field missing" + elif isinstance(exc, ValueError): + if type is not None: + tn = type.__name__ if hasattr(type, "__name__") else repr(type) + res = f"invalid value for type, expected {tn}" + else: + res = "invalid value" + elif isinstance(exc, TypeError): + if type is None: + if exc.args[0].endswith("object is not iterable"): + res = "invalid value for type, expected an iterable" + else: + res = f"invalid type ({exc})" + else: + tn = type.__name__ if hasattr(type, "__name__") else repr(type) + res = f"invalid value for type, expected {tn}" + elif isinstance(exc, ForbiddenExtraKeysError): + res = f"extra fields found ({', '.join(exc.extra_fields)})" + elif isinstance(exc, AttributeError) and exc.args[0].endswith( + "object has no attribute 'items'" + ): + # This was supposed to be a mapping (and have .items()) but it something else. + res = "expected a mapping" + elif isinstance(exc, AttributeError) and exc.args[0].endswith( + "object has no attribute 'copy'" + ): + # This was supposed to be a mapping (and have .copy()) but it something else. + # Used for TypedDicts. + res = "expected a mapping" + else: + res = f"unknown error ({exc})" + + return res + + +def transform_error( + exc: Union[ClassValidationError, IterableValidationError, BaseException], + path: str = "$", + format_exception: Callable[ + [BaseException, Union[type, None]], str + ] = format_exception, +) -> List[str]: + """Transform an exception into a list of error messages. + + To get detailed error messages, the exception should be produced by a converter + with `detailed_validation` set. + + By default, the error messages are in the form of `{description} @ {path}`. + + While traversing the exception and subexceptions, the path is formed: + + * by appending `.{field_name}` for fields in classes + * by appending `[{int}]` for indices in iterables, like lists + * by appending `[{str}]` for keys in mappings, like dictionaries + + :param exc: The exception to transform into error messages. + :param path: The root path to use. + :param format_exception: A callable to use to transform `Exceptions` into + string descriptions of errors. + + .. versionadded:: 23.1.0 + """ + errors = [] + if isinstance(exc, IterableValidationError): + with_notes, without = exc.group_exceptions() + for exc, note in with_notes: + p = f"{path}[{note.index!r}]" + if isinstance(exc, (ClassValidationError, IterableValidationError)): + errors.extend(transform_error(exc, p, format_exception)) + else: + errors.append(f"{format_exception(exc, note.type)} @ {p}") + for exc in without: + errors.append(f"{format_exception(exc, None)} @ {path}") + elif isinstance(exc, ClassValidationError): + with_notes, without = exc.group_exceptions() + for exc, note in with_notes: + p = f"{path}.{note.name}" + if isinstance(exc, (ClassValidationError, IterableValidationError)): + errors.extend(transform_error(exc, p, format_exception)) + else: + errors.append(f"{format_exception(exc, note.type)} @ {p}") + for exc in without: + errors.append(f"{format_exception(exc, None)} @ {path}") + else: + errors.append(f"{format_exception(exc, None)} @ {path}") + return errors diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE new file mode 100644 index 0000000..62b076c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA new file mode 100644 index 0000000..0a3a772 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA @@ -0,0 +1,67 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.8.30 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD new file mode 100644 index 0000000..7393811 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD @@ -0,0 +1,15 @@ +certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222 +certifi-2024.8.30.dist-info/RECORD,, +certifi-2024.8.30.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91 +certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-311.pyc,, +certifi/__pycache__/__main__.cpython-311.pyc,, +certifi/__pycache__/core.cpython-311.pyc,, +certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL new file mode 100644 index 0000000..57e56b7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (74.0.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt new file mode 100644 index 0000000..963eac5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py new file mode 100644 index 0000000..f61d77f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2024.08.30" diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py new file mode 100644 index 0000000..8945b5d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem b/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem new file mode 100644 index 0000000..3c165a1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem @@ -0,0 +1,4929 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- + +# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA +# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" +# Serial: 65916896770016886708751106294915943533 +# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 +# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 +# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a +-----BEGIN CERTIFICATE----- +MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw +CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE +YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB +IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf +e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C +cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O +BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO +PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw +hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG +XSaQpYXFuXqUPoeovQA= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA CYBER Root CA" +# Serial: 85076849864375384482682434040119489222 +# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 +# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 +# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ +MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 +IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 +WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO +LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg +Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P +40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF +avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ +34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i +JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu +j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf +Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP +2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA +S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA +oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC +kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW +5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd +BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB +AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t +tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn +68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn +TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t +RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx +f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI +Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz +8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 +NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX +xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 +t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA12" +# Serial: 587887345431707215246142177076162061960426065942 +# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 +# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 +# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw +NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF +KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt +p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd +J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur +FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J +hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K +h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF +AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld +mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ +mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA +8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV +55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ +yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA14" +# Serial: 575790784512929437950770173562378038616896959179 +# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 +# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f +# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM +BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u +LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw +NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD +eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS +b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ +FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg +vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy +6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo +/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J +kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ +0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib +y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac +18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs +0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB +SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL +ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk +86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E +rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib +ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT +zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS +DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 +2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo +FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy +K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 +dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl +Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB +365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c +JRNItX+S +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. +# Label: "SecureSign Root CA15" +# Serial: 126083514594751269499665114766174399806381178503 +# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 +# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d +# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a +-----BEGIN CERTIFICATE----- +MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw +UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM +dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy +NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl +cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 +IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 +wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR +ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT +9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp +4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 +bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= +-----END CERTIFICATE----- diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py new file mode 100644 index 0000000..91f538b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py @@ -0,0 +1,114 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/py.typed b/lambdas/aws-dd-forwarder-3.127.0/certifi/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE new file mode 100644 index 0000000..ad82355 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA new file mode 100644 index 0000000..b19096b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA @@ -0,0 +1,695 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 3.4.0 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/Ousret/charset_normalizer +Author: Ahmed TAHRI +Author-email: tahri.ahmed@proton.me +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.7.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + Download Count Total + + + + +

+

+ Featured Packages
+ + Static Badge + + + Static Badge + +

+

+ In other language (unofficial port - by the community)
+ + Static Badge + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| +| `Fast` | ❌ | ✅ | ✅ | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | +| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | + +

+Reading Normalized TextCat Reading Text +

+ +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +|-----------------------------------------------|:--------:|:------------------:|:------------------:| +| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +|-----------------------------------------------|:---------------:|:---------------:|:---------------:| +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 100 ms | 50 ms | 5 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (eg. Supported Encoding) Challenge-them if you want. + +## ✨ Installation + +Using pip: + +```sh +pip install charset-normalizer -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +or + +```bash +python -m charset_normalizer ./data/sample.1.fr.srt +``` + +🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is noise/mess and coherence according to **YOU ?** + +*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## ⚠️ About Python EOLs + +**If you are running:** + +- Python >=2.7,<3.5: Unsupported +- Python 3.5: charset-normalizer < 2.1 +- Python 3.6: charset-normalizer < 3.1 +- Python 3.7: charset-normalizer < 4.0 + +Upgrade your Python interpreter as soon as possible. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + +## 💼 For Enterprise + +Professional support for charset-normalizer is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme + +# Changelog +All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) + +### Added +- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. +- Support for Python 3.13 (#512) + +### Fixed +- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. +- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) +- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) + +## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) + +### Fixed +- Unintentional memory usage regression when using large payload that match several encoding (#376) +- Regression on some detection case showcased in the documentation (#371) + +### Added +- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) + +## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) + +### Changed +- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 +- Improved the general detection reliability based on reports from the community + +## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) + +### Added +- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` +- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) + +### Removed +- (internal) Redundant utils.is_ascii function and unused function is_private_use_only +- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant + +### Changed +- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection +- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 + +### Fixed +- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) + +## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) + +### Changed +- Typehint for function `from_path` no longer enforce `PathLike` as its first argument +- Minor improvement over the global detection reliability + +### Added +- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries +- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) +- Explicit support for Python 3.12 + +### Fixed +- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) + +## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) + +### Added +- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) + +### Removed +- Support for Python 3.6 (PR #260) + +### Changed +- Optional speedup provided by mypy/c 1.0.1 + +## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) + +### Fixed +- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) + +### Changed +- Speedup provided by mypy/c 0.990 on Python >= 3.7 + +## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it +- Sphinx warnings when generating the documentation + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' + +## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) + +### Added +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Removed +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) + +### Fixed +- Sphinx warnings when generating the documentation + +## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) + +### Changed +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Removed +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) + +### Deprecated +- Function `normalize` scheduled for removal in 3.0 + +### Changed +- Removed useless call to decode in fn is_unprintable (#206) + +### Fixed +- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) + +## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) + +### Added +- Output the Unicode table version when running the CLI with `--version` (PR #194) + +### Changed +- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) +- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) + +### Fixed +- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) +- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) + +### Removed +- Support for Python 3.5 (PR #192) + +### Deprecated +- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) + +## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) + +### Fixed +- ASCII miss-detection on rare cases (PR #170) + +## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) + +### Added +- Explicit support for Python 3.11 (PR #164) + +### Changed +- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) + +## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) + +### Fixed +- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) + +### Changed +- Skipping the language-detection (CD) on ASCII (PR #155) + +## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) + +### Changed +- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) + +### Fixed +- Wrong logging level applied when setting kwarg `explain` to True (PR #146) + +## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) +### Changed +- Improvement over Vietnamese detection (PR #126) +- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) +- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) +- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) +- Code style as refactored by Sourcery-AI (PR #131) +- Minor adjustment on the MD around european words (PR #133) +- Remove and replace SRTs from assets / tests (PR #139) +- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) + +### Fixed +- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) +- Avoid using too insignificant chunk (PR #137) + +### Added +- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) + +## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) +### Added +- Add support for Kazakh (Cyrillic) language detection (PR #109) + +### Changed +- Further, improve inferring the language from a given single-byte code page (PR #112) +- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) +- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) +- Various detection improvement (MD+CD) (PR #117) + +### Removed +- Remove redundant logging entry about detected language(s) (PR #115) + +### Fixed +- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) + +## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) +### Fixed +- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) +- Fix CLI crash when using --minimal output in certain cases (PR #103) + +### Changed +- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) + +## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) +### Changed +- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) +- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) +- The Unicode detection is slightly improved (PR #93) +- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) + +### Removed +- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) + +### Fixed +- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) +- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) +- The MANIFEST.in was not exhaustive (PR #78) + +## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) +### Fixed +- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) +- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) +- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) +- Submatch factoring could be wrong in rare edge cases (PR #72) +- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) +- Fix line endings from CRLF to LF for certain project files (PR #67) + +### Changed +- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) +- Allow fallback on specified encoding if any (PR #71) + +## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) +### Changed +- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) +- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) + +## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) +### Fixed +- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) + +### Changed +- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) + +## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) +### Fixed +- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) +- Using explain=False permanently disable the verbose output in the current runtime (PR #47) +- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) +- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) + +### Changed +- Public function normalize default args values were not aligned with from_bytes (PR #53) + +### Added +- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) + +## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) +### Changed +- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. +- Accent has been made on UTF-8 detection, should perform rather instantaneous. +- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. +- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) +- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ +- utf_7 detection has been reinstated. + +### Removed +- This package no longer require anything when used with Python 3.5 (Dropped cached_property) +- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. +- The exception hook on UnicodeDecodeError has been removed. + +### Deprecated +- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 + +### Fixed +- The CLI output used the relative path of the file(s). Should be absolute. + +## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) +### Fixed +- Logger configuration/usage no longer conflict with others (PR #44) + +## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) +### Removed +- Using standard logging instead of using the package loguru. +- Dropping nose test framework in favor of the maintained pytest. +- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. +- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. +- Stop support for UTF-7 that does not contain a SIG. +- Dropping PrettyTable, replaced with pure JSON output in CLI. + +### Fixed +- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. +- Not searching properly for the BOM when trying utf32/16 parent codec. + +### Changed +- Improving the package final size by compressing frequencies.json. +- Huge improvement over the larges payload. + +### Added +- CLI now produces JSON consumable output. +- Return ASCII if given sequences fit. Given reasonable confidence. + +## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) + +### Fixed +- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) + +## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) + +### Fixed +- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) + +## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) + +### Fixed +- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) + +## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) + +### Changed +- Amend the previous release to allow prettytable 2.0 (PR #35) + +## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) + +### Fixed +- Fix error while using the package with a python pre-release interpreter (PR #33) + +### Changed +- Dependencies refactoring, constraints revised. + +### Added +- Add python 3.9 and 3.10 to the supported interpreters + +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD new file mode 100644 index 0000000..9b31b27 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD @@ -0,0 +1,36 @@ +../../bin/normalizer,sha256=d64Y2GlBYzj4fRL5WK1WS-VHgezegWBH89IcZpevMig,242 +charset_normalizer-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-3.4.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-3.4.0.dist-info/METADATA,sha256=WGbEW9ehh2spNJxo1M6sEGGZWmsQ-oj2DsMjV29zoms,34159 +charset_normalizer-3.4.0.dist-info/RECORD,, +charset_normalizer-3.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer-3.4.0.dist-info/WHEEL,sha256=XihS4yPLFu_eB7R4sl7jUHiEAA7zQ3q0-_CuIzkpFkk,151 +charset_normalizer-3.4.0.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 +charset_normalizer-3.4.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 +charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 +charset_normalizer/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/__pycache__/api.cpython-311.pyc,, +charset_normalizer/__pycache__/cd.cpython-311.pyc,, +charset_normalizer/__pycache__/constant.cpython-311.pyc,, +charset_normalizer/__pycache__/legacy.cpython-311.pyc,, +charset_normalizer/__pycache__/md.cpython-311.pyc,, +charset_normalizer/__pycache__/models.cpython-311.pyc,, +charset_normalizer/__pycache__/utils.cpython-311.pyc,, +charset_normalizer/__pycache__/version.cpython-311.pyc,, +charset_normalizer/api.py,sha256=kMyNUqrfBZU22PP0pYKrSldtYUGA24wsGlXGLAKra7c,22559 +charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 +charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 +charset_normalizer/cli/__main__.py,sha256=zX9sV_ApU1d96Wb0cS04vulstdB4F0Eh7kLn-gevfw4,10411 +charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/constant.py,sha256=uwoW87NicWZDTLviX7le0wdoYBbhBQDA4n1JtJo77ts,40499 +charset_normalizer/legacy.py,sha256=XJjkT0hejMH8qfAKz1ts8OUiBT18t2FJP3tJgLwUWwc,2327 +charset_normalizer/md.cpython-311-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064 +charset_normalizer/md.py,sha256=SIIZcENrslI7h3v4GigbFN61fRyE_wiCN1z9Ii3fBRo,20138 +charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so,sha256=xDjCrj9MzdH8kW7d-HbtvIaOcrX6SFiV7SrBv4QgGEI,272696 +charset_normalizer/models.py,sha256=oAMAcBSEY7CngbUXJp34Wc4Rl9NKJJjGmUwW3EPtk6g,12425 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894 +charset_normalizer/version.py,sha256=AX66S4ytQFdd6F5jbVU2OPMqYwFS5M3BkMvyX-3BKF8,79 diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL new file mode 100644 index 0000000..d9c3682 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp311-cp311-manylinux_2_17_x86_64 +Tag: cp311-cp311-manylinux2014_x86_64 + diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt new file mode 100644 index 0000000..65619e7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer.cli:cli_detect diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..66958f0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store new file mode 100644 index 0000000..7e52a6c Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py new file mode 100644 index 0000000..55991fc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, is_binary +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "is_binary", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py new file mode 100644 index 0000000..beae2ef --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py @@ -0,0 +1,4 @@ +from .cli import cli_detect + +if __name__ == "__main__": + cli_detect() diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py new file mode 100644 index 0000000..e3f2283 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py @@ -0,0 +1,668 @@ +import logging +from os import PathLike +from typing import BinaryIO, List, Optional, Set, Union + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: Union[bytes, bytearray], + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + early_stop_results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + ( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)] + ), + encoding=encoding_iana, + ) + else: + decoded_payload = str( + ( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :] + ), + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + enable_fallback + and encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, + encoding_iana, + threshold, + False, + [], + decoded_payload, + preemptive_declaration=specified_encoding, + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + current_match = CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + ( + decoded_payload + if ( + is_too_large_sequence is False + or encoding_iana in [specified_encoding, "ascii", "utf_8"] + ) + else None + ), + preemptive_declaration=specified_encoding, + ) + + results.append(current_match) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + # If md says nothing to worry about, then... stop immediately! + if mean_mess_ratio == 0.0: + logger.debug( + "Encoding detection: %s is most likely the one.", + current_match.encoding, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([current_match]) + + early_stop_results.append(current_match) + + if ( + len(early_stop_results) + and (specified_encoding is None or specified_encoding in tested) + and "ascii" in tested + and "utf_8" in tested + ): + probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] + logger.debug( + "Encoding detection: %s is most likely the one.", + probable_result.encoding, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return CharsetMatches([probable_result]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def from_path( + path: Union[str, bytes, PathLike], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def is_binary( + fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = False, +) -> bool: + """ + Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. + Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match + are disabled to be stricter around ASCII-compatible but unlikely to be a string. + """ + if isinstance(fp_or_path_or_payload, (str, PathLike)): + guesses = from_path( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + elif isinstance( + fp_or_path_or_payload, + ( + bytes, + bytearray, + ), + ): + guesses = from_bytes( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + else: + guesses = from_fp( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + + return not guesses diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py new file mode 100644 index 0000000..4ea6760 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py @@ -0,0 +1,395 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter, Dict, List, Optional, Tuple + +from .constant import ( + FREQUENCIES, + KO_NAMES, + LANGUAGE_SUPPORTED_COUNT, + TOO_SMALL_SEQUENCE, + ZH_NAMES, +) +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module( + "encodings.{}".format(iana_name) + ).IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: List[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: List[str] = ordered_characters[0:character_rank] + characters_after: List[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: Dict[str, List[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py new file mode 100644 index 0000000..d95fedf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py @@ -0,0 +1,6 @@ +from .__main__ import cli_detect, query_yes_no + +__all__ = ( + "cli_detect", + "query_yes_no", +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py new file mode 100644 index 0000000..e7edd0f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py @@ -0,0 +1,320 @@ +import argparse +import sys +from json import dumps +from os.path import abspath, basename, dirname, join, realpath +from platform import python_version +from typing import List, Optional +from unicodedata import unidata_version + +import charset_normalizer.md as md_module +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: Optional[List[str]] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-i", + "--no-preemptive", + action="store_true", + default=False, + dest="no_preemptive", + help="Disable looking at a charset declaration to hint the detector.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( + __version__, + python_version(), + unidata_version, + "OFF" if md_module.__file__.lower().endswith(".py") else "ON", + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + if args.files: + for my_file in args.files: + my_file.close() + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + if args.files: + for my_file in args.files: + my_file.close() + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + if args.files: + for my_file in args.files: + my_file.close() + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + matches = from_fp( + my_file, + threshold=args.threshold, + explain=args.verbose, + preemptive_behaviour=args.no_preemptive is False, + ) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + ( + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "" + ), + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + dir_path = dirname(realpath(my_file.name)) + file_name = basename(realpath(my_file.name)) + + o_: List[str] = file_name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = join(dir_path, ".".join(o_)) + + with open(x_[0].unicode_path, "wb") as fp: + fp.write(best_guess.output()) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py new file mode 100644 index 0000000..f8f2a81 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py @@ -0,0 +1,1997 @@ +# -*- coding: utf-8 -*- +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 + +# Up-to-date Unicode ucd/15.0.0 +UNICODE_RANGES_COMBINED: Dict[str, range] = { + "Control character": range(32), + "Basic Latin": range(32, 128), + "Latin-1 Supplement": range(128, 256), + "Latin Extended-A": range(256, 384), + "Latin Extended-B": range(384, 592), + "IPA Extensions": range(592, 688), + "Spacing Modifier Letters": range(688, 768), + "Combining Diacritical Marks": range(768, 880), + "Greek and Coptic": range(880, 1024), + "Cyrillic": range(1024, 1280), + "Cyrillic Supplement": range(1280, 1328), + "Armenian": range(1328, 1424), + "Hebrew": range(1424, 1536), + "Arabic": range(1536, 1792), + "Syriac": range(1792, 1872), + "Arabic Supplement": range(1872, 1920), + "Thaana": range(1920, 1984), + "NKo": range(1984, 2048), + "Samaritan": range(2048, 2112), + "Mandaic": range(2112, 2144), + "Syriac Supplement": range(2144, 2160), + "Arabic Extended-B": range(2160, 2208), + "Arabic Extended-A": range(2208, 2304), + "Devanagari": range(2304, 2432), + "Bengali": range(2432, 2560), + "Gurmukhi": range(2560, 2688), + "Gujarati": range(2688, 2816), + "Oriya": range(2816, 2944), + "Tamil": range(2944, 3072), + "Telugu": range(3072, 3200), + "Kannada": range(3200, 3328), + "Malayalam": range(3328, 3456), + "Sinhala": range(3456, 3584), + "Thai": range(3584, 3712), + "Lao": range(3712, 3840), + "Tibetan": range(3840, 4096), + "Myanmar": range(4096, 4256), + "Georgian": range(4256, 4352), + "Hangul Jamo": range(4352, 4608), + "Ethiopic": range(4608, 4992), + "Ethiopic Supplement": range(4992, 5024), + "Cherokee": range(5024, 5120), + "Unified Canadian Aboriginal Syllabics": range(5120, 5760), + "Ogham": range(5760, 5792), + "Runic": range(5792, 5888), + "Tagalog": range(5888, 5920), + "Hanunoo": range(5920, 5952), + "Buhid": range(5952, 5984), + "Tagbanwa": range(5984, 6016), + "Khmer": range(6016, 6144), + "Mongolian": range(6144, 6320), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), + "Limbu": range(6400, 6480), + "Tai Le": range(6480, 6528), + "New Tai Lue": range(6528, 6624), + "Khmer Symbols": range(6624, 6656), + "Buginese": range(6656, 6688), + "Tai Tham": range(6688, 6832), + "Combining Diacritical Marks Extended": range(6832, 6912), + "Balinese": range(6912, 7040), + "Sundanese": range(7040, 7104), + "Batak": range(7104, 7168), + "Lepcha": range(7168, 7248), + "Ol Chiki": range(7248, 7296), + "Cyrillic Extended-C": range(7296, 7312), + "Georgian Extended": range(7312, 7360), + "Sundanese Supplement": range(7360, 7376), + "Vedic Extensions": range(7376, 7424), + "Phonetic Extensions": range(7424, 7552), + "Phonetic Extensions Supplement": range(7552, 7616), + "Combining Diacritical Marks Supplement": range(7616, 7680), + "Latin Extended Additional": range(7680, 7936), + "Greek Extended": range(7936, 8192), + "General Punctuation": range(8192, 8304), + "Superscripts and Subscripts": range(8304, 8352), + "Currency Symbols": range(8352, 8400), + "Combining Diacritical Marks for Symbols": range(8400, 8448), + "Letterlike Symbols": range(8448, 8528), + "Number Forms": range(8528, 8592), + "Arrows": range(8592, 8704), + "Mathematical Operators": range(8704, 8960), + "Miscellaneous Technical": range(8960, 9216), + "Control Pictures": range(9216, 9280), + "Optical Character Recognition": range(9280, 9312), + "Enclosed Alphanumerics": range(9312, 9472), + "Box Drawing": range(9472, 9600), + "Block Elements": range(9600, 9632), + "Geometric Shapes": range(9632, 9728), + "Miscellaneous Symbols": range(9728, 9984), + "Dingbats": range(9984, 10176), + "Miscellaneous Mathematical Symbols-A": range(10176, 10224), + "Supplemental Arrows-A": range(10224, 10240), + "Braille Patterns": range(10240, 10496), + "Supplemental Arrows-B": range(10496, 10624), + "Miscellaneous Mathematical Symbols-B": range(10624, 10752), + "Supplemental Mathematical Operators": range(10752, 11008), + "Miscellaneous Symbols and Arrows": range(11008, 11264), + "Glagolitic": range(11264, 11360), + "Latin Extended-C": range(11360, 11392), + "Coptic": range(11392, 11520), + "Georgian Supplement": range(11520, 11568), + "Tifinagh": range(11568, 11648), + "Ethiopic Extended": range(11648, 11744), + "Cyrillic Extended-A": range(11744, 11776), + "Supplemental Punctuation": range(11776, 11904), + "CJK Radicals Supplement": range(11904, 12032), + "Kangxi Radicals": range(12032, 12256), + "Ideographic Description Characters": range(12272, 12288), + "CJK Symbols and Punctuation": range(12288, 12352), + "Hiragana": range(12352, 12448), + "Katakana": range(12448, 12544), + "Bopomofo": range(12544, 12592), + "Hangul Compatibility Jamo": range(12592, 12688), + "Kanbun": range(12688, 12704), + "Bopomofo Extended": range(12704, 12736), + "CJK Strokes": range(12736, 12784), + "Katakana Phonetic Extensions": range(12784, 12800), + "Enclosed CJK Letters and Months": range(12800, 13056), + "CJK Compatibility": range(13056, 13312), + "CJK Unified Ideographs Extension A": range(13312, 19904), + "Yijing Hexagram Symbols": range(19904, 19968), + "CJK Unified Ideographs": range(19968, 40960), + "Yi Syllables": range(40960, 42128), + "Yi Radicals": range(42128, 42192), + "Lisu": range(42192, 42240), + "Vai": range(42240, 42560), + "Cyrillic Extended-B": range(42560, 42656), + "Bamum": range(42656, 42752), + "Modifier Tone Letters": range(42752, 42784), + "Latin Extended-D": range(42784, 43008), + "Syloti Nagri": range(43008, 43056), + "Common Indic Number Forms": range(43056, 43072), + "Phags-pa": range(43072, 43136), + "Saurashtra": range(43136, 43232), + "Devanagari Extended": range(43232, 43264), + "Kayah Li": range(43264, 43312), + "Rejang": range(43312, 43360), + "Hangul Jamo Extended-A": range(43360, 43392), + "Javanese": range(43392, 43488), + "Myanmar Extended-B": range(43488, 43520), + "Cham": range(43520, 43616), + "Myanmar Extended-A": range(43616, 43648), + "Tai Viet": range(43648, 43744), + "Meetei Mayek Extensions": range(43744, 43776), + "Ethiopic Extended-A": range(43776, 43824), + "Latin Extended-E": range(43824, 43888), + "Cherokee Supplement": range(43888, 43968), + "Meetei Mayek": range(43968, 44032), + "Hangul Syllables": range(44032, 55216), + "Hangul Jamo Extended-B": range(55216, 55296), + "High Surrogates": range(55296, 56192), + "High Private Use Surrogates": range(56192, 56320), + "Low Surrogates": range(56320, 57344), + "Private Use Area": range(57344, 63744), + "CJK Compatibility Ideographs": range(63744, 64256), + "Alphabetic Presentation Forms": range(64256, 64336), + "Arabic Presentation Forms-A": range(64336, 65024), + "Variation Selectors": range(65024, 65040), + "Vertical Forms": range(65040, 65056), + "Combining Half Marks": range(65056, 65072), + "CJK Compatibility Forms": range(65072, 65104), + "Small Form Variants": range(65104, 65136), + "Arabic Presentation Forms-B": range(65136, 65280), + "Halfwidth and Fullwidth Forms": range(65280, 65520), + "Specials": range(65520, 65536), + "Linear B Syllabary": range(65536, 65664), + "Linear B Ideograms": range(65664, 65792), + "Aegean Numbers": range(65792, 65856), + "Ancient Greek Numbers": range(65856, 65936), + "Ancient Symbols": range(65936, 66000), + "Phaistos Disc": range(66000, 66048), + "Lycian": range(66176, 66208), + "Carian": range(66208, 66272), + "Coptic Epact Numbers": range(66272, 66304), + "Old Italic": range(66304, 66352), + "Gothic": range(66352, 66384), + "Old Permic": range(66384, 66432), + "Ugaritic": range(66432, 66464), + "Old Persian": range(66464, 66528), + "Deseret": range(66560, 66640), + "Shavian": range(66640, 66688), + "Osmanya": range(66688, 66736), + "Osage": range(66736, 66816), + "Elbasan": range(66816, 66864), + "Caucasian Albanian": range(66864, 66928), + "Vithkuqi": range(66928, 67008), + "Linear A": range(67072, 67456), + "Latin Extended-F": range(67456, 67520), + "Cypriot Syllabary": range(67584, 67648), + "Imperial Aramaic": range(67648, 67680), + "Palmyrene": range(67680, 67712), + "Nabataean": range(67712, 67760), + "Hatran": range(67808, 67840), + "Phoenician": range(67840, 67872), + "Lydian": range(67872, 67904), + "Meroitic Hieroglyphs": range(67968, 68000), + "Meroitic Cursive": range(68000, 68096), + "Kharoshthi": range(68096, 68192), + "Old South Arabian": range(68192, 68224), + "Old North Arabian": range(68224, 68256), + "Manichaean": range(68288, 68352), + "Avestan": range(68352, 68416), + "Inscriptional Parthian": range(68416, 68448), + "Inscriptional Pahlavi": range(68448, 68480), + "Psalter Pahlavi": range(68480, 68528), + "Old Turkic": range(68608, 68688), + "Old Hungarian": range(68736, 68864), + "Hanifi Rohingya": range(68864, 68928), + "Rumi Numeral Symbols": range(69216, 69248), + "Yezidi": range(69248, 69312), + "Arabic Extended-C": range(69312, 69376), + "Old Sogdian": range(69376, 69424), + "Sogdian": range(69424, 69488), + "Old Uyghur": range(69488, 69552), + "Chorasmian": range(69552, 69600), + "Elymaic": range(69600, 69632), + "Brahmi": range(69632, 69760), + "Kaithi": range(69760, 69840), + "Sora Sompeng": range(69840, 69888), + "Chakma": range(69888, 69968), + "Mahajani": range(69968, 70016), + "Sharada": range(70016, 70112), + "Sinhala Archaic Numbers": range(70112, 70144), + "Khojki": range(70144, 70224), + "Multani": range(70272, 70320), + "Khudawadi": range(70320, 70400), + "Grantha": range(70400, 70528), + "Newa": range(70656, 70784), + "Tirhuta": range(70784, 70880), + "Siddham": range(71040, 71168), + "Modi": range(71168, 71264), + "Mongolian Supplement": range(71264, 71296), + "Takri": range(71296, 71376), + "Ahom": range(71424, 71504), + "Dogra": range(71680, 71760), + "Warang Citi": range(71840, 71936), + "Dives Akuru": range(71936, 72032), + "Nandinagari": range(72096, 72192), + "Zanabazar Square": range(72192, 72272), + "Soyombo": range(72272, 72368), + "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), + "Pau Cin Hau": range(72384, 72448), + "Devanagari Extended-A": range(72448, 72544), + "Bhaiksuki": range(72704, 72816), + "Marchen": range(72816, 72896), + "Masaram Gondi": range(72960, 73056), + "Gunjala Gondi": range(73056, 73136), + "Makasar": range(73440, 73472), + "Kawi": range(73472, 73568), + "Lisu Supplement": range(73648, 73664), + "Tamil Supplement": range(73664, 73728), + "Cuneiform": range(73728, 74752), + "Cuneiform Numbers and Punctuation": range(74752, 74880), + "Early Dynastic Cuneiform": range(74880, 75088), + "Cypro-Minoan": range(77712, 77824), + "Egyptian Hieroglyphs": range(77824, 78896), + "Egyptian Hieroglyph Format Controls": range(78896, 78944), + "Anatolian Hieroglyphs": range(82944, 83584), + "Bamum Supplement": range(92160, 92736), + "Mro": range(92736, 92784), + "Tangsa": range(92784, 92880), + "Bassa Vah": range(92880, 92928), + "Pahawh Hmong": range(92928, 93072), + "Medefaidrin": range(93760, 93856), + "Miao": range(93952, 94112), + "Ideographic Symbols and Punctuation": range(94176, 94208), + "Tangut": range(94208, 100352), + "Tangut Components": range(100352, 101120), + "Khitan Small Script": range(101120, 101632), + "Tangut Supplement": range(101632, 101760), + "Kana Extended-B": range(110576, 110592), + "Kana Supplement": range(110592, 110848), + "Kana Extended-A": range(110848, 110896), + "Small Kana Extension": range(110896, 110960), + "Nushu": range(110960, 111360), + "Duployan": range(113664, 113824), + "Shorthand Format Controls": range(113824, 113840), + "Znamenny Musical Notation": range(118528, 118736), + "Byzantine Musical Symbols": range(118784, 119040), + "Musical Symbols": range(119040, 119296), + "Ancient Greek Musical Notation": range(119296, 119376), + "Kaktovik Numerals": range(119488, 119520), + "Mayan Numerals": range(119520, 119552), + "Tai Xuan Jing Symbols": range(119552, 119648), + "Counting Rod Numerals": range(119648, 119680), + "Mathematical Alphanumeric Symbols": range(119808, 120832), + "Sutton SignWriting": range(120832, 121520), + "Latin Extended-G": range(122624, 122880), + "Glagolitic Supplement": range(122880, 122928), + "Cyrillic Extended-D": range(122928, 123024), + "Nyiakeng Puachue Hmong": range(123136, 123216), + "Toto": range(123536, 123584), + "Wancho": range(123584, 123648), + "Nag Mundari": range(124112, 124160), + "Ethiopic Extended-B": range(124896, 124928), + "Mende Kikakui": range(124928, 125152), + "Adlam": range(125184, 125280), + "Indic Siyaq Numbers": range(126064, 126144), + "Ottoman Siyaq Numbers": range(126208, 126288), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), + "Mahjong Tiles": range(126976, 127024), + "Domino Tiles": range(127024, 127136), + "Playing Cards": range(127136, 127232), + "Enclosed Alphanumeric Supplement": range(127232, 127488), + "Enclosed Ideographic Supplement": range(127488, 127744), + "Miscellaneous Symbols and Pictographs": range(127744, 128512), + "Emoticons range(Emoji)": range(128512, 128592), + "Ornamental Dingbats": range(128592, 128640), + "Transport and Map Symbols": range(128640, 128768), + "Alchemical Symbols": range(128768, 128896), + "Geometric Shapes Extended": range(128896, 129024), + "Supplemental Arrows-C": range(129024, 129280), + "Supplemental Symbols and Pictographs": range(129280, 129536), + "Chess Symbols": range(129536, 129648), + "Symbols and Pictographs Extended-A": range(129648, 129792), + "Symbols for Legacy Computing": range(129792, 130048), + "CJK Unified Ideographs Extension B": range(131072, 173792), + "CJK Unified Ideographs Extension C": range(173824, 177984), + "CJK Unified Ideographs Extension D": range(177984, 178208), + "CJK Unified Ideographs Extension E": range(178208, 183984), + "CJK Unified Ideographs Extension F": range(183984, 191472), + "CJK Compatibility Ideographs Supplement": range(194560, 195104), + "CJK Unified Ideographs Extension G": range(196608, 201552), + "CJK Unified Ideographs Extension H": range(201552, 205744), + "Tags": range(917504, 917632), + "Variation Selectors Supplement": range(917760, 918000), + "Supplementary Private Use Area-A": range(983040, 1048576), + "Supplementary Private Use Area-B": range(1048576, 1114112), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_NO_ALIASES = [ + "cp720", + "cp737", + "cp856", + "cp874", + "cp875", + "cp1006", + "koi8_r", + "koi8_t", + "koi8_u", +] + +IANA_SUPPORTED: List[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())) + IANA_NO_ALIASES, + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: Dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", + "(", + ")", +} + + +KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +# Logging LEVEL below DEBUG +TRACE: int = 5 + + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py new file mode 100644 index 0000000..3f6d490 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Optional +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE + +# TODO: remove this check when dropping Python 3.7 support +if TYPE_CHECKING: + from typing_extensions import TypedDict + + class ResultDict(TypedDict): + encoding: Optional[str] + language: str + confidence: Optional[float] + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> ResultDict: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..3824a42 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py new file mode 100644 index 0000000..d834db0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py @@ -0,0 +1,628 @@ +from functools import lru_cache +from logging import getLogger +from typing import List, Optional + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_arabic, + is_arabic_isolated_form, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: Optional[str] = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) + unicode_range_b: Optional[str] = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count <= 13: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + self._buffer_glyph_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + if ( + is_cjk(character) + or is_hangul(character) + or is_katakana(character) + or is_hiragana(character) + or is_thai(character) + ): + self._buffer_glyph_count += 1 + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length >= 0.5: + self._is_current_word_bad = True + # Word/Buffer ending with an upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + elif ( + is_accentuated(self._buffer[-1]) + and self._buffer[-1].isupper() + and all(_.isupper() for _ in self._buffer) is False + ): + self._foreign_long_count += 1 + self._is_current_word_bad = True + elif self._buffer_glyph_count == 1: + self._is_current_word_bad = True + self._foreign_long_count += 1 + if buffer_length >= 24 and self._foreign_long_watch: + camel_case_dst = [ + i + for c, i in zip(self._buffer, range(0, buffer_length)) + if c.isupper() + ] + probable_camel_cased: bool = False + + if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): + probable_camel_cased = True + + if not probable_camel_cased: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + self._buffer_glyph_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: Optional[str] = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and character.isascii() is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +class ArabicIsolatedFormPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._isolated_form_count: int = 0 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._isolated_form_count = 0 + + def eligible(self, character: str) -> bool: + return is_arabic(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_arabic_isolated_form(character): + self._isolated_form_count += 1 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + isolated_form_usage: float = self._isolated_form_count / self._character_count + + return isolated_form_usage + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: List[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: # pragma: nocover + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..38d5e70 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py new file mode 100644 index 0000000..6f6b86b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py @@ -0,0 +1,359 @@ +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + preemptive_declaration: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + self._preemptive_declaration: Optional[str] = preemptive_declaration + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + if isinstance(other, str): + return iana_name(other) == self.encoding + return False + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + return self.coherence > other.coherence + elif chaos_difference < 0.01 and coherence_difference <= 0.02: + # When having a difficult decision, use the result that decoded as many multi-byte as possible. + # preserve RAM usage! + if len(self._payload) >= TOO_BIG_SEQUENCE: + return self.chaos < other.chaos + return self.multi_byte_usage > other.multi_byte_usage + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - (len(str(self)) / len(self.raw)) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + decoded_string = str(self) + if ( + self._preemptive_declaration is not None + and self._preemptive_declaration.lower() + not in ["utf-8", "utf8", "utf_8"] + ): + patched_header = sub( + RE_POSSIBLE_ENCODING_INDICATION, + lambda m: m.string[m.span()[0] : m.span()[1]].replace( + m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type] + ), + decoded_string[:8192], + 1, + ) + + decoded_string = patched_header + decoded_string[8192:] + + self._output_payload = decoded_string.encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: Optional[List[CharsetMatch]] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) < TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/py.typed b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py new file mode 100644 index 0000000..e5cbbf4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py @@ -0,0 +1,421 @@ +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + or "WITH MACRON" in description + or "WITH RING ABOVE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range and character_category != "Lo" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range or "Pictographs" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic_isolated_form(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name and "ISOLATED FORM" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module( + "encodings.{}".format(iana_name_a) + ).IncrementalDecoder + decoder_b = importlib.import_module( + "encodings.{}".format(iana_name_b) + ).IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py new file mode 100644 index 0000000..699990e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "3.4.0" +VERSION = __version__.split(".") diff --git a/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py b/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py new file mode 100644 index 0000000..ad63a95 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2021 Datadog, Inc. + + +import re + +""" +Customized log group is a log group shared by multiple applications of the same type. Based on the feedback from AWS, +customers may name the log group arbitrarily. E.g they can name a lambda log group as "/aws/vendedlogs/states/**", which is typically used for Stepfunctions +In addition, potentially, not just Lambda, any other AWS services can use a customized log group. +The workaround is to parse the logstream_name to get the source of logs. +""" + +# Example: "2023/11/06/test-customized-log-group1[$LATEST]13e304cba4b9446eb7ef082a00038990" +REX_LAMBDA_CUSTOMIZE_LOGSTREAM_NAME_PATTERN = re.compile( + "^[0-9]{4}\\/[01][0-9]\\/[0-3][0-9]\\/[0-9a-zA-Z_.-]{1,75}\\[(?:\\$LATEST|[0-9A-Za-z_-]{1,129})\\][0-9a-f]{32}$" +) + + +def is_lambda_customized_log_group(logstream_name): + return ( + REX_LAMBDA_CUSTOMIZE_LOGSTREAM_NAME_PATTERN.fullmatch(logstream_name) + is not None + ) + + +def get_lambda_function_name_from_logstream_name(logstream_name): + try: + # Not match the pattern for customized Lambda log group + if not is_lambda_customized_log_group(logstream_name): + return None + leftSquareBracketPos = logstream_name.index("[") + lastForwardSlashPos = logstream_name.rindex("/") + return logstream_name[lastForwardSlashPos + 1 : leftSquareBracketPos] + except: + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA new file mode 100644 index 0000000..91be477 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA @@ -0,0 +1,228 @@ +Metadata-Version: 2.1 +Name: datadog +Version: 0.48.0 +Summary: The Datadog Python library +Project-URL: Bug Tracker, https://github.com/DataDog/datadogpy/issues +Project-URL: Documentation, https://datadogpy.readthedocs.io/en/latest/ +Project-URL: Source Code, https://github.com/DataDog/datadogpy +Author-email: "Datadog, Inc." +License: BSD-3-Clause +License-File: LICENSE +License-File: LICENSE-3rdparty.csv +Keywords: datadog +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 +Requires-Dist: configparser<5; python_version < '3.0' +Requires-Dist: requests>=2.6.0 +Requires-Dist: typing; python_version < '3.5' +Description-Content-Type: text/markdown + +# The Datadog Python library + +[![Unit Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.unit?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=10&branchName=master) +[![Integration Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.integration?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=13&branchName=master) +[![Documentation Status](https://readthedocs.org/projects/datadogpy/badge/?version=latest)](https://readthedocs.org/projects/datadogpy/?badge=latest) +[![PyPI - Version](https://img.shields.io/pypi/v/datadog.svg)](https://pypi.org/project/datadog) +[![PyPI - Downloads](https://pepy.tech/badge/datadog)](https://pepy.tech/project/datadog) + +The Datadog Python Library is a collection of tools suitable for inclusion in existing Python projects or for the development of standalone scripts. It provides an abstraction on top of Datadog's raw HTTP interface and the Agent's DogStatsD metrics aggregation server, to interact with Datadog and efficiently report events and metrics. + +- Library Documentation: https://datadogpy.readthedocs.io/en/latest/ +- HTTP API Documentation: https://docs.datadoghq.com/api/ +- DatadogHQ: https://datadoghq.com + +See [CHANGELOG.md](https://github.com/DataDog/datadogpy/blob/master/CHANGELOG.md) for changes. + +## Installation + +To install from pip: + + pip install datadog + +To install from source: + + python setup.py install + +## Datadog API + +To support all Datadog HTTP APIs, a generated library is +available which will expose all the endpoints: +[datadog-api-client-python](https://github.com/DataDog/datadog-api-client-python). + +Find below a working example for submitting an event to your Event Stream: + +```python +from datadog import initialize, api + +options = { + "api_key": "", + "app_key": "", +} + +initialize(**options) + +title = "Something big happened!" +text = "And let me tell you all about it here!" +tags = ["version:1", "application:web"] + +api.Event.create(title=title, text=text, tags=tags) +``` + +**Consult the full list of supported Datadog API endpoints with working code examples in [the Datadog API documentation](https://docs.datadoghq.com/api/latest/?code-lang=python).** + +**Note**: The full list of available Datadog API endpoints is also available in the [Datadog Python Library documentation](https://datadogpy.readthedocs.io/en/latest/) + +#### Environment Variables + +As an alternate method to using the `initialize` function with the `options` parameters, set the environment variables `DATADOG_API_KEY` and `DATADOG_APP_KEY` within the context of your application. + +If `DATADOG_API_KEY` or `DATADOG_APP_KEY` are not set, the library attempts to fall back to Datadog's APM environment variable prefixes: `DD_API_KEY` and `DD_APP_KEY`. + +```python +from datadog import initialize, api + +# Assuming you've set `DD_API_KEY` and `DD_APP_KEY` in your env, +# initialize() will pick it up automatically +initialize() + +title = "Something big happened!" +text = "And let me tell you all about it here!" +tags = ["version:1", "application:web"] + +api.Event.create(title=title, text=text, tags=tags) +``` + +In development, you can disable any `statsd` metric collection using `DD_DOGSTATSD_DISABLE=True` (or any not-empty value). + +## DogStatsD + +In order to use DogStatsD metrics, the Agent must be [running and available](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python). + +### Instantiate the DogStatsD client with UDP + +Once the Datadog Python Library is installed, instantiate the StatsD client using UDP in your code: + +```python +from datadog import initialize, statsd + +options = { + "statsd_host": "127.0.0.1", + "statsd_port": 8125, +} + +initialize(**options) +``` + +See the full list of available [DogStatsD client instantiation parameters](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python#client-instantiation-parameters). + +#### Instantiate the DogStatsd client with UDS + +Once the Datadog Python Library is installed, instantiate the StatsD client using UDS in your code: +```python + +from datadog import initialize, statsd + +options = { + "statsd_socket_path": PATH_TO_SOCKET, +} + +initialize(**options) +``` + +#### Origin detection over UDP and UDS + +Origin detection is a method to detect which pod `DogStatsD` packets are coming from in order to add the pod's tags to the tag list. +The `DogStatsD` client attaches an internal tag, `entity_id`. The value of this tag is the content of the `DD_ENTITY_ID` environment variable if found, which is the pod's UID. The Datadog Agent uses this tag to add container tags to the metrics. To avoid overwriting this global tag, make sure to only `append` to the `constant_tags` list. + +To enable origin detection over UDP, add the following lines to your application manifest +```yaml +env: + - name: DD_ENTITY_ID + valueFrom: + fieldRef: + fieldPath: metadata.uid +``` + +### Usage +#### Metrics + +After the client is created, you can start sending custom metrics to Datadog. See the dedicated [Metric Submission: DogStatsD documentation](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python) to see how to submit all supported metric types to Datadog with working code examples: + +* [Submit a COUNT metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#count). +* [Submit a GAUGE metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#gauge). +* [Submit a SET metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#set) +* [Submit a HISTOGRAM metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#histogram) +* [Submit a TIMER metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#timer) +* [Submit a DISTRIBUTION metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#distribution) + +Some options are supported when submitting metrics, like [applying a Sample Rate to your metrics](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-submission-options) or [tagging your metrics with your custom tags](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-tagging). + +#### Events + +After the client is created, you can start sending events to your Datadog Event Stream. See the dedicated [Event Submission: DogStatsD documentation](https://docs.datadoghq.com/events/guides/dogstatsd/?code-lang=python) to see how to submit an event to your Datadog Event Stream. + +#### Service Checks + +After the client is created, you can start sending Service Checks to Datadog. See the dedicated [Service Check Submission: DogStatsD documentation](https://docs.datadoghq.com/developers/service_checks/dogstatsd_service_checks_submission/?code-lang=python) to see how to submit a Service Check to Datadog. + +### Monitoring this client + +This client automatically injects telemetry about itself in the DogStatsD stream. +Those metrics will not be counted as custom and will not be billed. This feature can be disabled using the `statsd.disable_telemetry()` method. + +See [Telemetry documentation](https://docs.datadoghq.com/developers/dogstatsd/high_throughput/?code-lang=python#client-side-telemetry) to learn more about it. + +### Benchmarks + +_Note: You will need to install `psutil` package before running the benchmarks._ + +If you would like to get an approximate idea on the throughput that your DogStatsD library +can handle on your system, you can run the included local benchmark code: + +```sh-session +$ # Python 2 Example +$ python2 -m unittest -vvv tests.performance.test_statsd_throughput + +$ # Python 3 Example +$ python3 -m unittest -vvv tests.performance.test_statsd_throughput +``` + +You can also add set `BENCHMARK_*` to customize the runs: +```sh-session +$ # Example #1 +$ BENCHMARK_NUM_RUNS=10 BENCHMARK_NUM_THREADS=1 BENCHMARK_NUM_DATAPOINTS=5000 BENCHMARK_TRANSPORT="UDP" python2 -m unittest -vvv tests.performance.test_statsd_throughput + +$ # Example #2 +$ BENCHMARK_NUM_THREADS=10 BENCHMARK_TRANSPORT="UDS" python3 -m unittest -vvv tests.performance.test_statsd_throughput +``` + +## Maximum packets size in high-throughput scenarios + +In order to have the most efficient use of this library in high-throughput scenarios, +default values for the maximum packets size have already been set for both UDS (8192 bytes) +and UDP (1432 bytes) in order to have the best usage of the underlying network. +However, if you perfectly know your network and you know that a different value for the maximum packets +size should be used, you can set it with the parameter `max_buffer_len`. Example: + +```python +from datadog import initialize + +options = { + "api_key": "", + "app_key": "", + "max_buffer_len": 4096, +} + +initialize(**options) +``` + +## Thread Safety + +`DogStatsD` and `ThreadStats` are thread-safe. diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD new file mode 100644 index 0000000..dd845b1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD @@ -0,0 +1,161 @@ +../../bin/dog,sha256=USXQo9Llj4ZKaLnsoeHfNyJv8BOTxhRN2K3ybH4P368,224 +../../bin/dogshell,sha256=USXQo9Llj4ZKaLnsoeHfNyJv8BOTxhRN2K3ybH4P368,224 +../../bin/dogshellwrap,sha256=F4Dt5QTVYjdzj46YMwEj3E_oJ5ccVicfzgwWTFFWhd8,229 +../../bin/dogwrap,sha256=F4Dt5QTVYjdzj46YMwEj3E_oJ5ccVicfzgwWTFFWhd8,229 +datadog-0.48.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +datadog-0.48.0.dist-info/METADATA,sha256=xQ5n174J3GUL9awAKYujVRMfD2vnAz_D-cPSF2kXCf8,10144 +datadog-0.48.0.dist-info/RECORD,, +datadog-0.48.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +datadog-0.48.0.dist-info/WHEEL,sha256=VYAwk8D_V6zmIA2XKK-k7Fem_KAtVk3hugaRru3yjGc,105 +datadog-0.48.0.dist-info/entry_points.txt,sha256=UD-9aGJqcY-y_BqSuZvhoZEmoZcHJaAtfwmPHO3kf3g,158 +datadog-0.48.0.dist-info/licenses/LICENSE,sha256=LUpuzY69IJ0kJ3YlYDI6c8_9l9eyeW5xHv8-gRhgozM,1503 +datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv,sha256=W2B3r48ALOEkEFjyoUMmuU1lm9soN-rG9QqtufdYjts,252 +datadog/__init__.py,sha256=64k7GZlnwUUNl8R7aQvDeswZYBpveECUD1_JIdC4C0s,5551 +datadog/__pycache__/__init__.cpython-311.pyc,, +datadog/__pycache__/version.cpython-311.pyc,, +datadog/api/__init__.py,sha256=s2vtkIF6TN26-AiD2XEYUr7I9upD8m8VdN5e-b2qCb4,1900 +datadog/api/__pycache__/__init__.cpython-311.pyc,, +datadog/api/__pycache__/api_client.cpython-311.pyc,, +datadog/api/__pycache__/aws_integration.cpython-311.pyc,, +datadog/api/__pycache__/aws_log_integration.cpython-311.pyc,, +datadog/api/__pycache__/azure_integration.cpython-311.pyc,, +datadog/api/__pycache__/comments.cpython-311.pyc,, +datadog/api/__pycache__/constants.cpython-311.pyc,, +datadog/api/__pycache__/dashboard_list_v2.cpython-311.pyc,, +datadog/api/__pycache__/dashboard_lists.cpython-311.pyc,, +datadog/api/__pycache__/dashboards.cpython-311.pyc,, +datadog/api/__pycache__/distributions.cpython-311.pyc,, +datadog/api/__pycache__/downtimes.cpython-311.pyc,, +datadog/api/__pycache__/events.cpython-311.pyc,, +datadog/api/__pycache__/exceptions.cpython-311.pyc,, +datadog/api/__pycache__/format.cpython-311.pyc,, +datadog/api/__pycache__/gcp_integration.cpython-311.pyc,, +datadog/api/__pycache__/graphs.cpython-311.pyc,, +datadog/api/__pycache__/hosts.cpython-311.pyc,, +datadog/api/__pycache__/http_client.cpython-311.pyc,, +datadog/api/__pycache__/infrastructure.cpython-311.pyc,, +datadog/api/__pycache__/logs.cpython-311.pyc,, +datadog/api/__pycache__/metadata.cpython-311.pyc,, +datadog/api/__pycache__/metrics.cpython-311.pyc,, +datadog/api/__pycache__/monitors.cpython-311.pyc,, +datadog/api/__pycache__/permissions.cpython-311.pyc,, +datadog/api/__pycache__/resources.cpython-311.pyc,, +datadog/api/__pycache__/roles.cpython-311.pyc,, +datadog/api/__pycache__/screenboards.cpython-311.pyc,, +datadog/api/__pycache__/service_checks.cpython-311.pyc,, +datadog/api/__pycache__/service_level_objectives.cpython-311.pyc,, +datadog/api/__pycache__/synthetics.cpython-311.pyc,, +datadog/api/__pycache__/tags.cpython-311.pyc,, +datadog/api/__pycache__/timeboards.cpython-311.pyc,, +datadog/api/__pycache__/users.cpython-311.pyc,, +datadog/api/api_client.py,sha256=56xtTsbuel6TJXHCKrONq_sfscUOCAX4W4sj-TZVwzQ,10340 +datadog/api/aws_integration.py,sha256=ED6sdQCK1C93Z7VV_mO6aLw492G-B-A9mY2UtCRNaaY,10926 +datadog/api/aws_log_integration.py,sha256=vIIOkwVHD1KFkkZibRbkppIg3IgA37j-fyny6_16--M,4434 +datadog/api/azure_integration.py,sha256=wEBgwbh3PGNnPejXbePNTVjuRGDvEJfUoVpoHHkfMQU,3204 +datadog/api/comments.py,sha256=-ujRDfrSlTYt_QAPRNrzDhIZ6ohxEOXGH9L5r3vjhBw,461 +datadog/api/constants.py,sha256=8XSSk69ZSdAzl0q8_sRSMydRRiVT8kkA0Fo3ABceDWA,806 +datadog/api/dashboard_list_v2.py,sha256=kNizRqfzAKc6yq1j_OBGbUN3TjojatxoxSHUlQnn154,677 +datadog/api/dashboard_lists.py,sha256=vKAdF3xhDPwkg615_yyCTnASRJSDLIKW2ZV0oRSZvdY,1166 +datadog/api/dashboards.py,sha256=UQPvh_TtWWQk4Fww9bOTYXz0P8yIp22s6rW9A4ZJGA8,623 +datadog/api/distributions.py,sha256=MEnpxEqw-ZNdXrazMq2vBIzCgkI5uA_kPo1ZaFXppZ8,1895 +datadog/api/downtimes.py,sha256=o7JswAJXaDnK-nqlFDFW3WKPjOKYcB5d86nBK3swgVU,1072 +datadog/api/events.py,sha256=_e3ThyCsWuguRypEgfJ-zC48kaks4E0cdHRtI3tKyYU,3376 +datadog/api/exceptions.py,sha256=eVe7czONQySBZS8Std67ABXep-msgc8UYWnPqBIadnM,3171 +datadog/api/format.py,sha256=6RahpimFhlYEqUVfH9RgB9C1D8mgWYMXxCm8788rS-Y,1209 +datadog/api/gcp_integration.py,sha256=jr9rl-S3V_dV8Qlk5kMCAKSQgjTRet7wWsyfmzPzwqc,3778 +datadog/api/graphs.py,sha256=6BuHX01mxyJ7x8NWI968kmkBLAGBaAKe0JU6LrkoVvM,2695 +datadog/api/hosts.py,sha256=Qr4C6mEexWiCdUp98oIUzelSW2xb-J_c65O_Fpf9jm8,2491 +datadog/api/http_client.py,sha256=9VirvttGWQiWFH-fUW5BmUcNcwJaH9LLTqhgqSd5gcE,6465 +datadog/api/infrastructure.py,sha256=XnY0UvbEv_TKfMMYHAu884F0Jz8RmcPar55qrIil9Zc,1014 +datadog/api/logs.py,sha256=OCI0PmiNON8kIDul5bh-rJPg4Sp8rupQl76fNyMz4sk,727 +datadog/api/metadata.py,sha256=TdrrmRulgc1arbFzCp7PGCTK1vBJx25NWSH9b0KHxZc,2290 +datadog/api/metrics.py,sha256=S2sBGWeFRzcSA8pqu_eFwfBDF55sIEQ80J-ixw2ROas,5217 +datadog/api/monitors.py,sha256=8qqDGUdr_x3mGWeT6fvgJwkf8ydHJUrYzjQXA8JRnS0,4859 +datadog/api/permissions.py,sha256=iZjyktAJrnADC1pstL59H4DZsM_oplIng2rci6lrDss,720 +datadog/api/resources.py,sha256=fY3c-G7LrysmZOJCqi4vYTkRLxFnrbFiLLR_g5dHhZ4,16128 +datadog/api/roles.py,sha256=R0D3WezC7kVFoYR_wfKdbkIRu4k7mgCNag9D_Md7uYk,2370 +datadog/api/screenboards.py,sha256=fNa6N9bSamxopesHeCyIXZo-a4_da8kz60nVkf_t8D4,1389 +datadog/api/service_checks.py,sha256=ejDB2HqzL2iDg3-MUuvs8fwYv89LAh81G71IhSpf6dg,1615 +datadog/api/service_level_objectives.py,sha256=gBs30qG3PjRU78bUkVcxzlxqGsczVGuto8IfvuYk7oE,6547 +datadog/api/synthetics.py,sha256=YM1jyOG4HOxg-xlbI7YTVJ5e-uVIcrLr_7eWJae2e_Q,6135 +datadog/api/tags.py,sha256=u7lrXrKuyn_n-xoCtsJ2N9simr79KvmygiQDnI3XLwk,1612 +datadog/api/timeboards.py,sha256=4vTKGYblmfHtT8NVQ8P_tb-0EEm2X09W_HuR_2Vvnhw,618 +datadog/api/users.py,sha256=wvcRRgeZLYESScFTw3Dont8dwOr0d-ltBZctqnnYNSM,1440 +datadog/dogshell/__init__.py,sha256=5frjaiaWH7bwTuho4RAxOQwLGV6PlWuPONtqNoxCwQ4,4306 +datadog/dogshell/__pycache__/__init__.cpython-311.pyc,, +datadog/dogshell/__pycache__/comment.cpython-311.pyc,, +datadog/dogshell/__pycache__/common.cpython-311.pyc,, +datadog/dogshell/__pycache__/dashboard.cpython-311.pyc,, +datadog/dogshell/__pycache__/dashboard_list.cpython-311.pyc,, +datadog/dogshell/__pycache__/downtime.cpython-311.pyc,, +datadog/dogshell/__pycache__/event.cpython-311.pyc,, +datadog/dogshell/__pycache__/host.cpython-311.pyc,, +datadog/dogshell/__pycache__/metric.cpython-311.pyc,, +datadog/dogshell/__pycache__/monitor.cpython-311.pyc,, +datadog/dogshell/__pycache__/screenboard.cpython-311.pyc,, +datadog/dogshell/__pycache__/search.cpython-311.pyc,, +datadog/dogshell/__pycache__/service_check.cpython-311.pyc,, +datadog/dogshell/__pycache__/service_level_objective.cpython-311.pyc,, +datadog/dogshell/__pycache__/tag.cpython-311.pyc,, +datadog/dogshell/__pycache__/timeboard.cpython-311.pyc,, +datadog/dogshell/__pycache__/wrap.cpython-311.pyc,, +datadog/dogshell/comment.py,sha256=-Sai_Bt0Lc1i1izMw-Y9swu1qTMqJersh_m734XphlM,6571 +datadog/dogshell/common.py,sha256=K9rTVyLOhhzmF_b7SloOC7YkrizT66c5_40zDe6thQs,5307 +datadog/dogshell/dashboard.py,sha256=FfEl8NCXg6Uw2af0itRVGZfCpiSPSqXhlpm2DlHDNso,6628 +datadog/dogshell/dashboard_list.py,sha256=Zk1EWwb6uVDfc_s17lW4LDbQe0ZdOhw76_4OyXuYXnA,12591 +datadog/dogshell/downtime.py,sha256=9DtKolbWCHiuhCSJWxgWUDh6TZrm431-Cf1IM-CBzDA,5324 +datadog/dogshell/event.py,sha256=7vIYeXR5hnoMH8QWL_gN5cahvERBZXBfOSYpPautvYw,7398 +datadog/dogshell/host.py,sha256=4hd3kpeyTw6HS_cOLF_zNm2rlXWXvsKakaLUZVkyzXk,2281 +datadog/dogshell/metric.py,sha256=6-D1QR_jhNiP70ltlGB-lYDTW1bDR3H0JAqVb01T1-k,2834 +datadog/dogshell/monitor.py,sha256=Bqb0U_JSGHE0aj7v1uUGHnR7YcEr5N6lz_cW-ivYY7g,16294 +datadog/dogshell/screenboard.py,sha256=q8Jq8chvurkOnUYJQgImMnD3Hl32btvd8_5OuszNLJk,11606 +datadog/dogshell/search.py,sha256=pZ9qoIpgQWh-AotSsgv1Y4GmDGvApHi4_xByk1ii0f0,1663 +datadog/dogshell/service_check.py,sha256=XgTeVqECRmNJU_ie5o14pm9BqAgt2Ka4RLdrNCJFZDY,2161 +datadog/dogshell/service_level_objective.py,sha256=LXCT2Ws4EKquYJhIZ-doY8dhRaGjnDgXdsewlBj5a7w,15044 +datadog/dogshell/tag.py,sha256=u3Dmfwx_HxBlA-5rZZQJMFX24XZ1nRgmJSzsyzEiZrM,4328 +datadog/dogshell/timeboard.py,sha256=9kwRqMtH2PTNKuDpyPn1-jdYhP2AbSgg9HF62HJvJDQ,13298 +datadog/dogshell/wrap.py,sha256=hxjQ2_USc42lsMflQ-GWTKUNi4yUQacyMsBZBd9E6MM,16934 +datadog/dogstatsd/__init__.py,sha256=4IoRSiHQXgIwlpDoC0BegFNS_G9Nq0tZOhTUKyzqCm0,294 +datadog/dogstatsd/__pycache__/__init__.cpython-311.pyc,, +datadog/dogstatsd/__pycache__/base.cpython-311.pyc,, +datadog/dogstatsd/__pycache__/container.cpython-311.pyc,, +datadog/dogstatsd/__pycache__/context.cpython-311.pyc,, +datadog/dogstatsd/__pycache__/context_async.cpython-311.pyc,, +datadog/dogstatsd/__pycache__/route.cpython-311.pyc,, +datadog/dogstatsd/base.py,sha256=tLuou8XH1Q9--1oS0yuFSO0B-UIE5YEolH0paqo81uI,52299 +datadog/dogstatsd/container.py,sha256=0doQtobT4ID8GKDwa-jUjUFr_NTsf0jgc2joaUT0y7o,2052 +datadog/dogstatsd/context.py,sha256=yZgl5pCTHf0GrGkiruAy0H9dVHWZDlvVxjkn6e_elcQ,2873 +datadog/dogstatsd/context_async.py,sha256=wJgbf9n_pHaN95I0I1RoxycjoK18L0ZBGUVrzcVsW4M,1543 +datadog/dogstatsd/route.py,sha256=VOoCuD5XD9PPtEydVjpbz_FldgGEOd8Yazpt2YoVD-U,1253 +datadog/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +datadog/threadstats/__init__.py,sha256=a8JfLr2QiPHpxhEC-U5gmRuh9UI5kaLjvv785o_qEsY,379 +datadog/threadstats/__pycache__/__init__.cpython-311.pyc,, +datadog/threadstats/__pycache__/aws_lambda.cpython-311.pyc,, +datadog/threadstats/__pycache__/base.cpython-311.pyc,, +datadog/threadstats/__pycache__/constants.cpython-311.pyc,, +datadog/threadstats/__pycache__/events.cpython-311.pyc,, +datadog/threadstats/__pycache__/metrics.cpython-311.pyc,, +datadog/threadstats/__pycache__/periodic_timer.cpython-311.pyc,, +datadog/threadstats/__pycache__/reporters.cpython-311.pyc,, +datadog/threadstats/aws_lambda.py,sha256=E71iKuW9p4tLW3HZN6QzCPFLmP4ICsSSHogXXefjCHs,3701 +datadog/threadstats/base.py,sha256=YfUWWYL0DptCSCAMUe0qxc35wciCLcqNHEtX8PRFZw8,19162 +datadog/threadstats/constants.py,sha256=3BDnCBKzznBZLsY2oKs8EQBT4vJnIStRcl19FlfxMtw,569 +datadog/threadstats/events.py,sha256=Sa69_TyFoe333mPhcG2vtkYPkeqm-JJTNZDDZWhP1kU,713 +datadog/threadstats/metrics.py,sha256=CAUUzmx6GL78MWLpGWBsm1eZ9RR9Jgs2yCGY24yIp80,6242 +datadog/threadstats/periodic_timer.py,sha256=8DlyzDLcfsVhpoG8sc_MpaJvm-YDx4A5JGkt9vLXVP4,1137 +datadog/threadstats/reporters.py,sha256=SJ45WtEYLModVIq8e6XdGgGAVxPFwW-Cri8d0-s_e1I,937 +datadog/util/__init__.py,sha256=nHOZxl1VhFT33JpvolN8S3QWGNPE-BptvlumBl8pCEo,233 +datadog/util/__pycache__/__init__.cpython-311.pyc,, +datadog/util/__pycache__/cli.cpython-311.pyc,, +datadog/util/__pycache__/compat.cpython-311.pyc,, +datadog/util/__pycache__/config.cpython-311.pyc,, +datadog/util/__pycache__/deprecation.cpython-311.pyc,, +datadog/util/__pycache__/format.cpython-311.pyc,, +datadog/util/__pycache__/hostname.cpython-311.pyc,, +datadog/util/cli.py,sha256=OCGeY63V_iARHFod1sXbe8Fin7zIAZrA_1zJGqvURMY,5013 +datadog/util/compat.py,sha256=0UxFczhysUXWXsC1ZLo80Rte2qEo_VVt5RdV-x3JmF8,3284 +datadog/util/config.py,sha256=4hT22Kb1jPC7_9nPkaznuwRS3AqQ-X0wFbfrJK11x7I,3922 +datadog/util/deprecation.py,sha256=Aznjj1YLEB0WDt9YO84BVSNFnnolEBdXH9Vwrq1Npx4,782 +datadog/util/format.py,sha256=9jXeqsvnHr44X6B008k25qcwPES6OqB05-s8wee9_0c,1339 +datadog/util/hostname.py,sha256=5yedKu2G59Iv7m3WmdUmhmxb3KgC4VQrHueL4Z1wyJg,10296 +datadog/version.py,sha256=bkYe4lEQZCEmFm0XRZaZkxTV1niMqR_lbp-tzKL6s6c,23 diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL new file mode 100644 index 0000000..a5543ba --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.0 +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt new file mode 100644 index 0000000..4b946cb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +dog = datadog.dogshell:main +dogshell = datadog.dogshell:main +dogshellwrap = datadog.dogshell.wrap:main +dogwrap = datadog.dogshell.wrap:main diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000..984d5d2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE @@ -0,0 +1,26 @@ +Copyright (c) 2015-Present Datadog, Inc + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv new file mode 100644 index 0000000..3afd934 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv @@ -0,0 +1,4 @@ +Component,Origin,License,Copyright +setup.py,decorator,BSD-2-Clause,Copyright (c) 2005-2018, Michele Simionato +setup.py,requests,Apache-2.0,Copyright 2019 Kenneth Reitz +setup.py,argparse,Python-2.0,2006-2009 Steven J. Bethard diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store new file mode 100644 index 0000000..5900677 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py new file mode 100644 index 0000000..ffe4b64 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py @@ -0,0 +1,138 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Datadogpy is a collection of Datadog Python tools. +It contains: +* datadog.api: a Python client for Datadog REST API. +* datadog.dogstatsd: a DogStatsd Python client. +* datadog.threadstats: an alternative tool to DogStatsd client for collecting application metrics +without hindering performance. +* datadog.dogshell: a command-line tool, wrapping datadog.api, to interact with Datadog REST API. +""" +# stdlib +import logging +import os +import os.path +from typing import Any, List, Optional + +# datadog +from datadog import api +from datadog.dogstatsd import DogStatsd, statsd # noqa +from datadog.threadstats import ThreadStats, datadog_lambda_wrapper, lambda_metric # noqa +from datadog.util.compat import iteritems, NullHandler, text +from datadog.util.hostname import get_hostname +from datadog.version import __version__ # noqa + +# Loggers +logging.getLogger("datadog.api").addHandler(NullHandler()) +logging.getLogger("datadog.dogstatsd").addHandler(NullHandler()) +logging.getLogger("datadog.threadstats").addHandler(NullHandler()) + + +def initialize( + api_key=None, # type: Optional[str] + app_key=None, # type: Optional[str] + host_name=None, # type: Optional[str] + api_host=None, # type: Optional[str] + statsd_host=None, # type: Optional[str] + statsd_port=None, # type: Optional[int] + statsd_disable_buffering=True, # type: bool + statsd_use_default_route=False, # type: bool + statsd_socket_path=None, # type: Optional[str] + statsd_namespace=None, # type: Optional[str] + statsd_constant_tags=None, # type: Optional[List[str]] + return_raw_response=False, # type: bool + hostname_from_config=True, # type: bool + **kwargs # type: Any +): + # type: (...) -> None + """ + Initialize and configure Datadog.api and Datadog.statsd modules + + :param api_key: Datadog API key + :type api_key: string + + :param app_key: Datadog application key + :type app_key: string + + :param host_name: Set a specific hostname + :type host_name: string + + :param proxies: Proxy to use to connect to Datadog API; + for example, 'proxies': {'http': "http::@:/"} + :type proxies: dictionary mapping protocol to the URL of the proxy. + + :param api_host: Datadog API endpoint + :type api_host: url + + :param statsd_host: Host of DogStatsd server or statsd daemon + :type statsd_host: address + + :param statsd_port: Port of DogStatsd server or statsd daemon + :type statsd_port: port + + :param statsd_disable_buffering: Enable/disable statsd client buffering support + (default: True). + :type statsd_disable_buffering: boolean + + :param statsd_use_default_route: Dynamically set the statsd host to the default route + (Useful when running the client in a container) + :type statsd_use_default_route: boolean + + :param statsd_socket_path: path to the DogStatsd UNIX socket. Supersedes statsd_host + and stats_port if provided. + + :param statsd_constant_tags: A list of tags to be applied to all metrics ("tag", "tag:value") + :type statsd_constant_tags: list of string + + :param cacert: Path to local certificate file used to verify SSL \ + certificates. Can also be set to True (default) to use the systems \ + certificate store, or False to skip SSL verification + :type cacert: path or boolean + + :param mute: Mute any ApiError or ClientError before they escape \ + from datadog.api.HTTPClient (default: True). + :type mute: boolean + + :param return_raw_response: Whether or not to return the raw response object in addition \ + to the decoded response content (default: False) + :type return_raw_response: boolean + + :param hostname_from_config: Set the hostname from the Datadog agent config (agent 5). Will be deprecated + :type hostname_from_config: boolean + """ + # API configuration + api._api_key = api_key or api._api_key or os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY")) + api._application_key = ( + app_key or api._application_key or os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY")) + ) + api._hostname_from_config = hostname_from_config + api._host_name = host_name or api._host_name or get_hostname(hostname_from_config) + api._api_host = api_host or api._api_host or os.environ.get("DATADOG_HOST", "https://api.datadoghq.com") + + # Statsd configuration + # ...overrides the default `statsd` instance attributes + if statsd_socket_path: + statsd.socket_path = statsd_socket_path + statsd.host = None + statsd.port = None + else: + if statsd_host or statsd_use_default_route: + statsd.host = statsd.resolve_host(statsd_host, statsd_use_default_route) + if statsd_port: + statsd.port = int(statsd_port) + statsd.close_socket() + if statsd_namespace: + statsd.namespace = text(statsd_namespace) + if statsd_constant_tags: + statsd.constant_tags += statsd_constant_tags + + statsd.disable_buffering = statsd_disable_buffering + + api._return_raw_response = return_raw_response + + # HTTP client and API options + for key, value in iteritems(kwargs): + attribute = "_{}".format(key) + setattr(api, attribute, value) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py new file mode 100644 index 0000000..eb477c9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# flake8: noqa + +from typing import Optional + +# API settings +_api_key = None # type: Optional[str] +_application_key = None # type: Optional[str] +_api_version = "v1" +_api_host = None # type: Optional[str] +_host_name = None # type: Optional[str] +_hostname_from_config = True +_cacert = True + +# HTTP(S) settings +_proxies = None +_timeout = 60 +_max_timeouts = 3 +_max_retries = 3 +_backoff_period = 300 +_mute = True +_return_raw_response = False + +# Resources +from datadog.api.comments import Comment +from datadog.api.dashboard_lists import DashboardList +from datadog.api.distributions import Distribution +from datadog.api.downtimes import Downtime +from datadog.api.timeboards import Timeboard +from datadog.api.dashboards import Dashboard +from datadog.api.events import Event +from datadog.api.infrastructure import Infrastructure +from datadog.api.metadata import Metadata +from datadog.api.metrics import Metric +from datadog.api.monitors import Monitor +from datadog.api.screenboards import Screenboard +from datadog.api.graphs import Graph, Embed +from datadog.api.hosts import Host, Hosts +from datadog.api.service_checks import ServiceCheck +from datadog.api.tags import Tag +from datadog.api.users import User +from datadog.api.aws_integration import AwsIntegration +from datadog.api.aws_log_integration import AwsLogsIntegration +from datadog.api.azure_integration import AzureIntegration +from datadog.api.gcp_integration import GcpIntegration +from datadog.api.roles import Roles +from datadog.api.permissions import Permissions +from datadog.api.service_level_objectives import ServiceLevelObjective +from datadog.api.synthetics import Synthetics +from datadog.api.logs import Logs diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py new file mode 100644 index 0000000..db34873 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py @@ -0,0 +1,290 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import logging +import time +import zlib + +# datadog +from datadog.api import _api_version, _max_timeouts, _backoff_period +from datadog.api.exceptions import ClientError, ApiError, HttpBackoff, HttpTimeout, ApiNotInitialized +from datadog.api.http_client import resolve_http_client +from datadog.util.compat import is_p3k +from datadog.util.format import construct_url, normalize_tags + + +log = logging.getLogger("datadog.api") + + +class APIClient(object): + """ + Datadog API client: format and submit API calls to Datadog. + Embeds a HTTP client. + """ + + # HTTP transport parameters + _backoff_period = _backoff_period + _max_timeouts = _max_timeouts + _backoff_timestamp = None + _timeout_counter = 0 + _sort_keys = False + + # Plugged HTTP client + _http_client = None + + @classmethod + def _get_http_client(cls): + """ + Getter for the embedded HTTP client. + """ + if not cls._http_client: + cls._http_client = resolve_http_client() + + return cls._http_client + + @classmethod + def submit( + cls, + method, + path, + api_version=None, + body=None, + attach_host_name=False, + response_formatter=None, + error_formatter=None, + suppress_response_errors_on_codes=None, + compress_payload=False, + **params + ): + """ + Make an HTTP API request + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param path: API endpoint url + :type path: url + + :param api_version: The API version used + + :param body: dictionary to be sent in the body of the request + :type body: dictionary + + :param response_formatter: function to format JSON response from HTTP API request + :type response_formatter: JSON input function + + :param error_formatter: function to format JSON error response from HTTP API request + :type error_formatter: JSON input function + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param suppress_response_errors_on_codes: suppress ApiError on `errors` key in the response for the given HTTP + status codes + :type suppress_response_errors_on_codes: None|list(int) + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param params: dictionary to be sent in the query string of the request + :type params: dictionary + + :returns: JSON or formatted response from HTTP API request + """ + try: + # Check if it's ok to submit + if not cls._should_submit(): + _, backoff_time_left = cls._backoff_status() + raise HttpBackoff(backoff_time_left) + + # Import API, User and HTTP settings + from datadog.api import ( + _api_key, + _application_key, + _api_host, + _mute, + _host_name, + _proxies, + _max_retries, + _timeout, + _cacert, + _return_raw_response, + ) + + # Check keys and add then to params + if _api_key is None: + raise ApiNotInitialized("API key is not set." " Please run 'initialize' method first.") + + # Set api and app keys in headers + headers = {} + headers["DD-API-KEY"] = _api_key + if _application_key: + headers["DD-APPLICATION-KEY"] = _application_key + + # Check if the api_version is provided + if not api_version: + api_version = _api_version + + # Attach host name to body + if attach_host_name and body: + # Is it a 'series' list of objects ? + if "series" in body: + # Adding the host name to all objects + for obj_params in body["series"]: + if obj_params.get("host", "") == "": + obj_params["host"] = _host_name + else: + if body.get("host", "") == "": + body["host"] = _host_name + + # If defined, make sure tags are defined as a comma-separated string + if "tags" in params and isinstance(params["tags"], list): + tag_list = normalize_tags(params["tags"]) + params["tags"] = ",".join(tag_list) + + # If defined, make sure monitor_ids are defined as a comma-separated string + if "monitor_ids" in params and isinstance(params["monitor_ids"], list): + params["monitor_ids"] = ",".join(str(i) for i in params["monitor_ids"]) + + # Process the body, if necessary + if isinstance(body, dict): + body = json.dumps(body, sort_keys=cls._sort_keys) + headers["Content-Type"] = "application/json" + + if compress_payload: + body = zlib.compress(body.encode("utf-8")) + headers["Content-Encoding"] = "deflate" + + # Construct the URL + url = construct_url(_api_host, api_version, path) + + # Process requesting + start_time = time.time() + + result = cls._get_http_client().request( + method=method, + url=url, + headers=headers, + params=params, + data=body, + timeout=_timeout, + max_retries=_max_retries, + proxies=_proxies, + verify=_cacert, + ) + + # Request succeeded: log it and reset the timeout counter + duration = round((time.time() - start_time) * 1000.0, 4) + log.info("%s %s %s (%sms)" % (result.status_code, method, url, duration)) + cls._timeout_counter = 0 + + # Format response content + content = result.content + + if content: + try: + if is_p3k(): + response_obj = json.loads(content.decode("utf-8")) + else: + response_obj = json.loads(content) + except ValueError: + raise ValueError("Invalid JSON response: {0}".format(content)) + + # response_obj can be a bool and not a dict + if isinstance(response_obj, dict): + if response_obj and "errors" in response_obj: + # suppress ApiError when specified and just return the response + if not ( + suppress_response_errors_on_codes + and result.status_code in suppress_response_errors_on_codes + ): + raise ApiError(response_obj) + else: + response_obj = None + + if response_formatter is not None: + response_obj = response_formatter(response_obj) + + if _return_raw_response: + return response_obj, result + else: + return response_obj + + except HttpTimeout: + cls._timeout_counter += 1 + raise + except ClientError as e: + if _mute: + log.error(str(e)) + if error_formatter is None: + return {"errors": e.args[0]} + else: + return error_formatter({"errors": e.args[0]}) + else: + raise + except ApiError as e: + if _mute: + for error in e.args[0].get("errors") or []: + log.error(error) + if error_formatter is None: + return e.args[0] + else: + return error_formatter(e.args[0]) + else: + raise + + @classmethod + def _should_submit(cls): + """ + Returns True if we're in a state where we should make a request + (backoff expired, no backoff in effect), false otherwise. + """ + now = time.time() + should_submit = False + + # If we're not backing off, but the timeout counter exceeds the max + # number of timeouts, then enter the backoff state, recording the time + # we started backing off + if not cls._backoff_timestamp and cls._timeout_counter >= cls._max_timeouts: + log.info( + "Max number of datadog timeouts exceeded, backing off for %s seconds", + cls._backoff_period, + ) + cls._backoff_timestamp = now + should_submit = False + + # If we are backing off but the we've waiting sufficiently long enough + # (backoff_retry_age), exit the backoff state and reset the timeout + # counter so that we try submitting metrics again + elif cls._backoff_timestamp: + backed_off_time, backoff_time_left = cls._backoff_status() + if backoff_time_left < 0: + log.info( + "Exiting backoff state after %s seconds, will try to submit metrics again", + backed_off_time, + ) + cls._backoff_timestamp = None + cls._timeout_counter = 0 + should_submit = True + else: + log.info( + "In backoff state, won't submit metrics for another %s seconds", + backoff_time_left, + ) + should_submit = False + else: + should_submit = True + + return should_submit + + @classmethod + def _backoff_status(cls): + """ + Get a backoff report, i.e. backoff total and remaining time. + """ + now = time.time() + backed_off_time = now - cls._backoff_timestamp + backoff_time_left = cls._backoff_period - backed_off_time + return round(backed_off_time, 2), round(backoff_time_left, 2) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py new file mode 100644 index 0000000..eb4358b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py @@ -0,0 +1,248 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + UpdatableAPIResource, + UpdatableAPISubResource, + ListableAPISubResource, +) + + +class AwsIntegration( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +): + """ + A wrapper around AWS Integration API. + """ + + _resource_name = "integration" + _resource_id = "aws" + + @classmethod + def list(cls, **params): + """ + List all Datadog-AWS integrations available in your Datadog organization. + + >>> api.AwsIntegration.list() + """ + return super(AwsIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def create(cls, **params): + """ + Add a new AWS integration config. + + :param account_id: Your AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. + :type account_id: string + + :param access_key_id: If your AWS account is a GovCloud \ + or China account, enter the corresponding Access Key ID. + :type access_key_id: string + + :param role_name: Your Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. + :type role_name: string + + :param filter_tags: The array of EC2 tags (in the form key:value) \ + defines a filter that Datadog uses when collecting metrics from EC2. \ + Wildcards, such as ? (for single characters) and * (for multiple characters) \ + can also be used. Only hosts that match one of the defined tags will be imported \ + into Datadog. The rest will be ignored. Host matching a given tag can also be \ + excluded by adding ! before the tag. e.x. \ + env:production,instance-type:c1.*,!region:us-east-1 For more information \ + on EC2 tagging, see the AWS tagging documentation. + :type filter_tags: list of strings + + :param host_tags: Array of tags (in the form key:value) to add to all hosts and \ + metrics reporting through this integration. + :type host_tags: list of strings + + :param account_specific_namespace_rules: An object (in the form \ + {"namespace1":true/false, "namespace2":true/false}) that enables \ + or disables metric collection for specific AWS namespaces for this \ + AWS account only. A list of namespaces can be found at the \ + /v1/integration/aws/available_namespace_rules endpoint. + :type account_specific_namespace_rules: dictionary + + :param excluded_regions: An array of AWS regions to exclude \ + from metrics collection. + :type excluded_regions: list of strings + + :returns: Dictionary representing the API's JSON response + + >>> account_id = "" + >>> access_key_id = "" + >>> role_name = "DatadogAwsRole" + >>> filter_tags = [":"] + >>> host_tags = [":"] + >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false} + >>> excluded_regions = ["us-east-1", "us-west-1"] + + >>> api.AwsIntegration.create(account_id=account_id, role_name=role_name, \ + filter_tags=filter_tags,host_tags=host_tags,\ + account_specific_namespace_rules=account_specific_namespace_rules \ + excluded_regions=excluded_regions) + """ + return super(AwsIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an AWS integration config. + + :param account_id: Your existing AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. + :type account_id: string + + :param new_account_id: Your new AWS Account ID without dashes. \ + Consult the Datadog AWS integration to learn more about \ + your AWS account ID. This is the account to be updated. + :type new_account_id: string + + :param role_name: Your existing Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. + :type role_name: string + + :param new_role_name: Your new Datadog role delegation name. \ + For more information about you AWS account Role name, \ + see the Datadog AWS integration configuration info. \ + This is the role_name to be updated. + :type new_role_name: string + + :param access_key_id: If your AWS account is a GovCloud \ + or China account, enter the existing Access Key ID. + :type access_key_id: string + + :param new_access_key_id: If your AWS account is a GovCloud \ + or China account, enter the new Access Key ID to be set. + :type new_access_key_id: string + + :param secret_access_key: If your AWS account is a GovCloud \ + or China account, enter the existing Secret Access Key. + :type secret_access_key: string + + :param new_secret_access_key: If your AWS account is a GovCloud \ + or China account, enter the new key to be set. + :type new_secret_access_key: string + + :param filter_tags: The array of EC2 tags (in the form key:value) \ + defines a filter that Datadog uses when collecting metrics from EC2. \ + Wildcards, such as ? (for single characters) and * (for multiple characters) \ + can also be used. Only hosts that match one of the defined tags will be imported \ + into Datadog. The rest will be ignored. Host matching a given tag can also be \ + excluded by adding ! before the tag. e.x. \ + env:production,instance-type:c1.*,!region:us-east-1 For more information \ + on EC2 tagging, see the AWS tagging documentation. + :type filter_tags: list of strings + + :param host_tags: Array of tags (in the form key:value) to add to all hosts and \ + metrics reporting through this integration. + :type host_tags: list of strings + + :param account_specific_namespace_rules: An object (in the form \ + {"namespace1":true/false, "namespace2":true/false}) that enables \ + or disables metric collection for specific AWS namespaces for this \ + AWS account only. A list of namespaces can be found at the \ + /v1/integration/aws/available_namespace_rules endpoint. + :type account_specific_namespace_rules: dictionary + + :param excluded_regions: An array of AWS regions to exclude \ + from metrics collection. + :type excluded_regions: list of strings + + :returns: Dictionary representing the API's JSON response + + The following will depend on whether role delegation or access keys are being used. + If using role delegation, use the fields for role_name and account_id. + For access keys, use fields for access_key_id and secret_access_key. + + Both the existing fields and new fields are required no matter what. i.e. If the config is \ + account_id/role_name based, then `account_id`, `role_name`, `new_account_id`, and \ + `new_role_name` are all required. + + For access_key based accounts, `access_key_id`, `secret_access_key`, `new_access_key_id`, \ + and `new_secret_access_key` are all required. + + >>> account_id = "" + >>> role_name = "" + >>> access_key_id = "" + >>> secret_access_key = "" + >>> new_account_id = "" + >>> new_role_name = "" + >>> new_access_key_id = "" + >>> new_secret_access_key = "" + >>> filter_tags = [":"] + >>> host_tags = [":"] + >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false} + >>> excluded_regions = ["us-east-1", "us-west-1"] + + >>> api.AwsIntegration.update(account_id=account_id, role_name=role_name, \ + new_account_id=new_account_id, new_role_name=new_role_name, \ + filter_tags=filter_tags,host_tags=host_tags,\ + account_specific_namespace_rules=account_specific_namespace_rules, \ + excluded_regions=excluded_regions) + """ + params = {} + if body.get("account_id") and body.get("role_name"): + params["account_id"] = body.pop("account_id") + params["role_name"] = body.pop("role_name") + if body.get("new_account_id"): + body["account_id"] = body.pop("new_account_id") + if body.get("new_role_name"): + body["role_name"] = body.pop("new_role_name") + if body.get("access_key_id") and body.get("secret_access_key"): + params["access_key_id"] = body.pop("access_key_id") + params["secret_access_key"] = body.pop("secret_access_key") + if body.get("new_access_key_id"): + body["access_key_id"] = body.pop("new_access_key_id") + if body.get("new_secret_access_key"): + body["secret_access_key"] = body.pop("new_secret_access_key") + return super(AwsIntegration, cls).update(id=cls._resource_id, params=params, **body) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-AWS integration. + + >>> account_id = "" + >>> role_name = "" + + >>> api.AwsIntegration.delete() + """ + return super(AwsIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def list_namespace_rules(cls, **params): + """ + List all namespace rules available as options. + + >>> api.AwsIntegration.list_namespace_rules() + """ + cls._sub_resource_name = "available_namespace_rules" + return super(AwsIntegration, cls).get_items(id=cls._resource_id, **params) + + @classmethod + def generate_new_external_id(cls, **params): + """ + Generate a new AWS external id for a given AWS account id and role name pair. + + >>> account_id = "" + >>> role_name = "" + + >>> api.AwsIntegration.generate_new_external_id() + """ + cls._sub_resource_name = "generate_new_external_id" + return super(AwsIntegration, cls).update_items(id=cls._resource_id, **params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py new file mode 100644 index 0000000..3528435 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py @@ -0,0 +1,111 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource + + +class AwsLogsIntegration(DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource): + """ + A wrapper around AWS Logs API. + """ + + _resource_name = "integration" + _resource_id = "aws" + + @classmethod + def list_log_services(cls, **params): + """ + List all namespace rules available as options. + + >>> api.AwsLogsIntegration.list_log_services() + """ + cls._sub_resource_name = "logs/services" + return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params) + + @classmethod + def add_log_lambda_arn(cls, **params): + """ + Attach the Lambda ARN of the Lambda created for the Datadog-AWS \ + log collection to your AWS account ID to enable log collection. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.add_log_lambda_arn(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def save_services(cls, **params): + """ + Enable Automatic Log collection for your AWS services. + + >>> account_id = "" + >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + + >>> api.AwsLogsIntegration.save_services() + """ + cls._sub_resource_name = "logs/services" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def delete_config(cls, **params): + """ + Delete a Datadog-AWS log collection configuration by removing the specific Lambda ARN \ + associated with a given AWS account. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.delete_config(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).delete_items(id=cls._resource_id, **params) + + @classmethod + def check_lambda(cls, **params): + """ + Check function to see if a lambda_arn exists within an account. \ + This sends a job on our side if it does not exist, then immediately returns \ + the status of that job. Subsequent requests will always repeat the above, so this endpoint \ + can be polled intermittently instead of blocking. + + Returns a status of 'created' when it's checking if the Lambda exists in the account. + Returns a status of 'waiting' while checking. + Returns a status of 'checked and ok' if the Lambda exists. + Returns a status of 'error' if the Lambda does not exist. + + >>> account_id = "" + >>> lambda_arn = "" + + >>> api.AwsLogsIntegration.check_lambda(account_id=account_id, lambda_arn=lambda_arn) + """ + cls._sub_resource_name = "logs/check_async" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def check_services(cls, **params): + """ + Test if permissions are present to add log-forwarding triggers for the \ + given services + AWS account. Input is the same as for save_services. + Done async, so can be repeatedly polled in a non-blocking fashion until \ + the async request completes + + >>> account_id = "" + >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"] + + >>> api.AwsLogsIntegration.check_services() + """ + cls._sub_resource_name = "logs/services_async" + return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def list(cls, **params): + """ + List all Datadog-AWS Logs integrations available in your Datadog organization. + + >>> api.AwsLogsIntegration.list() + """ + cls._sub_resource_name = "logs" + return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py new file mode 100644 index 0000000..2bb1cea --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py @@ -0,0 +1,91 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + DeletableAPIResource, + UpdatableAPIResource, + AddableAPISubResource, +) + + +class AzureIntegration( + GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource, AddableAPISubResource +): + """ + A wrapper around Azure integration API. + """ + + _resource_name = "integration" + _resource_id = "azure" + + @classmethod + def list(cls, **params): + """ + List all Datadog-Azure integrations available in your Datadog organization. + + >>> api.AzureIntegration.list() + """ + return super(AzureIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def create(cls, **params): + """ + Add a new Azure integration config. + + >>> tenant_name = "" + >>> client_id = "" + >>> client_secret = "" + >>> host_filters = [":"] + + >>> api.AzureIntegration.create(tenant_name=tenant_name, client_id=client_id, \ + client_secret=client_secret,host_filters=host_filters) + """ + return super(AzureIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-Azure integration. + + >>> tenant_name = "" + >>> client_id = "" + + >>> api.AzureIntegration.delete(tenant_name=tenant_name, client_id=client_id) + """ + return super(AzureIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def update_host_filters(cls, **params): + """ + Update the defined list of host filters for a given Datadog-Azure integration. \ + + >>> tenant_name = "" + >>> client_id = "" + >>> host_filters = ":" + + >>> api.AzureIntegration.update_host_filters(tenant_name=tenant_name, client_id=client_id, \ + host_filters=host_filters) + """ + cls._sub_resource_name = "host_filters" + return super(AzureIntegration, cls).add_items(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an Azure account configuration. + + >>> tenant_name = "" + >>> client_id = "" + >>> new_tenant_name = "" + >>> new_client_id = "" + >>> client_secret = "" + >>> host_filters = ":" + + >>> api.AzureIntegration.update(tenant_name=tenant_name, client_id=client_id, \ + new_tenant_name=new_tenant_name, new_client_id=new_client_id,\ + client_secret=client_secret, host_filters=host_filters) + """ + params = {} + return super(AzureIntegration, cls).update(id=cls._resource_id, params=params, **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py new file mode 100644 index 0000000..7ecd506 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py @@ -0,0 +1,12 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import CreateableAPIResource, UpdatableAPIResource + + +class Comment(CreateableAPIResource, UpdatableAPIResource): + """ + A wrapper around Comment HTTP API. + """ + + _resource_name = "comments" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py new file mode 100644 index 0000000..a7e02b8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py @@ -0,0 +1,25 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + + +class CheckStatus(object): + OK = 0 + WARNING = 1 + CRITICAL = 2 + UNKNOWN = 3 + ALL = (OK, WARNING, CRITICAL, UNKNOWN) + + +class MonitorType(object): + # From https://docs.datadoghq.com/api/?lang=bash#create-a-monitor + QUERY_ALERT = "query alert" + COMPOSITE = "composite" + SERVICE_CHECK = "service check" + PROCESS_ALERT = "process alert" + LOG_ALERT = "log alert" + METRIC_ALERT = "metric alert" + RUM_ALERT = "rum alert" + EVENT_ALERT = "event alert" + SYNTHETICS_ALERT = "synthetics alert" + TRACE_ANALYTICS = "trace-analytics alert" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py new file mode 100644 index 0000000..127fca9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py @@ -0,0 +1,19 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + AddableAPISubResource, + DeletableAPISubResource, + ListableAPISubResource, + UpdatableAPISubResource, +) + + +class DashboardListV2(ListableAPISubResource, AddableAPISubResource, UpdatableAPISubResource, DeletableAPISubResource): + """ + A wrapper around Dashboard List HTTP API. + """ + + _resource_name = "dashboard/lists/manual" + _sub_resource_name = "dashboards" + _api_version = "v2" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py new file mode 100644 index 0000000..e83785f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + AddableAPISubResource, + CreateableAPIResource, + DeletableAPIResource, + DeletableAPISubResource, + GetableAPIResource, + ListableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +) + +from datadog.api.dashboard_list_v2 import DashboardListV2 + + +class DashboardList( + AddableAPISubResource, + CreateableAPIResource, + DeletableAPIResource, + DeletableAPISubResource, + GetableAPIResource, + ListableAPIResource, + ListableAPISubResource, + UpdatableAPIResource, + UpdatableAPISubResource, +): + """ + A wrapper around Dashboard List HTTP API. + """ + + _resource_name = "dashboard/lists/manual" + _sub_resource_name = "dashboards" + + # Support for new API version (api.DashboardList.v2) + # Note: This needs to be removed after complete migration of these endpoints from v1 to v2. + v2 = DashboardListV2() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py new file mode 100644 index 0000000..dab9b4d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py @@ -0,0 +1,20 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ListableAPIResource, +) + + +class Dashboard( + GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource, ListableAPIResource +): + """ + A wrapper around Dashboard HTTP API. + """ + + _resource_name = "dashboard" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py new file mode 100644 index 0000000..918f7d8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py @@ -0,0 +1,45 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog +from datadog.api.format import format_points +from datadog.api.resources import SendableAPIResource + + +class Distribution(SendableAPIResource): + """A wrapper around Distribution HTTP API""" + + _resource_name = "distribution_points" + + @classmethod + def send(cls, distributions=None, attach_host_name=True, compress_payload=False, **distribution): + """ + Submit a distribution metric or a list of distribution metrics to the distribution metric + API + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + :param metric: the name of the time series + :type metric: string + :param points: a (timestamp, [list of values]) pair or + list of (timestamp, [list of values]) pairs + :type points: list + :param host: host name that produced the metric + :type host: string + :param tags: list of tags associated with the metric. + :type tags: string list + :returns: Dictionary representing the API's JSON response + """ + if distributions: + # Multiple distributions are sent + for d in distributions: + if isinstance(d, dict): + d["points"] = format_points(d["points"]) + series_dict = {"series": distributions} + else: + # One distribution is sent + distribution["points"] = format_points(distribution["points"]) + series_dict = {"series": [distribution]} + return super(Distribution, cls).send( + attach_host_name=attach_host_name, compress_payload=compress_payload, **series_dict + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py new file mode 100644 index 0000000..567ed9e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class Downtime( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): + """ + A wrapper around Monitor Downtiming HTTP API. + """ + + _resource_name = "downtime" + + @classmethod + def cancel_downtime_by_scope(cls, **body): + """ + Cancels all downtimes matching the scope. + + :param scope: scope to cancel downtimes by + :type scope: string + + :returns: Dictionary representing the API's JSON response + """ + return super(Downtime, cls)._trigger_class_action("POST", "cancel/by_scope", **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py new file mode 100644 index 0000000..55b176f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py @@ -0,0 +1,95 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.exceptions import ApiError +from datadog.api.resources import GetableAPIResource, CreateableAPIResource, SearchableAPIResource +from datadog.util.compat import iteritems + + +class Event(GetableAPIResource, CreateableAPIResource, SearchableAPIResource): + """ + A wrapper around Event HTTP API. + """ + + _resource_name = "events" + _timestamp_keys = set(["start", "end"]) + + @classmethod + def create(cls, attach_host_name=True, **params): + """ + Post an event. + + :param title: title for the new event + :type title: string + + :param text: event message + :type text: string + + :param aggregation_key: key by which to group events in event stream + :type aggregation_key: string + + :param alert_type: "error", "warning", "info" or "success". + :type alert_type: string + + :param date_happened: when the event occurred. if unset defaults to the current time. \ + (POSIX timestamp) + :type date_happened: integer + + :param handle: user to post the event as. defaults to owner of the application key used \ + to submit. + :type handle: string + + :param priority: priority to post the event as. ("normal" or "low", defaults to "normal") + :type priority: string + + :param related_event_id: post event as a child of the given event + :type related_event_id: id + + :param tags: tags to post the event with + :type tags: list of strings + + :param host: host to post the event with + :type host: string + + :param device_name: device_name to post the event with + :type device_name: list of strings + + :returns: Dictionary representing the API's JSON response + + >>> title = "Something big happened!" + >>> text = 'And let me tell you all about it here!' + >>> tags = ['version:1', 'application:web'] + + >>> api.Event.create(title=title, text=text, tags=tags) + """ + if params.get("alert_type"): + if params["alert_type"] not in ["error", "warning", "info", "success"]: + raise ApiError("Parameter alert_type must be either error, warning, info or success") + + return super(Event, cls).create(attach_host_name=attach_host_name, **params) + + @classmethod + def query(cls, **params): + """ + Get the events that occurred between the *start* and *end* POSIX timestamps, + optional filtered by *priority* ("low" or "normal"), *sources* and + *tags*. + + See the `event API documentation `_ for the + event data format. + + :returns: Dictionary representing the API's JSON response + + >>> api.Event.query(start=1313769783, end=1419436870, priority="normal", \ + tags=["application:web"]) + """ + + def timestamp_to_integer(k, v): + if k in cls._timestamp_keys: + return int(v) + else: + return v + + params = dict((k, timestamp_to_integer(k, v)) for k, v in iteritems(params)) + + return super(Event, cls)._search(**params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py new file mode 100644 index 0000000..afdfa36 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py @@ -0,0 +1,105 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +API & HTTP Clients exceptions. +""" + + +class DatadogException(Exception): + """ + Base class for Datadog API exceptions. Use this for patterns like the following: + + try: + # do something with the Datadog API + except datadog.api.exceptions.DatadogException: + # handle any Datadog-specific exceptions + """ + + +class ProxyError(DatadogException): + """ + HTTP connection to the configured proxy server failed. + """ + + def __init__(self, method, url, exception): + message = ( + u"Could not request {method} {url}: Unable to connect to proxy. " + u"Please check the proxy configuration and try again.".format(method=method, url=url) + ) + super(ProxyError, self).__init__(message) + + +class ClientError(DatadogException): + """ + HTTP connection to Datadog endpoint is not possible. + """ + + def __init__(self, method, url, exception): + message = ( + u"Could not request {method} {url}: {exception}. " + u"Please check the network connection or try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + method=method, url=url, exception=exception + ) + ) + super(ClientError, self).__init__(message) + + +class HttpTimeout(DatadogException): + """ + HTTP connection timeout. + """ + + def __init__(self, method, url, timeout): + message = ( + u"{method} {url} timed out after {timeout}. " + u"Please try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + method=method, url=url, timeout=timeout + ) + ) + super(HttpTimeout, self).__init__(message) + + +class HttpBackoff(DatadogException): + """ + Backing off after too many timeouts. + """ + + def __init__(self, backoff_period): + message = u"Too many timeouts. Won't try again for {backoff_period} seconds. ".format( + backoff_period=backoff_period + ) + super(HttpBackoff, self).__init__(message) + + +class HTTPError(DatadogException): + """ + Datadog returned a HTTP error. + """ + + def __init__(self, status_code=None, reason=None): + reason = u" - {reason}".format(reason=reason) if reason else u"" + message = ( + u"Datadog returned a bad HTTP response code: {status_code}{reason}. " + u"Please try again later. " + u"If the problem persists, please contact support@datadoghq.com".format( + status_code=status_code, + reason=reason, + ) + ) + + super(HTTPError, self).__init__(message) + + +class ApiError(DatadogException): + """ + Datadog returned an API error (known HTTPError). + + Matches the following status codes: 400, 401, 403, 404, 409, 429. + """ + + +class ApiNotInitialized(DatadogException): + "No API key is set" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py new file mode 100644 index 0000000..d3e5b72 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from numbers import Number +import sys +import time + +if sys.version_info[0] >= 3: + from collections.abc import Iterable +else: + from collections import Iterable + + +def format_points(points): + """ + Format `points` parameter. + + Input: + a value or (timestamp, value) pair or a list of value or (timestamp, value) pairs + + Returns: + list of (timestamp, float value) pairs + + """ + now = time.time() + if not isinstance(points, list): + points = [points] + + formatted_points = [] + for point in points: + if isinstance(point, Number): + timestamp = now + value = float(point) + # Distributions contain a list of points + else: + timestamp = point[0] + if isinstance(point[1], Iterable): + value = [float(p) for p in point[1]] + else: + value = float(point[1]) + + formatted_points.append((timestamp, value)) + + return formatted_points diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py new file mode 100644 index 0000000..978e1ae --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py @@ -0,0 +1,93 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource + + +class GcpIntegration(GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource): + """ + A wrapper around GCP integration API. + """ + + _resource_name = "integration" + _resource_id = "gcp" + + @classmethod + def list(cls, **params): + """ + List all Datadog-Gcp integrations available in your Datadog organization. + + >>> api.GcpIntegration.list() + """ + return super(GcpIntegration, cls).get(id=cls._resource_id, **params) + + @classmethod + def delete(cls, **body): + """ + Delete a given Datadog-GCP integration. + + >>> project_id="" + >>> client_email="" + + >>> api.GcpIntegration.delete(project_id=project_id, client_email=client_email) + """ + return super(GcpIntegration, cls).delete(id=cls._resource_id, body=body) + + @classmethod + def create(cls, **params): + """ + Add a new GCP integration config. + + All of the following fields values are provided by the \ + JSON service account key file created in the GCP Console \ + for service accounts; Refer to the Datadog-Google Cloud \ + Platform integration installation instructions to see how \ + to generate one for your organization. For further references, \ + consult the Google Cloud service account documentation. + + >>> type="service_account" + >>> project_id="" + >>> private_key_id="" + >>> private_key="" + >>> client_email="" + >>> client_id="" + >>> auth_uri=">> token_uri="" + >>> auth_provider_x509_cert_url="" + >>> client_x509_cert_url="" + >>> host_filters=":,:" + + >>> api.GcpIntegration.create(type=type, project_id=project_id, \ + private_key_id=private_key_id,private_key=private_key, \ + client_email=client_email, client_id=client_id, \ + auth_uri=auth_uri, token_uri=token_uri, \ + auth_provider_x509_cert_url=auth_provider_x509_cert_url, \ + client_x509_cert_url=client_x509_cert_url, host_filters=host_filters) + """ + return super(GcpIntegration, cls).create(id=cls._resource_id, **params) + + @classmethod + def update(cls, **body): + """ + Update an existing service account partially (one or multiple fields), \ + by supplying a new value for the field(s) to be updated. + + `project_id` and `client_email` are required, in order to identify the \ + right service account to update. \ + The unspecified fields will keep their original values. + + The only use case for updating this integration is to change \ + host filtering and automute settings. Otherwise, an entirely \ + new integration config is needed. + + >>> project_id="" + >>> client_email="" + >>> host_filters="" + >>> automute=true #boolean + + >>> api.GcpIntegration.update(project_id=project_id, \ + client_email=client_email, host_filters=host_filters, \ + automute=automute) + """ + params = {} + return super(GcpIntegration, cls).update(id=cls._resource_id, params=params, **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py new file mode 100644 index 0000000..ef29d70 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py @@ -0,0 +1,84 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.util.compat import urlparse +from datadog.api.resources import CreateableAPIResource, ActionAPIResource, GetableAPIResource, ListableAPIResource + + +class Graph(CreateableAPIResource, ActionAPIResource): + """ + A wrapper around Graph HTTP API. + """ + + _resource_name = "graph/snapshot" + + @classmethod + def create(cls, **params): + """ + Take a snapshot of a graph, returning the full url to the snapshot. + + :param metric_query: metric query + :type metric_query: string query + + :param start: query start timestamp + :type start: POSIX timestamp + + :param end: query end timestamp + :type end: POSIX timestamp + + :param event_query: a query that will add event bands to the graph + :type event_query: string query + + :returns: Dictionary representing the API's JSON response + """ + return super(Graph, cls).create(method="GET", **params) + + @classmethod + def status(cls, snapshot_url): + """ + Returns the status code of snapshot. Can be used to know when the + snapshot is ready for download. + + :param snapshot_url: snapshot URL to check + :type snapshot_url: string url + + :returns: Dictionary representing the API's JSON response + """ + snap_path = urlparse(snapshot_url).path + snap_path = snap_path.split("/snapshot/view/")[1].split(".png")[0] + + snapshot_status_url = "graph/snapshot_status/{0}".format(snap_path) + + return super(Graph, cls)._trigger_action("GET", snapshot_status_url) + + +class Embed(ListableAPIResource, GetableAPIResource, ActionAPIResource, CreateableAPIResource): + """ + A wrapper around Embed HTTP API. + """ + + _resource_name = "graph/embed" + + @classmethod + def enable(cls, embed_id): + """ + Enable a specified embed. + + :param embed_id: embed token + :type embed_id: string embed token + + :returns: Dictionary representing the API's JSON response + """ + return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="enable") + + @classmethod + def revoke(cls, embed_id): + """ + Revoke a specified embed. + + :param embed_id: embed token + :type embed_id: string embed token + + :returns: Dictionary representing the API's JSON response + """ + return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="revoke") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py new file mode 100644 index 0000000..5bc2a32 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py @@ -0,0 +1,91 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ActionAPIResource, SearchableAPIResource + + +class Host(ActionAPIResource): + """ + A wrapper around Host HTTP API. + """ + + _resource_name = "host" + + @classmethod + def mute(cls, host_name, **body): + """ + Mute a host. + + :param host_name: hostname + :type host_name: string + + :param end: timestamp to end muting + :type end: POSIX timestamp + + :param override: if true and the host is already muted, will override\ + existing end on the host + :type override: bool + + :param message: message to associate with the muting of this host + :type message: string + + :returns: Dictionary representing the API's JSON response + + """ + return super(Host, cls)._trigger_class_action("POST", "mute", host_name, **body) + + @classmethod + def unmute(cls, host_name): + """ + Unmute a host. + + :param host_name: hostname + :type host_name: string + + :returns: Dictionary representing the API's JSON response + + """ + return super(Host, cls)._trigger_class_action("POST", "unmute", host_name) + + +class Hosts(ActionAPIResource, SearchableAPIResource): + """ + A wrapper around Hosts HTTP API. + """ + + _resource_name = "hosts" + + @classmethod + def search(cls, **params): + """ + Search among hosts live within the past 2 hours. Max 100 + results at a time. + + :param filter: query to filter search results + :type filter: string + + :param sort_field: "status", "apps", "cpu", "iowait", or "load" + :type sort_field: string + + :param sort_dir: "asc" or "desc" + :type sort_dir: string + + :param start: host result to start at + :type start: integer + + :param count: number of host results to return + :type count: integer + + :returns: Dictionary representing the API's JSOn response + + """ + return super(Hosts, cls)._search(**params) + + @classmethod + def totals(cls): + """ + Get total number of hosts active and up. + + :returns: Dictionary representing the API's JSON response + """ + return super(Hosts, cls)._trigger_class_action("GET", "totals") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py new file mode 100644 index 0000000..f058393 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py @@ -0,0 +1,195 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Available HTTP Client for Datadog API client. + +Priority: +1. `requests` 3p module +2. `urlfetch` 3p module - Google App Engine only +""" +# stdlib +import copy +import logging +import platform +import urllib +from threading import Lock + +# 3p +try: + import requests + import requests.adapters +except ImportError: + requests = None # type: ignore + +try: + from google.appengine.api import urlfetch, urlfetch_errors +except ImportError: + urlfetch, urlfetch_errors = None, None + +# datadog +from datadog.api.exceptions import ProxyError, ClientError, HTTPError, HttpTimeout + + +log = logging.getLogger("datadog.api") + + +def _get_user_agent_header(): + from datadog import version + + return "datadogpy/{version} (python {pyver}; os {os}; arch {arch})".format( + version=version.__version__, + pyver=platform.python_version(), + os=platform.system().lower(), + arch=platform.machine().lower(), + ) + + +def _remove_context(exc): + """Python3: remove context from chained exceptions to prevent leaking API keys in tracebacks.""" + exc.__cause__ = None + return exc + + +class HTTPClient(object): + """ + An abstract generic HTTP client. Subclasses must implement the `request` methods. + """ + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + """ + Main method to be implemented by HTTP clients. + + The returned data structure has the following fields: + * `content`: string containing the response from the server + * `status_code`: HTTP status code returned by the server + + Can raise the following exceptions: + * `ClientError`: server cannot be contacted + * `HttpTimeout`: connection timed out + * `HTTPError`: unexpected HTTP response code + """ + raise NotImplementedError(u"Must be implemented by HTTPClient subclasses.") + + +class RequestClient(HTTPClient): + """ + HTTP client based on 3rd party `requests` module, using a single session. + This allows us to keep the session alive to spare some execution time. + """ + + _session = None + _session_lock = Lock() + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + try: + + with cls._session_lock: + if cls._session is None: + cls._session = requests.Session() + http_adapter = requests.adapters.HTTPAdapter(max_retries=max_retries) + cls._session.mount("https://", http_adapter) + cls._session.headers.update({"User-Agent": _get_user_agent_header()}) + + result = cls._session.request( + method, url, headers=headers, params=params, data=data, timeout=timeout, proxies=proxies, verify=verify + ) + + result.raise_for_status() + + except requests.exceptions.ProxyError as e: + raise _remove_context(ProxyError(method, url, e)) + except requests.ConnectionError as e: + raise _remove_context(ClientError(method, url, e)) + except requests.exceptions.Timeout: + raise _remove_context(HttpTimeout(method, url, timeout)) + except requests.exceptions.HTTPError as e: + if e.response.status_code in (400, 401, 403, 404, 409, 429): + # This gets caught afterwards and raises an ApiError exception + pass + else: + raise _remove_context(HTTPError(e.response.status_code, result.reason)) + except TypeError: + raise TypeError( + u"Your installed version of `requests` library seems not compatible with" + u"Datadog's usage. We recommend upgrading it ('pip install -U requests')." + u"If you need help or have any question, please contact support@datadoghq.com" + ) + + return result + + +class URLFetchClient(HTTPClient): + """ + HTTP client based on Google App Engine `urlfetch` module. + """ + + @classmethod + def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries): + """ + Wrapper around `urlfetch.fetch` method. + + TO IMPLEMENT: + * `max_retries` + """ + # No local certificate file can be used on Google App Engine + validate_certificate = True if verify else False + + # Encode parameters in the url + url_with_params = "{url}?{params}".format(url=url, params=urllib.urlencode(params)) + newheaders = copy.deepcopy(headers) + newheaders["User-Agent"] = _get_user_agent_header() + + try: + result = urlfetch.fetch( + url=url_with_params, + method=method, + headers=newheaders, + validate_certificate=validate_certificate, + deadline=timeout, + payload=data, + # setting follow_redirects=False may be slightly faster: + # https://cloud.google.com/appengine/docs/python/microservice-performance#use_the_shortest_route + follow_redirects=False, + ) + + cls.raise_on_status(result) + + except urlfetch.DownloadError as e: + raise ClientError(method, url, e) + except urlfetch_errors.DeadlineExceededError: + raise HttpTimeout(method, url, timeout) + + return result + + @classmethod + def raise_on_status(cls, result): + """ + Raise on HTTP status code errors. + """ + status_code = result.status_code + + if (status_code / 100) != 2: + if status_code in (400, 401, 403, 404, 409, 429): + pass + else: + raise HTTPError(status_code) + + +def resolve_http_client(): + """ + Resolve an appropriate HTTP client based the defined priority and user environment. + """ + if requests: + log.debug(u"Use `requests` based HTTP client.") + return RequestClient + + if urlfetch and urlfetch_errors: + log.debug(u"Use `urlfetch` based HTTP client.") + return URLFetchClient + + raise ImportError( + u"Datadog API client was unable to resolve a HTTP client. " u" Please install `requests` library." + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py new file mode 100644 index 0000000..806a051 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py @@ -0,0 +1,28 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import SearchableAPIResource + + +class Infrastructure(SearchableAPIResource): + """ + A wrapper around Infrastructure HTTP API. + """ + + _resource_name = "search" + + @classmethod + def search(cls, **params): + """ + Search for entities in Datadog. + + :param q: a query to search for host and metrics + :type q: string query + + :returns: Dictionary representing the API's JSON response + """ + # Deprecate the hosts search param + query = params.get("q", "").split(":") + if len(query) > 1 and query[0] == "hosts": + print("[DEPRECATION] Infrastructure.search() is deprecated for ", "hosts. Use `Hosts.search` instead.") + return super(Infrastructure, cls)._search(**params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py new file mode 100644 index 0000000..a87efa2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py @@ -0,0 +1,22 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import CreateableAPIResource +from datadog.api.api_client import APIClient + + +class Logs(CreateableAPIResource): + """ + A wrapper around Log HTTP API. + """ + + _resource_name = "logs-queries" + + @classmethod + def list(cls, data): + path = "{resource_name}/list".format( + resource_name=cls._resource_name, + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, data) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py new file mode 100644 index 0000000..6c251e5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py @@ -0,0 +1,64 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog +from datadog.api.resources import GetableAPIResource, UpdatableAPIResource + + +class Metadata(GetableAPIResource, UpdatableAPIResource): + """ + A wrapper around Metric Metadata HTTP API + """ + + _resource_name = "metrics" + + @classmethod + def get(cls, metric_name): + """ + Get metadata information on an existing Datadog metric + + param metric_name: metric name (ex. system.cpu.idle) + + :returns: Dictionary representing the API's JSON response + """ + if not metric_name: + raise KeyError("'metric_name' parameter is required") + + return super(Metadata, cls).get(metric_name) + + @classmethod + def update(cls, metric_name, **params): + """ + Update metadata fields for an existing Datadog metric. + If the metadata does not exist for the metric it is created by + the update. + + :param type: type of metric (ex. "gauge", "rate", etc.) + see http://docs.datadoghq.com/metrictypes/ + :type type: string + + :param description: description of the metric + :type description: string + + :param short_name: short name of the metric + :type short_name: string + + :param unit: unit type associated with the metric (ex. "byte", "operation") + see http://docs.datadoghq.com/units/ for full list + :type unit: string + + :param per_unit: per unit type (ex. "second" as in "queries per second") + see http://docs.datadoghq.com/units/ for full list + :type per_unit: string + + :param statsd_interval: statsd flush interval for metric in seconds (if applicable) + :type statsd_interval: integer + + :returns: Dictionary representing the API's JSON response + + >>> api.Metadata.update(metric_name='api.requests.served', metric_type="counter") + """ + if not metric_name: + raise KeyError("'metric_name' parameter is required") + + return super(Metadata, cls).update(id=metric_name, **params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py new file mode 100644 index 0000000..252ea88 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py @@ -0,0 +1,147 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# datadog +from datadog.api.exceptions import ApiError +from datadog.api.format import format_points +from datadog.api.resources import SearchableAPIResource, SendableAPIResource, ListableAPIResource + + +class Metric(SearchableAPIResource, SendableAPIResource, ListableAPIResource): + """ + A wrapper around Metric HTTP API + """ + + _resource_name = None + + _METRIC_QUERY_ENDPOINT = "query" + _METRIC_SUBMIT_ENDPOINT = "series" + _METRIC_LIST_ENDPOINT = "metrics" + + @classmethod + def list(cls, from_epoch): + """ + Get a list of active metrics since a given time (Unix Epoc) + + :param from_epoch: Start time in Unix Epoc (seconds) + + :returns: Dictionary containing a list of active metrics + """ + + cls._resource_name = cls._METRIC_LIST_ENDPOINT + + try: + seconds = int(from_epoch) + params = {"from": seconds} + except ValueError: + raise ApiError("Parameter 'from_epoch' must be an integer") + + return super(Metric, cls).get_all(**params) + + @staticmethod + def _rename_metric_type(metric): + """ + FIXME DROPME in 1.0: + + API documentation was illegitimately promoting usage of `metric_type` parameter + instead of `type`. + To be consistent and avoid 'backward incompatibilities', properly rename this parameter. + """ + if "metric_type" in metric: + metric["type"] = metric.pop("metric_type") + + @classmethod + def send(cls, metrics=None, attach_host_name=True, compress_payload=False, **single_metric): + """ + Submit a metric or a list of metrics to the metric API + A metric dictionary should consist of 5 keys: metric, points, host, tags, type (some of which optional), + see below: + + :param metric: the name of the time series + :type metric: string + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param metrics: a list of dictionaries, each item being a metric to send + :type metrics: list + + :param points: a (timestamp, value) pair or list of (timestamp, value) pairs + :type points: list + + :param host: host name that produced the metric + :type host: string + + :param tags: list of tags associated with the metric. + :type tags: string list + + :param type: type of the metric + :type type: 'gauge' or 'count' or 'rate' string + + >>> api.Metric.send(metric='my.series', points=[(now, 15), (future_10s, 16)]) + + >>> metrics = [{'metric': 'my.series', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]}, + {'metric': 'my.series2', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]}] + >>> api.Metric.send(metrics=metrics) + + :returns: Dictionary representing the API's JSON response + """ + # Set the right endpoint + cls._resource_name = cls._METRIC_SUBMIT_ENDPOINT + + # Format the payload + try: + if metrics: + for metric in metrics: + if isinstance(metric, dict): + cls._rename_metric_type(metric) + metric["points"] = format_points(metric["points"]) + metrics_dict = {"series": metrics} + else: + cls._rename_metric_type(single_metric) + single_metric["points"] = format_points(single_metric["points"]) + metrics = [single_metric] + metrics_dict = {"series": metrics} + + except KeyError: + raise KeyError("'points' parameter is required") + + return super(Metric, cls).send( + attach_host_name=attach_host_name, compress_payload=compress_payload, **metrics_dict + ) + + @classmethod + def query(cls, **params): + """ + Query metrics from Datadog + + :param start: query start timestamp + :type start: POSIX timestamp + + :param end: query end timestamp + :type end: POSIX timestamp + + :param query: metric query + :type query: string query + + :returns: Dictionary representing the API's JSON response + + *start* and *end* should be less than 24 hours apart. + It is *not* meant to retrieve metric data in bulk. + + >>> api.Metric.query(start=int(time.time()) - 3600, end=int(time.time()), + query='avg:system.cpu.idle{*}') + """ + # Set the right endpoint + cls._resource_name = cls._METRIC_QUERY_ENDPOINT + + # `from` is a reserved keyword in Python, therefore + # `api.Metric.query(from=...)` is not permitted + # -> map `start` to `from` and `end` to `to` + try: + params["from"] = params.pop("start") + params["to"] = params.pop("end") + except KeyError as e: + raise ApiError("The parameter '{0}' is required".format(e.args[0])) + + return super(Metric, cls)._search(**params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py new file mode 100644 index 0000000..a2d9e74 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py @@ -0,0 +1,157 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class Monitor( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): + """ + A wrapper around Monitor HTTP API. + """ + + _resource_name = "monitor" + + @classmethod + def get(cls, id, **params): + """ + Get monitor's details. + + :param id: monitor to retrieve + :type id: id + + :param group_states: string list indicating what, if any, group states to include + :type group_states: string list, strings are chosen from one or more \ + from 'all', 'alert', 'warn', or 'no data' + + :returns: Dictionary representing the API's JSON response + """ + if "group_states" in params and isinstance(params["group_states"], list): + params["group_states"] = ",".join(params["group_states"]) + + return super(Monitor, cls).get(id, **params) + + @classmethod + def get_all(cls, **params): + """ + Get all monitor details. + + :param group_states: string list indicating what, if any, group states to include + :type group_states: string list, strings are chosen from one or more \ + from 'all', 'alert', 'warn', or 'no data' + + :param name: name to filter the list of monitors by + :type name: string + + :param tags: tags to filter the list of monitors by scope + :type tags: string list + + :param monitor_tags: list indicating what service and/or custom tags, if any, \ + should be used to filter the list of monitors + :type monitor_tags: string list + + :returns: Dictionary representing the API's JSON response + """ + for p in ["group_states", "tags", "monitor_tags"]: + if p in params and isinstance(params[p], list): + params[p] = ",".join(params[p]) + + return super(Monitor, cls).get_all(**params) + + @classmethod + def mute(cls, id, **body): + """ + Mute a monitor. + + :param scope: scope to apply the mute + :type scope: string + + :param end: timestamp for when the mute should end + :type end: POSIX timestamp + + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "mute", id, **body) + + @classmethod + def unmute(cls, id, **body): + """ + Unmute a monitor. + + :param scope: scope to apply the unmute + :type scope: string + + :param all_scopes: if True, clears mute settings for all scopes + :type all_scopes: boolean + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "unmute", id, **body) + + @classmethod + def mute_all(cls): + """ + Globally mute monitors. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "mute_all") + + @classmethod + def unmute_all(cls): + """ + Cancel global monitor mute setting (does not remove mute settings for individual monitors). + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "unmute_all") + + @classmethod + def search(cls, **params): + """ + Search monitors. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "search", params=params) + + @classmethod + def search_groups(cls, **params): + """ + Search monitor groups. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "groups/search", params=params) + + @classmethod + def can_delete(cls, **params): + """ + Checks if the monitors corresponding to the monitor ids can be deleted. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("GET", "can_delete", params=params) + + @classmethod + def validate(cls, **body): + """ + Checks if the monitors definition is valid. + + :returns: Dictionary representing the API's JSON response + """ + return super(Monitor, cls)._trigger_class_action("POST", "validate", **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py new file mode 100644 index 0000000..f12dad7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py @@ -0,0 +1,27 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + + +class Permissions( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + GetableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + """ + A wrapper around Tag HTTP API. + """ + + _resource_name = "permissions" + _api_version = "v2" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py new file mode 100644 index 0000000..67bcc39 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py @@ -0,0 +1,539 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Datadog API resources. +""" + +from datadog.api.api_client import APIClient + + +class CreateableAPIResource(object): + """ + Creatable API Resource + """ + + @classmethod + def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body): + """ + Create a new API resource object + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param id: create a new resource object as a child of the given object + :type id: id + + :param params: new resource object source + :type params: dictionary + + :param body: new resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = cls._resource_name + api_version = getattr(cls, "_api_version", None) + + if method == "GET": + return APIClient.submit("GET", path, api_version, **body) + if id is None: + return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params) + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params) + + +class SendableAPIResource(object): + """ + Fork of CreateableAPIResource class with different method names + """ + + @classmethod + def send(cls, attach_host_name=False, id=None, compress_payload=False, **body): + """ + Create an API resource object + + :param attach_host_name: link the new resource object to the host name + :type attach_host_name: bool + + :param id: create a new resource object as a child of the given object + :type id: id + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :param body: new resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + if id is None: + return APIClient.submit( + "POST", + cls._resource_name, + api_version, + body, + attach_host_name=attach_host_name, + compress_payload=compress_payload, + ) + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + return APIClient.submit( + "POST", path, api_version, body, attach_host_name=attach_host_name, compress_payload=compress_payload + ) + + +class UpdatableAPIResource(object): + """ + Updatable API Resource + """ + + @classmethod + def update(cls, id, params=None, **body): + """ + Update an API resource object + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class CustomUpdatableAPIResource(object): + """ + Updatable API Resource with custom HTTP Verb + """ + + @classmethod + def update(cls, method=None, id=None, params=None, **body): + """ + Update an API resource object + + :param method: HTTP method, defaults to PUT + :type params: string + + :param params: updatable resource id + :type params: string + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + + if method is None: + method = "PUT" + if params is None: + params = {} + + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit(method, path, api_version, body, **params) + + +class DeletableAPIResource(object): + """ + Deletable API Resource + """ + + @classmethod + def delete(cls, id, **params): + """ + Delete an API resource object + + :param id: resource object to delete + :type id: id + + :returns: Dictionary representing the API's JSON response + """ + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, **params) + + +class GetableAPIResource(object): + """ + Getable API Resource + """ + + @classmethod + def get(cls, id, **params): + """ + Get information about an API resource object + + :param id: resource object id to retrieve + :type id: id + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", path, api_version, **params) + + +class ListableAPIResource(object): + """ + Listable API Resource + """ + + @classmethod + def get_all(cls, **params): + """ + List API resource objects + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", cls._resource_name, api_version, **params) + + +class ListableAPISubResource(object): + """ + Listable API Sub-Resource + """ + + @classmethod + def get_items(cls, id, **params): + """ + List API sub-resource objects from a resource + + :param id: resource id to retrieve sub-resource objects from + :type id: id + + :param params: parameters to filter API sub-resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", path, api_version, **params) + + +class AddableAPISubResource(object): + """ + Addable API Sub-Resource + """ + + @classmethod + def add_items(cls, id, params=None, **body): + """ + Add new API sub-resource objects to a resource + + :param id: resource id to add sub-resource objects to + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: new sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, body, **params) + + +class UpdatableAPISubResource(object): + """ + Updatable API Sub-Resource + """ + + @classmethod + def update_items(cls, id, params=None, **body): + """ + Update API sub-resource objects of a resource + + :param id: resource id to update sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: updated sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class DeletableAPISubResource(object): + """ + Deletable API Sub-Resource + """ + + @classmethod + def delete_items(cls, id, params=None, **body): + """ + Delete API sub-resource objects from a resource + + :param id: resource id to delete sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: deleted sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, body, **params) + + +class SearchableAPIResource(object): + """ + Fork of ListableAPIResource class with different method names + """ + + @classmethod + def _search(cls, **params): + """ + Query an API resource stream + + :param params: parameters to filter API resource stream + :type params: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("GET", cls._resource_name, api_version, **params) + + +class ActionAPIResource(object): + """ + Actionable API Resource + """ + + @classmethod + def _trigger_class_action(cls, method, action_name, id=None, params=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param action_name: action name + :type action_name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param params: action parameters + :type params: dictionary + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + api_version = getattr(cls, "_api_version", None) + + if id is None: + path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=action_name) + else: + path = "{resource_name}/{resource_id}/{action_name}".format( + resource_name=cls._resource_name, resource_id=id, action_name=action_name + ) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body, **params) + + @classmethod + def _trigger_action(cls, method, name, id=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param name: action name + :type name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + api_version = getattr(cls, "_api_version", None) + if id is None: + return APIClient.submit(method, name, api_version, body) + + path = "{action_name}/{resource_id}".format(action_name=name, resource_id=id) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body) + + +class UpdatableAPISyntheticsSubResource(object): + """ + Update Synthetics sub resource + """ + + @classmethod + def update_synthetics_items(cls, id, params=None, **body): + """ + Update API sub-resource objects of a resource + + :param id: resource id to update sub-resource objects from + :type id: id + + :param params: request parameters + :type params: dictionary + + :param body: updated sub-resource objects attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/tests/{resource_id}/{sub_resource_name}".format( + resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name + ) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class UpdatableAPISyntheticsResource(object): + """ + Update Synthetics resource + """ + + @classmethod + def update_synthetics(cls, id, params=None, **body): + """ + Update an API resource object + + :param params: updated resource object source + :type params: dictionary + + :param body: updated resource object attributes + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + path = "{resource_name}/tests/{resource_id}".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("PUT", path, api_version, body, **params) + + +class ActionAPISyntheticsResource(object): + """ + Actionable Synthetics API Resource + """ + + @classmethod + def _trigger_synthetics_class_action(cls, method, name, id=None, params=None, **body): + """ + Trigger an action + + :param method: HTTP method to use to contact API endpoint + :type method: HTTP method string + + :param name: action name + :type name: string + + :param id: trigger the action for the specified resource object + :type id: id + + :param params: action parameters + :type params: dictionary + + :param body: action body + :type body: dictionary + + :returns: Dictionary representing the API's JSON response + """ + if params is None: + params = {} + + api_version = getattr(cls, "_api_version", None) + + if id is None: + path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=name) + else: + path = "{resource_name}/{action_name}/{resource_id}".format( + resource_name=cls._resource_name, resource_id=id, action_name=name + ) + if method == "GET": + # Do not add body to GET requests, it causes 400 Bad request responses on EU site + body = None + return APIClient.submit(method, path, api_version, body, **params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py new file mode 100644 index 0000000..2fce1dd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py @@ -0,0 +1,71 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + +from datadog.api.api_client import APIClient + + +class Roles( + ActionAPIResource, + CreateableAPIResource, + CustomUpdatableAPIResource, + GetableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + """ + A wrapper around Tag HTTP API. + """ + + _resource_name = "roles" + _api_version = "v2" + + @classmethod + def update(cls, id, **body): + """ + Update a role's attributes + + :param id: uuid of the role + :param body: dict with type of the input, role `id`, and modified attributes + :returns: Dictionary representing the API's JSON response + """ + params = {} + return super(Roles, cls).update("PATCH", id, params=params, **body) + + @classmethod + def assign_permission(cls, id, **body): + """ + Assign permission to a role + + :param id: uuid of the role to assign permission to + :param body: dict with "type": "permissions" and uuid of permission to assign + :returns: Dictionary representing the API's JSON response + """ + params = {} + path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("POST", path, api_version, body, **params) + + @classmethod + def unassign_permission(cls, id, **body): + """ + Unassign permission from a role + + :param id: uuid of the role to unassign permission from + :param body: dict with "type": "permissions" and uuid of permission to unassign + :returns: Dictionary representing the API's JSON response + """ + params = {} + path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id) + api_version = getattr(cls, "_api_version", None) + + return APIClient.submit("DELETE", path, api_version, body, **params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py new file mode 100644 index 0000000..9367ab7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ActionAPIResource, + ListableAPIResource, +) + + +class Screenboard( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + ActionAPIResource, + ListableAPIResource, +): + """ + A wrapper around Screenboard HTTP API. + """ + + _resource_name = "screen" + + @classmethod + def share(cls, board_id): + """ + Share the screenboard with given id + + :param board_id: screenboard to share + :type board_id: id + + :returns: Dictionary representing the API's JSON response + """ + return super(Screenboard, cls)._trigger_action("POST", "screen/share", board_id) + + @classmethod + def revoke(cls, board_id): + """ + Revoke a shared screenboard with given id + + :param board_id: screenboard to revoke + :type board_id: id + + :returns: Dictionary representing the API's JSON response + """ + return super(Screenboard, cls)._trigger_action("DELETE", "screen/share", board_id) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py new file mode 100644 index 0000000..72fcb9a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py @@ -0,0 +1,45 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.constants import CheckStatus +from datadog.api.exceptions import ApiError +from datadog.api.resources import ActionAPIResource + + +class ServiceCheck(ActionAPIResource): + """ + A wrapper around ServiceCheck HTTP API. + """ + + @classmethod + def check(cls, **body): + """ + Post check statuses for use with monitors + + :param check: text for the message + :type check: string + + :param host_name: name of the host submitting the check + :type host_name: string + + :param status: integer for the status of the check + :type status: Options: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN + + :param timestamp: timestamp of the event + :type timestamp: POSIX timestamp + + :param message: description of why this status occurred + :type message: string + + :param tags: list of tags for this check + :type tags: string list + + :returns: Dictionary representing the API's JSON response + """ + + # Validate checks, include only non-null values + for param, value in body.items(): + if param == "status" and body[param] not in CheckStatus.ALL: + raise ApiError("Invalid status, expected one of: %s" % ", ".join(str(v) for v in CheckStatus.ALL)) + + return super(ServiceCheck, cls)._trigger_action("POST", "check_run", **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py new file mode 100644 index 0000000..abb5a5d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py @@ -0,0 +1,213 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.util.format import force_to_epoch_seconds +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +) + + +class ServiceLevelObjective( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, + ActionAPIResource, +): + """ + A wrapper around Service Level Objective HTTP API. + """ + + _resource_name = "slo" + + @classmethod + def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body): + """ + Create a SLO + + :returns: created SLO details + """ + return super(ServiceLevelObjective, cls).create( + attach_host_name=False, method="POST", id=None, params=params, **body + ) + + @classmethod + def get(cls, id, **params): + """ + Get a specific SLO details. + + :param id: SLO id to get details for + :type id: str + + :returns: SLO details + """ + return super(ServiceLevelObjective, cls).get(id, **params) + + @classmethod + def get_all(cls, query=None, tags_query=None, metrics_query=None, ids=None, offset=0, limit=100, **params): + """ + Get all SLO details. + + :param query: optional search query to filter results for SLO name + :type query: str + + :param tags_query: optional search query to filter results for a single SLO tag + :type query: str + + :param metrics_query: optional search query to filter results based on SLO numerator and denominator + :type query: str + + :param ids: optional list of SLO ids to get many specific SLOs at once. + :type ids: list(str) + + :param offset: offset of results to use (default 0) + :type offset: int + + :param limit: limit of results to return (default: 100) + :type limit: int + + :returns: SLOs matching the query + """ + search_terms = {} + if query: + search_terms["query"] = query + if ids: + search_terms["ids"] = ids + if tags_query: + search_terms["tags_query"] = tags_query + if metrics_query: + search_terms["metrics_query"] = metrics_query + search_terms["offset"] = offset + search_terms["limit"] = limit + + return super(ServiceLevelObjective, cls).get_all(**search_terms) + + @classmethod + def update(cls, id, params=None, **body): + """ + Update a specific SLO details. + + :param id: SLO id to update details for + :type id: str + + :returns: SLO details + """ + return super(ServiceLevelObjective, cls).update(id, params, **body) + + @classmethod + def delete(cls, id, **params): + """ + Delete a specific SLO. + + :param id: SLO id to delete + :type id: str + + :returns: SLO ids removed + """ + return super(ServiceLevelObjective, cls).delete(id, **params) + + @classmethod + def bulk_delete(cls, ops, **params): + """ + Bulk Delete Timeframes from multiple SLOs. + + :param ops: a dictionary mapping of SLO ID to timeframes to remove. + :type ops: dict(str, list(str)) + + :returns: Dictionary representing the API's JSON response + `errors` - errors with operation + `data` - updates and deletions + """ + return super(ServiceLevelObjective, cls)._trigger_class_action( + "POST", + "bulk_delete", + body=ops, + params=params, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def delete_many(cls, ids, **params): + """ + Delete Multiple SLOs + + :param ids: a list of SLO IDs to remove + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response see `data` list(slo ids) && `errors` + """ + return super(ServiceLevelObjective, cls)._trigger_class_action( + "DELETE", + "", + params=params, + body={"ids": ids}, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def can_delete(cls, ids, **params): + """ + Check if the following SLOs can be safely deleted. + + This is used to check if SLO has any references to it. + + :param ids: a list of SLO IDs to check + :type ids: list(str) + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["ids"] = ids + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "can_delete", + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def history(cls, id, from_ts, to_ts, **params): + """ + Get the SLO's history from the given time range. + + :param id: SLO ID to query + :type id: str + + :param from_ts: `from` timestamp in epoch seconds to query + :type from_ts: int|datetime.datetime + + :param to_ts: `to` timestamp in epoch seconds to query, must be > `from_ts` + :type to_ts: int|datetime.datetime + + :returns: Dictionary representing the API's JSON response + "data.ok" represents a list of SLO ids that have no known references. + "errors" contains a dictionary of SLO ID to known reference(s). + """ + params["id"] = id + params["from_ts"] = force_to_epoch_seconds(from_ts) + params["to_ts"] = force_to_epoch_seconds(to_ts) + return super(ServiceLevelObjective, cls)._trigger_class_action( + "GET", + "history", + id=id, + params=params, + body=None, + suppress_response_errors_on_codes=[200], + ) + + @classmethod + def search(cls, **params): + """ + Search SLOs. + + :returns: Dictionary representing the API's JSON response + """ + return super(ServiceLevelObjective, cls)._trigger_class_action("GET", "search", params=params) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py new file mode 100644 index 0000000..88c0e3a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py @@ -0,0 +1,214 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.exceptions import ApiError +from datadog.api.resources import ( + CreateableAPIResource, + GetableAPIResource, + ActionAPIResource, + UpdatableAPISyntheticsResource, + UpdatableAPISyntheticsSubResource, + ActionAPISyntheticsResource, +) + + +class Synthetics( + ActionAPIResource, + ActionAPISyntheticsResource, + CreateableAPIResource, + GetableAPIResource, + UpdatableAPISyntheticsResource, + UpdatableAPISyntheticsSubResource, +): + """ + A wrapper around Sythetics HTTP API. + """ + + _resource_name = "synthetics" + _sub_resource_name = "status" + + @classmethod + def get_test(cls, id, **params): + """ + Get test's details. + + :param id: public id of the test to retrieve + :type id: string + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/ + + name = "tests" + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", id=id, name=name, params=params) + + @classmethod + def get_all_tests(cls, **params): + """ + Get all tests' details. + + :returns: Dictionary representing the API's JSON response + """ + + for p in ["locations", "tags"]: + if p in params and isinstance(params[p], list): + params[p] = ",".join(params[p]) + + # API path = "synthetics/tests" + + return super(Synthetics, cls).get(id="tests", params=params) + + @classmethod + def get_devices(cls, **params): + """ + Get a list of devices for browser checks + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/browser/devices" + + name = "browser/devices" + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params) + + @classmethod + def get_locations(cls, **params): + """ + Get a list of all available locations + + :return: Dictionary representing the API's JSON response + """ + + name = "locations" + + # API path = "synthetics/locations + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params) + + @classmethod + def get_results(cls, id, **params): + """ + Get the most recent results for a test + + :param id: public id of the test to retrieve results for + :type id: id + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests//results + + path = "tests/{}/results".format(id) + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params) + + @classmethod + def get_result(cls, id, result_id, **params): + """ + Get a specific result for a given test. + + :param id: public ID of the test to retrieve the most recent result for + :type id: id + + :param result_id: result ID of the test to retrieve the most recent result for + :type result_id: id + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/results/ + + path = "tests/{}/results/{}".format(id, result_id) + + return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params) + + @classmethod + def create_test(cls, **params): + """ + Create a test + + :param name: A unique name for the test + :type name: string + + :param type: The type of test. Valid values are api and browser + :type type: string + + :param subtype: required for SSL test - For a SSL API test, specify ssl as the value. + :Otherwise, you should omit this argument. + :type subtype: string + + :param config: The test configuration, contains the request specification and the assertions. + :type config: dict + + :param options: List of options to customize the test + :type options: dict + + :param message: A description of the test + :type message: string + + :param locations: A list of the locations to send the tests from + :type locations: list + + :param tags: A list of tags used to filter the test + :type tags: list + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests" + + return super(Synthetics, cls).create(id="tests", **params) + + @classmethod + def edit_test(cls, id, **params): + """ + Edit a test + + :param id: Public id of the test to edit + :type id: string + + :return: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests/" + + return super(Synthetics, cls).update_synthetics(id=id, **params) + + @classmethod + def start_or_pause_test(cls, id, **body): + """ + Pause a given test + + :param id: public id of the test to pause + :type id: string + + :param new_status: mew status for the test + :type id: string + + :returns: Dictionary representing the API's JSON response + """ + + # API path = "synthetics/tests//status" + + return super(Synthetics, cls).update_synthetics_items(id=id, **body) + + @classmethod + def delete_test(cls, **body): + """ + Delete a test + + :param public_ids: list of public IDs to delete corresponding tests + :type public_ids: list of strings + + :return: Dictionary representing the API's JSON response + """ + + if not isinstance(body["public_ids"], list): + raise ApiError("Parameter 'public_ids' must be a list") + + # API path = "synthetics/tests/delete + + return super(Synthetics, cls)._trigger_action("POST", name="synthetics", id="tests/delete", **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py new file mode 100644 index 0000000..2226cdb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py @@ -0,0 +1,54 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + CreateableAPIResource, + UpdatableAPIResource, + DeletableAPIResource, + GetableAPIResource, + ListableAPIResource, +) + + +class Tag(CreateableAPIResource, UpdatableAPIResource, GetableAPIResource, ListableAPIResource, DeletableAPIResource): + """ + A wrapper around Tag HTTP API. + """ + + _resource_name = "tags/hosts" + + @classmethod + def create(cls, host, **body): + """ + Add tags to a host + + :param tags: list of tags to apply to the host + :type tags: string list + + :param source: source of the tags + :type source: string + + :returns: Dictionary representing the API's JSON response + """ + params = {} + if "source" in body: + params["source"] = body["source"] + return super(Tag, cls).create(id=host, params=params, **body) + + @classmethod + def update(cls, host, **body): + """ + Update all tags for a given host + + :param tags: list of tags to apply to the host + :type tags: string list + + :param source: source of the tags + :type source: string + + :returns: Dictionary representing the API's JSON response + """ + params = {} + if "source" in body: + params["source"] = body["source"] + return super(Tag, cls).update(id=host, params=params, **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py new file mode 100644 index 0000000..42d34da --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py @@ -0,0 +1,20 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +) + + +class Timeboard( + GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, ListableAPIResource, DeletableAPIResource +): + """ + A wrapper around Timeboard HTTP API. + """ + + _resource_name = "dash" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py new file mode 100644 index 0000000..ff0b2f2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.api.resources import ( + ActionAPIResource, + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +) + + +class User( + ActionAPIResource, + GetableAPIResource, + CreateableAPIResource, + UpdatableAPIResource, + ListableAPIResource, + DeletableAPIResource, +): + + _resource_name = "user" + + """ + A wrapper around User HTTP API. + """ + + @classmethod + def invite(cls, emails): + """ + Send an invite to join datadog to each of the email addresses in the + *emails* list. If *emails* is a string, it will be wrapped in a list and + sent. Returns a list of email addresses for which an email was sent. + + :param emails: emails addresses to invite to join datadog + :type emails: string list + + :returns: Dictionary representing the API's JSON response + """ + print("[DEPRECATION] User.invite() is deprecated. Use `create` instead.") + + if not isinstance(emails, list): + emails = [emails] + + body = { + "emails": emails, + } + + return super(User, cls)._trigger_action("POST", "/invite_users", **body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py new file mode 100644 index 0000000..cb4aab6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py @@ -0,0 +1,113 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import os +import warnings +import sys + +# 3p +import argparse + +# datadog +from datadog import initialize, __version__ +from datadog.dogshell.comment import CommentClient +from datadog.dogshell.common import DogshellConfig +from datadog.dogshell.dashboard_list import DashboardListClient +from datadog.dogshell.downtime import DowntimeClient +from datadog.dogshell.event import EventClient +from datadog.dogshell.host import HostClient +from datadog.dogshell.metric import MetricClient +from datadog.dogshell.monitor import MonitorClient +from datadog.dogshell.screenboard import ScreenboardClient +from datadog.dogshell.search import SearchClient +from datadog.dogshell.service_check import ServiceCheckClient +from datadog.dogshell.service_level_objective import ServiceLevelObjectiveClient +from datadog.dogshell.tag import TagClient +from datadog.dogshell.timeboard import TimeboardClient +from datadog.dogshell.dashboard import DashboardClient + + +def main(): + if sys.argv[0].endswith("dog"): + warnings.warn("dog is pending deprecation. Please use dogshell instead.", PendingDeprecationWarning) + + parser = argparse.ArgumentParser( + description="Interact with the Datadog API", formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "--config", help="location of your dogrc file (default ~/.dogrc)", default=os.path.expanduser("~/.dogrc") + ) + parser.add_argument( + "--api-key", + help="your API key, from " + "https://app.datadoghq.com/account/settings#api. " + "You can also set the environment variables DATADOG_API_KEY or DD_API_KEY", + dest="api_key", + default=os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY")), + ) + parser.add_argument( + "--application-key", + help="your Application key, from " + "https://app.datadoghq.com/account/settings#api. " + "You can also set the environment variables DATADOG_APP_KEY or DD_APP_KEY", + dest="app_key", + default=os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY")), + ) + parser.add_argument( + "--pretty", + help="pretty-print output (suitable for human consumption, " "less useful for scripting)", + dest="format", + action="store_const", + const="pretty", + ) + parser.add_argument( + "--raw", help="raw JSON as returned by the HTTP service", dest="format", action="store_const", const="raw" + ) + parser.add_argument( + "--timeout", help="time to wait in seconds before timing" " out an API call (default 10)", default=10, type=int + ) + parser.add_argument( + "-v", "--version", help="Dog API version", action="version", version="%(prog)s {0}".format(__version__) + ) + + parser.add_argument( + "--api_host", + help="Datadog site to send data, us (datadoghq.com), eu (datadoghq.eu), us3 (us3.datadoghq.com), \ + us5 (us5.datadoghq.com), ap1 (ap1.datadoghq.com), gov (ddog-gov.com), or custom url. default: us", + dest="api_host", + ) + + config = DogshellConfig() + + # Set up subparsers for each service + subparsers = parser.add_subparsers(title="Modes", dest="mode") + subparsers.required = True + + CommentClient.setup_parser(subparsers) + SearchClient.setup_parser(subparsers) + MetricClient.setup_parser(subparsers) + TagClient.setup_parser(subparsers) + EventClient.setup_parser(subparsers) + MonitorClient.setup_parser(subparsers) + TimeboardClient.setup_parser(subparsers) + DashboardClient.setup_parser(subparsers) + ScreenboardClient.setup_parser(subparsers) + DashboardListClient.setup_parser(subparsers) + HostClient.setup_parser(subparsers) + DowntimeClient.setup_parser(subparsers) + ServiceCheckClient.setup_parser(subparsers) + ServiceLevelObjectiveClient.setup_parser(subparsers) + + args = parser.parse_args() + + config.load(args.config, args.api_key, args.app_key, args.api_host) + + # Initialize datadog.api package + initialize(**config) + + args.func(args) + + +if __name__ == "__main__": + main() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py new file mode 100644 index 0000000..208d009 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py @@ -0,0 +1,152 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import sys + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class CommentClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("comment", help="Post, update, and delete comments.") + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post comments.") + post_parser.add_argument("handle", help="handle to post as.") + post_parser.add_argument("comment", help="comment message to post. if unset," " reads from stdin.", nargs="?") + post_parser.set_defaults(func=cls._post) + + update_parser = verb_parsers.add_parser("update", help="Update existing comments.") + update_parser.add_argument("comment_id", help="comment to update (by id)") + update_parser.add_argument("handle", help="handle to post as.") + update_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?") + update_parser.set_defaults(func=cls._update) + + reply_parser = verb_parsers.add_parser("reply", help="Reply to existing comments.") + reply_parser.add_argument("comment_id", help="comment to reply to (by id)") + reply_parser.add_argument("handle", help="handle to post as.") + reply_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?") + reply_parser.set_defaults(func=cls._reply) + + show_parser = verb_parsers.add_parser("show", help="Show comment details.") + show_parser.add_argument("comment_id", help="comment to show") + show_parser.set_defaults(func=cls._show) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + handle = args.handle + comment = args.comment + format = args.format + if comment is None: + comment = sys.stdin.read() + res = api.Comment.create(handle=handle, message=comment) + report_warnings(res) + report_errors(res) + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": + print(json.dumps(res)) + else: + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) + + @classmethod + def _update(cls, args): + handle = args.handle + comment = args.comment + id = args.comment_id + format = args.format + if comment is None: + comment = sys.stdin.read() + res = api.Comment.update(id, handle=handle, message=comment) + report_warnings(res) + report_errors(res) + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": + print(json.dumps(res)) + else: + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) + + @classmethod + def _reply(cls, args): + api._timeout = args.timeout + handle = args.handle + comment = args.comment + id = args.comment_id + format = args.format + if comment is None: + comment = sys.stdin.read() + res = api.Comment.create(handle=handle, message=comment, related_event_id=id) + report_warnings(res) + report_errors(res) + if format == "pretty": + message = res["comment"]["message"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\n" + message) + elif format == "raw": + print(json.dumps(res)) + else: + print("id\t\t" + str(res["comment"]["id"])) + print("url\t\t" + res["comment"]["url"]) + print("resource\t" + res["comment"]["resource"]) + print("handle\t\t" + res["comment"]["handle"]) + print("message\t\t" + res["comment"]["message"].__repr__()) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + id = args.comment_id + format = args.format + res = api.Event.get(id) + report_warnings(res) + report_errors(res) + if format == "pretty": + message = res["event"]["text"] + lines = message.split("\n") + message = "\n".join([" " + line for line in lines]) + print("id\t\t" + str(res["event"]["id"])) + print("url\t\t" + res["event"]["url"]) + print("resource\t" + res["event"]["resource"]) + print("message\n" + message) + elif format == "raw": + print(json.dumps(res)) + else: + print("id\t\t" + str(res["event"]["id"])) + print("url\t\t" + res["event"]["url"]) + print("resource\t" + res["event"]["resource"]) + print("message\t\t" + res["event"]["text"].__repr__()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py new file mode 100644 index 0000000..251e658 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py @@ -0,0 +1,122 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from __future__ import print_function +import os +import sys + +# datadog +from datadog.util.compat import is_p3k, configparser, IterableUserDict, get_input + + +def print_err(msg): + if is_p3k(): + print(msg + "\n", file=sys.stderr) + else: + sys.stderr.write(msg + "\n") + sys.stderr.flush() + + +def report_errors(res): + if "errors" in res: + errors = res["errors"] + if isinstance(errors, list): + for error in errors: + print_err("ERROR: {}".format(error)) + else: + print_err("ERROR: {}".format(errors)) + sys.exit(1) + return False + + +def report_warnings(res): + if "warnings" in res: + warnings = res["warnings"] + if isinstance(warnings, list): + for warning in warnings: + print_err("WARNING: {}".format(warning)) + else: + print_err("WARNING: {}".format(warnings)) + return True + return False + + +class DogshellConfig(IterableUserDict): + def load(self, config_file, api_key, app_key, api_host): + config = configparser.ConfigParser() + + if api_host is not None: + if api_host in ("datadoghq.com", "us"): + self["api_host"] = "https://api.datadoghq.com" + elif api_host in ("datadoghq.eu", "eu"): + self["api_host"] = "https://api.datadoghq.eu" + elif api_host in ("us3.datadoghq.com", "us3"): + self["api_host"] = "https://api.us3.datadoghq.com" + elif api_host in ("us5.datadoghq.com", "us5"): + self["api_host"] = "https://api.us5.datadoghq.com" + elif api_host in ("ap1.datadoghq.com", "ap1"): + self["api_host"] = "https://api.ap1.datadoghq.com" + elif api_host in ("ddog-gov.com", "gov"): + self["api_host"] = "https://api.ddog-gov.com" + else: + self["api_host"] = api_host + if api_key is not None and app_key is not None: + self["api_key"] = api_key + self["app_key"] = app_key + else: + if os.access(config_file, os.F_OK): + config.read(config_file) + if not config.has_section("Connection"): + report_errors({"errors": ["%s has no [Connection] section" % config_file]}) + else: + try: + response = None + while response is None or response.strip().lower() not in ["", "y", "n"]: + response = get_input("%s does not exist. Would you like to" " create it? [Y/n] " % config_file) + if response.strip().lower() in ["", "y"]: + # Read the api and app keys from stdin + while True: + api_key = get_input( + "What is your api key? (Get it here: " + "https://app.datadoghq.com/account/settings#api) " + ) + if api_key.isalnum(): + break + print("Datadog api keys can only contain alphanumeric characters.") + while True: + app_key = get_input( + "What is your app key? (Get it here: " + "https://app.datadoghq.com/account/settings#api) " + ) + if app_key.isalnum(): + break + print("Datadog app keys can only contain alphanumeric characters.") + + # Write the config file + config.add_section("Connection") + config.set("Connection", "apikey", api_key) + config.set("Connection", "appkey", app_key) + + f = open(config_file, "w") + config.write(f) + f.close() + print("Wrote %s" % config_file) + elif response.strip().lower() == "n": + # Abort + print_err("Exiting\n") + sys.exit(1) + except (KeyboardInterrupt, EOFError): + # Abort + print_err("\nExiting") + sys.exit(1) + + self["api_key"] = config.get("Connection", "apikey") + self["app_key"] = config.get("Connection", "appkey") + if config.has_section("Proxy"): + self["proxies"] = dict(config.items("Proxy")) + if config.has_option("Connection", "host_name"): + self["host_name"] = config.get("Connection", "host_name") + if config.has_option("Connection", "api_host"): + self["api_host"] = config.get("Connection", "api_host") + assert self["api_key"] is not None and self["app_key"] is not None diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py new file mode 100644 index 0000000..bc37bd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py @@ -0,0 +1,174 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import sys + +# 3p +import argparse + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings +from datadog.util.format import pretty_json + + +class DashboardClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("dashboard", help="Create, edit, and delete dashboards") + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create dashboards") + # Required arguments: + post_parser.add_argument("title", help="title for the new dashboard") + post_parser.add_argument( + "widgets", help="widget definitions as a JSON string. If unset," " reads from stdin.", nargs="?" + ) + post_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.") + # Optional arguments: + post_parser.add_argument("--description", help="Short description of the dashboard") + post_parser.add_argument( + "--read_only", + help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.", + action="store_true", + ) + post_parser.add_argument( + "--notify_list", + type=_json_string, + help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'', + ) + post_parser.add_argument( + "--template_variables", + type=_json_string, + help="A json list of template variable dicts, e.g. " + '\'[{"name": "host", "prefix": "host", ' + '"default": "my-host"}]\'', + ) + post_parser.set_defaults(func=cls._post) + + update_parser = verb_parsers.add_parser("update", help="Update existing dashboards") + # Required arguments: + update_parser.add_argument("dashboard_id", help="Dashboard to replace" " with the new definition") + update_parser.add_argument("title", help="New title for the dashboard") + update_parser.add_argument( + "widgets", help="Widget definitions as a JSON string." " If unset, reads from stdin", nargs="?" + ) + update_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.") + # Optional arguments: + update_parser.add_argument("--description", help="Short description of the dashboard") + update_parser.add_argument( + "--read_only", + help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.", + action="store_true", + ) + update_parser.add_argument( + "--notify_list", + type=_json_string, + help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'', + ) + update_parser.add_argument( + "--template_variables", + type=_json_string, + help="A json list of template variable dicts, e.g. " + '\'[{"name": "host", "prefix": "host", ' + '"default": "my-host"}]\'', + ) + update_parser.set_defaults(func=cls._update) + + show_parser = verb_parsers.add_parser("show", help="Show a dashboard definition") + show_parser.add_argument("dashboard_id", help="Dashboard to show") + show_parser.set_defaults(func=cls._show) + + delete_parser = verb_parsers.add_parser("delete", help="Delete dashboards") + delete_parser.add_argument("dashboard_id", help="Dashboard to delete") + delete_parser.set_defaults(func=cls._delete) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + widgets = args.widgets + if args.widgets is None: + widgets = sys.stdin.read() + widgets = json.loads(widgets) + + # Required arguments + payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type} + # Optional arguments + if args.description: + payload["description"] = args.description + if args.read_only: + payload["is_read_only"] = args.read_only + if args.notify_list: + payload["notify_list"] = args.notify_list + if args.template_variables: + payload["template_variables"] = args.template_variables + + res = api.Dashboard.create(**payload) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + widgets = args.widgets + if args.widgets is None: + widgets = sys.stdin.read() + widgets = json.loads(widgets) + + # Required arguments + payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type} + # Optional arguments + if args.description: + payload["description"] = args.description + if args.read_only: + payload["is_read_only"] = args.read_only + if args.notify_list: + payload["notify_list"] = args.notify_list + if args.template_variables: + payload["template_variables"] = args.template_variables + + res = api.Dashboard.update(args.dashboard_id, **payload) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Dashboard.get(args.dashboard_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.Dashboard.delete(args.dashboard_id) + if res is not None: + report_warnings(res) + report_errors(res) + + +def _json_string(str): + try: + return json.loads(str) + except Exception: + raise argparse.ArgumentTypeError("bad json parameter") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py new file mode 100644 index 0000000..9164ba7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py @@ -0,0 +1,339 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class DashboardListClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("dashboard_list", help="Create, edit, and delete dashboard lists") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + # Create Dashboard List parser + post_parser = verb_parsers.add_parser("post", help="Create a dashboard list") + post_parser.add_argument("name", help="Name for the dashboard list") + post_parser.set_defaults(func=cls._post) + + # Update Dashboard List parser + update_parser = verb_parsers.add_parser("update", help="Update existing dashboard list") + update_parser.add_argument("dashboard_list_id", help="Dashboard list to replace with the new definition") + update_parser.add_argument("name", help="Name for the dashboard list") + update_parser.set_defaults(func=cls._update) + + # Show Dashboard List parser + show_parser = verb_parsers.add_parser("show", help="Show a dashboard list definition") + show_parser.add_argument("dashboard_list_id", help="Dashboard list to show") + show_parser.set_defaults(func=cls._show) + + # Show All Dashboard Lists parser + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all dashboard lists") + show_all_parser.set_defaults(func=cls._show_all) + + # Delete Dashboard List parser + delete_parser = verb_parsers.add_parser("delete", help="Delete existing dashboard list") + delete_parser.add_argument("dashboard_list_id", help="Dashboard list to delete") + delete_parser.set_defaults(func=cls._delete) + + # Get Dashboards for Dashboard List parser + get_dashboards_parser = verb_parsers.add_parser( + "show_dashboards", help="Show a list of all dashboards for an existing dashboard list" + ) + get_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from") + get_dashboards_parser.set_defaults(func=cls._show_dashboards) + + # Get Dashboards for Dashboard List parser (v2) + get_dashboards_v2_parser = verb_parsers.add_parser( + "show_dashboards_v2", help="Show a list of all dashboards for an existing dashboard list" + ) + get_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from") + get_dashboards_v2_parser.set_defaults(func=cls._show_dashboards_v2) + + # Add Dashboards to Dashboard List parser + add_dashboards_parser = verb_parsers.add_parser( + "add_dashboards", help="Add dashboards to an existing dashboard list" + ) + add_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to") + + add_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + add_dashboards_parser.set_defaults(func=cls._add_dashboards) + + # Add Dashboards to Dashboard List parser (v2) + add_dashboards_v2_parser = verb_parsers.add_parser( + "add_dashboards_v2", help="Add dashboards to an existing dashboard list" + ) + add_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to") + add_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + add_dashboards_v2_parser.set_defaults(func=cls._add_dashboards_v2) + + # Update Dashboards of Dashboard List parser + update_dashboards_parser = verb_parsers.add_parser( + "update_dashboards", help="Update dashboards of an existing dashboard list" + ) + update_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards") + update_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + update_dashboards_parser.set_defaults(func=cls._update_dashboards) + + # Update Dashboards of Dashboard List parser (v2) + update_dashboards_v2_parser = verb_parsers.add_parser( + "update_dashboards_v2", help="Update dashboards of an existing dashboard list" + ) + update_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards") + update_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + update_dashboards_v2_parser.set_defaults(func=cls._update_dashboards_v2) + + # Delete Dashboards from Dashboard List parser + delete_dashboards_parser = verb_parsers.add_parser( + "delete_dashboards", help="Delete dashboards from an existing dashboard list" + ) + delete_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from") + delete_dashboards_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": 1234}, ' + + '{"type": "custom_screenboard", "id": 123}]', + ) + delete_dashboards_parser.set_defaults(func=cls._delete_dashboards) + + # Delete Dashboards from Dashboard List parser + delete_dashboards_v2_parser = verb_parsers.add_parser( + "delete_dashboards_v2", help="Delete dashboards from an existing dashboard list" + ) + delete_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from") + delete_dashboards_v2_parser.add_argument( + "dashboards", + help="A JSON list of dashboard dicts, e.g. " + + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, ' + + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]', + ) + delete_dashboards_v2_parser.set_defaults(func=cls._delete_dashboards_v2) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + name = args.name + + res = api.DashboardList.create(name=name) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + name = args.name + + res = api.DashboardList.update(dashboard_list_id, name=name) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.get(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + res = api.DashboardList.get_all() + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.delete(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.get_items(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + + res = api.DashboardList.v2.get_items(dashboard_list_id) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _add_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.add_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _add_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.add_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.update_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.update_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_dashboards(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.delete_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_dashboards_v2(cls, args): + api._timeout = args.timeout + format = args.format + dashboard_list_id = args.dashboard_list_id + dashboards = json.loads(args.dashboards) + + res = api.DashboardList.v2.delete_items(dashboard_list_id, dashboards=dashboards) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py new file mode 100644 index 0000000..1c53b46 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py @@ -0,0 +1,132 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class DowntimeClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("downtime", help="Create, edit, and delete downtimes") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent downtime IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create a downtime") + post_parser.add_argument("scope", help="scope to apply downtime to") + post_parser.add_argument("start", help="POSIX timestamp to start the downtime", default=None) + post_parser.add_argument("--end", help="POSIX timestamp to end the downtime", default=None) + post_parser.add_argument( + "--message", help="message to include with notifications" " for this downtime", default=None + ) + post_parser.set_defaults(func=cls._schedule_downtime) + + update_parser = verb_parsers.add_parser("update", help="Update existing downtime") + update_parser.add_argument("downtime_id", help="downtime to replace" " with the new definition") + update_parser.add_argument("--scope", help="scope to apply downtime to") + update_parser.add_argument("--start", help="POSIX timestamp to start" " the downtime", default=None) + update_parser.add_argument("--end", help="POSIX timestamp to" " end the downtime", default=None) + update_parser.add_argument( + "--message", help="message to include with notifications" " for this downtime", default=None + ) + update_parser.set_defaults(func=cls._update_downtime) + + show_parser = verb_parsers.add_parser("show", help="Show a downtime definition") + show_parser.add_argument("downtime_id", help="downtime to show") + show_parser.set_defaults(func=cls._show_downtime) + + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all downtimes") + show_all_parser.add_argument( + "--current_only", help="only return downtimes that" " are active when the request is made", default=None + ) + show_all_parser.set_defaults(func=cls._show_all_downtime) + + delete_parser = verb_parsers.add_parser("delete", help="Delete a downtime") + delete_parser.add_argument("downtime_id", help="downtime to delete") + delete_parser.set_defaults(func=cls._cancel_downtime) + + cancel_parser = verb_parsers.add_parser("cancel_by_scope", help="Cancel all downtimes with a given scope") + cancel_parser.add_argument("scope", help="The scope of the downtimes to cancel") + cancel_parser.set_defaults(func=cls._cancel_downtime_by_scope) + + @classmethod + def _schedule_downtime(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.create(scope=args.scope, start=args.start, end=args.end, message=args.message) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update_downtime(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.update( + args.downtime_id, scope=args.scope, start=args.start, end=args.end, message=args.message + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _cancel_downtime(cls, args): + api._timeout = args.timeout + res = api.Downtime.delete(args.downtime_id) + if res is not None: + report_warnings(res) + report_errors(res) + + @classmethod + def _show_downtime(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.get(args.downtime_id) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all_downtime(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.get_all(current_only=args.current_only) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _cancel_downtime_by_scope(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Downtime.cancel_downtime_by_scope(scope=args.scope) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py new file mode 100644 index 0000000..89d68a6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py @@ -0,0 +1,201 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import datetime +import time +import re +import sys +import json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +time_pat = re.compile(r"(?P[0-9]*\.?[0-9]+)(?P[mhd])") + + +def prettyprint_event(event): + title = event["title"] or "" + text = event.get("text", "") or "" + handle = event.get("handle", "") or "" + date = event["date_happened"] + dt = datetime.datetime.fromtimestamp(date) + link = event["url"] + + # Print + print((title + " " + text + " " + " (" + handle + ")").strip()) + print(dt.isoformat(" ") + " | " + link) + + +def print_event(event): + prettyprint_event(event) + + +def prettyprint_event_details(event): + prettyprint_event(event) + + +def print_event_details(event): + prettyprint_event(event) + + +def parse_time(timestring): + now = time.mktime(datetime.datetime.now().timetuple()) + if timestring is None: + t = now + else: + try: + t = int(timestring) + except Exception: + match = time_pat.match(timestring) + if match is None: + raise Exception + delta = float(match.group("delta")) + unit = match.group("unit") + if unit == "m": + delta = delta * 60 + if unit == "h": + delta = delta * 60 * 60 + if unit == "d": + delta = delta * 60 * 60 * 24 + t = now - int(delta) + return int(t) + + +class EventClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("event", help="Post events, get event details," " and view the event stream.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post events.") + post_parser.add_argument("title", help="event title") + post_parser.add_argument( + "--date_happened", + type=int, + help="POSIX timestamp" " when the event occurred. if unset defaults to the current time.", + ) + post_parser.add_argument("--handle", help="user to post as. if unset, submits " "as the generic API user.") + post_parser.add_argument("--priority", help='"normal" or "low". defaults to "normal"', default="normal") + post_parser.add_argument( + "--related_event_id", help="event to post as a child of." " if unset, posts a top-level event" + ) + post_parser.add_argument("--tags", help="comma separated list of tags") + post_parser.add_argument("--host", help="related host (default to the local host name)", default="") + post_parser.add_argument( + "--no_host", help="no host is associated with the event" " (overrides --host))", action="store_true" + ) + post_parser.add_argument("--device", help="related device (e.g. eth0, /dev/sda1)") + post_parser.add_argument("--aggregation_key", help="key to aggregate the event with") + post_parser.add_argument("--type", help="type of event, e.g. nagios, jenkins, etc.") + post_parser.add_argument("--alert_type", help='"error", "warning", "info" or "success". defaults to "info"') + post_parser.add_argument("message", help="event message body. " "if unset, reads from stdin.", nargs="?") + post_parser.set_defaults(func=cls._post) + + show_parser = verb_parsers.add_parser("show", help="Show event details.") + show_parser.add_argument("event_id", help="event to show") + show_parser.set_defaults(func=cls._show) + + stream_parser = verb_parsers.add_parser( + "stream", + help="Retrieve events from the Event Stream", + description="Stream start and end times can be specified as either a POSIX" + " timestamp (e.g. the output of `date +%s`) or as a period of" + " time in the past (e.g. '5m', '6h', '3d').", + ) + stream_parser.add_argument("start", help="start date for the stream request") + stream_parser.add_argument("end", help="end date for the stream request " "(defaults to 'now')", nargs="?") + stream_parser.add_argument("--priority", help="filter by priority." " 'normal' or 'low'. defaults to 'normal'") + stream_parser.add_argument("--sources", help="comma separated list of sources to filter by") + stream_parser.add_argument("--tags", help="comma separated list of tags to filter by") + stream_parser.set_defaults(func=cls._stream) + + @classmethod + def _post(cls, args): + """ + Post an event. + """ + api._timeout = args.timeout + format = args.format + message = args.message + if message is None: + message = sys.stdin.read() + if args.tags is not None: + tags = [t.strip() for t in args.tags.split(",")] + else: + tags = None + + host = None if args.no_host else args.host + + # Submit event + res = api.Event.create( + title=args.title, + text=message, + date_happened=args.date_happened, + handle=args.handle, + priority=args.priority, + related_event_id=args.related_event_id, + tags=tags, + host=host, + device=args.device, + aggregation_key=args.aggregation_key, + source_type_name=args.type, + alert_type=args.alert_type, + ) + + # Report + report_warnings(res) + report_errors(res) + if format == "pretty": + prettyprint_event(res["event"]) + elif format == "raw": + print(json.dumps(res)) + else: + print_event(res["event"]) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Event.get(args.event_id) + report_warnings(res) + report_errors(res) + if format == "pretty": + prettyprint_event_details(res["event"]) + elif format == "raw": + print(json.dumps(res)) + else: + print_event_details(res["event"]) + + @classmethod + def _stream(cls, args): + api._timeout = args.timeout + format = args.format + if args.sources is not None: + sources = [s.strip() for s in args.sources.split(",")] + else: + sources = None + if args.tags is not None: + tags = [t.strip() for t in args.tags.split(",")] + else: + tags = None + start = parse_time(args.start) + end = parse_time(args.end) + # res = api.Event.query(start=start, end=end) + # TODO FIXME + res = api.Event.query(start=start, end=end, priority=args.priority, sources=sources, tags=tags) + report_warnings(res) + report_errors(res) + if format == "pretty": + for event in res["events"]: + prettyprint_event(event) + print() + elif format == "raw": + print(json.dumps(res)) + else: + for event in res["events"]: + print_event(event) + print() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py new file mode 100644 index 0000000..1f93a78 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class HostClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("host", help="Mute, unmute hosts") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + mute_parser = verb_parsers.add_parser("mute", help="Mute a host") + mute_parser.add_argument("host_name", help="host to mute") + mute_parser.add_argument( + "--end", help="POSIX timestamp, if omitted," " host will be muted until explicitly unmuted", default=None + ) + mute_parser.add_argument("--message", help="string to associate with the" " muting of this host", default=None) + mute_parser.add_argument( + "--override", + help="true/false, if true and the host is already" " muted, will overwrite existing end on the host", + action="store_true", + ) + mute_parser.set_defaults(func=cls._mute) + + unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a host") + unmute_parser.add_argument("host_name", help="host to mute") + unmute_parser.set_defaults(func=cls._unmute) + + @classmethod + def _mute(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Host.mute(args.host_name, end=args.end, message=args.message, override=args.override) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _unmute(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Host.unmute(args.host_name) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py new file mode 100644 index 0000000..135e4b9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py @@ -0,0 +1,72 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from collections import defaultdict + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class MetricClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("metric", help="Post metrics.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Post metrics") + post_parser.add_argument("name", help="metric name") + post_parser.add_argument("value", help="metric value (integer or decimal value)", type=float) + post_parser.add_argument( + "--host", help="scopes your metric to a specific host " "(default to the local host name)", default="" + ) + post_parser.add_argument( + "--no_host", help="no host is associated with the metric" " (overrides --host))", action="store_true" + ) + post_parser.add_argument("--device", help="scopes your metric to a specific device", default=None) + post_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + post_parser.add_argument( + "--localhostname", + help="deprecated, used to force `--host`" + " to the local hostname " + "(now default when no `--host` is specified)", + action="store_true", + ) + post_parser.add_argument( + "--type", help="type of the metric - gauge(32bit float)" " or counter(64bit integer)", default=None + ) + parser.set_defaults(func=cls._post) + + @classmethod + def _post(cls, args): + """ + Post a metric. + """ + # Format parameters + api._timeout = args.timeout + + host = None if args.no_host else args.host + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t])) + else: + tags = None + + # Submit metric + res = api.Metric.send( + metric=args.name, points=args.value, host=host, device=args.device, tags=tags, metric_type=args.type + ) + + # Report + res = defaultdict(list, res) + + if args.localhostname: + # Warn about`--localhostname` command line flag deprecation + res["warnings"].append( + u"`--localhostname` command line flag is deprecated, made default when no `--host` " + u"is specified. See the `--host` option for more information." + ) + report_warnings(res) + report_errors(res) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py new file mode 100644 index 0000000..ddc207e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py @@ -0,0 +1,431 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import argparse +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err + + +class MonitorClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("monitor", help="Create, edit, and delete monitors") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent monitor IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create a monitor") + post_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'") + post_parser.add_argument( + "query", help="query to notify on with syntax varying " "depending on what type of monitor you are creating" + ) + post_parser.add_argument("--name", help="name of the alert", default=None) + post_parser.add_argument( + "--message", help="message to include with notifications" " for this monitor", default=None + ) + post_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + post_parser.add_argument( + "--priority", + help="Integer from 1 (high) to 5 (low) indicating alert severity.", + default=None + ) + post_parser.add_argument("--options", help="json options for the monitor", default=None) + post_parser.set_defaults(func=cls._post) + + file_post_parser = verb_parsers.add_parser("fpost", help="Create a monitor from file") + file_post_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_post_parser.set_defaults(func=cls._file_post) + + update_parser = verb_parsers.add_parser("update", help="Update existing monitor") + update_parser.add_argument("monitor_id", help="monitor to replace with the new definition") + update_parser.add_argument( + "type", + nargs="?", + help="[Deprecated] optional argument preferred" "type of the monitor, e.g. 'metric alert' 'service check'", + default=None, + ) + update_parser.add_argument( + "query", + nargs="?", + help="[Deprecated] optional argument preferred" + "query to notify on with syntax varying depending on monitor type", + default=None, + ) + update_parser.add_argument( + "--type", help="type of the monitor, e.g. " "'metric alert' 'service check'", default=None, dest="type_opt" + ) + update_parser.add_argument( + "--query", + help="query to notify on with syntax varying" " depending on monitor type", + default=None, + dest="query_opt", + ) + update_parser.add_argument("--name", help="name of the alert", default=None) + update_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + update_parser.add_argument( + "--message", help="message to include with " "notifications for this monitor", default=None + ) + update_parser.add_argument( + "--priority", + help="Integer from 1 (high) to 5 (low) indicating alert severity.", + default=None + ) + update_parser.add_argument("--options", help="json options for the monitor", default=None) + update_parser.set_defaults(func=cls._update) + + file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing" " monitor from file") + file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_update_parser.set_defaults(func=cls._file_update) + + show_parser = verb_parsers.add_parser("show", help="Show a monitor definition") + show_parser.add_argument("monitor_id", help="monitor to show") + show_parser.set_defaults(func=cls._show) + + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all monitors") + show_all_parser.add_argument( + "--group_states", + help="comma separated list of group states to filter by" + "(choose one or more from 'all', 'alert', 'warn', or 'no data')", + ) + show_all_parser.add_argument("--name", help="string to filter monitors by name") + show_all_parser.add_argument( + "--tags", + help="comma separated list indicating what tags, if any, " + "should be used to filter the list of monitors by scope (e.g. 'host:host0')", + ) + show_all_parser.add_argument( + "--monitor_tags", + help="comma separated list indicating what service " + "and/or custom tags, if any, should be used to filter the list of monitors", + ) + + show_all_parser.set_defaults(func=cls._show_all) + + delete_parser = verb_parsers.add_parser("delete", help="Delete a monitor") + delete_parser.add_argument("monitor_id", help="monitor to delete") + delete_parser.set_defaults(func=cls._delete) + + mute_all_parser = verb_parsers.add_parser("mute_all", help="Globally mute " "monitors (downtime over *)") + mute_all_parser.set_defaults(func=cls._mute_all) + + unmute_all_parser = verb_parsers.add_parser( + "unmute_all", help="Globally unmute " "monitors (cancel downtime over *)" + ) + unmute_all_parser.set_defaults(func=cls._unmute_all) + + mute_parser = verb_parsers.add_parser("mute", help="Mute a monitor") + mute_parser.add_argument("monitor_id", help="monitor to mute") + mute_parser.add_argument("--scope", help="scope to apply the mute to," " e.g. role:db (optional)", default=[]) + mute_parser.add_argument( + "--end", help="POSIX timestamp for when" " the mute should end (optional)", default=None + ) + mute_parser.set_defaults(func=cls._mute) + + unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a monitor") + unmute_parser.add_argument("monitor_id", help="monitor to unmute") + unmute_parser.add_argument("--scope", help="scope to unmute (must be muted), " "e.g. role:db", default=[]) + unmute_parser.add_argument("--all_scopes", help="clear muting across all scopes", action="store_true") + unmute_parser.set_defaults(func=cls._unmute) + + can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if you can delete some monitors") + can_delete_parser.add_argument("monitor_ids", help="monitors to check if they can be deleted") + can_delete_parser.set_defaults(func=cls._can_delete) + + validate_parser = verb_parsers.add_parser("validate", help="Validates if a monitor definition is correct") + validate_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'") + validate_parser.add_argument("query", help="the monitor query") + validate_parser.add_argument("--name", help="name of the alert", default=None) + validate_parser.add_argument( + "--message", help="message to include with notifications" " for this monitor", default=None + ) + validate_parser.add_argument("--tags", help="comma-separated list of tags", default=None) + validate_parser.add_argument("--options", help="json options for the monitor", default=None) + validate_parser.set_defaults(func=cls._validate) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + options = None + if args.options is not None: + options = json.loads(args.options) + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None + + body = { + "type": args.type, + "query": args.query, + "name": args.name, + "message": args.message, + "options": options + } + if tags: + body["tags"] = tags + if args.priority: + body["priority"] = args.priority + + res = api.Monitor.create(**body) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_post(cls, args): + api._timeout = args.timeout + format = args.format + monitor = json.load(args.file) + body = { + "type": monitor["type"], + "query": monitor["query"], + "name": monitor["name"], + "message": monitor["message"], + "options": monitor["options"] + } + tags = monitor.get("tags", None) + if tags: + body["tags"] = tags + priority = monitor.get("priority", None) + if priority: + body["priority"] = priority + + res = api.Monitor.create(**body) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + + to_update = {} + if args.type: + if args.type_opt: + msg = "Duplicate arguments for `type`. Using optional value --type" + print_err("WARNING: {}".format(msg)) + else: + to_update["type"] = args.type + msg = "[DEPRECATION] `type` is no longer required to `update` and may be omitted" + print_err("WARNING: {}".format(msg)) + if args.query: + if args.query_opt: + msg = "Duplicate arguments for `query`. Using optional value --query" + print_err("WARNING: {}".format(msg)) + else: + to_update["query"] = args.query + msg = "[DEPRECATION] `query` is no longer required to `update` and may be omitted" + print_err("WARNING: {}".format(msg)) + if args.name: + to_update["name"] = args.name + if args.message: + to_update["message"] = args.message + if args.type_opt: + to_update["type"] = args.type_opt + if args.query_opt: + to_update["query"] = args.query_opt + if args.tags: + to_update["tags"] = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + if args.priority: + to_update["priority"] = args.priority + + if args.options is not None: + to_update["options"] = json.loads(args.options) + + res = api.Monitor.update(args.monitor_id, **to_update) + + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_update(cls, args): + api._timeout = args.timeout + format = args.format + monitor = json.load(args.file) + body = { + "type": monitor["type"], + "query": monitor["query"], + "name": monitor["name"], + "message": monitor["message"], + "options": monitor["options"] + } + tags = monitor.get("tags", None) + if tags: + body["tags"] = tags + priority = monitor.get("priority", None) + if priority: + body["priority"] = priority + + res = api.Monitor.update(monitor["id"], **body) + + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Monitor.get(args.monitor_id) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["id"] = str(res["id"]) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + res = api.Monitor.get_all( + group_states=args.group_states, name=args.name, tags=args.tags, monitor_tags=args.monitor_tags + ) + report_warnings(res) + report_errors(res) + + if args.string_ids: + for d in res: + d["id"] = str(d["id"]) + + if format == "pretty": + print(pretty_json(res)) + elif format == "raw": + print(json.dumps(res)) + else: + for d in res: + print( + "\t".join( + [ + (str(d["id"])), + (cls._escape(d["message"])), + (cls._escape(d["name"])), + (str(d["options"])), + (str(d["org_id"])), + (d["query"]), + (d["type"]), + ] + ) + ) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + # TODO CHECK + res = api.Monitor.delete(args.monitor_id) + if res is not None: + report_warnings(res) + report_errors(res) + + @classmethod + def _escape(cls, s): + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") + + @classmethod + def _mute_all(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Monitor.mute_all() + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _unmute_all(cls, args): + api._timeout = args.timeout + res = api.Monitor.unmute_all() + if res is not None: + report_warnings(res) + report_errors(res) + + @classmethod + def _mute(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Monitor.mute(args.monitor_id, scope=args.scope, end=args.end) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _unmute(cls, args): + api._timeout = args.timeout + res = api.Monitor.unmute(args.monitor_id, scope=args.scope, all_scopes=args.all_scopes) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _can_delete(cls, args): + api._timeout = args.timeout + monitor_ids = [i.strip() for i in args.monitor_ids.split(",") if i.strip()] + res = api.Monitor.can_delete(monitor_ids=monitor_ids) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _validate(cls, args): + api._timeout = args.timeout + format = args.format + options = None + if args.options is not None: + options = json.loads(args.options) + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None + + res = api.Monitor.validate( + type=args.type, query=args.query, name=args.name, message=args.message, tags=tags, options=options + ) + # report_warnings(res) + # report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py new file mode 100644 index 0000000..093a3e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py @@ -0,0 +1,308 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import argparse +import json +import platform +import sys +import webbrowser + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err +from datetime import datetime + + +class ScreenboardClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("screenboard", help="Create, edit, and delete screenboards.") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent screenboard IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create screenboards.") + post_parser.add_argument("title", help="title for the new screenboard") + post_parser.add_argument("description", help="short description of the screenboard") + post_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + post_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', 'default': 'host:my-host'}]", + ) + post_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels") + post_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels") + post_parser.set_defaults(func=cls._post) + + update_parser = verb_parsers.add_parser("update", help="Update existing screenboards.") + update_parser.add_argument("screenboard_id", help="screenboard to replace " " with the new definition") + update_parser.add_argument("title", help="title for the new screenboard") + update_parser.add_argument("description", help="short description of the screenboard") + update_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + update_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', 'default': " + "'host:my-host'}]", + ) + update_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels") + update_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels") + update_parser.set_defaults(func=cls._update) + + show_parser = verb_parsers.add_parser("show", help="Show a screenboard definition.") + show_parser.add_argument("screenboard_id", help="screenboard to show") + show_parser.set_defaults(func=cls._show) + + delete_parser = verb_parsers.add_parser("delete", help="Delete a screenboard.") + delete_parser.add_argument("screenboard_id", help="screenboard to delete") + delete_parser.set_defaults(func=cls._delete) + + share_parser = verb_parsers.add_parser("share", help="Share an existing screenboard's" " with a public URL.") + share_parser.add_argument("screenboard_id", help="screenboard to share") + share_parser.set_defaults(func=cls._share) + + revoke_parser = verb_parsers.add_parser("revoke", help="Revoke an existing screenboard's" " with a public URL.") + revoke_parser.add_argument("screenboard_id", help="screenboard to revoke") + revoke_parser.set_defaults(func=cls._revoke) + + pull_parser = verb_parsers.add_parser("pull", help="Pull a screenboard on the server" " into a local file") + pull_parser.add_argument("screenboard_id", help="ID of screenboard to pull") + pull_parser.add_argument("filename", help="file to pull screenboard into") + pull_parser.set_defaults(func=cls._pull) + + push_parser = verb_parsers.add_parser( + "push", help="Push updates to screenboards" " from local files to the server" + ) + push_parser.add_argument( + "--append_auto_text", + action="store_true", + dest="append_auto_text", + help="When pushing to the server, appends filename and" + " timestamp to the end of the screenboard description", + ) + push_parser.add_argument( + "file", help="screenboard files to push to the server", nargs="+", type=argparse.FileType("r") + ) + push_parser.set_defaults(func=cls._push) + + new_file_parser = verb_parsers.add_parser( + "new_file", help="Create a new screenboard" " and put its contents in a file" + ) + new_file_parser.add_argument("filename", help="name of file to create with" " empty screenboard") + new_file_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + new_file_parser.set_defaults(func=cls._new_file) + + @classmethod + def _pull(cls, args): + cls._write_screen_to_file(args.screenboard_id, args.filename, args.timeout, args.format, args.string_ids) + + # TODO Is there a test for this one ? + @classmethod + def _push(cls, args): + api._timeout = args.timeout + for f in args.file: + screen_obj = json.load(f) + + if args.append_auto_text: + datetime_str = datetime.now().strftime("%x %X") + auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format( + datetime_str, f.name, screen_obj["id"], platform.node() + ) + screen_obj["description"] += auto_text + + if "id" in screen_obj: + # Always convert to int, in case it was originally a string. + screen_obj["id"] = int(screen_obj["id"]) + res = api.Screenboard.update(**screen_obj) + else: + res = api.Screenboard.create(**screen_obj) + + if "errors" in res: + print_err("Upload of screenboard {0} from file {1} failed.".format(screen_obj["id"], f.name)) + + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + if args.format == "pretty": + print("Uploaded file {0} (screenboard {1})".format(f.name, screen_obj["id"])) + + @classmethod + def _write_screen_to_file(cls, screenboard_id, filename, timeout, format="raw", string_ids=False): + with open(filename, "w") as f: + res = api.Screenboard.get(screenboard_id) + report_warnings(res) + report_errors(res) + + screen_obj = res + if "resource" in screen_obj: + del screen_obj["resource"] + if "url" in screen_obj: + del screen_obj["url"] + + if string_ids: + screen_obj["id"] = str(screen_obj["id"]) + + json.dump(screen_obj, f, indent=2) + + if format == "pretty": + print("Downloaded screenboard {0} to file {1}".format(screenboard_id, filename)) + else: + print("{0} {1}".format(screenboard_id, filename)) + + @classmethod + def _post(cls, args): + graphs = sys.stdin.read() + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + res = api.Screenboard.create( + title=args.title, + description=args.description, + graphs=[graphs], + template_variables=args.template_variables, + width=args.width, + height=args.height, + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + + res = api.Screenboard.update( + args.screenboard_id, + board_title=args.title, + description=args.description, + widgets=graphs, + template_variables=args.template_variables, + width=args.width, + height=args.height, + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _web_view(cls, args): + dash_id = json.load(args.file)["id"] + url = api._api_host + "/dash/dash/{0}".format(dash_id) + webbrowser.open(url) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Screenboard.get(args.screenboard_id) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["id"] = str(res["id"]) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + # TODO CHECK + res = api.Screenboard.delete(args.screenboard_id) + if res is not None: + report_warnings(res) + report_errors(res) + + @classmethod + def _share(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Screenboard.share(args.screenboard_id) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _revoke(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Screenboard.revoke(args.screenboard_id) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _new_file(cls, args): + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + res = api.Screenboard.create( + board_title=args.filename, description="Description for {0}".format(args.filename), widgets=[graphs] + ) + report_warnings(res) + report_errors(res) + + cls._write_screen_to_file(res["id"], args.filename, args.timeout, format, args.string_ids) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + +def _template_variables(tpl_var_input): + if "[" not in tpl_var_input: + return [v.strip() for v in tpl_var_input.split(",")] + else: + try: + return json.loads(tpl_var_input) + except Exception: + raise argparse.ArgumentTypeError("bad template_variable json parameter") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py new file mode 100644 index 0000000..9c1cb47 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py @@ -0,0 +1,43 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +# TODO IS there a test ? +class SearchClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("search", help="search datadog") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + query_parser = verb_parsers.add_parser("query", help="Search datadog.") + query_parser.add_argument("query", help="optionally faceted search query") + query_parser.set_defaults(func=cls._query) + + @classmethod + def _query(cls, args): + api._timeout = args.timeout + res = api.Infrastructure.search(q=args.query) + report_warnings(res) + report_errors(res) + if format == "pretty": + for facet, results in list(res["results"].items()): + for idx, result in enumerate(results): + if idx == 0: + print("\n") + print("%s\t%s" % (facet, result)) + else: + print("%s\t%s" % (" " * len(facet), result)) + elif format == "raw": + print(json.dumps(res)) + else: + for facet, results in list(res["results"].items()): + for result in results: + print("%s\t%s" % (facet, result)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py new file mode 100644 index 0000000..b30f33c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py @@ -0,0 +1,55 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# 3p +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class ServiceCheckClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("service_check", help="Perform service checks") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + check_parser = verb_parsers.add_parser("check", help="text for the message") + check_parser.add_argument("check", help="text for the message") + check_parser.add_argument("host_name", help="name of the host submitting the check") + check_parser.add_argument( + "status", + help="integer for the status of the check." " i.e: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN", + ) + check_parser.add_argument("--timestamp", help="POSIX timestamp of the event", default=None) + check_parser.add_argument("--message", help="description of why this status occurred", default=None) + check_parser.add_argument("--tags", help="comma separated list of tags", default=None) + check_parser.set_defaults(func=cls._check) + + @classmethod + def _check(cls, args): + api._timeout = args.timeout + format = args.format + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + else: + tags = None + res = api.ServiceCheck.check( + check=args.check, + host_name=args.host_name, + status=int(args.status), + timestamp=args.timestamp, + message=args.message, + tags=tags, + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py new file mode 100644 index 0000000..13ec928 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py @@ -0,0 +1,426 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import argparse +import json + +# 3p +from datadog.util.cli import ( + set_of_ints, + comma_set, + comma_list_or_empty, + parse_date_as_epoch_timestamp, +) +from datadog.util.format import pretty_json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class ServiceLevelObjectiveClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser( + "service_level_objective", + help="Create, edit, and delete service level objectives", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + create_parser = verb_parsers.add_parser("create", help="Create a SLO") + create_parser.add_argument( + "--type", + help="type of the SLO, e.g.", + choices=["metric", "monitor"], + ) + create_parser.add_argument("--name", help="name of the SLO", default=None) + create_parser.add_argument("--description", help="description of the SLO", default=None) + create_parser.add_argument( + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, + ) + create_parser.add_argument( + "--thresholds", + help="comma separated list of :[:[:[:]]", + ) + create_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + create_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + create_parser.add_argument( + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=None, + type=set_of_ints, + ) + create_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None) + create_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + type=comma_list_or_empty, + ) + create_parser.set_defaults(func=cls._create) + + file_create_parser = verb_parsers.add_parser("fcreate", help="Create a SLO from file") + file_create_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_create_parser.set_defaults(func=cls._file_create) + + update_parser = verb_parsers.add_parser("update", help="Update existing SLO") + update_parser.add_argument("slo_id", help="SLO to replace with the new definition") + update_parser.add_argument( + "--type", + help="type of the SLO (must specify it's original type)", + choices=["metric", "monitor"], + ) + update_parser.add_argument("--name", help="name of the SLO", default=None) + update_parser.add_argument("--description", help="description of the SLO", default=None) + update_parser.add_argument( + "--thresholds", + help="comma separated list of :[:[:[:]]", + ) + update_parser.add_argument( + "--tags", + help="comma-separated list of tags", + default=None, + type=comma_list_or_empty, + ) + update_parser.add_argument( + "--numerator", + help="numerator metric query (sum of good events)", + default=None, + ) + update_parser.add_argument( + "--denominator", + help="denominator metric query (sum of total events)", + default=None, + ) + update_parser.add_argument( + "--monitor_ids", + help="explicit monitor_ids to use (CSV)", + default=[], + type=list, + ) + update_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None) + update_parser.add_argument( + "--groups", + help="for a single monitor you can specify the specific groups as a pipe (|) delimited string", + default=None, + ) + update_parser.set_defaults(func=cls._update) + + file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing SLO from file") + file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r")) + file_update_parser.set_defaults(func=cls._file_update) + + show_parser = verb_parsers.add_parser("show", help="Show a SLO definition") + show_parser.add_argument("slo_id", help="SLO to show") + show_parser.set_defaults(func=cls._show) + + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all SLOs") + show_all_parser.add_argument("--query", help="string to filter SLOs by query (see UI or documentation)") + show_all_parser.add_argument( + "--slo_ids", + help="comma separated list indicating what SLO IDs to get at once", + type=comma_set, + ) + show_all_parser.add_argument("--offset", help="offset of query pagination", default=0) + show_all_parser.add_argument("--limit", help="limit of query pagination", default=100) + show_all_parser.set_defaults(func=cls._show_all) + + delete_parser = verb_parsers.add_parser("delete", help="Delete a SLO") + delete_parser.add_argument("slo_id", help="SLO to delete") + delete_parser.set_defaults(func=cls._delete) + + delete_many_parser = verb_parsers.add_parser("delete_many", help="Delete a SLO") + delete_many_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set) + delete_many_parser.set_defaults(func=cls._delete_many) + + delete_timeframe_parser = verb_parsers.add_parser("delete_many_timeframe", help="Delete a SLO timeframe") + delete_timeframe_parser.add_argument("slo_id", help="SLO ID to update") + delete_timeframe_parser.add_argument( + "timeframes", + help="CSV of timeframes to delete, e.g. 7d,30d,90d", + type=comma_set, + ) + delete_timeframe_parser.set_defaults(func=cls._delete_timeframe) + + can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if can delete SLOs") + can_delete_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set) + can_delete_parser.set_defaults(func=cls._can_delete) + + history_parser = verb_parsers.add_parser("history", help="Get the SLO history") + history_parser.add_argument("slo_id", help="SLO to query the history") + history_parser.add_argument( + "from_ts", + type=parse_date_as_epoch_timestamp, + help="`from` date or timestamp", + ) + history_parser.add_argument( + "to_ts", + type=parse_date_as_epoch_timestamp, + help="`to` date or timestamp", + ) + history_parser.set_defaults(func=cls._history) + + @classmethod + def _create(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"type": args.type, "name": args.name} + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()])) + params["tags"] = tags + + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = float(parts[1]) + + threshold = {"timeframe": timeframe, "target": target} + + if len(parts) > 2: + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) + params["thresholds"] = thresholds + + if args.description: + params["description"] = args.description + + if args.type == "metric": + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator, + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = list(args.monitor_ids) + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups + + if args.tags: + params["tags"] = args.tags + + res = api.ServiceLevelObjective.create(**params) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_create(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + res = api.ServiceLevelObjective.create(return_raw=True, **slo) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"type": args.type} + + if args.thresholds: + thresholds = [] + for threshold_str in args.thresholds.split(","): + parts = threshold_str.split(":") + timeframe = parts[0] + target = parts[1] + + threshold = {"timeframe": timeframe, "target": target} + + if len(parts) > 2: + threshold["warning"] = float(parts[2]) + + if len(parts) > 3 and parts[3]: + threshold["target_display"] = parts[3] + + if len(parts) > 4 and parts[4]: + threshold["warning_display"] = parts[4] + + thresholds.append(threshold) + params["thresholds"] = thresholds + + if args.description: + params["description"] = args.description + + if args.type == "metric": + if args.numerator and args.denominator: + params["query"] = { + "numerator": args.numerator, + "denominator": args.denominator, + } + elif args.monitor_search: + params["monitor_search"] = args.monitor_search + else: + params["monitor_ids"] = args.monitor_ids + if args.groups and len(args.monitor_ids) == 1: + groups = args.groups.split("|") + params["groups"] = groups + + if args.tags: + tags = sorted(set([t.strip() for t in args.tags if t.strip()])) + params["tags"] = tags + res = api.ServiceLevelObjective.update(args.slo_id, return_raw=True, **params) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _file_update(cls, args): + api._timeout = args.timeout + format = args.format + slo = json.load(args.file) + + res = api.ServiceLevelObjective.update(slo["id"], return_raw=True, **slo) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.ServiceLevelObjective.get(args.slo_id, return_raw=True) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["id"] = str(res["id"]) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + + params = {"offset": args.offset, "limit": args.limit} + if args.query: + params["query"] = args.query + else: + params["ids"] = args.slo_ids + + res = api.ServiceLevelObjective.get_all(return_raw=True, **params) + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete(args.slo_id, return_raw=True) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_many(cls, args): + api._timeout = args.timeout + res = api.ServiceLevelObjective.delete_many(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _delete_timeframe(cls, args): + api._timeout = args.timeout + + ops = {args.slo_id: args.timeframes} + + res = api.ServiceLevelObjective.bulk_delete(ops) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _can_delete(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.can_delete(args.slo_ids) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _history(cls, args): + api._timeout = args.timeout + + res = api.ServiceLevelObjective.history(args.slo_id) + if res is not None: + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _escape(cls, s): + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py new file mode 100644 index 0000000..3d4d2b9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py @@ -0,0 +1,120 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings + + +class TagClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("tag", help="View and modify host tags.") + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + add_parser = verb_parsers.add_parser( + "add", help="Add a host to one or more tags.", description="Hosts can be specified by name or id." + ) + add_parser.add_argument("host", help="host to add") + add_parser.add_argument("tag", help="tag to add host to (one or more, space separated)", nargs="+") + add_parser.set_defaults(func=cls._add) + + replace_parser = verb_parsers.add_parser( + "replace", + help="Replace all tags with one or more new tags.", + description="Hosts can be specified by name or id.", + ) + replace_parser.add_argument("host", help="host to modify") + replace_parser.add_argument("tag", help="list of tags to add host to", nargs="+") + replace_parser.set_defaults(func=cls._replace) + + show_parser = verb_parsers.add_parser( + "show", help="Show host tags.", description="Hosts can be specified by name or id." + ) + show_parser.add_argument("host", help="host to show (or 'all' to show all tags)") + show_parser.set_defaults(func=cls._show) + + detach_parser = verb_parsers.add_parser( + "detach", help="Remove a host from all tags.", description="Hosts can be specified by name or id." + ) + detach_parser.add_argument("host", help="host to detach") + detach_parser.set_defaults(func=cls._detach) + + @classmethod + def _add(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Tag.create(args.host, tags=args.tag) + report_warnings(res) + report_errors(res) + if format == "pretty": + print("Tags for '%s':" % res["host"]) + for c in res["tags"]: + print(" " + c) + elif format == "raw": + print(json.dumps(res)) + else: + for c in res["tags"]: + print(c) + + @classmethod + def _replace(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Tag.update(args.host, tags=args.tag) + report_warnings(res) + report_errors(res) + if format == "pretty": + print("Tags for '%s':" % res["host"]) + for c in res["tags"]: + print(" " + c) + elif format == "raw": + print(json.dumps(res)) + else: + for c in res["tags"]: + print(c) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + if args.host == "all": + res = api.Tag.get_all() + else: + res = api.Tag.get(args.host) + report_warnings(res) + report_errors(res) + if args.host == "all": + if format == "pretty": + for tag, hosts in list(res["tags"].items()): + for host in hosts: + print(tag) + print(" " + host) + print() + elif format == "raw": + print(json.dumps(res)) + else: + for tag, hosts in list(res["tags"].items()): + for host in hosts: + print(tag + "\t" + host) + else: + if format == "pretty": + for tag in res["tags"]: + print(tag) + elif format == "raw": + print(json.dumps(res)) + else: + for tag in res["tags"]: + print(tag) + + @classmethod + def _detach(cls, args): + api._timeout = args.timeout + res = api.Tag.delete(args.host) + if res is not None: + report_warnings(res) + report_errors(res) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py new file mode 100644 index 0000000..477a1b6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py @@ -0,0 +1,358 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import os.path +import platform +import sys +import webbrowser + +# 3p +import argparse + +# datadog +from datadog import api +from datadog.dogshell.common import report_errors, report_warnings, print_err +from datadog.util.format import pretty_json +from datetime import datetime + + +class TimeboardClient(object): + @classmethod + def setup_parser(cls, subparsers): + parser = subparsers.add_parser("timeboard", help="Create, edit, and delete timeboards") + parser.add_argument( + "--string_ids", + action="store_true", + dest="string_ids", + help="Represent timeboard IDs as strings instead of ints in JSON", + ) + + verb_parsers = parser.add_subparsers(title="Verbs", dest="verb") + verb_parsers.required = True + + post_parser = verb_parsers.add_parser("post", help="Create timeboards") + post_parser.add_argument("title", help="title for the new timeboard") + post_parser.add_argument("description", help="short description of the timeboard") + post_parser.add_argument( + "graphs", help="graph definitions as a JSON string. if unset," " reads from stdin.", nargs="?" + ) + post_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', " + "'default': 'host:my-host'}]'", + ) + + post_parser.set_defaults(func=cls._post) + + update_parser = verb_parsers.add_parser("update", help="Update existing timeboards") + update_parser.add_argument("timeboard_id", help="timeboard to replace" " with the new definition") + update_parser.add_argument("title", help="new title for the timeboard") + update_parser.add_argument("description", help="short description of the timeboard") + update_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin", nargs="?" + ) + update_parser.add_argument( + "--template_variables", + type=_template_variables, + default=[], + help="a json list of template variable dicts, e.g. " + "[{'name': 'host', 'prefix': 'host', " + "'default': 'host:my-host'}]'", + ) + update_parser.set_defaults(func=cls._update) + + show_parser = verb_parsers.add_parser("show", help="Show a timeboard definition") + show_parser.add_argument("timeboard_id", help="timeboard to show") + show_parser.set_defaults(func=cls._show) + + show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all timeboards") + show_all_parser.set_defaults(func=cls._show_all) + + pull_parser = verb_parsers.add_parser("pull", help="Pull a timeboard on the server" " into a local file") + pull_parser.add_argument("timeboard_id", help="ID of timeboard to pull") + pull_parser.add_argument("filename", help="file to pull timeboard into") + pull_parser.set_defaults(func=cls._pull) + + pull_all_parser = verb_parsers.add_parser("pull_all", help="Pull all timeboards" " into files in a directory") + pull_all_parser.add_argument("pull_dir", help="directory to pull timeboards into") + pull_all_parser.set_defaults(func=cls._pull_all) + + push_parser = verb_parsers.add_parser( + "push", help="Push updates to timeboards" " from local files to the server" + ) + push_parser.add_argument( + "--append_auto_text", + action="store_true", + dest="append_auto_text", + help="When pushing to the server, appends filename" + " and timestamp to the end of the timeboard description", + ) + push_parser.add_argument( + "file", help="timeboard files to push to the server", nargs="+", type=argparse.FileType("r") + ) + push_parser.set_defaults(func=cls._push) + + new_file_parser = verb_parsers.add_parser( + "new_file", help="Create a new timeboard" " and put its contents in a file" + ) + new_file_parser.add_argument("filename", help="name of file to create with empty timeboard") + new_file_parser.add_argument( + "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?" + ) + new_file_parser.set_defaults(func=cls._new_file) + + web_view_parser = verb_parsers.add_parser("web_view", help="View the timeboard in a web browser") + web_view_parser.add_argument("file", help="timeboard file", type=argparse.FileType("r")) + web_view_parser.set_defaults(func=cls._web_view) + + delete_parser = verb_parsers.add_parser("delete", help="Delete timeboards") + delete_parser.add_argument("timeboard_id", help="timeboard to delete") + delete_parser.set_defaults(func=cls._delete) + + @classmethod + def _pull(cls, args): + cls._write_dash_to_file(args.timeboard_id, args.filename, args.timeout, args.format, args.string_ids) + + @classmethod + def _pull_all(cls, args): + api._timeout = args.timeout + + def _title_to_filename(title): + # Get a lowercased version with most punctuation stripped out... + no_punct = "".join([c for c in title.lower() if c.isalnum() or c in [" ", "_", "-"]]) + # Now replace all -'s, _'s and spaces with "_", and strip trailing _ + return no_punct.replace(" ", "_").replace("-", "_").strip("_") + + format = args.format + res = api.Timeboard.get_all() + report_warnings(res) + report_errors(res) + + if not os.path.exists(args.pull_dir): + os.mkdir(args.pull_dir, 0o755) + + used_filenames = set() + for dash_summary in res["dashes"]: + filename = _title_to_filename(dash_summary["title"]) + if filename in used_filenames: + filename = filename + "-" + dash_summary["id"] + used_filenames.add(filename) + + cls._write_dash_to_file( + dash_summary["id"], + os.path.join(args.pull_dir, filename + ".json"), + args.timeout, + format, + args.string_ids, + ) + if format == "pretty": + print( + ("\n### Total: {0} dashboards to {1} ###".format(len(used_filenames), os.path.realpath(args.pull_dir))) + ) + + @classmethod + def _new_file(cls, args): + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + res = api.Timeboard.create( + title=args.filename, description="Description for {0}".format(args.filename), graphs=[graphs] + ) + + report_warnings(res) + report_errors(res) + + cls._write_dash_to_file(res["dash"]["id"], args.filename, args.timeout, format, args.string_ids) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _write_dash_to_file(cls, dash_id, filename, timeout, format="raw", string_ids=False): + with open(filename, "w") as f: + res = api.Timeboard.get(dash_id) + report_warnings(res) + report_errors(res) + + dash_obj = res["dash"] + if "resource" in dash_obj: + del dash_obj["resource"] + if "url" in dash_obj: + del dash_obj["url"] + + if string_ids: + dash_obj["id"] = str(dash_obj["id"]) + + if not dash_obj.get("template_variables"): + dash_obj.pop("template_variables", None) + + json.dump(dash_obj, f, indent=2) + + if format == "pretty": + print(u"Downloaded dashboard {0} to file {1}".format(dash_id, filename)) + else: + print(u"{0} {1}".format(dash_id, filename)) + + @classmethod + def _push(cls, args): + api._timeout = args.timeout + for f in args.file: + try: + dash_obj = json.load(f) + except Exception as err: + raise Exception("Could not parse {0}: {1}".format(f.name, err)) + + if args.append_auto_text: + datetime_str = datetime.now().strftime("%x %X") + auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format( + datetime_str, f.name, dash_obj["id"], platform.node() + ) + dash_obj["description"] += auto_text + tpl_vars = dash_obj.get("template_variables", []) + + if "id" in dash_obj: + # Always convert to int, in case it was originally a string. + dash_obj["id"] = int(dash_obj["id"]) + res = api.Timeboard.update( + dash_obj["id"], + title=dash_obj["title"], + description=dash_obj["description"], + graphs=dash_obj["graphs"], + template_variables=tpl_vars, + ) + else: + res = api.Timeboard.create( + title=dash_obj["title"], + description=dash_obj["description"], + graphs=dash_obj["graphs"], + template_variables=tpl_vars, + ) + + if "errors" in res: + print_err("Upload of dashboard {0} from file {1} failed.".format(dash_obj["id"], f.name)) + + report_warnings(res) + report_errors(res) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + if args.format == "pretty": + print("Uploaded file {0} (dashboard {1})".format(f.name, dash_obj["id"])) + + @classmethod + def _post(cls, args): + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + res = api.Timeboard.create( + title=args.title, description=args.description, graphs=[graphs], template_variables=args.template_variables + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _update(cls, args): + api._timeout = args.timeout + format = args.format + graphs = args.graphs + if args.graphs is None: + graphs = sys.stdin.read() + graphs = json.loads(graphs) + + res = api.Timeboard.update( + args.timeboard_id, + title=args.title, + description=args.description, + graphs=graphs, + template_variables=args.template_variables, + ) + report_warnings(res) + report_errors(res) + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Timeboard.get(args.timeboard_id) + report_warnings(res) + report_errors(res) + + if args.string_ids: + res["dash"]["id"] = str(res["dash"]["id"]) + + if format == "pretty": + print(pretty_json(res)) + else: + print(json.dumps(res)) + + @classmethod + def _show_all(cls, args): + api._timeout = args.timeout + format = args.format + res = api.Timeboard.get_all() + report_warnings(res) + report_errors(res) + + if args.string_ids: + for d in res["dashes"]: + d["id"] = str(d["id"]) + + if format == "pretty": + print(pretty_json(res)) + elif format == "raw": + print(json.dumps(res)) + else: + for d in res["dashes"]: + print("\t".join([(d["id"]), (d["resource"]), (d["title"]), cls._escape(d["description"])])) + + @classmethod + def _delete(cls, args): + api._timeout = args.timeout + res = api.Timeboard.delete(args.timeboard_id) + if res is not None: + report_warnings(res) + report_errors(res) + + @classmethod + def _web_view(cls, args): + dash_id = json.load(args.file)["id"] + url = api._api_host + "/dash/dash/{0}".format(dash_id) + webbrowser.open(url) + + @classmethod + def _escape(cls, s): + return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") if s else "" + + +def _template_variables(tpl_var_input): + if "[" not in tpl_var_input: + return [v.strip() for v in tpl_var_input.split(",")] + else: + try: + return json.loads(tpl_var_input) + except Exception: + raise argparse.ArgumentTypeError("bad template_variable json parameter") diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py new file mode 100644 index 0000000..25df6d9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py @@ -0,0 +1,520 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" + +Wraps shell commands and sends the result to Datadog as events. Ex: + +dogwrap -n test-job -k $API_KEY --submit_mode all "ls -lah" + +Note that you need to enclose your command in quotes to prevent python +from thinking the command line arguments belong to the python command +instead of the wrapped command. + +You can also have the script only send events if they fail: + +dogwrap -n test-job -k $API_KEY --submit_mode errors "ls -lah" + +And you can give the command a timeout too: + +dogwrap -n test-job -k $API_KEY --timeout=1 "sleep 3" + +""" +# stdlib +from __future__ import print_function + +import os +from copy import copy +import optparse +import subprocess +import sys +import threading +import time +import warnings + +# datadog +from datadog import initialize, api, __version__ +from datadog.util.compat import is_p3k + + +SUCCESS = "success" +ERROR = "error" +WARNING = "warning" + +MAX_EVENT_BODY_LENGTH = 3000 + + +class Timeout(Exception): + pass + + +class OutputReader(threading.Thread): + """ + Thread collecting the output of a subprocess, optionally forwarding it to + a given file descriptor and storing it for further retrieval. + """ + + def __init__(self, proc_out, fwd_out=None): + """ + Instantiates an OutputReader. + :param proc_out: the output to read + :type proc_out: file descriptor + :param fwd_out: the output to forward to (None to disable forwarding) + :type fwd_out: file descriptor or None + """ + threading.Thread.__init__(self) + self.daemon = True + self._out_content = b"" + self._out = proc_out + self._fwd_out = fwd_out + + def run(self): + """ + Thread's main loop: collects the output optionnally forwarding it to + the file descriptor passed in the constructor. + """ + for line in iter(self._out.readline, b""): + if self._fwd_out is not None: + self._fwd_out.write(line) + self._out_content += line + self._out.close() + + @property + def content(self): + """ + The content stored in out so far. (Not threadsafe, wait with .join()) + """ + return self._out_content + + +def poll_proc(proc, sleep_interval, timeout): + """ + Polls the process until it returns or a given timeout has been reached + """ + start_time = time.time() + returncode = None + while returncode is None: + returncode = proc.poll() + if time.time() - start_time > timeout: + raise Timeout() + else: + time.sleep(sleep_interval) + return returncode + + +def execute(cmd, cmd_timeout, sigterm_timeout, sigkill_timeout, proc_poll_interval, buffer_outs): + """ + Launches the process and monitors its outputs + """ + start_time = time.time() + returncode = -1 + stdout = b"" + stderr = b"" + try: + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + except Exception: + print(u"Failed to execute %s" % (repr(cmd)), file=sys.stderr) + raise + try: + # Let's that the threads collecting the output from the command in the + # background + stdout_buffer = sys.stdout.buffer if is_p3k() else sys.stdout + stderr_buffer = sys.stderr.buffer if is_p3k() else sys.stderr + out_reader = OutputReader(proc.stdout, stdout_buffer if not buffer_outs else None) + err_reader = OutputReader(proc.stderr, stderr_buffer if not buffer_outs else None) + out_reader.start() + err_reader.start() + + # Let's quietly wait from the program's completion here to get the exit + # code when it finishes + returncode = poll_proc(proc, proc_poll_interval, cmd_timeout) + except Timeout: + returncode = Timeout + sigterm_start = time.time() + print("Command timed out after %.2fs, killing with SIGTERM" % (time.time() - start_time), file=sys.stderr) + try: + proc.terminate() + try: + poll_proc(proc, proc_poll_interval, sigterm_timeout) + except Timeout: + print( + "SIGTERM timeout failed after %.2fs, killing with SIGKILL" % (time.time() - sigterm_start), + file=sys.stderr, + ) + sigkill_start = time.time() + proc.kill() + try: + poll_proc(proc, proc_poll_interval, sigkill_timeout) + except Timeout: + print( + "SIGKILL timeout failed after %.2fs, exiting" % (time.time() - sigkill_start), file=sys.stderr + ) + except OSError as e: + # Ignore OSError 3: no process found. + if e.errno != 3: + raise + + # Let's harvest the outputs collected by our background threads + # after making sure they're done reading it. + out_reader.join() + err_reader.join() + stdout = out_reader.content + stderr = err_reader.content + + duration = time.time() - start_time + + return returncode, stdout, stderr, duration + + +def trim_text(text, max_len): + """ + Trim input text to fit the `max_len` condition. + + If trim is needed: keep the first 1/3rd of the budget on the top, + and the other 2 thirds on the bottom. + """ + if len(text) <= max_len: + return text + + trimmed_text = ( + u"{top_third}\n" + u"```\n" + u"*...trimmed...*\n" + u"```\n" + u"{bottom_two_third}\n".format( + top_third=text[: max_len // 3], bottom_two_third=text[len(text) - (2 * max_len) // 3 :] + ) + ) + + return trimmed_text + + +def build_event_body(cmd, returncode, stdout, stderr, notifications): + """ + Format and return an event body. + + Note: do not exceed MAX_EVENT_BODY_LENGTH length. + """ + fmt_stdout = u"" + fmt_stderr = u"" + fmt_notifications = u"" + + max_length = MAX_EVENT_BODY_LENGTH // 2 if stdout and stderr else MAX_EVENT_BODY_LENGTH + + if stdout: + fmt_stdout = u"**>>>> STDOUT <<<<**\n```\n{stdout} \n```\n".format( + stdout=trim_text(stdout.decode("utf-8", "replace"), max_length) + ) + + if stderr: + fmt_stderr = u"**>>>> STDERR <<<<**\n```\n{stderr} \n```\n".format( + stderr=trim_text(stderr.decode("utf-8", "replace"), max_length) + ) + + if notifications: + notifications = notifications.decode("utf-8", "replace") if isinstance(notifications, bytes) else notifications + fmt_notifications = u"**>>>> NOTIFICATIONS <<<<**\n\n {notifications}\n".format(notifications=notifications) + + return ( + u"%%%\n" + u"**>>>> CMD <<<<**\n```\n{command} \n```\n" + u"**>>>> EXIT CODE <<<<**\n\n {returncode}\n\n\n" + u"{stdout}" + u"{stderr}" + u"{notifications}" + u"%%%\n".format( + command=cmd, + returncode=returncode, + stdout=fmt_stdout, + stderr=fmt_stderr, + notifications=fmt_notifications, + ) + ) + + +def generate_warning_codes(option, opt, options_warning): + try: + # options_warning is a string e.g.: --warning_codes 123,456,789 + # we need to create a list from it + warning_codes = options_warning.split(",") + return warning_codes + except ValueError: + raise optparse.OptionValueError("option %s: invalid warning codes value(s): %r" % (opt, options_warning)) + + +class DogwrapOption(optparse.Option): + # https://docs.python.org/3.7/library/optparse.html#adding-new-types + TYPES = optparse.Option.TYPES + ("warning_codes",) + TYPE_CHECKER = copy(optparse.Option.TYPE_CHECKER) + TYPE_CHECKER["warning_codes"] = generate_warning_codes + + +def parse_options(raw_args=None): + """ + Parse the raw command line options into an options object and the remaining command string + """ + parser = optparse.OptionParser( + usage='%prog -n [event_name] -k [api_key] --submit_mode \ +[ all | errors | warnings] [options] "command". \n\nNote that you need to enclose your command in \ +quotes to prevent python executing as soon as there is a space in your command. \n \nNOTICE: In \ +normal mode, the whole stderr is printed before stdout, in flush_live mode they will be mixed but \ +there is not guarantee that messages sent by the command on both stderr and stdout are printed in \ +the order they were sent.', + version="%prog {0}".format(__version__), + option_class=DogwrapOption, + ) + + parser.add_option( + "-n", + "--name", + action="store", + type="string", + help="the name of the event \ +as it should appear on your Datadog stream", + ) + parser.add_option( + "-k", + "--api_key", + action="store", + type="string", + help="your DataDog API Key", + default=os.environ.get("DD_API_KEY"), + ) + parser.add_option( + "-s", + "--site", + action="store", + type="string", + default="datadoghq.com", + help="The site to send data. Accepts us (datadoghq.com), eu (datadoghq.eu), \ +us3 (us3.datadoghq.com), us5 (us5.datadoghq.com), or ap1 (ap1.datadoghq.com), \ +gov (ddog-gov.com), or custom url. default: us", + ) + parser.add_option( + "-m", + "--submit_mode", + action="store", + type="choice", + default="errors", + choices=["errors", "warnings", "all"], + help="[ all | errors | warnings ] if set \ +to error, an event will be sent only of the command exits with a non zero exit status or if it \ +times out. If set to warning, a list of exit codes need to be provided", + ) + parser.add_option( + "--warning_codes", + action="store", + type="warning_codes", + dest="warning_codes", + help="comma separated list of warning codes, e.g: 127,255", + ) + parser.add_option( + "-p", + "--priority", + action="store", + type="choice", + choices=["normal", "low"], + help="the priority of the event (default: 'normal')", + ) + parser.add_option( + "-t", + "--timeout", + action="store", + type="int", + default=60 * 60 * 24, + help="(in seconds) a timeout after which your command must be aborted. An \ +event will be sent to your DataDog stream (default: 24hours)", + ) + parser.add_option( + "--sigterm_timeout", + action="store", + type="int", + default=60 * 2, + help="(in seconds) When your command times out, the \ +process it triggers is sent a SIGTERM. If this sigterm_timeout is reached, it will be sent a \ +SIGKILL signal. (default: 2m)", + ) + parser.add_option( + "--sigkill_timeout", + action="store", + type="int", + default=60, + help="(in seconds) how long to wait at most after SIGKILL \ + has been sent (default: 60s)", + ) + parser.add_option( + "--proc_poll_interval", + action="store", + type="float", + default=0.5, + help="(in seconds). interval at which your command will be polled \ +(default: 500ms)", + ) + parser.add_option( + "--notify_success", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ +case of success.", + ) + parser.add_option( + "--notify_error", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ +case of error.", + ) + parser.add_option( + "--notify_warning", + action="store", + type="string", + default="", + help="a message string and @people directives to send notifications in \ + case of warning.", + ) + parser.add_option( + "-b", + "--buffer_outs", + action="store_true", + dest="buffer_outs", + default=False, + help="displays the stderr and stdout of the command only once it has \ +returned (the command outputs remains buffered in dogwrap meanwhile)", + ) + parser.add_option( + "--send_metric", + action="store_true", + dest="send_metric", + default=False, + help="sends a metric for event duration", + ) + parser.add_option( + "--tags", action="store", type="string", dest="tags", default="", help="comma separated list of tags" + ) + + options, args = parser.parse_args(args=raw_args) + + if is_p3k(): + cmd = " ".join(args) + else: + cmd = b" ".join(args).decode("utf-8") + + return options, cmd + + +def main(): + options, cmd = parse_options() + + # If silent is checked we force the outputs to be buffered (and therefore + # not forwarded to the Terminal streams) and we just avoid printing the + # buffers at the end + returncode, stdout, stderr, duration = execute( + cmd, + options.timeout, + options.sigterm_timeout, + options.sigkill_timeout, + options.proc_poll_interval, + options.buffer_outs, + ) + + if options.site in ("datadoghq.com", "us"): + api_host = "https://api.datadoghq.com" + elif options.site in ("datadoghq.eu", "eu"): + api_host = "https://api.datadoghq.eu" + elif options.site in ("us3.datadoghq.com", "us3"): + api_host = "https://api.us3.datadoghq.com" + elif options.site in ("us5.datadoghq.com", "us5"): + api_host = "https://api.us5.datadoghq.com" + elif options.site in ("ap1.datadoghq.com", "ap1"): + api_host = "https://api.ap1.datadoghq.com" + elif options.site in ("ddog-gov.com", "gov"): + api_host = "https://api.ddog-gov.com" + else: + api_host = options.site + + initialize(api_key=options.api_key, api_host=api_host) + host = api._host_name + + warning_codes = None + + if options.warning_codes: + # Convert warning codes from string to int since return codes will evaluate the latter + warning_codes = list(map(int, options.warning_codes)) + + if returncode == 0: + alert_type = SUCCESS + event_priority = "low" + event_title = u"[%s] %s succeeded in %.2fs" % (host, options.name, duration) + elif returncode != 0 and options.submit_mode == "warnings": + if not warning_codes: + # the list of warning codes is empty - the option was not specified + print("A comma separated list of exit codes need to be provided") + sys.exit() + elif returncode in warning_codes: + alert_type = WARNING + event_priority = "normal" + event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration) + else: + print("Command exited with a different exit code that the one(s) provided") + sys.exit() + else: + alert_type = ERROR + event_priority = "normal" + + if returncode is Timeout: + event_title = u"[%s] %s timed out after %.2fs" % (host, options.name, duration) + returncode = -1 + else: + event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration) + + notifications = "" + + if alert_type == SUCCESS and options.notify_success: + notifications = options.notify_success + elif alert_type == ERROR and options.notify_error: + notifications = options.notify_error + elif alert_type == WARNING and options.notify_warning: + notifications = options.notify_warning + + if options.tags: + tags = [t.strip() for t in options.tags.split(",")] + else: + tags = None + + event_body = build_event_body(cmd, returncode, stdout, stderr, notifications) + + event = { + "alert_type": alert_type, + "aggregation_key": options.name, + "host": host, + "priority": options.priority or event_priority, + "tags": tags, + } + + if options.buffer_outs: + if is_p3k(): + stderr = stderr.decode("utf-8") + stdout = stdout.decode("utf-8") + + print(stderr.strip(), file=sys.stderr) + print(stdout.strip(), file=sys.stdout) + + if options.submit_mode == "all" or returncode != 0: + if options.send_metric: + event_name_tag = "event_name:{}".format(options.name) + if tags: + duration_tags = tags + [event_name_tag] + else: + duration_tags = [event_name_tag] + api.Metric.send(metric="dogwrap.duration", points=duration, tags=duration_tags, type="gauge") + api.Event.create(title=event_title, text=event_body, **event) + + sys.exit(returncode) + + +if __name__ == "__main__": + if sys.argv[0].endswith("dogwrap"): + warnings.warn("dogwrap is pending deprecation. Please use dogshellwrap instead.", PendingDeprecationWarning) + main() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py new file mode 100644 index 0000000..d6fa527 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py @@ -0,0 +1,4 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datadog.dogstatsd.base import DogStatsd, statsd # noqa diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py new file mode 100644 index 0000000..1f58fe5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py @@ -0,0 +1,1398 @@ +#!/usr/bin/env python + +# Unless explicitly stated otherwise all files in this repository are licensed under +# the BSD-3-Clause License. This product includes software developed at Datadog +# (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +DogStatsd is a Python client for DogStatsd, a Statsd fork for Datadog. +""" +# Standard libraries +from random import random +import logging +import os +import socket +import errno +import threading +import time +from threading import Lock, RLock +import weakref + +try: + import queue +except ImportError: + # pypy has the same module, but capitalized. + import Queue as queue # type: ignore[no-redef] + +from typing import Optional, List, Text, Union + +# Datadog libraries +from datadog.dogstatsd.context import ( + TimedContextManagerDecorator, + DistributedContextManagerDecorator, +) +from datadog.dogstatsd.route import get_default_route +from datadog.dogstatsd.container import ContainerID +from datadog.util.compat import is_p3k, text +from datadog.util.format import normalize_tags +from datadog.version import __version__ + +# Logging +log = logging.getLogger("datadog.dogstatsd") + +# Default config +DEFAULT_HOST = "localhost" +DEFAULT_PORT = 8125 + +# Buffering-related values (in seconds) +DEFAULT_FLUSH_INTERVAL = 0.3 +MIN_FLUSH_INTERVAL = 0.0001 + +# Tag name of entity_id +ENTITY_ID_TAG_NAME = "dd.internal.entity_id" + +# Env var name of entity_id +ENTITY_ID_ENV_VAR = "DD_ENTITY_ID" + +# Env var to enable/disable sending the container ID field +ORIGIN_DETECTION_ENABLED = "DD_ORIGIN_DETECTION_ENABLED" + +# Default buffer settings based on socket type +UDP_OPTIMAL_PAYLOAD_LENGTH = 1432 +UDS_OPTIMAL_PAYLOAD_LENGTH = 8192 + +# Socket options +MIN_SEND_BUFFER_SIZE = 32 * 1024 + +# Mapping of each "DD_" prefixed environment variable to a specific tag name +DD_ENV_TAGS_MAPPING = { + ENTITY_ID_ENV_VAR: ENTITY_ID_TAG_NAME, + "DD_ENV": "env", + "DD_SERVICE": "service", + "DD_VERSION": "version", +} + +# Telemetry minimum flush interval in seconds +DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL = 10 + +# Telemetry pre-computed formatting string. Pre-computation +# increases throughput of composing the result by 2-15% from basic +# '%'-based formatting with a `join`. +TELEMETRY_FORMATTING_STR = "\n".join( + [ + "datadog.dogstatsd.client.metrics:%s|c|#%s", + "datadog.dogstatsd.client.events:%s|c|#%s", + "datadog.dogstatsd.client.service_checks:%s|c|#%s", + "datadog.dogstatsd.client.bytes_sent:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped_queue:%s|c|#%s", + "datadog.dogstatsd.client.bytes_dropped_writer:%s|c|#%s", + "datadog.dogstatsd.client.packets_sent:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped_queue:%s|c|#%s", + "datadog.dogstatsd.client.packets_dropped_writer:%s|c|#%s", + ] +) + "\n" + +Stop = object() + +SUPPORTS_FORKING = hasattr(os, "register_at_fork") and not os.environ.get("DD_DOGSTATSD_DISABLE_FORK_SUPPORT", None) +TRACK_INSTANCES = not os.environ.get("DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING", None) + +_instances = weakref.WeakSet() # type: weakref.WeakSet + + +def pre_fork(): + """Prepare all client instances for a process fork. + + If SUPPORTS_FORKING is true, this will be called automatically before os.fork(). + """ + for c in _instances: + c.pre_fork() + + +def post_fork(): + """Restore all client instances after a fork. + + If SUPPORTS_FORKING is true, this will be called automatically after os.fork(). + """ + for c in _instances: + c.post_fork() + + +if SUPPORTS_FORKING: + os.register_at_fork(before=pre_fork, after_in_child=post_fork, after_in_parent=post_fork) # type: ignore + + +# pylint: disable=useless-object-inheritance,too-many-instance-attributes +# pylint: disable=too-many-arguments,too-many-locals +class DogStatsd(object): + OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3) + + def __init__( + self, + host=DEFAULT_HOST, # type: Text + port=DEFAULT_PORT, # type: int + max_buffer_size=None, # type: None + flush_interval=DEFAULT_FLUSH_INTERVAL, # type: float + disable_buffering=True, # type: bool + namespace=None, # type: Optional[Text] + constant_tags=None, # type: Optional[List[str]] + use_ms=False, # type: bool + use_default_route=False, # type: bool + socket_path=None, # type: Optional[Text] + default_sample_rate=1, # type: float + disable_telemetry=False, # type: bool + telemetry_min_flush_interval=(DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL), # type: int + telemetry_host=None, # type: Text + telemetry_port=None, # type: Union[str, int] + telemetry_socket_path=None, # type: Text + max_buffer_len=0, # type: int + container_id=None, # type: Optional[Text] + origin_detection_enabled=True, # type: bool + socket_timeout=0, # type: Optional[float] + telemetry_socket_timeout=0, # type: Optional[float] + disable_background_sender=True, # type: bool + sender_queue_size=0, # type: int + sender_queue_timeout=0, # type: Optional[float] + track_instance=True, # type: bool + ): # type: (...) -> None + """ + Initialize a DogStatsd object. + + >>> statsd = DogStatsd() + + :envvar DD_AGENT_HOST: the host of the DogStatsd server. + If set, it overrides default value. + :type DD_AGENT_HOST: string + + :envvar DD_DOGSTATSD_PORT: the port of the DogStatsd server. + If set, it overrides default value. + :type DD_DOGSTATSD_PORT: integer + + :envvar DATADOG_TAGS: Tags to attach to every metric reported by dogstatsd client. + :type DATADOG_TAGS: comma-delimited string + + :envvar DD_ENTITY_ID: Tag to identify the client entity. + :type DD_ENTITY_ID: string + + :envvar DD_ENV: the env of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_ENV: string + + :envvar DD_SERVICE: the name of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_SERVICE: string + + :envvar DD_VERSION: the version of the service running the dogstatsd client. + If set, it is appended to the constant (global) tags of the statsd client. + :type DD_VERSION: string + + :envvar DD_DOGSTATSD_DISABLE: Disable any statsd metric collection (default False) + :type DD_DOGSTATSD_DISABLE: boolean + + :envvar DD_TELEMETRY_HOST: the host for the dogstatsd server we wish to submit + telemetry stats to. If set, it overrides default value. + :type DD_TELEMETRY_HOST: string + + :envvar DD_TELEMETRY_PORT: the port for the dogstatsd server we wish to submit + telemetry stats to. If set, it overrides default value. + :type DD_TELEMETRY_PORT: integer + + :envvar DD_ORIGIN_DETECTION_ENABLED: Enable/disable sending the container ID field + for origin detection. + :type DD_ORIGIN_DETECTION_ENABLED: boolean + + :envvar DD_DOGSTATSD_DISABLE_FORK_SUPPORT: Don't install global fork hooks with os.register_at_fork. + Global fork hooks then need to be called manually before and after calling os.fork. + :type DD_DOGSTATSD_DISABLE_FORK_SUPPORT: boolean + + :envvar DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: Don't register instances of this class with global fork hooks. + :type DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: boolean + + :param host: the host of the DogStatsd server. + :type host: string + + :param port: the port of the DogStatsd server. + :type port: integer + + :max_buffer_size: Deprecated option, do not use it anymore. + :type max_buffer_type: None + + :flush_interval: Amount of time in seconds that the flush thread will + wait before trying to flush the buffered metrics to the server. If set, + it overrides the default value. + :type flush_interval: float + + :disable_buffering: If set, metrics are no longered buffered by the client and + all data is sent synchronously to the server + :type disable_buffering: bool + + :param namespace: Namespace to prefix all metric names + :type namespace: string + + :param constant_tags: Tags to attach to all metrics + :type constant_tags: list of strings + + :param use_ms: Report timed values in milliseconds instead of seconds (default False) + :type use_ms: boolean + + :param use_default_route: Dynamically set the DogStatsd host to the default route + (Useful when running the client in a container) (Linux only) + :type use_default_route: boolean + + :param socket_path: Communicate with dogstatsd through a UNIX socket instead of + UDP. If set, disables UDP transmission (Linux only) + :type socket_path: string + + :param default_sample_rate: Sample rate to use by default for all metrics + :type default_sample_rate: float + + :param max_buffer_len: Maximum number of bytes to buffer before sending to the server + if sending metrics in batch. If not specified it will be adjusted to a optimal value + depending on the connection type. + :type max_buffer_len: integer + + :param disable_telemetry: Should client telemetry be disabled + :type disable_telemetry: boolean + + :param telemetry_min_flush_interval: Minimum flush interval for telemetry in seconds + :type telemetry_min_flush_interval: integer + + :param telemetry_host: the host for the dogstatsd server we wish to submit + telemetry stats to. Optional. If telemetry is enabled and this is not specified + the default host will be used. + :type host: string + + :param telemetry_port: the port for the dogstatsd server we wish to submit + telemetry stats to. Optional. If telemetry is enabled and this is not specified + the default host will be used. + :type port: integer + + :param telemetry_socket_path: Submit client telemetry to dogstatsd through a UNIX + socket instead of UDP. If set, disables UDP transmission (Linux only) + :type telemetry_socket_path: string + + :param container_id: Allows passing the container ID, this will be used by the Agent to enrich + metrics with container tags. + This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0. + When configured, the provided container ID is prioritized over the container ID discovered + via Origin Detection. When DD_ENTITY_ID is set, this value is ignored. + Default: None. + :type container_id: string + + :param origin_detection_enabled: Enable/disable the client origin detection. + This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0. + When enabled, the client tries to discover its container ID and sends it to the Agent + to enrich the metrics with container tags. + Origin detection can be disabled by configuring the environment variabe DD_ORIGIN_DETECTION_ENABLED=false + The client tries to read the container ID by parsing the file /proc/self/cgroup. + This is not supported on Windows. + The client prioritizes the value passed via DD_ENTITY_ID (if set) over the container ID. + Default: True. + More on this: https://docs.datadoghq.com/developers/dogstatsd/?tab=kubernetes#origin-detection-over-udp + :type origin_detection_enabled: boolean + + :param socket_timeout: Set timeout for socket operations, in seconds. Optional. + If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. + :type socket_timeout: float + + :param telemetry_socket_timeout: Set timeout for the telemetry socket operations. Optional. + Effective only if either telemetry_host or telemetry_socket_path are set. + If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. + :type telemetry_socket_timeout: float + + :param disable_background_sender: Use a background thread to communicate with the dogstatsd server. Optional. + When enabled, a background thread will be used to send metric payloads to the Agent. + Applications should call stop() before exiting to make sure all pending payloads are sent. + Default: True. + :type disable_background_sender: boolean + + :param sender_queue_size: Set the maximum number of packets to queue for the sender. Optional + How may packets to queue before blocking or dropping the packet if the packet queue is already full. + Default: 0 (unlimited). + :type sender_queue_size: integer + + :param sender_queue_timeout: Set timeout for packet queue operations, in seconds. Optional. + How long the application thread is willing to wait for the queue clear up before dropping the metric packet. + If set to None, wait forever. + If set to zero drop the packet immediately if the queue is full. + Default: 0 (no wait) + :type sender_queue_timeout: float + + :param track_instance: Keep track of this instance and automatically handle cleanup when os.fork() is called, + if supported. + Default: True. + :type track_instance: boolean + """ + + self._socket_lock = Lock() + + # Check for deprecated option + if max_buffer_size is not None: + log.warning("The parameter max_buffer_size is now deprecated and is not used anymore") + + # Check host and port env vars + agent_host = os.environ.get("DD_AGENT_HOST") + if agent_host and host == DEFAULT_HOST: + host = agent_host + + dogstatsd_port = os.environ.get("DD_DOGSTATSD_PORT") + if dogstatsd_port and port == DEFAULT_PORT: + try: + port = int(dogstatsd_port) + except ValueError: + log.warning( + "Port number provided in DD_DOGSTATSD_PORT env var is not an integer: \ + %s, using %s as port number", + dogstatsd_port, + port, + ) + + # Assuming environment variables always override + telemetry_host = os.environ.get("DD_TELEMETRY_HOST", telemetry_host) + telemetry_port = os.environ.get("DD_TELEMETRY_PORT", telemetry_port) or port + + # Check enabled + if os.environ.get("DD_DOGSTATSD_DISABLE") not in {"True", "true", "yes", "1"}: + self._enabled = True + else: + self._enabled = False + + # Connection + self._max_buffer_len = max_buffer_len + self.socket_timeout = socket_timeout + if socket_path is not None: + self.socket_path = socket_path # type: Optional[text] + self.host = None + self.port = None + else: + self.socket_path = None + self.host = self.resolve_host(host, use_default_route) + self.port = int(port) + + self.telemetry_socket_path = telemetry_socket_path + self.telemetry_host = None + self.telemetry_port = None + self.telemetry_socket_timeout = telemetry_socket_timeout + if not telemetry_socket_path and telemetry_host: + self.telemetry_socket_path = None + self.telemetry_host = self.resolve_host(telemetry_host, use_default_route) + self.telemetry_port = int(telemetry_port) + + # Socket + self.socket = None + self.telemetry_socket = None + self.encoding = "utf-8" + + # Options + env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag] + # Inject values of DD_* environment variables as global tags. + has_entity_id = False + for var, tag_name in DD_ENV_TAGS_MAPPING.items(): + value = os.environ.get(var, "") + if value: + env_tags.append("{name}:{value}".format(name=tag_name, value=value)) + if var == ENTITY_ID_ENV_VAR: + has_entity_id = True + if constant_tags is None: + constant_tags = [] + self.constant_tags = constant_tags + env_tags + if namespace is not None: + namespace = text(namespace) + self.namespace = namespace + self.use_ms = use_ms + self.default_sample_rate = default_sample_rate + + # Origin detection + self._container_id = None + if not has_entity_id: + origin_detection_enabled = self._is_origin_detection_enabled( + container_id, origin_detection_enabled, has_entity_id + ) + self._set_container_id(container_id, origin_detection_enabled) + + # init telemetry version + self._client_tags = [ + "client:py", + "client_version:{}".format(__version__), + ] + self._reset_telemetry() + self._telemetry_flush_interval = telemetry_min_flush_interval + self._telemetry = not disable_telemetry + self._last_flush_time = time.time() + + self._current_buffer_total_size = 0 + self._buffer = [] # type: List[Text] + self._buffer_lock = RLock() + + self._reset_buffer() + + # This lock is used for all cases where client configuration is being changed: buffering, sender mode. + self._config_lock = RLock() + + # If buffering is disabled, we bypass the buffer function. + self._send = self._send_to_buffer + self._disable_buffering = disable_buffering + if self._disable_buffering: + self._send = self._send_to_server + log.debug("Statsd buffering is disabled") + + # Indicates if the process is about to fork, so we shouldn't start any new threads yet. + self._forking = False + + # Start the flush thread if buffering is enabled and the interval is above + # a reasonable range. This both prevents thrashing and allow us to use "0.0" + # as a value for disabling the automatic flush timer as well. + self._flush_interval = flush_interval + self._flush_thread_stop = threading.Event() + self._flush_thread = None + self._start_flush_thread(self._flush_interval) + + self._queue = None + self._sender_thread = None + self._sender_enabled = False + + if not disable_background_sender: + self.enable_background_sender(sender_queue_size, sender_queue_timeout) + + if TRACK_INSTANCES and track_instance: + _instances.add(self) + + @property + def socket_path(self): + return self._socket_path + + @socket_path.setter + def socket_path(self, path): + with self._socket_lock: + self._socket_path = path + if path is None: + self._transport = "udp" + self._max_payload_size = self._max_buffer_len or UDP_OPTIMAL_PAYLOAD_LENGTH + else: + self._transport = "uds" + self._max_payload_size = self._max_buffer_len or UDS_OPTIMAL_PAYLOAD_LENGTH + + def enable_background_sender(self, sender_queue_size=0, sender_queue_timeout=0): + """ + Use a background thread to communicate with the dogstatsd server. + When enabled, a background thread will be used to send metric payloads to the Agent. + + Applications should call stop() before exiting to make sure all pending payloads are sent. + + Compatible with os.fork() starting with Python 3.7. On earlier versions, compatible if applications + arrange to call pre_fork() and post_fork() module functions around calls to os.fork(). + + :param sender_queue_size: Set the maximum number of packets to queue for the sender. + How many packets to queue before blocking or dropping the packet if the packet queue is already full. + Default: 0 (unlimited). + :type sender_queue_size: integer, optional + :param sender_queue_timeout: Set timeout for packet queue operations, in seconds. + How long the application thread is willing to wait for the queue clear up before dropping the metric packet. + If set to None, wait forever. If set to zero drop the packet immediately if the queue is full. + Default: 0 (no wait). + :type sender_queue_timeout: float, optional + """ + + with self._config_lock: + self._sender_enabled = True + self._sender_queue_size = sender_queue_size + if sender_queue_timeout is None: + self._queue_blocking = True + self._queue_timeout = None + else: + self._queue_blocking = sender_queue_timeout > 0 + self._queue_timeout = max(0, sender_queue_timeout) + + self._start_sender_thread() + + def disable_background_sender(self): + """Disable background sender mode. + + This call will block until all previously queued payloads are sent. + """ + with self._config_lock: + self._sender_enabled = False + self._stop_sender_thread() + + def disable_telemetry(self): + self._telemetry = False + + def enable_telemetry(self): + self._telemetry = True + + # Note: Invocations of this method should be thread-safe + def _start_flush_thread(self, flush_interval): + if self._disable_buffering or self._flush_interval <= MIN_FLUSH_INTERVAL: + log.debug("Statsd periodic buffer flush is disabled") + return + + if self._forking: + return + + if self._flush_thread is not None: + return + + def _flush_thread_loop(self, flush_interval): + while not self._flush_thread_stop.is_set(): + time.sleep(flush_interval) + self.flush() + + self._flush_thread = threading.Thread( + name="{}_flush_thread".format(self.__class__.__name__), + target=_flush_thread_loop, + args=(self, flush_interval,), + ) + self._flush_thread.daemon = True + self._flush_thread.start() + + log.debug( + "Statsd flush thread registered with period of %s", + self._flush_interval, + ) + + # Note: Invocations of this method should be thread-safe + def _stop_flush_thread(self): + if not self._flush_thread: + return + + try: + self.flush() + finally: + pass + + self._flush_thread_stop.set() + + self._flush_thread.join() + self._flush_thread = None + + self._flush_thread_stop.clear() + + def _dedicated_telemetry_destination(self): + return bool(self.telemetry_socket_path or self.telemetry_host) + + # Context manager helper + def __enter__(self): + self.open_buffer() + return self + + # Context manager helper + def __exit__(self, exc_type, value, traceback): + self.close_buffer() + + @property + def disable_buffering(self): + with self._config_lock: + return self._disable_buffering + + @disable_buffering.setter + def disable_buffering(self, is_disabled): + with self._config_lock: + # If the toggle didn't change anything, this method is a noop + if self._disable_buffering == is_disabled: + return + + self._disable_buffering = is_disabled + + # If buffering has been disabled, flush and kill the background thread + # otherwise start up the flushing thread and enable the buffering. + if is_disabled: + self._send = self._send_to_server + self._stop_flush_thread() + log.debug("Statsd buffering is disabled") + else: + self._send = self._send_to_buffer + self._start_flush_thread(self._flush_interval) + + @staticmethod + def resolve_host(host, use_default_route): + """ + Resolve the DogStatsd host. + + :param host: host + :type host: string + :param use_default_route: Use the system default route as host (overrides `host` parameter) + :type use_default_route: bool + """ + if not use_default_route: + return host + + return get_default_route() + + def get_socket(self, telemetry=False): + """ + Return a connected socket. + + Note: connect the socket before assigning it to the class instance to + avoid bad thread race conditions. + """ + with self._socket_lock: + if telemetry and self._dedicated_telemetry_destination(): + if not self.telemetry_socket: + if self.telemetry_socket_path is not None: + self.telemetry_socket = self._get_uds_socket( + self.telemetry_socket_path, + self.telemetry_socket_timeout, + ) + else: + self.telemetry_socket = self._get_udp_socket( + self.telemetry_host, + self.telemetry_port, + self.telemetry_socket_timeout, + ) + + return self.telemetry_socket + + if not self.socket: + if self.socket_path is not None: + self.socket = self._get_uds_socket(self.socket_path, self.socket_timeout) + else: + self.socket = self._get_udp_socket( + self.host, + self.port, + self.socket_timeout, + ) + + return self.socket + + def set_socket_timeout(self, timeout): + """ + Set timeout for socket operations, in seconds. + + If set to zero, never wait if operation can not be completed immediately. If set to None, wait forever. + This option does not affect hostname resolution when using UDP. + """ + with self._socket_lock: + self.socket_timeout = timeout + if self.socket: + self.socket.settimeout(timeout) + + @classmethod + def _ensure_min_send_buffer_size(cls, sock, min_size=MIN_SEND_BUFFER_SIZE): + # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX + # buffers which is half of the max packet size that the client will send. + if os.name == 'posix': + try: + recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) + if recv_buff_size <= min_size: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, min_size) + log.debug("Socket send buffer increased to %dkb", min_size / 1024) + finally: + pass + + @classmethod + def _get_uds_socket(cls, socket_path, timeout): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) + sock.settimeout(timeout) + cls._ensure_min_send_buffer_size(sock) + sock.connect(socket_path) + return sock + + @classmethod + def _get_udp_socket(cls, host, port, timeout): + log.debug("Connecting to %s:%s", host, port) + addrinfo = socket.getaddrinfo(host, port, 0, socket.SOCK_DGRAM) + # Override gai.conf order for backwrads compatibility: prefer + # v4, so that a v4-only service on hosts with both addresses + # still works. + addrinfo.sort(key=lambda v: v[0] == socket.AF_INET, reverse=True) + lastaddr = len(addrinfo) - 1 + for i, (af, ty, proto, _, addr) in enumerate(addrinfo): + sock = None + try: + sock = socket.socket(af, ty, proto) + sock.settimeout(timeout) + cls._ensure_min_send_buffer_size(sock) + sock.connect(addr) + log.debug("Connected to: %s", addr) + return sock + except Exception as e: + if sock is not None: + sock.close() + log.debug("Failed to connect to %s: %s", addr, e) + if i < lastaddr: + continue + raise e + else: + raise ValueError("getaddrinfo returned no addresses to connect to") + + def open_buffer(self, max_buffer_size=None): + """ + Open a buffer to send a batch of metrics. + + To take advantage of automatic flushing, you should use the context manager instead + + >>> with DogStatsd() as batch: + >>> batch.gauge("users.online", 123) + >>> batch.gauge("active.connections", 1001) + + Note: This method must be called before close_buffer() matching invocation. + """ + + self._config_lock.acquire() + + # XXX Remove if `disable_buffering` default is changed to False + self._send = self._send_to_buffer + + if max_buffer_size is not None: + log.warning("The parameter max_buffer_size is now deprecated and is not used anymore") + + self._reset_buffer() + + def close_buffer(self): + """ + Flush the buffer and switch back to single metric packets. + + Note: This method must be called after a matching open_buffer() + invocation. + """ + try: + self.flush() + finally: + # XXX Remove if `disable_buffering` default is changed to False + if self._disable_buffering: + self._send = self._send_to_server + + self._config_lock.release() + + def _reset_buffer(self): + with self._buffer_lock: + self._current_buffer_total_size = 0 + self._buffer = [] + + def flush(self): + """ + Flush the metrics buffer by sending the data to the server. + """ + with self._buffer_lock: + # Only send packets if there are packets to send + if self._buffer: + self._send_to_server("\n".join(self._buffer)) + self._reset_buffer() + + def gauge( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Record the value of a gauge, optionally setting a list of tags and a + sample rate. + + >>> statsd.gauge("users.online", 123) + >>> statsd.gauge("active.connections", 1001, tags=["protocol:http"]) + """ + return self._report(metric, "g", value, tags, sample_rate) + + def increment( + self, + metric, # type: Text + value=1, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type: (...) -> None + """ + Increment a counter, optionally setting a value, tags and a sample + rate. + + >>> statsd.increment("page.views") + >>> statsd.increment("files.transferred", 124) + """ + self._report(metric, "c", value, tags, sample_rate) + + def decrement( + self, + metric, # type: Text + value=1, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Decrement a counter, optionally setting a value, tags and a sample + rate. + + >>> statsd.decrement("files.remaining") + >>> statsd.decrement("active.connections", 2) + """ + metric_value = -value if value else value + self._report(metric, "c", metric_value, tags, sample_rate) + + def histogram( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Sample a histogram value, optionally setting tags and a sample rate. + + >>> statsd.histogram("uploaded.file.size", 1445) + >>> statsd.histogram("album.photo.count", 26, tags=["gender:female"]) + """ + self._report(metric, "h", value, tags, sample_rate) + + def distribution( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Send a global distribution value, optionally setting tags and a sample rate. + + >>> statsd.distribution("uploaded.file.size", 1445) + >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"]) + """ + self._report(metric, "d", value, tags, sample_rate) + + def timing( + self, + metric, # type: Text + value, # type: float + tags=None, # type: Optional[List[str]] + sample_rate=None, # type: Optional[float] + ): # type(...) -> None + """ + Record a timing, optionally setting tags and a sample rate. + + >>> statsd.timing("query.response.time", 1234) + """ + self._report(metric, "ms", value, tags, sample_rate) + + def timed(self, metric=None, tags=None, sample_rate=None, use_ms=None): + """ + A decorator or context manager that will measure the distribution of a + function's/context's run time. Optionally specify a list of tags or a + sample rate. If the metric is not defined as a decorator, the module + name and function name will be used. The metric is required as a context + manager. + :: + + @statsd.timed("user.query.time", sample_rate=0.5) + def get_user(user_id): + # Do what you need to ... + pass + + # Is equivalent to ... + with statsd.timed("user.query.time", sample_rate=0.5): + # Do what you need to ... + pass + + # Is equivalent to ... + start = time.time() + try: + get_user(user_id) + finally: + statsd.timing("user.query.time", time.time() - start) + """ + return TimedContextManagerDecorator(self, metric, tags, sample_rate, use_ms) + + def distributed(self, metric=None, tags=None, sample_rate=None, use_ms=None): + """ + A decorator or context manager that will measure the distribution of a + function's/context's run time using custom metric distribution. + Optionally specify a list of tags or a sample rate. If the metric is not + defined as a decorator, the module name and function name will be used. + The metric is required as a context manager. + :: + + @statsd.distributed("user.query.time", sample_rate=0.5) + def get_user(user_id): + # Do what you need to ... + pass + + # Is equivalent to ... + with statsd.distributed("user.query.time", sample_rate=0.5): + # Do what you need to ... + pass + + # Is equivalent to ... + start = time.time() + try: + get_user(user_id) + finally: + statsd.distribution("user.query.time", time.time() - start) + """ + return DistributedContextManagerDecorator(self, metric, tags, sample_rate, use_ms) + + def set(self, metric, value, tags=None, sample_rate=None): + """ + Sample a set value. + + >>> statsd.set("visitors.uniques", 999) + """ + self._report(metric, "s", value, tags, sample_rate) + + def close_socket(self): + """ + Closes connected socket if connected. + """ + with self._socket_lock: + if self.socket: + try: + self.socket.close() + except OSError as e: + log.error("Unexpected error: %s", str(e)) + self.socket = None + + if self.telemetry_socket: + try: + self.telemetry_socket.close() + except OSError as e: + log.error("Unexpected error: %s", str(e)) + self.telemetry_socket = None + + def _serialize_metric(self, metric, metric_type, value, tags, sample_rate=1): + # Create/format the metric packet + return "%s%s:%s|%s%s%s%s" % ( + (self.namespace + ".") if self.namespace else "", + metric, + value, + metric_type, + ("|@" + text(sample_rate)) if sample_rate != 1 else "", + ("|#" + ",".join(normalize_tags(tags))) if tags else "", + ("|c:" + self._container_id if self._container_id else "") + ) + + def _report(self, metric, metric_type, value, tags, sample_rate): + """ + Create a metric packet and send it. + + More information about the packets' format: http://docs.datadoghq.com/guides/dogstatsd/ + """ + if value is None: + return + + if self._enabled is not True: + return + + if self._telemetry: + self.metrics_count += 1 + + if sample_rate is None: + sample_rate = self.default_sample_rate + + if sample_rate != 1 and random() > sample_rate: + return + + # Resolve the full tag list + tags = self._add_constant_tags(tags) + payload = self._serialize_metric(metric, metric_type, value, tags, sample_rate) + + # Send it + self._send(payload) + + def _reset_telemetry(self): + self.metrics_count = 0 + self.events_count = 0 + self.service_checks_count = 0 + self.bytes_sent = 0 + self.bytes_dropped_queue = 0 + self.bytes_dropped_writer = 0 + self.packets_sent = 0 + self.packets_dropped_queue = 0 + self.packets_dropped_writer = 0 + self._last_flush_time = time.time() + + # Aliases for backwards compatibility. + @property + def packets_dropped(self): + return self.packets_dropped_queue + self.packets_dropped_writer + + @property + def bytes_dropped(self): + return self.bytes_dropped_queue + self.bytes_dropped_writer + + def _flush_telemetry(self): + tags = self._client_tags[:] + tags.append("client_transport:{}".format(self._transport)) + tags.extend(self.constant_tags) + telemetry_tags = ",".join(tags) + + return TELEMETRY_FORMATTING_STR % ( + self.metrics_count, + telemetry_tags, + self.events_count, + telemetry_tags, + self.service_checks_count, + telemetry_tags, + self.bytes_sent, + telemetry_tags, + self.bytes_dropped_queue + self.bytes_dropped_writer, + telemetry_tags, + self.bytes_dropped_queue, + telemetry_tags, + self.bytes_dropped_writer, + telemetry_tags, + self.packets_sent, + telemetry_tags, + self.packets_dropped_queue + self.packets_dropped_writer, + telemetry_tags, + self.packets_dropped_queue, + telemetry_tags, + self.packets_dropped_writer, + telemetry_tags, + ) + + def _is_telemetry_flush_time(self): + return self._telemetry and \ + self._last_flush_time + self._telemetry_flush_interval < time.time() + + def _send_to_server(self, packet): + # Skip the lock if the queue is None. There is no race with enable_background_sender. + if self._queue is not None: + # Prevent a race with disable_background_sender. + with self._buffer_lock: + if self._queue is not None: + try: + self._queue.put(packet + '\n', self._queue_blocking, self._queue_timeout) + except queue.Full: + self.packets_dropped_queue += 1 + self.bytes_dropped_queue += 1 + return + + self._xmit_packet_with_telemetry(packet + '\n') + + def _xmit_packet_with_telemetry(self, packet): + self._xmit_packet(packet, False) + + if self._is_telemetry_flush_time(): + telemetry = self._flush_telemetry() + if self._xmit_packet(telemetry, True): + self._reset_telemetry() + self.packets_sent += 1 + self.bytes_sent += len(telemetry) + else: + # Telemetry packet has been dropped, keep telemetry data for the next flush + self._last_flush_time = time.time() + self.bytes_dropped_writer += len(telemetry) + self.packets_dropped_writer += 1 + + def _xmit_packet(self, packet, is_telemetry): + try: + if is_telemetry and self._dedicated_telemetry_destination(): + mysocket = self.telemetry_socket or self.get_socket(telemetry=True) + else: + # If set, use socket directly + mysocket = self.socket or self.get_socket() + + mysocket.send(packet.encode(self.encoding)) + + if not is_telemetry and self._telemetry: + self.packets_sent += 1 + self.bytes_sent += len(packet) + + return True + except socket.timeout: + # dogstatsd is overflowing, drop the packets (mimics the UDP behaviour) + pass + except (socket.herror, socket.gaierror) as socket_err: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except socket.error as socket_err: + if socket_err.errno == errno.EAGAIN: + log.debug("Socket send would block: %s, dropping the packet", socket_err) + elif socket_err.errno == errno.ENOBUFS: + log.debug("Socket buffer full: %s, dropping the packet", socket_err) + elif socket_err.errno == errno.EMSGSIZE: + log.debug( + "Packet size too big (size: %d): %s, dropping the packet", + len(packet.encode(self.encoding)), + socket_err) + else: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except Exception as exc: + print("Unexpected error: %s", exc) + log.error("Unexpected error: %s", str(exc)) + + if not is_telemetry and self._telemetry: + self.bytes_dropped_writer += len(packet) + self.packets_dropped_writer += 1 + + return False + + def _send_to_buffer(self, packet): + with self._buffer_lock: + if self._should_flush(len(packet)): + self.flush() + + self._buffer.append(packet) + # Update the current buffer length, including line break to anticipate + # the final packet size + self._current_buffer_total_size += len(packet) + 1 + + def _should_flush(self, length_to_be_added): + if self._current_buffer_total_size + length_to_be_added + 1 > self._max_payload_size: + return True + return False + + @staticmethod + def _escape_event_content(string): + return string.replace("\n", "\\n") + + @staticmethod + def _escape_service_check_message(string): + return string.replace("\n", "\\n").replace("m:", "m\\:") + + def event( + self, + title, + message, + alert_type=None, + aggregation_key=None, + source_type_name=None, + date_happened=None, + priority=None, + tags=None, + hostname=None, + ): + """ + Send an event. Attributes are the same as the Event API. + http://docs.datadoghq.com/api/ + + >>> statsd.event("Man down!", "This server needs assistance.") + >>> statsd.event("Web server restart", "The web server is up", alert_type="success") # NOQA + """ + title = DogStatsd._escape_event_content(title) + message = DogStatsd._escape_event_content(message) + + # pylint: disable=undefined-variable + if not is_p3k(): + if not isinstance(title, unicode): # noqa: F821 + title = unicode(DogStatsd._escape_event_content(title), 'utf8') # noqa: F821 + if not isinstance(message, unicode): # noqa: F821 + message = unicode(DogStatsd._escape_event_content(message), 'utf8') # noqa: F821 + + # Append all client level tags to every event + tags = self._add_constant_tags(tags) + + string = u"_e{{{},{}}}:{}|{}".format( + len(title.encode('utf8', 'replace')), + len(message.encode('utf8', 'replace')), + title, + message, + ) + + if date_happened: + string = "%s|d:%d" % (string, date_happened) + if hostname: + string = "%s|h:%s" % (string, hostname) + if aggregation_key: + string = "%s|k:%s" % (string, aggregation_key) + if priority: + string = "%s|p:%s" % (string, priority) + if source_type_name: + string = "%s|s:%s" % (string, source_type_name) + if alert_type: + string = "%s|t:%s" % (string, alert_type) + if tags: + string = "%s|#%s" % (string, ",".join(tags)) + if self._container_id: + string = "%s|c:%s" % (string, self._container_id) + + if len(string) > 8 * 1024: + raise ValueError( + u'Event "{0}" payload is too big (>=8KB). Event discarded'.format( + title + ) + ) + + if self._telemetry: + self.events_count += 1 + + self._send(string) + + def service_check( + self, + check_name, + status, + tags=None, + timestamp=None, + hostname=None, + message=None, + ): + """ + Send a service check run. + + >>> statsd.service_check("my_service.check_name", DogStatsd.WARNING) + """ + message = DogStatsd._escape_service_check_message(message) if message is not None else "" + + string = u"_sc|{0}|{1}".format(check_name, status) + + # Append all client level tags to every status check + tags = self._add_constant_tags(tags) + + if timestamp: + string = u"{0}|d:{1}".format(string, timestamp) + if hostname: + string = u"{0}|h:{1}".format(string, hostname) + if tags: + string = u"{0}|#{1}".format(string, ",".join(tags)) + if message: + string = u"{0}|m:{1}".format(string, message) + if self._container_id: + string = u"{0}|c:{1}".format(string, self._container_id) + + if self._telemetry: + self.service_checks_count += 1 + + self._send(string) + + def _add_constant_tags(self, tags): + if self.constant_tags: + if tags: + return tags + self.constant_tags + + return self.constant_tags + return tags + + def _is_origin_detection_enabled(self, container_id, origin_detection_enabled, has_entity_id): + """ + Returns whether the client should fill the container field. + If DD_ENTITY_ID is set, we don't send the container ID + If a user-defined container ID is provided, we don't ignore origin detection + as dd.internal.entity_id is prioritized over the container field for backward compatibility. + If DD_ENTITY_ID is not set, we try to fill the container field automatically unless + DD_ORIGIN_DETECTION_ENABLED is explicitly set to false. + """ + if not origin_detection_enabled or has_entity_id or container_id is not None: + # origin detection is explicitly disabled + # or DD_ENTITY_ID was found + # or a user-defined container ID was provided + return False + value = os.environ.get(ORIGIN_DETECTION_ENABLED, "") + return value.lower() not in {"no", "false", "0", "n", "off"} + + def _set_container_id(self, container_id, origin_detection_enabled): + """ + Initializes the container ID. + It can either be provided by the user or read from cgroups. + """ + if container_id: + self._container_id = container_id + return + if origin_detection_enabled: + try: + reader = ContainerID() + self._container_id = reader.container_id + except Exception as e: + log.debug("Couldn't get container ID: %s", str(e)) + self._container_id = None + + def _start_sender_thread(self): + if not self._sender_enabled or self._forking: + return + + if self._queue is not None: + return + + self._queue = queue.Queue(self._sender_queue_size) + + log.debug("Starting background sender thread") + self._sender_thread = threading.Thread( + name="{}_sender_thread".format(self.__class__.__name__), + target=self._sender_main_loop, + args=(self._queue,) + ) + self._sender_thread.daemon = True + self._sender_thread.start() + + def _stop_sender_thread(self): + # Lock ensures that nothing gets added to the queue after we disable it. + with self._buffer_lock: + if not self._queue: + return + self._queue.put(Stop) + self._queue = None + + self._sender_thread.join() + self._sender_thread = None + + def _sender_main_loop(self, queue): + while True: + item = queue.get() + if item is Stop: + queue.task_done() + return + self._xmit_packet_with_telemetry(item) + queue.task_done() + + def wait_for_pending(self): + """ + Flush the buffer and wait for all queued payloads to be written to the server. + """ + + self.flush() + + # Avoid race with disable_background_sender. We don't need a + # lock, just copy the value so it doesn't change between the + # check and join later. + queue = self._queue + + if queue is not None: + queue.join() + + def pre_fork(self): + """Prepare client for a process fork. + + Flush any pending payloads, stop all background threads and + close the connection. Once the function returns. + + The client should not be used from this point until + post_fork() is called. + """ + log.debug("[%d] pre_fork for %s", os.getpid(), self) + + self._forking = True + + with self._config_lock: + self._stop_flush_thread() + self._stop_sender_thread() + self.close_socket() + + def post_fork(self): + """Restore the client state after a fork.""" + + log.debug("[%d] post_fork for %s", os.getpid(), self) + + with self._socket_lock: + if self.socket or self.telemetry_socket: + log.warning("Open socket detected after fork. Call pre_fork() before os.fork().") + self.close_socket() + + self._forking = False + + with self._config_lock: + self._start_flush_thread(self._flush_interval) + self._start_sender_thread() + + def stop(self): + """Stop the client. + + Disable buffering, background sender and flush any pending payloads to the server. + + Client remains usable after this method, but sending metrics may block if socket_timeout is enabled. + """ + + self.disable_background_sender() + self.disable_buffering = True + self.flush() + self.close_socket() + + +statsd = DogStatsd() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py new file mode 100644 index 0000000..fe2e71c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py @@ -0,0 +1,57 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under +# the BSD-3-Clause License. This product includes software developed at Datadog +# (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import errno +import re + + +class UnresolvableContainerID(Exception): + """ + Unable to get container ID from cgroup. + """ + + +class ContainerID(object): + """ + A reader class that retrieves the current container ID parsed from a the cgroup file. + + Returns: + object: ContainerID + + Raises: + `NotImplementedError`: No proc filesystem is found (non-Linux systems) + `UnresolvableContainerID`: Unable to read the container ID + """ + + CGROUP_PATH = "/proc/self/cgroup" + UUID_SOURCE = r"[0-9a-f]{8}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{12}" + CONTAINER_SOURCE = r"[0-9a-f]{64}" + TASK_SOURCE = r"[0-9a-f]{32}-\d+" + LINE_RE = re.compile(r"^(\d+):([^:]*):(.+)$") + CONTAINER_RE = re.compile(r"(?:.+)?({0}|{1}|{2})(?:\.scope)?$".format(UUID_SOURCE, CONTAINER_SOURCE, TASK_SOURCE)) + + def __init__(self): + self.container_id = self._read_container_id(self.CGROUP_PATH) + + def _read_container_id(self, fpath): + try: + with open(fpath, mode="r") as fp: + for line in fp: + line = line.strip() + match = self.LINE_RE.match(line) + if not match: + continue + _, _, path = match.groups() + parts = [p for p in path.split("/")] + if len(parts): + match = self.CONTAINER_RE.match(parts.pop()) + if match: + return match.group(1) + except IOError as e: + if e.errno != errno.ENOENT: + raise NotImplementedError("Unable to open {}.".format(self.CGROUP_PATH)) + except Exception as e: + raise UnresolvableContainerID("Unable to read the container ID: " + str(e)) + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py new file mode 100644 index 0000000..90e9ce9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py @@ -0,0 +1,88 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +from functools import wraps + +try: + from time import monotonic # type: ignore[attr-defined] +except ImportError: + from time import time as monotonic + +# datadog +from datadog.dogstatsd.context_async import _get_wrapped_co +from datadog.util.compat import iscoroutinefunction + + +class TimedContextManagerDecorator(object): + """ + A context manager and a decorator which will report the elapsed time in + the context OR in a function call. + """ + + def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None): + self.statsd = statsd + self.timing_func = statsd.timing + self.metric = metric + self.tags = tags + self.sample_rate = sample_rate + self.use_ms = use_ms + self.elapsed = None + + def __call__(self, func): + """ + Decorator which returns the elapsed time of the function call. + + Default to the function name if metric was not provided. + """ + if not self.metric: + self.metric = "%s.%s" % (func.__module__, func.__name__) + + # Coroutines + if iscoroutinefunction(func): + return _get_wrapped_co(self, func) + + # Others + @wraps(func) + def wrapped(*args, **kwargs): + start = monotonic() + try: + return func(*args, **kwargs) + finally: + self._send(start) + + return wrapped + + def __enter__(self): + if not self.metric: + raise TypeError("Cannot used timed without a metric!") + self._start = monotonic() + return self + + def __exit__(self, type, value, traceback): + # Report the elapsed time of the context manager. + self._send(self._start) + + def _send(self, start): + elapsed = monotonic() - start + use_ms = self.use_ms if self.use_ms is not None else self.statsd.use_ms + elapsed = int(round(1000 * elapsed)) if use_ms else elapsed + self.timing_func(self.metric, elapsed, self.tags, self.sample_rate) + self.elapsed = elapsed + + def start(self): + self.__enter__() + + def stop(self): + self.__exit__(None, None, None) + + +class DistributedContextManagerDecorator(TimedContextManagerDecorator): + """ + A context manager and a decorator which will report the elapsed time in + the context OR in a function call using the custom distribution metric. + """ + + def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None): + super(DistributedContextManagerDecorator, self).__init__(statsd, metric, tags, sample_rate, use_ms) + self.timing_func = statsd.distribution diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py new file mode 100644 index 0000000..d178d4e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Decorator `timed` for coroutine methods. + +Warning: requires Python 3.5 or higher. +""" +# stdlib +import sys + + +# Wrap the Python 3.5+ function in a docstring to avoid syntax errors when +# running mypy in --py2 mode. Currently there is no way to have mypy skip an +# entire file if it has syntax errors. This solution is very hacky; another +# option is to specify the source files to process in mypy.ini (using glob +# inclusion patterns), and omit this file from the list. +# +# https://stackoverflow.com/a/57023749/3776794 +# https://github.com/python/mypy/issues/6897 +ASYNC_SOURCE = r''' +from functools import wraps +try: + from time import monotonic +except ImportError: + from time import time as monotonic + + +def _get_wrapped_co(self, func): + """ + `timed` wrapper for coroutine methods. + """ + @wraps(func) + async def wrapped_co(*args, **kwargs): + start = monotonic() + try: + result = await func(*args, **kwargs) + return result + finally: + self._send(start) + return wrapped_co +''' + + +def _get_wrapped_co(self, func): + raise NotImplementedError( + u"Decorator `timed` compatibility with coroutine functions" u" requires Python 3.5 or higher." + ) + + +if sys.version_info >= (3, 5): + exec(compile(ASYNC_SOURCE, __file__, "exec")) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py new file mode 100644 index 0000000..c3fe779 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py @@ -0,0 +1,40 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Helper(s), resolve the system's default interface. +""" +# stdlib +import socket +import struct + + +class UnresolvableDefaultRoute(Exception): + """ + Unable to resolve system's default route. + """ + + +def get_default_route(): + """ + Return the system default interface using the proc filesystem. + + Returns: + string: default route + + Raises: + `NotImplementedError`: No proc filesystem is found (non-Linux systems) + `StopIteration`: No default route found + """ + try: + with open("/proc/net/route") as f: + for line in f.readlines(): + fields = line.strip().split() + if fields[1] == "00000000": + return socket.inet_ntoa(struct.pack(" 0: + should_flush = False + if should_flush: + _get_lambda_stats().flush(float("inf")) + + def __call__(self, *args, **kw): + warnings.warn("datadog_lambda_wrapper() is relocated to https://git.io/fjy8o", DeprecationWarning) + _LambdaDecorator._enter() + try: + return self.func(*args, **kw) + finally: + _LambdaDecorator._close() + + +_lambda_stats = None +datadog_lambda_wrapper = _LambdaDecorator + + +def _get_lambda_stats(): + global _lambda_stats + # This is not thread-safe, it should be called first by _LambdaDecorator + if _lambda_stats is None: + _lambda_stats = ThreadStats() + _lambda_stats.start(flush_in_greenlet=False, flush_in_thread=False) + return _lambda_stats + + +def lambda_metric(*args, **kw): + """ Alias to expose only distributions for lambda functions""" + _get_lambda_stats().distribution(*args, **kw) + + +def _init_api_client(): + """No-op GET to initialize the requests connection with DD's endpoints + + The goal here is to make the final flush faster: + we keep alive the Requests session, this means that we can re-use the connection + The consequence is that the HTTP Handshake, which can take hundreds of ms, + is now made at the beginning of a lambda instead of at the end. + + By making the initial request async, we spare a lot of execution time in the lambdas. + """ + try: + api.api_client.APIClient.submit("GET", "validate") + except Exception: + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py new file mode 100644 index 0000000..b5e7699 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py @@ -0,0 +1,511 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +ThreadStats is a tool for collecting application metrics without hindering +performance. It collects metrics in the application thread with very little overhead +and allows flushing metrics in process, in a thread or in a greenlet, depending +on your application's needs. +""" +import atexit +import logging +import os + +# stdlib +from contextlib import contextmanager +from functools import wraps +from time import time + +try: + from time import monotonic # type: ignore[attr-defined] +except ImportError: + from time import time as monotonic + +# datadog +from datadog.api.exceptions import ApiNotInitialized +from datadog.threadstats.constants import MetricType +from datadog.threadstats.events import EventsAggregator +from datadog.threadstats.metrics import MetricsAggregator, Counter, Gauge, Histogram, Timing, Distribution, Set +from datadog.threadstats.reporters import HttpReporter + +# Loggers +log = logging.getLogger("datadog.threadstats") + +DD_ENV_TAGS_MAPPING = { + "DD_ENV": "env", + "DD_SERVICE": "service", + "DD_VERSION": "version", +} + + +class ThreadStats(object): + def __init__(self, namespace="", constant_tags=None, compress_payload=False): + """ + Initialize a threadstats object. + + :param namespace: Namespace to prefix all metric names + :type namespace: string + + :param constant_tags: Tags to attach to every metric reported by this client + :type constant_tags: list of strings + + :param compress_payload: compress the payload using zlib + :type compress_payload: bool + + :envvar DATADOG_TAGS: Tags to attach to every metric reported by ThreadStats client + :type DATADOG_TAGS: comma-delimited string + + :envvar DD_ENV: the env of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_ENV: string + + :envvar DD_SERVICE: the name of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_SERVICE: string + + :envvar DD_VERSION: the version of the service running the ThreadStats client. + If set, it is appended to the constant (global) tags of the client. + :type DD_VERSION: string + """ + # Parameters + self.namespace = namespace + env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag] + for var, tag_name in DD_ENV_TAGS_MAPPING.items(): + value = os.environ.get(var, "") + if value: + env_tags.append("{name}:{value}".format(name=tag_name, value=value)) + if constant_tags is None: + constant_tags = [] + self.constant_tags = constant_tags + env_tags + + # State + self._disabled = True + self.compress_payload = compress_payload + + def start( + self, + flush_interval=10, + roll_up_interval=10, + device=None, + flush_in_thread=True, + flush_in_greenlet=False, + disabled=False, + ): + """ + Start the ThreadStats instance with the specified metric flushing method and preferences. + + By default, metrics will be flushed in a thread. + + >>> stats.start() + + If you're running a gevent server and want to flush metrics in a + greenlet, set *flush_in_greenlet* to True. Be sure to import and monkey + patch gevent before starting ThreadStats. :: + + >>> from gevent import monkey; monkey.patch_all() + >>> stats.start(flush_in_greenlet=True) + + If you'd like to flush metrics in process, set *flush_in_thread* + to False, though you'll have to call ``flush`` manually to post metrics + to the server. :: + + >>> stats.start(flush_in_thread=False) + + If for whatever reason, you need to disable metrics collection in a + hurry, set ``disabled`` to True and metrics won't be collected or flushed. + + >>> stats.start(disabled=True) + + *Note:* Please remember to set your API key before, + using datadog module ``initialize`` method. + + >>> from datadog import initialize, ThreadStats + >>> initialize(api_key="my_api_key") + >>> stats = ThreadStats() + >>> stats.start() + >>> stats.increment("home.page.hits") + + :param flush_interval: The number of seconds to wait between flushes. + :type flush_interval: int + :param flush_in_thread: True if you'd like to spawn a thread to flush metrics. + It will run every `flush_interval` seconds. + :type flush_in_thread: bool + :param flush_in_greenlet: Set to true if you'd like to flush in a gevent greenlet. + :type flush_in_greenlet: bool + :param disabled: Disable metrics collection + :type disabled: bool + """ + self.flush_interval = flush_interval + self.roll_up_interval = roll_up_interval + self.device = device + self._disabled = disabled + self._is_auto_flushing = False + + # Create an aggregator + self._metric_aggregator = MetricsAggregator(self.roll_up_interval) + self._event_aggregator = EventsAggregator() + + # The reporter is responsible for sending metrics off to their final destination. + # It's abstracted to support easy unit testing and in the near future, forwarding + # to the datadog agent. + self.reporter = HttpReporter(compress_payload=self.compress_payload) + + self._is_flush_in_progress = False + self.flush_count = 0 + if self._disabled: + log.info("ThreadStats instance is disabled. No metrics will flush.") + else: + if flush_in_greenlet: + self._start_flush_greenlet() + elif flush_in_thread: + self._start_flush_thread() + + # Flush all remaining metrics on exit + atexit.register(lambda: self.flush(float("inf"))) + + def stop(self): + if not self._is_auto_flushing: + return True + if self._flush_thread: + self._flush_thread.end() + self._is_auto_flushing = False + return True + + def event( + self, + title, + message, + alert_type=None, + aggregation_key=None, + source_type_name=None, + date_happened=None, + priority=None, + tags=None, + hostname=None, + ): + """ + Send an event. See http://docs.datadoghq.com/api/ for more info. + + >>> stats.event("Man down!", "This server needs assistance.") + >>> stats.event("The web server restarted", \ + "The web server is up again", alert_type="success") + """ + if not self._disabled: + # Append all client level tags to every event + event_tags = tags + if self.constant_tags: + if tags: + event_tags = tags + self.constant_tags + else: + event_tags = self.constant_tags + + self._event_aggregator.add_event( + title=title, + text=message, + alert_type=alert_type, + aggregation_key=aggregation_key, + source_type_name=source_type_name, + date_happened=date_happened, + priority=priority, + tags=event_tags, + host=hostname, + ) + + def gauge(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Record the current ``value`` of a metric. The most recent value in + a given flush interval will be recorded. Optionally, specify a set of + tags to associate with the metric. This should be used for sum values + such as total hard disk space, process uptime, total number of active + users, or number of rows in a database table. + + >>> stats.gauge("process.uptime", time.time() - process_start_time) + >>> stats.gauge("cache.bytes.free", cache.get_free_bytes(), tags=["version:1.0"]) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Gauge, sample_rate=sample_rate, host=host + ) + + def set(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Add ``value`` to the current set. The length of the set is + flushed as a gauge to Datadog. Optionally, specify a set of + tags to associate with the metric. + + >>> stats.set("example_metric.set", "value_1", tags=["environment:dev"]) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Set, sample_rate=sample_rate, host=host + ) + + def increment(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Increment the counter by the given ``value``. Optionally, specify a list of + ``tags`` to associate with the metric. This is useful for counting things + such as incrementing a counter each time a page is requested. + + >>> stats.increment('home.page.hits') + >>> stats.increment('bytes.processed', file.size()) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Counter, sample_rate=sample_rate, host=host + ) + + def decrement(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Decrement a counter, optionally setting a value, tags and a sample + rate. + + >>> stats.decrement("files.remaining") + >>> stats.decrement("active.connections", 2) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), -value, Counter, sample_rate=sample_rate, host=host + ) + + def histogram(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Sample a histogram value. Histograms will produce metrics that + describe the distribution of the recorded values, namely the maximum, minimum, + average, count and the 75/85/95/99 percentiles. Optionally, specify + a list of ``tags`` to associate with the metric. + + >>> stats.histogram("uploaded_file.size", uploaded_file.size()) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Histogram, sample_rate=sample_rate, host=host + ) + + def distribution(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Sample a distribution value. Distributions will produce metrics that + describe the distribution of the recorded values, namely the maximum, + median, average, count and the 50/75/90/95/99 percentiles. Optionally, + specify a list of ``tags`` to associate with the metric. + + >>> stats.distribution("uploaded_file.size", uploaded_file.size()) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Distribution, sample_rate=sample_rate, host=host + ) + + def timing(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None): + """ + Record a timing, optionally setting tags and a sample rate. + + >>> stats.timing("query.response.time", 1234) + """ + if not self._disabled: + self._metric_aggregator.add_point( + metric_name, tags, timestamp or time(), value, Timing, sample_rate=sample_rate, host=host + ) + + @contextmanager + def timer(self, metric_name, sample_rate=1, tags=None, host=None): + """ + A context manager that will track the distribution of the contained code's run time. + Optionally specify a list of tags to associate with the metric. + :: + + def get_user(user_id): + with stats.timer("user.query.time"): + # Do what you need to ... + pass + + # Is equivalent to ... + def get_user(user_id): + start = time.time() + try: + # Do what you need to ... + pass + finally: + stats.histogram("user.query.time", time.time() - start) + """ + start = monotonic() + try: + yield + finally: + end = monotonic() + self.timing(metric_name, end - start, time(), tags=tags, sample_rate=sample_rate, host=host) + + def timed(self, metric_name, sample_rate=1, tags=None, host=None): + """ + A decorator that will track the distribution of a function's run time. + Optionally specify a list of tags to associate with the metric. + :: + + @stats.timed("user.query.time") + def get_user(user_id): + # Do what you need to ... + pass + + # Is equivalent to ... + start = time.time() + try: + get_user(user_id) + finally: + stats.histogram("user.query.time", time.time() - start) + """ + + def wrapper(func): + @wraps(func) + def wrapped(*args, **kwargs): + with self.timer(metric_name, sample_rate, tags, host): + result = func(*args, **kwargs) + return result + + return wrapped + + return wrapper + + def flush(self, timestamp=None): + """ + Flush and post all metrics to the server. Note that this is a blocking + call, so it is likely not suitable for user facing processes. In those + cases, it's probably best to flush in a thread or greenlet. + """ + try: + if self._is_flush_in_progress: + log.debug("A flush is already in progress. Skipping this one.") + return False + if self._disabled: + log.info("Not flushing because we're disabled.") + return False + + self._is_flush_in_progress = True + + # Process metrics + metrics, dists = self._get_aggregate_metrics_and_dists(timestamp or time()) + count_metrics = len(metrics) + if count_metrics: + self.flush_count += 1 + log.debug("Flush #%s sending %s metrics" % (self.flush_count, count_metrics)) + self.reporter.flush_metrics(metrics) + else: + log.debug("No metrics to flush. Continuing.") + + count_dists = len(dists) + if count_dists: + self.flush_count += 1 + log.debug("Flush #%s sending %s distributions" % (self.flush_count, count_dists)) + self.reporter.flush_distributions(dists) + else: + log.debug("No distributions to flush. Continuing.") + + # Process events + events = self._get_aggregate_events() + count_events = len(events) + if count_events: + self.flush_count += 1 + log.debug("Flush #%s sending %s events" % (self.flush_count, count_events)) + self.reporter.flush_events(events) + else: + log.debug("No events to flush. Continuing.") + except ApiNotInitialized: + raise + except Exception: + try: + log.exception("Error flushing metrics and events") + except Exception: + pass + finally: + self._is_flush_in_progress = False + + def _get_aggregate_metrics_and_dists(self, flush_time=None): + """ + Get, format and return the rolled up metrics from the aggregator. + """ + # Get rolled up metrics + rolled_up_metrics = self._metric_aggregator.flush(flush_time) + + # FIXME: emit a dictionary from the aggregator + metrics = [] + dists = [] + for timestamp, value, name, tags, host, metric_type, interval in rolled_up_metrics: + metric_tags = tags + metric_name = name + + # Append all client level tags to every metric + if self.constant_tags: + if tags: + metric_tags = tags + self.constant_tags + else: + metric_tags = self.constant_tags + + # Resolve the metric name + if self.namespace: + metric_name = self.namespace + "." + name + + metric = { + "metric": metric_name, + "points": [[timestamp, value]], + "type": metric_type, + "host": host, + "device": self.device, + "tags": metric_tags, + "interval": interval, + } + if metric_type == MetricType.Distribution: + dists.append(metric) + else: + metrics.append(metric) + return (metrics, dists) + + def _get_aggregate_events(self): + # Get events + events = self._event_aggregator.flush() + return events + + def _start_flush_thread(self): + """ Start a thread to flush metrics. """ + from datadog.threadstats.periodic_timer import PeriodicTimer + + if self._is_auto_flushing: + log.info("Autoflushing already started.") + return + self._is_auto_flushing = True + + # A small helper for logging and flushing. + def flush(): + try: + log.debug("Flushing metrics in thread") + self.flush() + except Exception: + try: + log.exception("Error flushing in thread") + except Exception: + pass + + log.info("Starting flush thread with interval %s." % self.flush_interval) + self._flush_thread = PeriodicTimer(self.flush_interval, flush) + self._flush_thread.start() + + def _start_flush_greenlet(self): + if self._is_auto_flushing: + log.info("Autoflushing already started.") + return + self._is_auto_flushing = True + + import gevent + + # A small helper for flushing. + def flush(): + while True: + try: + log.debug("Flushing metrics in greenlet") + self.flush() + gevent.sleep(self.flush_interval) + except Exception: + try: + log.exception("Error flushing in greenlet") + except Exception: + pass + + log.info("Starting flush greenlet with interval %s." % self.flush_interval) + gevent.spawn(flush) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py new file mode 100644 index 0000000..63b565d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py @@ -0,0 +1,18 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + + +class MetricType(object): + Gauge = "gauge" + Counter = "counter" + Histogram = "histogram" + Rate = "rate" + Distribution = "distribution" + + +class MonitorType(object): + SERVICE_CHECK = "service check" + METRIC_ALERT = "metric alert" + QUERY_ALERT = "query alert" + ALL = (SERVICE_CHECK, METRIC_ALERT, QUERY_ALERT) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py new file mode 100644 index 0000000..a85c798 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py @@ -0,0 +1,27 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Event aggregator class. +""" + +from datadog.util.compat import iteritems + + +class EventsAggregator(object): + """ + A simple event aggregator + """ + + def __init__(self): + self._events = [] + + def add_event(self, **event): + # Clean empty values + event = dict((k, v) for k, v in iteritems(event) if v is not None) + self._events.append(event) + + def flush(self): + events = self._events + self._events = [] + return events diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py new file mode 100644 index 0000000..aa9fef5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py @@ -0,0 +1,203 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Metric roll-up classes. +""" +from collections import defaultdict +import random +import itertools +import threading + +from datadog.util.compat import iternext +from datadog.threadstats.constants import MetricType + + +class Metric(object): + """ + A base metric class that accepts points, slices them into time intervals + and performs roll-ups within those intervals. + """ + + def add_point(self, value): + """ Add a point to the given metric. """ + raise NotImplementedError() + + def flush(self, timestamp, interval): + """ Flush all metrics up to the given timestamp. """ + raise NotImplementedError() + + +class Set(Metric): + """ A set metric. """ + + stats_tag = "g" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.set = set() + + def add_point(self, value): + self.set.add(value) + + def flush(self, timestamp, interval): + return [(timestamp, len(self.set), self.name, self.tags, self.host, MetricType.Gauge, interval)] + + +class Gauge(Metric): + """ A gauge metric. """ + + stats_tag = "g" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.value = None + + def add_point(self, value): + self.value = value + + def flush(self, timestamp, interval): + return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Gauge, interval)] + + +class Counter(Metric): + """ A metric that tracks a counter value. """ + + stats_tag = "c" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.count = [] + + def add_point(self, value): + self.count.append(value) + + def flush(self, timestamp, interval): + count = sum(self.count, 0) + return [(timestamp, count / float(interval), self.name, self.tags, self.host, MetricType.Rate, interval)] + + +class Distribution(Metric): + """ A distribution metric. """ + + stats_tag = "d" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.value = [] + + def add_point(self, value): + self.value.append(value) + + def flush(self, timestamp, interval): + return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Distribution, interval)] + + +class Histogram(Metric): + """ A histogram metric. """ + + stats_tag = "h" + + def __init__(self, name, tags, host): + self.name = name + self.tags = tags + self.host = host + self.max = float("-inf") + self.min = float("inf") + self.sum = [] + self.iter_counter = itertools.count() + self.count = iternext(self.iter_counter) + self.sample_size = 1000 + self.samples = [] + self.percentiles = [0.75, 0.85, 0.95, 0.99] + + def add_point(self, value): + self.max = self.max if self.max > value else value + self.min = self.min if self.min < value else value + self.sum.append(value) + if self.count < self.sample_size: + self.samples.append(value) + else: + self.samples[random.randrange(0, self.sample_size)] = value + self.count = iternext(self.iter_counter) + + def flush(self, timestamp, interval): + if not self.count: + return [] + metrics = [ + (timestamp, self.min, "%s.min" % self.name, self.tags, self.host, MetricType.Gauge, interval), + (timestamp, self.max, "%s.max" % self.name, self.tags, self.host, MetricType.Gauge, interval), + ( + timestamp, + self.count / float(interval), + "%s.count" % self.name, + self.tags, + self.host, + MetricType.Rate, + interval, + ), + (timestamp, self.average(), "%s.avg" % self.name, self.tags, self.host, MetricType.Gauge, interval), + ] + length = len(self.samples) + self.samples.sort() + for p in self.percentiles: + val = self.samples[int(round(p * length - 1))] + name = "%s.%spercentile" % (self.name, int(p * 100)) + metrics.append((timestamp, val, name, self.tags, self.host, MetricType.Gauge, interval)) + return metrics + + def average(self): + sum_metrics = sum(self.sum, 0) + return float(sum_metrics) / self.count + + +class Timing(Histogram): + """ + A timing metric. + Inherit from Histogram to workaround and support it in API mode + """ + + stats_tag = "ms" + + +class MetricsAggregator(object): + """ + A small class to handle the roll-ups of multiple metrics at once. + """ + + def __init__(self, roll_up_interval=10): + self._lock = threading.RLock() + self._metrics = defaultdict(lambda: {}) + self._roll_up_interval = roll_up_interval + + def add_point(self, metric, tags, timestamp, value, metric_class, sample_rate=1, host=None): + # The sample rate is currently ignored for in process stuff + interval = timestamp - timestamp % self._roll_up_interval + key = (metric, host, tuple(sorted(tags)) if tags else None) + with self._lock: + if key not in self._metrics[interval]: + self._metrics[interval][key] = metric_class(metric, tags, host) + self._metrics[interval][key].add_point(value) + + def flush(self, timestamp): + """ Flush all metrics up to the given timestamp. """ + if timestamp == float("inf"): + interval = float("inf") + else: + interval = timestamp - timestamp % self._roll_up_interval + + with self._lock: + past_intervals = [i for i in self._metrics.keys() if i < interval] + metrics = [] + for i in past_intervals: + for m in list(self._metrics.pop(i).values()): + metrics += m.flush(i, self._roll_up_interval) + return metrics diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py new file mode 100644 index 0000000..ff4b583 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py @@ -0,0 +1,36 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +A small class to run a task periodically in a thread. +""" + + +from threading import Thread, Event +import sys + + +class PeriodicTimer(Thread): + def __init__(self, interval, function, *args, **kwargs): + Thread.__init__(self) + self.daemon = True + assert interval > 0 + self.interval = interval + assert function + self.function = function + self.args = args + self.kwargs = kwargs + self.finished = Event() + + def end(self): + self.finished.set() + + def run(self): + while not self.finished.wait(self.interval): + try: + self.function(*self.args, **self.kwargs) + except Exception: + # If `sys` is None, it means the interpreter is shutting down + # and it's very likely the reason why we got an exception. + if sys is not None: + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py new file mode 100644 index 0000000..1324794 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py @@ -0,0 +1,34 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +""" +Reporter classes. +""" + + +from datadog import api + + +class Reporter(object): + def flush(self, metrics): + raise NotImplementedError() + + +class HttpReporter(Reporter): + def __init__(self, compress_payload=False): + self.compress_payload = compress_payload + + def flush_distributions(self, distributions): + api.Distribution.send(distributions, compress_payload=self.compress_payload) + + def flush_metrics(self, metrics): + api.Metric.send(metrics, compress_payload=self.compress_payload) + + def flush_events(self, events): + for event in events: + api.Event.create(**event) + + +class GraphiteReporter(Reporter): + def flush(self, metrics): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py new file mode 100644 index 0000000..b3017a1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py @@ -0,0 +1,3 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py new file mode 100644 index 0000000..f309980 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py @@ -0,0 +1,152 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +from datetime import datetime, timedelta +from argparse import ArgumentTypeError +import json +import re +from datadog.util.format import force_to_epoch_seconds +import time + + +def comma_list(list_str, item_func=None): + if not list_str: + raise ArgumentTypeError("Invalid comma list") + item_func = item_func or (lambda i: i) + return [item_func(i.strip()) for i in list_str.split(",") if i.strip()] + + +def comma_set(list_str, item_func=None): + return set(comma_list(list_str, item_func=item_func)) + + +def comma_list_or_empty(list_str): + if not list_str: + return [] + else: + return comma_list(list_str) + + +def list_of_ints(int_csv): + if not int_csv: + raise ArgumentTypeError("Invalid list of ints") + try: + # Try as a [1, 2, 3] list + j = json.loads(int_csv) + if isinstance(j, (list, set)): + j = [int(i) for i in j] + return j + except Exception: + pass + + try: + return [int(i.strip()) for i in int_csv.strip().split(",")] + except Exception: + raise ArgumentTypeError("Invalid list of ints: {0}".format(int_csv)) + + +def list_of_ints_and_strs(csv): + def int_or_str(item): + try: + return int(item) + except ValueError: + return item + + return comma_list(csv, int_or_str) + + +def set_of_ints(int_csv): + return set(list_of_ints(int_csv)) + + +class DateParsingError(Exception): + """Thrown if parse_date exhausts all possible parsings of a string""" + + +_date_fieldre = re.compile(r"(\d+)\s?(\w+) (ago|ahead)") + + +def _midnight(): + """ Truncate a date to midnight. Default to UTC midnight today.""" + return datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) + + +def parse_date_as_epoch_timestamp(date_str): + return parse_date(date_str, to_epoch_ts=True) + + +def _parse_date_noop_formatter(d): + """ NOOP - only here for pylint """ + return d + + +def parse_date(date_str, to_epoch_ts=False): + formatter = _parse_date_noop_formatter + if to_epoch_ts: + formatter = force_to_epoch_seconds + + if isinstance(date_str, datetime): + return formatter(date_str) + elif isinstance(date_str, time.struct_time): + return formatter(datetime.fromtimestamp(time.mktime(date_str))) + + # Parse relative dates. + if date_str == "today": + return formatter(_midnight()) + elif date_str == "yesterday": + return formatter(_midnight() - timedelta(days=1)) + elif date_str == "tomorrow": + return formatter(_midnight() + timedelta(days=1)) + elif date_str.endswith(("ago", "ahead")): + m = _date_fieldre.match(date_str) + if m: + fields = m.groups() + else: + fields = date_str.split(" ")[1:] + num = int(fields[0]) + short_unit = fields[1] + time_direction = {"ago": -1, "ahead": 1}[fields[2]] + assert short_unit, short_unit + units = ["weeks", "days", "hours", "minutes", "seconds"] + # translate 'h' -> 'hours' + short_units = dict([(u[:1], u) for u in units]) + unit = short_units.get(short_unit, short_unit) + # translate 'hour' -> 'hours' + if unit[-1] != "s": + unit += "s" # tolerate 1 hour + assert unit in units, "'%s' not in %s" % (unit, units) + return formatter(datetime.utcnow() + time_direction * timedelta(**{unit: num})) + elif date_str == "now": + return formatter(datetime.utcnow()) + + def _from_epoch_timestamp(seconds): + print("_from_epoch_timestamp({})".format(seconds)) + return datetime.utcfromtimestamp(float(seconds)) + + def _from_epoch_ms_timestamp(millis): + print("_from_epoch_ms_timestamp({})".format(millis)) + in_sec = float(millis) / 1000.0 + print("_from_epoch_ms_timestamp({}) -> {}".format(millis, in_sec)) + return _from_epoch_timestamp(in_sec) + + # Or parse date formats (most specific to least specific) + parse_funcs = [ + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%f"), + lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S"), + lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M"), + lambda d: datetime.strptime(d, "%Y-%m-%d-%H"), + lambda d: datetime.strptime(d, "%Y-%m-%d"), + lambda d: datetime.strptime(d, "%Y-%m"), + lambda d: datetime.strptime(d, "%Y"), + _from_epoch_timestamp, # an epoch in seconds + _from_epoch_ms_timestamp, # an epoch in milliseconds + ] + + for parse_func in parse_funcs: + try: + return formatter(parse_func(date_str)) + except Exception: + pass + raise DateParsingError(u"Could not parse {0} as date".format(date_str)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py new file mode 100644 index 0000000..58927d1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py @@ -0,0 +1,135 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# flake8: noqa +""" +Imports for compatibility with Python 2, Python 3 and Google App Engine. +""" +from functools import wraps +import logging +import socket +import sys + +# Logging +log = logging.getLogger("datadog.util") + +# Note: using `sys.version_info` instead of the helper functions defined here +# so that mypy detects version-specific code paths. Currently, mypy doesn't +# support try/except imports for version-specific code paths either. +# +# https://mypy.readthedocs.io/en/stable/common_issues.html#python-version-and-system-platform-checks + +# Python 3.x +if sys.version_info[0] >= 3: + import builtins + from collections import UserDict as IterableUserDict + import configparser + from configparser import ConfigParser + from io import StringIO + from urllib.parse import urljoin, urlparse + import urllib.request as url_lib, urllib.error, urllib.parse + + imap = map + get_input = input + text = str + + def iteritems(d): + return iter(d.items()) + + def iternext(iter): + return next(iter) + + +# Python 2.x +else: + import __builtin__ as builtins + import ConfigParser as configparser + from configparser import ConfigParser + from cStringIO import StringIO + from itertools import imap + import urllib2 as url_lib + from urlparse import urljoin, urlparse + from UserDict import IterableUserDict + + get_input = raw_input + text = unicode + + def iteritems(d): + return d.iteritems() + + def iternext(iter): + return iter.next() + + +# Python >= 3.5 +if sys.version_info >= (3, 5): + from asyncio import iscoroutinefunction +# Others +else: + + def iscoroutinefunction(*args, **kwargs): + return False + + +# Python >= 2.7 +if sys.version_info >= (2, 7): + from logging import NullHandler +# Python 2.6.x +else: + from logging import Handler + + class NullHandler(Handler): + def emit(self, record): + pass + + +def _is_py_version_higher_than(major, minor=0): + """ + Assert that the Python version is higher than `$maj.$min`. + """ + return sys.version_info >= (major, minor) + + +def is_p3k(): + """ + Assert that Python is version 3 or higher. + """ + return _is_py_version_higher_than(3) + + +def is_higher_py32(): + """ + Assert that Python is version 3.2 or higher. + """ + return _is_py_version_higher_than(3, 2) + + +def is_higher_py35(): + """ + Assert that Python is version 3.5 or higher. + """ + return _is_py_version_higher_than(3, 5) + + +def is_pypy(): + """ + Assert that PyPy is being used (regardless of 2 or 3) + """ + return "__pypy__" in sys.builtin_module_names + + +def conditional_lru_cache(func): + """ + A decorator that conditionally enables a lru_cache of size 512 if + the version of Python can support it (>3.2) and otherwise returns + the original function + """ + if not is_higher_py32(): + return func + + log.debug("Enabling LRU cache for function %s", func.__name__) + + # pylint: disable=import-outside-toplevel + from functools import lru_cache + + return lru_cache(maxsize=512)(func) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py new file mode 100644 index 0000000..cd186bc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py @@ -0,0 +1,148 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +import os +import string +import sys + +# datadog +from datadog.util.compat import configparser, StringIO, is_p3k +from datadog.version import __version__ + +# CONSTANTS +DATADOG_CONF = "datadog.conf" + + +class CfgNotFound(Exception): + pass + + +class PathNotFound(Exception): + pass + + +def get_os(): + "Human-friendly OS name" + if sys.platform == "darwin": + return "mac" + elif sys.platform.find("freebsd") != -1: + return "freebsd" + elif sys.platform.find("linux") != -1: + return "linux" + elif sys.platform.find("win32") != -1: + return "windows" + elif sys.platform.find("sunos") != -1: + return "solaris" + else: + return sys.platform + + +def skip_leading_wsp(f): + "Works on a file, returns a file-like object" + if is_p3k(): + return StringIO("\n".join(x.strip(" ") for x in f.readlines())) + else: + return StringIO("\n".join(map(string.strip, f.readlines()))) + + +def _windows_commondata_path(): + """Return the common appdata path, using ctypes + From http://stackoverflow.com/questions/626796/\ + how-do-i-find-the-windows-common-application-data-folder-using-python + """ + import ctypes + from ctypes import wintypes, windll + + CSIDL_COMMON_APPDATA = 35 + + _SHGetFolderPath = windll.shell32.SHGetFolderPathW + _SHGetFolderPath.argtypes = [wintypes.HWND, ctypes.c_int, wintypes.HANDLE, wintypes.DWORD, wintypes.LPCWSTR] + + path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH) + _SHGetFolderPath(0, CSIDL_COMMON_APPDATA, 0, 0, path_buf) + return path_buf.value + + +def _windows_config_path(): + common_data = _windows_commondata_path() + path = os.path.join(common_data, "Datadog", DATADOG_CONF) + if os.path.exists(path): + return path + raise PathNotFound(path) + + +def _unix_config_path(): + path = os.path.join("/etc/dd-agent", DATADOG_CONF) + if os.path.exists(path): + return path + raise PathNotFound(path) + + +def _mac_config_path(): + path = os.path.join("~/.datadog-agent/agent", DATADOG_CONF) + path = os.path.expanduser(path) + if os.path.exists(path): + return path + raise PathNotFound(path) + + +def get_config_path(cfg_path=None, os_name=None): + # Check if there's an override and if it exists + if cfg_path is not None and os.path.exists(cfg_path): + return cfg_path + + if os_name is None: + os_name = get_os() + + # Check for an OS-specific path, continue on not-found exceptions + if os_name == "windows": + return _windows_config_path() + elif os_name == "mac": + return _mac_config_path() + else: + return _unix_config_path() + + +def get_config(cfg_path=None, options=None): + agentConfig = {} + + # Config handling + try: + # Find the right config file + path = os.path.realpath(__file__) + path = os.path.dirname(path) + + config_path = get_config_path(cfg_path, os_name=get_os()) + config = configparser.ConfigParser() + with open(config_path) as config_file: + if is_p3k(): + config.read_file(skip_leading_wsp(config_file)) + else: + config.readfp(skip_leading_wsp(config_file)) + + # bulk import + for option in config.options("Main"): + agentConfig[option] = config.get("Main", option) + + except Exception: + raise CfgNotFound + + return agentConfig + + +def get_pkg_version(): + """ + Resolve `datadog` package version. + + Deprecated: use `datadog.__version__` directly instead + """ + return __version__ + + +def get_version(): + """ + Resolve `datadog` package version. + + Deprecated: use `datadog.__version__` directly instead + """ + return __version__ diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py new file mode 100644 index 0000000..57673ef --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py @@ -0,0 +1,24 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc + +import warnings +from functools import wraps + + +def deprecated(message): + def deprecated_decorator(func): + @wraps(func) + def deprecated_func(*args, **kwargs): + warnings.warn( + "'{0}' is a deprecated function. {1}".format(func.__name__, message), + category=DeprecationWarning, + stacklevel=2, + ) + warnings.simplefilter('default', DeprecationWarning) + + return func(*args, **kwargs) + + return deprecated_func + + return deprecated_decorator diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py new file mode 100644 index 0000000..f6b1e96 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import calendar +import datetime +import json +import re + +from datadog.util.compat import conditional_lru_cache + +TAG_INVALID_CHARS_RE = re.compile(r"[^\w\d_\-:/\.]", re.UNICODE) +TAG_INVALID_CHARS_SUBS = "_" + + +def pretty_json(obj): + return json.dumps(obj, sort_keys=True, indent=2) + + +def construct_url(host, api_version, path): + return "{}/api/{}/{}".format(host.strip("/"), api_version.strip("/"), path.strip("/")) + + +def construct_path(api_version, path): + return "{}/{}".format(api_version.strip("/"), path.strip("/")) + + +def force_to_epoch_seconds(epoch_sec_or_dt): + if isinstance(epoch_sec_or_dt, datetime.datetime): + return calendar.timegm(epoch_sec_or_dt.timetuple()) + return epoch_sec_or_dt + + +@conditional_lru_cache +def _normalize_tags_with_cache(tag_list): + return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list] + + +def normalize_tags(tag_list): + # We have to turn our input tag list into a non-mutable tuple for it to + # be hashable (and thus usable) by the @lru_cache decorator. + return _normalize_tags_with_cache(tuple(tag_list)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py new file mode 100644 index 0000000..6a1f857 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py @@ -0,0 +1,305 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2015-Present Datadog, Inc +# stdlib +import json +import logging +import re +import socket +import subprocess +import types +from typing import Dict, Optional + +# datadog +from datadog.util.compat import url_lib, is_p3k, iteritems +from datadog.util.config import get_config, get_os, CfgNotFound + +VALID_HOSTNAME_RFC_1123_PATTERN = re.compile( + r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$" +) # noqa +MAX_HOSTNAME_LEN = 255 + +log = logging.getLogger("datadog.api") + + +def is_valid_hostname(hostname): + if hostname.lower() in set( + [ + "localhost", + "localhost.localdomain", + "localhost6.localdomain6", + "ip6-localhost", + ] + ): + log.warning("Hostname: %s is local" % hostname) + return False + if len(hostname) > MAX_HOSTNAME_LEN: + log.warning("Hostname: %s is too long (max length is %s characters)" % (hostname, MAX_HOSTNAME_LEN)) + return False + if VALID_HOSTNAME_RFC_1123_PATTERN.match(hostname) is None: + log.warning("Hostname: %s is not complying with RFC 1123" % hostname) + return False + return True + + +def get_hostname(hostname_from_config): + # type: (bool) -> Optional[str] + """ + Get the canonical host name this agent should identify as. This is + the authoritative source of the host name for the agent. + + Tries, in order: + + * agent config (datadog.conf, "hostname:") + * 'hostname -f' (on unix) + * socket.gethostname() + """ + + hostname = None + config = None + + # first, try the config if hostname_from_config is set to True + try: + if hostname_from_config: + config = get_config() + config_hostname = config.get("hostname") + if config_hostname and is_valid_hostname(config_hostname): + log.warning( + "Hostname lookup from agent configuration will be deprecated " + "in an upcoming version of datadogpy. Set hostname_from_config to False " + "to get rid of this warning" + ) + return config_hostname + except CfgNotFound: + log.info("No agent or invalid configuration file found") + + # Try to get GCE instance name + if hostname is None: + gce_hostname = GCE.get_hostname(config) + if gce_hostname is not None: + if is_valid_hostname(gce_hostname): + return gce_hostname + # then move on to os-specific detection + if hostname is None: + + def _get_hostname_unix(): + try: + # try fqdn + p = subprocess.Popen(["/bin/hostname", "-f"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) + out, err = p.communicate() + if p.returncode == 0: + if is_p3k(): + return out.decode("utf-8").strip() + else: + return out.strip() + except Exception: + return None + + os_name = get_os() + if os_name in ["mac", "freebsd", "linux", "solaris"]: + unix_hostname = _get_hostname_unix() + if unix_hostname and is_valid_hostname(unix_hostname): + hostname = unix_hostname + + # if we have an ec2 default hostname, see if there's an instance-id available + if hostname is not None and True in [hostname.lower().startswith(p) for p in [u"ip-", u"domu"]]: + instanceid = EC2.get_instance_id(config) + if instanceid: + hostname = instanceid + + # fall back on socket.gethostname(), socket.getfqdn() is too unreliable + if hostname is None: + try: + socket_hostname = socket.gethostname() # type: Optional[str] + except socket.error: + socket_hostname = None + if socket_hostname and is_valid_hostname(socket_hostname): + hostname = socket_hostname + + if hostname is None: + log.warning( + u"Unable to reliably determine host name. You can define one in your `hosts` file, " + u"or in `datadog.conf` file if you have Datadog Agent installed." + ) + + return hostname + + +def get_ec2_instance_id(): + try: + # Remember the previous default timeout + old_timeout = socket.getdefaulttimeout() + + # Try to query the EC2 internal metadata service, but fail fast + socket.setdefaulttimeout(0.25) + + try: + return url_lib.urlopen(url_lib.Request("http://169.254.169.254/latest/" "meta-data/instance-id")).read() + finally: + # Reset the previous default timeout + socket.setdefaulttimeout(old_timeout) + except Exception: + return socket.gethostname() + + +class GCE(object): + URL = "http://169.254.169.254/computeMetadata/v1/?recursive=true" + TIMEOUT = 0.1 # second + SOURCE_TYPE_NAME = "google cloud platform" + metadata = None + + @staticmethod + def _get_metadata(agentConfig): + if GCE.metadata is not None: + return GCE.metadata + + if not agentConfig["collect_instance_metadata"]: + log.info("Instance metadata collection is disabled. Not collecting it.") + GCE.metadata = {} + return GCE.metadata + + socket_to = None + try: + socket_to = socket.getdefaulttimeout() + socket.setdefaulttimeout(GCE.TIMEOUT) + except Exception: + pass + + try: + opener = url_lib.build_opener() + opener.addheaders = [("X-Google-Metadata-Request", "True")] + GCE.metadata = json.loads(opener.open(GCE.URL).read().strip()) + + except Exception: + GCE.metadata = {} + + try: + if socket_to is None: + socket_to = 3 + socket.setdefaulttimeout(socket_to) + except Exception: + pass + return GCE.metadata + + @staticmethod + def get_hostname(agentConfig): + try: + host_metadata = GCE._get_metadata(agentConfig) + return host_metadata["instance"]["hostname"].split(".")[0] + except Exception: + return None + + +class EC2(object): + """Retrieve EC2 metadata""" + + URL = "http://169.254.169.254/latest/meta-data" + TIMEOUT = 0.1 # second + metadata = {} # type: Dict[str, str] + + @staticmethod + def get_tags(agentConfig): + if not agentConfig["collect_instance_metadata"]: + log.info("Instance metadata collection is disabled. Not collecting it.") + return [] + + socket_to = None + try: + socket_to = socket.getdefaulttimeout() + socket.setdefaulttimeout(EC2.TIMEOUT) + except Exception: + pass + + try: + iam_role = url_lib.urlopen(EC2.URL + "/iam/security-credentials").read().strip() + iam_params = json.loads( + url_lib.urlopen(EC2.URL + "/iam/security-credentials" + "/" + str(iam_role)).read().strip() + ) + from boto.ec2.connection import EC2Connection + + connection = EC2Connection( + aws_access_key_id=iam_params["AccessKeyId"], + aws_secret_access_key=iam_params["SecretAccessKey"], + security_token=iam_params["Token"], + ) + instance_object = connection.get_only_instances([EC2.metadata["instance-id"]])[0] + + EC2_tags = [u"%s:%s" % (tag_key, tag_value) for tag_key, tag_value in iteritems(instance_object.tags)] + + except Exception: + log.exception("Problem retrieving custom EC2 tags") + EC2_tags = [] + + try: + if socket_to is None: + socket_to = 3 + socket.setdefaulttimeout(socket_to) + except Exception: + pass + + return EC2_tags + + @staticmethod + def get_metadata(agentConfig): + """Use the ec2 http service to introspect the instance. This adds latency \ + if not running on EC2 + """ + # >>> import urllib2 + # >>> urllib2.urlopen('http://169.254.169.254/latest/', timeout=1).read() + # 'meta-data\nuser-data' + # >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data', timeout=1).read() + # 'ami-id\nami-launch-index\nami-manifest-path\nhostname\ninstance-id\nlocal-ipv4\ + # npublic-keys/\nreservation-id\nsecurity-groups' + # >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id', + # timeout=1).read() + # 'i-deadbeef' + + # Every call may add TIMEOUT seconds in latency so don't abuse this call + # python 2.4 does not support an explicit timeout argument so force it here + # Rather than monkey-patching urllib2, just lower the timeout globally for these calls + + if not agentConfig["collect_instance_metadata"]: + log.info("Instance metadata collection is disabled. Not collecting it.") + return {} + + socket_to = None + try: + socket_to = socket.getdefaulttimeout() + socket.setdefaulttimeout(EC2.TIMEOUT) + except Exception: + pass + + for k in ( + "instance-id", + "hostname", + "local-hostname", + "public-hostname", + "ami-id", + "local-ipv4", + "public-keys", + "public-ipv4", + "reservation-id", + "security-groups", + ): + try: + v = url_lib.urlopen(EC2.URL + "/" + str(k)).read().strip() + assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, "%s is not a string" % v + EC2.metadata[k] = v + except Exception: + pass + + try: + if socket_to is None: + socket_to = 3 + socket.setdefaulttimeout(socket_to) + except Exception: + pass + + return EC2.metadata + + @staticmethod + def get_instance_id(agentConfig): + try: + return EC2.get_metadata(agentConfig).get("instance-id", None) + except Exception: + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py new file mode 100644 index 0000000..3158ac8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py @@ -0,0 +1 @@ +__version__ = "0.48.0" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE new file mode 100644 index 0000000..8263325 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE @@ -0,0 +1,203 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Datadog, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +SPDX-License-Identifier: Apache-2.0 \ No newline at end of file diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA new file mode 100644 index 0000000..b1eba7e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: datadog-lambda +Version: 5.87.0 +Summary: The Datadog AWS Lambda Library +Home-page: https://github.com/DataDog/datadog-lambda-python +License: Apache-2.0 +Keywords: datadog,aws,lambda,layer +Author: Datadog, Inc. +Author-email: dev@datadoghq.com +Requires-Python: >=3.8.0,<4 +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Provides-Extra: dev +Requires-Dist: boto3 (>=1.28.0,<2.0.0) ; extra == "dev" +Requires-Dist: datadog (>=0.41.0,<1.0.0) +Requires-Dist: ddtrace (>=2.3.1) +Requires-Dist: flake8 (>=5.0.4,<6.0.0) ; extra == "dev" +Requires-Dist: importlib_metadata ; python_version < "3.8" +Requires-Dist: nose2 (>=0.9.1,<0.10.0) ; extra == "dev" +Requires-Dist: requests (>=2.22.0,<3.0.0) ; extra == "dev" +Requires-Dist: typing_extensions (>=4.0,<5.0) ; python_version < "3.8" +Requires-Dist: urllib3 (<2.0.0) ; python_version < "3.11" +Requires-Dist: urllib3 (<2.1.0) ; python_version >= "3.11" +Requires-Dist: wrapt (>=1.11.2,<2.0.0) +Project-URL: Repository, https://github.com/DataDog/datadog-lambda-python +Description-Content-Type: text/markdown + +# datadog-lambda-python + +![build](https://github.com/DataDog/datadog-lambda-python/workflows/build/badge.svg) +[![PyPI](https://img.shields.io/pypi/v/datadog-lambda)](https://pypi.org/project/datadog-lambda/) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/datadog-lambda) +[![Slack](https://chat.datadoghq.com/badge.svg?bg=632CA6)](https://chat.datadoghq.com/) +[![License](https://img.shields.io/badge/license-Apache--2.0-blue)](https://github.com/DataDog/datadog-lambda-python/blob/main/LICENSE) + +Datadog Lambda Library for Python (3.8, 3.9, 3.10, 3.11, and 3.12) enables [enhanced Lambda metrics](https://docs.datadoghq.com/serverless/enhanced_lambda_metrics), [distributed tracing](https://docs.datadoghq.com/serverless/distributed_tracing), and [custom metric submission](https://docs.datadoghq.com/serverless/custom_metrics) from AWS Lambda functions. + +## Installation + +Follow the [installation instructions](https://docs.datadoghq.com/serverless/installation/python/), and view your function's enhanced metrics, traces and logs in Datadog. + +## Configuration + +Follow the [configuration instructions](https://docs.datadoghq.com/serverless/configuration) to tag your telemetry, capture request/response payloads, filter or scrub sensitive information from logs or traces, and more. + +For additional tracing configuration options, check out the [official documentation for Datadog trace client](https://ddtrace.readthedocs.io/en/stable/configuration.html). + +Besides the environment variables supported by dd-trace-py, the datadog-lambda-python library added following environment variables. + +| Environment Variables | Description | Default Value | +| -------------------- | ------------ | ------------- | +| DD_ENCODE_AUTHORIZER_CONTEXT | When set to `true` for Lambda authorizers, the tracing context will be encoded into the response for propagation. Supported for NodeJS and Python. | `true` | +| DD_DECODE_AUTHORIZER_CONTEXT | When set to `true` for Lambdas that are authorized via Lambda authorizers, it will parse and use the encoded tracing context (if found). Supported for NodeJS and Python. | `true` | +| DD_COLD_START_TRACING | Set to `false` to disable Cold Start Tracing. Used in NodeJS and Python. | `true` | +| DD_MIN_COLD_START_DURATION | Sets the minimum duration (in milliseconds) for a module load event to be traced via Cold Start Tracing. Number. | `3` | +| DD_COLD_START_TRACE_SKIP_LIB | optionally skip creating Cold Start Spans for a comma-separated list of libraries. Useful to limit depth or skip known libraries. | `ddtrace.internal.compat,ddtrace.filters` | +| DD_CAPTURE_LAMBDA_PAYLOAD | [Captures incoming and outgoing AWS Lambda payloads][1] in the Datadog APM spans for Lambda invocations. | `false` | +| DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH | Determines the level of detail captured from AWS Lambda payloads, which are then assigned as tags for the `aws.lambda` span. It specifies the nesting depth of the JSON payload structure to process. Once the specified maximum depth is reached, the tag's value is set to the stringified value of any nested elements beyond this level.
For example, given the input payload:
{
"lv1" : {
"lv2": {
"lv3": "val"
}
}
}
If the depth is set to `2`, the resulting tag's key is set to `function.request.lv1.lv2` and the value is `{\"lv3\": \"val\"}`.
If the depth is set to `0`, the resulting tag's key is set to `function.request` and value is `{\"lv1\":{\"lv2\":{\"lv3\": \"val\"}}}` | `10` | + + +## Opening Issues + +If you encounter a bug with this package, we want to hear about it. Before opening a new issue, search the existing issues to avoid duplicates. + +When opening an issue, include the Datadog Lambda Library version, Python version, and stack trace if available. In addition, include the steps to reproduce when appropriate. + +You can also open an issue for a feature request. + +## Lambda Profiling Beta + +Datadog's [Continuous Profiler](https://www.datadoghq.com/product/code-profiling/) is now available in beta for Python in version 4.62.0 and layer version 62 and above. This optional feature is enabled by setting the `DD_PROFILING_ENABLED` environment variable to `true`. During the beta period, profiling is available at no additional cost. + +The Continuous Profiler works by spawning a thread which periodically wakes up and takes a snapshot of the CPU and Heap of all running python code. This can include the profiler itself. If you want the Profiler to ignore itself, set `DD_PROFILING_IGNORE_PROFILER` to `true`. + +## Major Version Notes + +### 5.x / Layer version 86+ +- Python3.7 support has been [deprecated](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) by AWS, and support removed from this library. + +### 4.x / Layer version 61+ + +- Python3.6 support has been [deprecated](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) by AWS, and support removed from this library. +- `dd-trace` upgraded from 0.61 to 1.4, full release notes are available [here](https://ddtrace.readthedocs.io/en/stable/release_notes.html#v1-0-0) + - `get_correlation_ids()` has been changed to `get_log_correlation_context()`, which now returns a dictionary containing the active `span_id`, `trace_id`, as well as `service` and `env`. + +## Contributing + +If you find an issue with this package and have a fix, please feel free to open a pull request following the [procedures](CONTRIBUTING.md). + +## Community + +For product feedback and questions, join the `#serverless` channel in the [Datadog community on Slack](https://chat.datadoghq.com/). + +## License + +Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0. + +This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2019 Datadog, Inc. + +[1]: https://www.datadoghq.com/blog/troubleshoot-lambda-function-request-response-payloads/ + diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD new file mode 100644 index 0000000..a275ee4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD @@ -0,0 +1,44 @@ +datadog_lambda-5.87.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +datadog_lambda-5.87.0.dist-info/LICENSE,sha256=4yQmjpKp1MKL7DdRDPVHkKYc2W0aezm5SIDske8oAdM,11379 +datadog_lambda-5.87.0.dist-info/METADATA,sha256=s2sPTache99ImWP7igPhfpBOuNtxNUceNYeioFEi06w,7326 +datadog_lambda-5.87.0.dist-info/RECORD,, +datadog_lambda-5.87.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +datadog_lambda-5.87.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88 +datadog_lambda/__init__.py,sha256=fE7XRhgzSgTCjt1AAocMbqtAJM9pZiLbbcCi8BWE3fQ,538 +datadog_lambda/__pycache__/__init__.cpython-311.pyc,, +datadog_lambda/__pycache__/api.cpython-311.pyc,, +datadog_lambda/__pycache__/cold_start.cpython-311.pyc,, +datadog_lambda/__pycache__/constants.cpython-311.pyc,, +datadog_lambda/__pycache__/dogstatsd.cpython-311.pyc,, +datadog_lambda/__pycache__/extension.cpython-311.pyc,, +datadog_lambda/__pycache__/handler.cpython-311.pyc,, +datadog_lambda/__pycache__/metric.cpython-311.pyc,, +datadog_lambda/__pycache__/module_name.cpython-311.pyc,, +datadog_lambda/__pycache__/patch.cpython-311.pyc,, +datadog_lambda/__pycache__/stats_writer.cpython-311.pyc,, +datadog_lambda/__pycache__/statsd_writer.cpython-311.pyc,, +datadog_lambda/__pycache__/tag_object.cpython-311.pyc,, +datadog_lambda/__pycache__/tags.cpython-311.pyc,, +datadog_lambda/__pycache__/thread_stats_writer.cpython-311.pyc,, +datadog_lambda/__pycache__/tracing.cpython-311.pyc,, +datadog_lambda/__pycache__/trigger.cpython-311.pyc,, +datadog_lambda/__pycache__/wrapper.cpython-311.pyc,, +datadog_lambda/__pycache__/xray.cpython-311.pyc,, +datadog_lambda/api.py,sha256=TFg7gCek088_C53cZQQHDoLXGlTAhP2AD8NAuWYOVco,3653 +datadog_lambda/cold_start.py,sha256=aGpWlgPdMvQkyK9kVz5pEoLIxrVa0AqZoOy5ABXyXzA,7891 +datadog_lambda/constants.py,sha256=DeujbnguBT9nDioiaYlgQQdZ6Ps53sWXmYhruLVoCHE,1669 +datadog_lambda/dogstatsd.py,sha256=HCyl72oQUSF3E4y1ivrHaGTHL9WG1asGjB1Xo2D_Abc,4769 +datadog_lambda/extension.py,sha256=zQaBioG0TrWtZvk8c9z7ANUJt_oMzeAPMG-mGPL_cMw,1199 +datadog_lambda/handler.py,sha256=r2MiZoIfTWuVAN-f6iXXIjhdtd1t7m9bTnwplVm2SEY,994 +datadog_lambda/metric.py,sha256=jk4jRgb0pwxd_c4D2zzAZ3olN_8ci64fpYk3cuxbg0U,4707 +datadog_lambda/module_name.py,sha256=5FmOCjjgjq78b6a83QePZZFmqahAoy9XHdUNWdq2D1Q,139 +datadog_lambda/patch.py,sha256=Hr_zeekk9PeAizTDFoZ_ZwTWptjgtKjl9A-XHX5kA1k,4641 +datadog_lambda/stats_writer.py,sha256=SIac96wu45AxDOZ4GraCbK3r1RKr4AFgXcEPHg1VX0A,243 +datadog_lambda/statsd_writer.py,sha256=F4SCJ6-J6YfvQNh0uQfAkP6QYiAtV3-MCsxz4QnaBBI,403 +datadog_lambda/tag_object.py,sha256=Kcys4Mo4_4vdXxq4XS7ilWpCuSQQyVRSjDejgq6RJS4,2112 +datadog_lambda/tags.py,sha256=wIG1f5iq85dq3FNV-yi-D0XwqYOx8jE0x_8Re6Ucmso,3240 +datadog_lambda/thread_stats_writer.py,sha256=fkjMDgrzwACrK_ZrCwl9mHz5U3CMLEyrsaondjdM3r8,2522 +datadog_lambda/tracing.py,sha256=r9H77-RNsmXxHA3k8yaOzmShYD4FtYtz3yrbDin36cQ,46854 +datadog_lambda/trigger.py,sha256=_Sxpy9UpMDHdw_X1hD61G4OTex7CIYQw1guFu6dzByo,12082 +datadog_lambda/wrapper.py,sha256=NrM6_TCWi4sjIHSSGAjAZV7hdF8PxJwCurRQUCnjspo,15547 +datadog_lambda/xray.py,sha256=05-8xd3GOOIDtGaB4k2Ow1kbWn86Px2mhyKEUYIwKIc,3448 diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL new file mode 100644 index 0000000..258a6ff --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.6.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py new file mode 100644 index 0000000..20b4244 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py @@ -0,0 +1,17 @@ +import os +import logging +from datadog_lambda.cold_start import initialize_cold_start_tracing + +initialize_cold_start_tracing() + +# The minor version corresponds to the Lambda layer version. +# E.g.,, version 0.5.0 gets packaged into layer version 5. +try: + import importlib.metadata as importlib_metadata +except ModuleNotFoundError: + import importlib_metadata + +__version__ = importlib_metadata.version(__name__) + +logger = logging.getLogger(__name__) +logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper())) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py new file mode 100644 index 0000000..079f69d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py @@ -0,0 +1,93 @@ +import os +import logging +import base64 +from datadog_lambda.extension import should_use_extension + +logger = logging.getLogger(__name__) +KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName" + + +def decrypt_kms_api_key(kms_client, ciphertext): + from botocore.exceptions import ClientError + + """ + Decodes and deciphers the base64-encoded ciphertext given as a parameter using KMS. + For this to work properly, the Lambda function must have the appropriate IAM permissions. + + Args: + kms_client: The KMS client to use for decryption + ciphertext (string): The base64-encoded ciphertext to decrypt + """ + decoded_bytes = base64.b64decode(ciphertext) + + """ + When the API key is encrypted using the AWS console, the function name is added as an + encryption context. When the API key is encrypted using the AWS CLI, no encryption context + is added. We need to try decrypting the API key both with and without the encryption context. + """ + # Try without encryption context, in case API key was encrypted using the AWS CLI + function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME") + try: + plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[ + "Plaintext" + ].decode("utf-8") + except ClientError: + logger.debug( + "Failed to decrypt ciphertext without encryption context, \ + retrying with encryption context" + ) + # Try with encryption context, in case API key was encrypted using the AWS Console + plaintext = kms_client.decrypt( + CiphertextBlob=decoded_bytes, + EncryptionContext={ + KMS_ENCRYPTION_CONTEXT_KEY: function_name, + }, + )["Plaintext"].decode("utf-8") + + return plaintext + + +def init_api(): + if ( + not should_use_extension + and not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true" + ): + # Make sure that this package would always be lazy-loaded/outside from the critical path + # since underlying packages are quite heavy to load + # and useless when the extension is present + from datadog import api + + if not api._api_key: + import boto3 + + DD_API_KEY_SECRET_ARN = os.environ.get("DD_API_KEY_SECRET_ARN", "") + DD_API_KEY_SSM_NAME = os.environ.get("DD_API_KEY_SSM_NAME", "") + DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "") + DD_API_KEY = os.environ.get( + "DD_API_KEY", os.environ.get("DATADOG_API_KEY", "") + ) + + if DD_API_KEY_SECRET_ARN: + api._api_key = boto3.client("secretsmanager").get_secret_value( + SecretId=DD_API_KEY_SECRET_ARN + )["SecretString"] + elif DD_API_KEY_SSM_NAME: + api._api_key = boto3.client("ssm").get_parameter( + Name=DD_API_KEY_SSM_NAME, WithDecryption=True + )["Parameter"]["Value"] + elif DD_KMS_API_KEY: + kms_client = boto3.client("kms") + api._api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY) + else: + api._api_key = DD_API_KEY + + logger.debug("Setting DATADOG_API_KEY of length %d", len(api._api_key)) + + # Set DATADOG_HOST, to send data to a non-default Datadog datacenter + api._api_host = os.environ.get( + "DATADOG_HOST", "https://api." + os.environ.get("DD_SITE", "datadoghq.com") + ) + logger.debug("Setting DATADOG_HOST to %s", api._api_host) + + # Unmute exceptions from datadog api client, so we can catch and handle them + api._mute = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py new file mode 100644 index 0000000..9da02e7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py @@ -0,0 +1,252 @@ +import time +import os +from typing import List, Hashable +import logging + +logger = logging.getLogger(__name__) + +_cold_start = True +_proactive_initialization = False +_lambda_container_initialized = False + + +def set_cold_start(init_timestamp_ns): + """Set the value of the cold start global + + This should be executed once per Lambda execution before the execution + """ + global _cold_start + global _lambda_container_initialized + global _proactive_initialization + if not _lambda_container_initialized: + now = time.time_ns() + if (now - init_timestamp_ns) // 1_000_000_000 > 10: + _cold_start = False + _proactive_initialization = True + else: + _cold_start = not _lambda_container_initialized + else: + _cold_start = False + _proactive_initialization = False + _lambda_container_initialized = True + + +def is_cold_start(): + """Returns the value of the global cold_start""" + return _cold_start + + +def is_proactive_init(): + """Returns the value of the global proactive_initialization""" + return _proactive_initialization + + +def is_new_sandbox(): + return is_cold_start() or is_proactive_init() + + +def get_cold_start_tag(): + """Returns the cold start tag to be used in metrics""" + return "cold_start:{}".format(str(is_cold_start()).lower()) + + +def get_proactive_init_tag(): + """Returns the proactive init tag to be used in metrics""" + return "proactive_initialization:{}".format(str(is_proactive_init()).lower()) + + +class ImportNode(object): + def __init__(self, module_name, full_file_path, start_time_ns, end_time_ns=None): + self.module_name = module_name + self.full_file_path = full_file_path + self.start_time_ns = start_time_ns + self.end_time_ns = end_time_ns + self.children = [] + + +root_nodes: List[ImportNode] = [] +import_stack: List[ImportNode] = [] +already_wrapped_loaders = set() + + +def reset_node_stacks(): + global root_nodes + root_nodes = [] + global import_stack + import_stack = [] + + +def push_node(module_name, file_path): + node = ImportNode(module_name, file_path, time.time_ns()) + global import_stack + if import_stack: + import_stack[-1].children.append(node) + import_stack.append(node) + + +def pop_node(module_name): + global import_stack + if not import_stack: + return + node = import_stack.pop() + if node.module_name != module_name: + return + end_time_ns = time.time_ns() + node.end_time_ns = end_time_ns + if not import_stack: # import_stack empty, a root node has been found + global root_nodes + root_nodes.append(node) + + +def wrap_exec_module(original_exec_module): + def wrapped_method(module): + should_pop = False + try: + spec = module.__spec__ + push_node(spec.name, spec.origin) + should_pop = True + except Exception: + pass + try: + return original_exec_module(module) + finally: + if should_pop: + pop_node(spec.name) + + return wrapped_method + + +def wrap_find_spec(original_find_spec): + def wrapped_find_spec(*args, **kwargs): + spec = original_find_spec(*args, **kwargs) + if spec is None: + return None + loader = getattr(spec, "loader", None) + if ( + loader is not None + and isinstance(loader, Hashable) + and loader not in already_wrapped_loaders + ): + if hasattr(loader, "exec_module"): + try: + loader.exec_module = wrap_exec_module(loader.exec_module) + already_wrapped_loaders.add(loader) + except Exception as e: + logger.debug("Failed to wrap the loader. %s", e) + return spec + + return wrapped_find_spec + + +def initialize_cold_start_tracing(): + if ( + is_new_sandbox() + and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true" + and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true" + ): + from sys import meta_path + + for importer in meta_path: + try: + importer.find_spec = wrap_find_spec(importer.find_spec) + except Exception: + pass + + +class ColdStartTracer(object): + def __init__( + self, + tracer, + function_name, + current_span_start_time_ns, + trace_ctx, + min_duration_ms: int, + ignored_libs: List[str] = None, + ): + if ignored_libs is None: + ignored_libs = [] + self._tracer = tracer + self.function_name = function_name + self.current_span_start_time_ns = current_span_start_time_ns + self.min_duration_ms = min_duration_ms + self.trace_ctx = trace_ctx + self.ignored_libs = ignored_libs + self.need_to_reactivate_context = True + + def trace(self, root_nodes: List[ImportNode] = root_nodes): + if not root_nodes: + return + cold_start_span_start_time_ns = root_nodes[0].start_time_ns + cold_start_span_end_time_ns = min( + root_nodes[-1].end_time_ns, self.current_span_start_time_ns + ) + cold_start_span = self.create_cold_start_span(cold_start_span_start_time_ns) + while root_nodes: + root_node = root_nodes.pop() + self.trace_tree(root_node, cold_start_span) + self.finish_span(cold_start_span, cold_start_span_end_time_ns) + + def trace_tree(self, import_node: ImportNode, parent_span): + if ( + import_node.end_time_ns - import_node.start_time_ns + < self.min_duration_ms * 1e6 + or import_node.module_name in self.ignored_libs + ): + return + + span = self.start_span( + "aws.lambda.import", import_node.module_name, import_node.start_time_ns + ) + tags = { + "resource_names": import_node.module_name, + "resource.name": import_node.module_name, + "filename": import_node.full_file_path, + "operation_name": self.get_operation_name(import_node.full_file_path), + } + span.set_tags(tags) + if parent_span: + span.parent_id = parent_span.span_id + for child_node in import_node.children: + self.trace_tree(child_node, span) + self.finish_span(span, import_node.end_time_ns) + + def create_cold_start_span(self, start_time_ns): + span = self.start_span("aws.lambda.load", self.function_name, start_time_ns) + tags = { + "resource_names": self.function_name, + "resource.name": self.function_name, + "operation_name": "aws.lambda.load", + } + span.set_tags(tags) + return span + + def start_span(self, span_type, resource, start_time_ns): + if self.need_to_reactivate_context: + self._tracer.context_provider.activate( + self.trace_ctx + ) # reactivate required after each finish() call + self.need_to_reactivate_context = False + span_kwargs = { + "service": "aws.lambda", + "resource": resource, + "span_type": span_type, + } + span = self._tracer.trace(span_type, **span_kwargs) + span.start_ns = start_time_ns + return span + + def finish_span(self, span, finish_time_ns): + span.finish(finish_time_ns / 1e9) + self.need_to_reactivate_context = True + + def get_operation_name(self, filename: str): + if filename is None: + return "aws.lambda.import_core_module" + if not isinstance(filename, str): + return "aws.lambda.import" + if filename.startswith("/opt/"): + return "aws.lambda.import_layer" + elif filename.startswith("/var/lang/"): + return "aws.lambda.import_runtime" + else: + return "aws.lambda.import" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py new file mode 100644 index 0000000..fd8afb3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py @@ -0,0 +1,53 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +# Datadog trace sampling priority + + +class SamplingPriority(object): + USER_REJECT = -1 + AUTO_REJECT = 0 + AUTO_KEEP = 1 + USER_KEEP = 2 + + +# Datadog trace headers +class TraceHeader(object): + TRACE_ID = "x-datadog-trace-id" + PARENT_ID = "x-datadog-parent-id" + SAMPLING_PRIORITY = "x-datadog-sampling-priority" + + +# X-Ray subsegment to save Datadog trace metadata +class XraySubsegment(object): + NAME = "datadog-metadata" + TRACE_KEY = "trace" + LAMBDA_FUNCTION_TAGS_KEY = "lambda_function_tags" + NAMESPACE = "datadog" + + +# TraceContextSource of datadog context. The DD_MERGE_XRAY_TRACES +# feature uses this to determine when to use X-Ray as the parent +# trace. +class TraceContextSource(object): + XRAY = "xray" + EVENT = "event" + DDTRACE = "ddtrace" + + +# X-Ray deamon +class XrayDaemon(object): + XRAY_TRACE_ID_HEADER_NAME = "_X_AMZN_TRACE_ID" + XRAY_DAEMON_ADDRESS = "AWS_XRAY_DAEMON_ADDRESS" + FUNCTION_NAME_HEADER_NAME = "AWS_LAMBDA_FUNCTION_NAME" + + +class Headers(object): + Parent_Span_Finish_Time = "x-datadog-parent-span-finish-time" + + # For one request from the client, the event.requestContext.requestIds in the authorizer lambda + # invocation and the main function invocation are IDENTICAL. Therefore we can use it to tell + # whether current invocation is the actual original authorizing request or a cached request. + Authorizing_Request_Id = "x-datadog-authorizing-requestid" diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py new file mode 100644 index 0000000..a627492 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py @@ -0,0 +1,143 @@ +import logging +import os +import socket +import errno +import re +from threading import Lock + + +MIN_SEND_BUFFER_SIZE = 32 * 1024 +log = logging.getLogger("datadog_lambda.dogstatsd") + + +class DogStatsd(object): + def __init__(self): + self._socket_lock = Lock() + self.socket_path = None + self.host = "localhost" + self.port = 8125 + self.socket = None + self.encoding = "utf-8" + + def get_socket(self, telemetry=False): + """ + Return a connected socket. + + Note: connect the socket before assigning it to the class instance to + avoid bad thread race conditions. + """ + with self._socket_lock: + self.socket = self._get_udp_socket( + self.host, + self.port, + ) + return self.socket + + @classmethod + def _ensure_min_send_buffer_size(cls, sock, min_size=MIN_SEND_BUFFER_SIZE): + # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX + # buffers which is half of the max packet size that the client will send. + if os.name == "posix": + try: + recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF) + if recv_buff_size <= min_size: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, min_size) + log.debug("Socket send buffer increased to %dkb", min_size / 1024) + finally: + pass + + @classmethod + def _get_udp_socket(cls, host, port): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(0) + cls._ensure_min_send_buffer_size(sock) + sock.connect((host, port)) + + return sock + + def distribution(self, metric, value, tags=None): + """ + Send a global distribution value, optionally setting tags. + + >>> statsd.distribution("uploaded.file.size", 1445) + >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"]) + """ + self._report(metric, "d", value, tags) + + def close_socket(self): + """ + Closes connected socket if connected. + """ + with self._socket_lock: + if self.socket: + try: + self.socket.close() + except OSError as e: + log.error("Unexpected error: %s", str(e)) + self.socket = None + + def normalize_tags(self, tag_list): + TAG_INVALID_CHARS_RE = re.compile(r"[^\w\d_\-:/\.]", re.UNICODE) + TAG_INVALID_CHARS_SUBS = "_" + return [ + re.sub(TAG_INVALID_CHARS_RE, TAG_INVALID_CHARS_SUBS, tag) + for tag in tag_list + ] + + def _serialize_metric(self, metric, metric_type, value, tags): + # Create/format the metric packet + return "%s:%s|%s%s" % ( + metric, + value, + metric_type, + ("|#" + ",".join(self.normalize_tags(tags))) if tags else "", + ) + + def _report(self, metric, metric_type, value, tags): + if value is None: + return + + payload = self._serialize_metric(metric, metric_type, value, tags) + + # Send it + self._send_to_server(payload) + + def _send_to_server(self, packet): + try: + mysocket = self.socket or self.get_socket() + mysocket.send(packet.encode(self.encoding)) + return True + except socket.timeout: + # dogstatsd is overflowing, drop the packets (mimicks the UDP behaviour) + pass + except (socket.herror, socket.gaierror) as socket_err: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except socket.error as socket_err: + if socket_err.errno == errno.EAGAIN: + log.debug( + "Socket send would block: %s, dropping the packet", socket_err + ) + elif socket_err.errno == errno.ENOBUFS: + log.debug("Socket buffer full: %s, dropping the packet", socket_err) + elif socket_err.errno == errno.EMSGSIZE: + log.debug( + "Packet size too big (size: %d): %s, dropping the packet", + len(packet.encode(self.encoding)), + socket_err, + ) + else: + log.warning( + "Error submitting packet: %s, dropping the packet and closing the socket", + socket_err, + ) + self.close_socket() + except Exception as e: + log.error("Unexpected error: %s", str(e)) + return False + + +statsd = DogStatsd() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py new file mode 100644 index 0000000..d66848f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py @@ -0,0 +1,42 @@ +import logging +from os import path + +try: + # only available in python 3 + # not an issue since the extension is not compatible with python 2.x runtime + # https://docs.aws.amazon.com/lambda/latest/dg/using-extensions.html + import urllib.request +except ImportError: + # safe since both calls to urllib are protected with try/expect and will return false + urllib = None + +AGENT_URL = "http://127.0.0.1:8124" +HELLO_PATH = "/lambda/hello" +FLUSH_PATH = "/lambda/flush" +EXTENSION_PATH = "/opt/extensions/datadog-agent" + +logger = logging.getLogger(__name__) + + +def is_extension_running(): + if not path.exists(EXTENSION_PATH): + return False + try: + urllib.request.urlopen(AGENT_URL + HELLO_PATH) + except Exception as e: + logger.debug("Extension is not running, returned with error %s", e) + return False + return True + + +def flush_extension(): + try: + req = urllib.request.Request(AGENT_URL + FLUSH_PATH, "".encode("ascii")) + urllib.request.urlopen(req) + except Exception as e: + logger.debug("Failed to flush extension, returned with error %s", e) + return False + return True + + +should_use_extension = is_extension_running() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py new file mode 100644 index 0000000..09cc5e7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py @@ -0,0 +1,31 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2020 Datadog, Inc. + +from __future__ import absolute_import +from importlib import import_module + +import os +from datadog_lambda.wrapper import datadog_lambda_wrapper +from datadog_lambda.module_name import modify_module_name + + +class HandlerError(Exception): + pass + + +path = os.environ.get("DD_LAMBDA_HANDLER", None) +if path is None: + raise HandlerError( + "DD_LAMBDA_HANDLER is not defined. Can't use prebuilt datadog handler" + ) +parts = path.rsplit(".", 1) +if len(parts) != 2: + raise HandlerError("Value %s for DD_LAMBDA_HANDLER has invalid format." % path) + + +(mod_name, handler_name) = parts +modified_mod_name = modify_module_name(mod_name) +handler_module = import_module(modified_mod_name) +handler = datadog_lambda_wrapper(getattr(handler_module, handler_name)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py new file mode 100644 index 0000000..ca23ed9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py @@ -0,0 +1,136 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import os +import json +import time +import logging + +from datadog_lambda.extension import should_use_extension +from datadog_lambda.tags import get_enhanced_metrics_tags, tag_dd_lambda_layer +from datadog_lambda.api import init_api + +logger = logging.getLogger(__name__) + +lambda_stats = None + +init_api() + +if should_use_extension: + from datadog_lambda.statsd_writer import StatsDWriter + + lambda_stats = StatsDWriter() +else: + # Periodical flushing in a background thread is NOT guaranteed to succeed + # and leads to data loss. When disabled, metrics are only flushed at the + # end of invocation. To make metrics submitted from a long-running Lambda + # function available sooner, consider using the Datadog Lambda extension. + from datadog_lambda.thread_stats_writer import ThreadStatsWriter + + flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true" + lambda_stats = ThreadStatsWriter(flush_in_thread) + + +def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False): + """ + Submit a data point to Datadog distribution metrics. + https://docs.datadoghq.com/graphing/metrics/distributions/ + + When DD_FLUSH_TO_LOG is True, write metric to log, and + wait for the Datadog Log Forwarder Lambda function to submit + the metrics asynchronously. + + Otherwise, the metrics will be submitted to the Datadog API + periodically and at the end of the function execution in a + background thread. + + Note that if the extension is present, it will override the DD_FLUSH_TO_LOG value + and always use the layer to send metrics to the extension + """ + flush_to_logs = os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true" + tags = tag_dd_lambda_layer(tags) + + if should_use_extension: + logger.debug( + "Sending metric %s value %s to Datadog via extension", metric_name, value + ) + lambda_stats.distribution(metric_name, value, tags=tags, timestamp=timestamp) + else: + if flush_to_logs or force_async: + write_metric_point_to_stdout( + metric_name, value, timestamp=timestamp, tags=tags + ) + else: + lambda_stats.distribution( + metric_name, value, tags=tags, timestamp=timestamp + ) + + +def write_metric_point_to_stdout(metric_name, value, timestamp=None, tags=[]): + """Writes the specified metric point to standard output""" + logger.debug( + "Sending metric %s value %s to Datadog via log forwarder", metric_name, value + ) + print( + json.dumps( + { + "m": metric_name, + "v": value, + "e": timestamp or int(time.time()), + "t": tags, + } + ) + ) + + +def flush_stats(): + lambda_stats.flush() + + +def are_enhanced_metrics_enabled(): + """Check env var to find if enhanced metrics should be submitted + + Returns: + boolean for whether enhanced metrics are enabled + """ + # DD_ENHANCED_METRICS defaults to true + return os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true" + + +def submit_enhanced_metric(metric_name, lambda_context): + """Submits the enhanced metric with the given name + + Args: + metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors" + lambda_context (dict): Lambda context dict passed to the function by AWS + """ + if not are_enhanced_metrics_enabled(): + logger.debug( + "Not submitting enhanced metric %s because enhanced metrics are disabled", + metric_name, + ) + return + tags = get_enhanced_metrics_tags(lambda_context) + metric_name = "aws.lambda.enhanced." + metric_name + # Enhanced metrics always use an async submission method, (eg logs or extension). + lambda_metric(metric_name, 1, timestamp=None, tags=tags, force_async=True) + + +def submit_invocations_metric(lambda_context): + """Increment aws.lambda.enhanced.invocations by 1, applying runtime, layer, and cold_start tags + + Args: + lambda_context (dict): Lambda context dict passed to the function by AWS + """ + submit_enhanced_metric("invocations", lambda_context) + + +def submit_errors_metric(lambda_context): + """Increment aws.lambda.enhanced.errors by 1, applying runtime, layer, and cold_start tags + + Args: + lambda_context (dict): Lambda context dict passed to the function by AWS + """ + submit_enhanced_metric("errors", lambda_context) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py new file mode 100644 index 0000000..9e4a93e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py @@ -0,0 +1,3 @@ +def modify_module_name(module_name): + """Returns a valid modified module to get imported""" + return ".".join(module_name.split("/")) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py new file mode 100644 index 0000000..0f6d28e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py @@ -0,0 +1,159 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import json +import os +import sys +import logging +import zlib + +from wrapt import wrap_function_wrapper as wrap +from wrapt.importer import when_imported +from ddtrace import patch_all as patch_all_dd + +from datadog_lambda.tracing import ( + get_dd_trace_context, + dd_tracing_enabled, +) +from collections.abc import MutableMapping + +logger = logging.getLogger(__name__) + +_http_patched = False +_requests_patched = False +_integration_tests_patched = False + + +def patch_all(): + """ + Patch third-party libraries for tracing. + """ + _patch_for_integration_tests() + + if dd_tracing_enabled: + patch_all_dd() + else: + _patch_http() + _ensure_patch_requests() + + +def _patch_for_integration_tests(): + """ + Patch `requests` to log the outgoing requests for integration tests. + """ + global _integration_tests_patched + is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true" + if not _integration_tests_patched and is_in_tests: + wrap("requests", "Session.send", _log_request) + _integration_tests_patched = True + + +def _patch_http(): + """ + Patch `http.client` (Python 3) module. + """ + global _http_patched + http_module = "http.client" + if not _http_patched: + _http_patched = True + wrap(http_module, "HTTPConnection.request", _wrap_http_request) + + logger.debug("Patched %s", http_module) + + +def _ensure_patch_requests(): + """ + `requests` is third-party, may not be installed or used, + but ensure it gets patched if installed and used. + """ + if "requests" in sys.modules: + # already imported, patch now + _patch_requests(sys.modules["requests"]) + else: + # patch when imported + when_imported("requests")(_patch_requests) + + +def _patch_requests(module): + """ + Patch the high-level HTTP client module `requests` + if it's installed. + """ + global _requests_patched + if not _requests_patched: + _requests_patched = True + try: + wrap("requests", "Session.request", _wrap_requests_request) + logger.debug("Patched requests") + except Exception: + logger.debug("Failed to patch requests", exc_info=True) + + +def _wrap_requests_request(func, instance, args, kwargs): + """ + Wrap `requests.Session.request` to inject the Datadog trace headers + into the outgoing requests. + """ + context = get_dd_trace_context() + if "headers" in kwargs and isinstance(kwargs["headers"], MutableMapping): + kwargs["headers"].update(context) + elif len(args) >= 5 and isinstance(args[4], MutableMapping): + args[4].update(context) + else: + kwargs["headers"] = context + + return func(*args, **kwargs) + + +def _wrap_http_request(func, instance, args, kwargs): + """ + Wrap `http.client` (python3) to inject + the Datadog trace headers into the outgoing requests. + """ + context = get_dd_trace_context() + if "headers" in kwargs and isinstance(kwargs["headers"], MutableMapping): + kwargs["headers"].update(context) + elif len(args) >= 4 and isinstance(args[3], MutableMapping): + args[3].update(context) + else: + kwargs["headers"] = context + + return func(*args, **kwargs) + + +def _log_request(func, instance, args, kwargs): + request = kwargs.get("request") or args[0] + _print_request_string(request) + return func(*args, **kwargs) + + +def _print_request_string(request): + """Print the request so that it can be checked in integration tests + + Only used by integration tests. + """ + method = request.method + url = request.url + + # Sort the datapoints POSTed by their name so that snapshots always align + data = request.body or "{}" + # If payload is compressed, decompress it so we can parse it + if request.headers.get("Content-Encoding") == "deflate": + data = zlib.decompress(data) + data_dict = json.loads(data) + data_dict.get("series", []).sort(key=lambda series: series.get("metric")) + sorted_data = json.dumps(data_dict) + + # Sort headers to prevent any differences in ordering + headers = request.headers or {} + sorted_headers = sorted( + "{}:{}".format(key, value) for key, value in headers.items() + ) + sorted_header_str = json.dumps(sorted_headers) + print( + "HTTP {} {} Headers: {} Data: {}".format( + method, url, sorted_header_str, sorted_data + ) + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py new file mode 100644 index 0000000..d3919c3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py @@ -0,0 +1,9 @@ +class StatsWriter: + def distribution(self, metric_name, value, tags=[], timestamp=None): + raise NotImplementedError() + + def flush(self): + raise NotImplementedError() + + def stop(self): + raise NotImplementedError() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py new file mode 100644 index 0000000..33843dc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py @@ -0,0 +1,17 @@ +from datadog_lambda.stats_writer import StatsWriter +from datadog_lambda.dogstatsd import statsd + + +class StatsDWriter(StatsWriter): + """ + Writes distribution metrics using StatsD protocol + """ + + def distribution(self, metric_name, value, tags=[], timestamp=None): + statsd.distribution(metric_name, value, tags=tags) + + def flush(self): + pass + + def stop(self): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py new file mode 100644 index 0000000..ec1c5a6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py @@ -0,0 +1,68 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2021 Datadog, Inc. + +from decimal import Decimal +import json +import logging + +redactable_keys = ["authorization", "x-authorization", "password", "token"] +max_depth = 10 +logger = logging.getLogger(__name__) + + +def tag_object(span, key, obj, depth=0): + if obj is None: + return span.set_tag(key, obj) + if depth >= max_depth: + return span.set_tag(key, _redact_val(key, str(obj)[0:5000])) + depth += 1 + if _should_try_string(obj): + parsed = None + try: + parsed = json.loads(obj) + return tag_object(span, key, parsed, depth) + except ValueError: + redacted = _redact_val(key, obj[0:5000]) + return span.set_tag(key, redacted) + if isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, Decimal): + return span.set_tag(key, str(obj)) + if isinstance(obj, list): + for k, v in enumerate(obj): + formatted_key = "{}.{}".format(key, k) + tag_object(span, formatted_key, v, depth) + return + if hasattr(obj, "items"): + for k, v in obj.items(): + formatted_key = "{}.{}".format(key, k) + tag_object(span, formatted_key, v, depth) + return + if hasattr(obj, "to_dict"): + for k, v in obj.to_dict().items(): + formatted_key = "{}.{}".format(key, k) + tag_object(span, formatted_key, v, depth) + return + try: + value_as_str = str(obj) + except Exception: + value_as_str = "UNKNOWN" + return span.set_tag(key, value_as_str) + + +def _should_try_string(obj): + try: + if isinstance(obj, str) or isinstance(obj, unicode): + return True + except NameError: + if isinstance(obj, bytes): + return True + + return False + + +def _redact_val(k, v): + split_key = k.split(".").pop() or k + if split_key in redactable_keys: + return "redacted" + return v diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py new file mode 100644 index 0000000..cdaeb4e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py @@ -0,0 +1,104 @@ +import sys + +from platform import python_version_tuple + +from datadog_lambda import __version__ +from datadog_lambda.cold_start import get_cold_start_tag + + +def _format_dd_lambda_layer_tag(): + """ + Formats the dd_lambda_layer tag, e.g., 'dd_lambda_layer:datadog-python39_1' + """ + runtime = "python{}{}".format(sys.version_info[0], sys.version_info[1]) + return "dd_lambda_layer:datadog-{}_{}".format(runtime, __version__) + + +def tag_dd_lambda_layer(tags): + """ + Used by lambda_metric to insert the dd_lambda_layer tag + """ + dd_lambda_layer_tag = _format_dd_lambda_layer_tag() + if tags: + return tags + [dd_lambda_layer_tag] + else: + return [dd_lambda_layer_tag] + + +def parse_lambda_tags_from_arn(lambda_context): + """Generate the list of lambda tags based on the data in the arn + Args: + lambda_context: Aws lambda context object + ex: lambda_context.arn = arn:aws:lambda:us-east-1:123597598159:function:my-lambda:1 + """ + # Set up flag for extra testing to distinguish between a version or alias + hasAlias = False + # Cap the number of times to spli + split_arn = lambda_context.invoked_function_arn.split(":") + + if len(split_arn) > 7: + hasAlias = True + _, _, _, region, account_id, _, function_name, alias = split_arn + else: + _, _, _, region, account_id, _, function_name = split_arn + + # Add the standard tags to a list + tags = [ + "region:{}".format(region), + "account_id:{}".format(account_id), + "functionname:{}".format(function_name), + ] + + # Check if we have a version or alias + if hasAlias: + # If $Latest, drop the $ for datadog tag convention. A lambda alias can't start with $ + if alias.startswith("$"): + alias = alias[1:] + # Versions are numeric. Aliases need the executed version tag + elif not check_if_number(alias): + tags.append("executedversion:{}".format(lambda_context.function_version)) + # create resource tag with function name and alias/version + resource = "resource:{}:{}".format(function_name, alias) + else: + # Resource is only the function name otherwise + resource = "resource:{}".format(function_name) + + tags.append(resource) + + return tags + + +def get_runtime_tag(): + """Get the runtime tag from the current Python version""" + major_version, minor_version, _ = python_version_tuple() + + return "runtime:python{major}.{minor}".format( + major=major_version, minor=minor_version + ) + + +def get_library_version_tag(): + """Get datadog lambda library tag""" + return "datadog_lambda:v{}".format(__version__) + + +def get_enhanced_metrics_tags(lambda_context): + """Get the list of tags to apply to enhanced metrics""" + return parse_lambda_tags_from_arn(lambda_context) + [ + get_cold_start_tag(), + "memorysize:{}".format(lambda_context.memory_limit_in_mb), + get_runtime_tag(), + get_library_version_tag(), + ] + + +def check_if_number(alias): + """ + Check if the alias is a version or number. + Python 2 has no easy way to test this like Python 3 + """ + try: + float(alias) + return True + except ValueError: + return False diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py new file mode 100644 index 0000000..bfcf3c9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py @@ -0,0 +1,65 @@ +import logging + +# Make sure that this package would always be lazy-loaded/outside from the critical path +# since underlying packages are quite heavy to load and useless when the extension is present +from datadog.threadstats import ThreadStats +from datadog_lambda.stats_writer import StatsWriter + +logger = logging.getLogger(__name__) + + +class ThreadStatsWriter(StatsWriter): + """ + Writes distribution metrics using the ThreadStats class + """ + + def __init__(self, flush_in_thread): + self.thread_stats = ThreadStats(compress_payload=True) + self.thread_stats.start(flush_in_thread=flush_in_thread) + + def distribution(self, metric_name, value, tags=[], timestamp=None): + self.thread_stats.distribution( + metric_name, value, tags=tags, timestamp=timestamp + ) + + def flush(self): + """ "Flush distributions from ThreadStats to Datadog. + Modified based on `datadog.threadstats.base.ThreadStats.flush()`, + to gain better control over exception handling. + """ + _, dists = self.thread_stats._get_aggregate_metrics_and_dists(float("inf")) + count_dists = len(dists) + if not count_dists: + logger.debug("No distributions to flush. Continuing.") + + self.thread_stats.flush_count += 1 + logger.debug( + "Flush #%s sending %s distributions", + self.thread_stats.flush_count, + count_dists, + ) + try: + self.thread_stats.reporter.flush_distributions(dists) + except Exception as e: + # The nature of the root issue https://bugs.python.org/issue41345 is complex, + # but comprehensive tests suggest that it is safe to retry on this specific error. + if type(e).__name__ == "ClientError" and "RemoteDisconnected" in str(e): + logger.debug( + "Retry flush #%s due to RemoteDisconnected", + self.thread_stats.flush_count, + ) + try: + self.thread_stats.reporter.flush_distributions(dists) + except Exception: + logger.debug( + "Flush #%s failed after retry", + self.thread_stats.flush_count, + exc_info=True, + ) + else: + logger.debug( + "Flush #%s failed", self.thread_stats.flush_count, exc_info=True + ) + + def stop(self): + self.thread_stats.stop() diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py new file mode 100644 index 0000000..dc7e32b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py @@ -0,0 +1,1308 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. +import hashlib +import logging +import os +import json +import base64 +from datetime import datetime, timezone +from typing import Optional, Dict + +from datadog_lambda.metric import submit_errors_metric + +try: + from typing import Literal +except ImportError: + # Literal was added to typing in python 3.8 + from typing_extensions import Literal + +from datadog_lambda.constants import ( + SamplingPriority, + TraceContextSource, + XrayDaemon, + Headers, +) +from datadog_lambda.xray import ( + send_segment, + parse_xray_header, +) +from ddtrace import tracer, patch, Span +from ddtrace import __version__ as ddtrace_version +from ddtrace.propagation.http import HTTPPropagator +from ddtrace.context import Context +from datadog_lambda import __version__ as datadog_lambda_version +from datadog_lambda.trigger import ( + _EventSource, + parse_event_source, + get_first_record, + EventTypes, + EventSubtypes, +) + +dd_trace_otel_enabled = ( + os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true" +) +if dd_trace_otel_enabled: + from opentelemetry.trace import set_tracer_provider + from ddtrace.opentelemetry import TracerProvider + + set_tracer_provider(TracerProvider()) + + +logger = logging.getLogger(__name__) + +dd_trace_context = None +dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true" +if dd_tracing_enabled: + # Enable the telemetry client if the user has opted in + if ( + os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower() + == "true" + ): + from ddtrace.internal.telemetry import telemetry_writer + + telemetry_writer.enable() + +propagator = HTTPPropagator() + + +def _convert_xray_trace_id(xray_trace_id): + """ + Convert X-Ray trace id (hex)'s last 63 bits to a Datadog trace id (int). + """ + return 0x7FFFFFFFFFFFFFFF & int(xray_trace_id[-16:], 16) + + +def _convert_xray_entity_id(xray_entity_id): + """ + Convert X-Ray (sub)segement id (hex) to a Datadog span id (int). + """ + return int(xray_entity_id, 16) + + +def _convert_xray_sampling(xray_sampled): + """ + Convert X-Ray sampled (True/False) to its Datadog counterpart. + """ + return SamplingPriority.USER_KEEP if xray_sampled else SamplingPriority.USER_REJECT + + +def _get_xray_trace_context(): + if not is_lambda_context(): + return None + + xray_trace_entity = parse_xray_header( + os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "") + ) + if xray_trace_entity is None: + return None + trace_context = Context( + trace_id=_convert_xray_trace_id(xray_trace_entity.get("trace_id")), + span_id=_convert_xray_entity_id(xray_trace_entity.get("parent_id")), + sampling_priority=_convert_xray_sampling(xray_trace_entity.get("sampled")), + ) + logger.debug( + "Converted trace context %s from X-Ray segment %s", + trace_context, + ( + xray_trace_entity["trace_id"], + xray_trace_entity["parent_id"], + xray_trace_entity["sampled"], + ), + ) + return trace_context + + +def _get_dd_trace_py_context(): + span = tracer.current_span() + if not span: + return None + + logger.debug( + "found dd trace context: %s", (span.context.trace_id, span.context.span_id) + ) + return span.context + + +def _is_context_complete(context): + return ( + context + and context.trace_id + and context.span_id + and context.sampling_priority is not None + ) + + +def create_dd_dummy_metadata_subsegment( + subsegment_metadata_value, subsegment_metadata_key +): + """ + Create a Datadog subsegment to pass the Datadog trace context or Lambda function + tags into its metadata field, so the X-Ray trace can be converted to a Datadog + trace in the Datadog backend with the correct context. + """ + send_segment(subsegment_metadata_key, subsegment_metadata_value) + + +def extract_context_from_lambda_context(lambda_context): + """ + Extract Datadog trace context from the `client_context` attr + from the Lambda `context` object. + + dd_trace libraries inject this trace context on synchronous invocations + """ + dd_data = None + client_context = lambda_context.client_context + if client_context and client_context.custom: + dd_data = client_context.custom + if "_datadog" in client_context.custom: + # Legacy trace propagation dict + dd_data = client_context.custom.get("_datadog") + return propagator.extract(dd_data) + + +def extract_context_from_http_event_or_context( + event, + lambda_context, + event_source: _EventSource, + decode_authorizer_context: bool = True, +): + """ + Extract Datadog trace context from the `headers` key in from the Lambda + `event` object. + + Falls back to lambda context if no trace data is found in the `headers` + """ + if decode_authorizer_context: + is_http_api = event_source.equals( + EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API + ) + injected_authorizer_data = get_injected_authorizer_data(event, is_http_api) + context = propagator.extract(injected_authorizer_data) + if _is_context_complete(context): + return context + + headers = event.get("headers") + context = propagator.extract(headers) + + if not _is_context_complete(context): + return extract_context_from_lambda_context(lambda_context) + + return context + + +def create_sns_event(message): + return { + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "Sns": message, + } + ] + } + + +def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context): + """ + Extract Datadog trace context from an SQS event. + + The extraction chain goes as follows: + EB => SQS (First records body contains EB context), or + SNS => SQS (First records body contains SNS context), or + SQS or SNS (`messageAttributes` for SQS context, + `MessageAttributes` for SNS context), else + Lambda Context. + + Falls back to lambda context if no trace data is found in the SQS message attributes. + """ + + # EventBridge => SQS + try: + context = _extract_context_from_eventbridge_sqs_event(event) + if _is_context_complete(context): + return context + except Exception: + logger.debug("Failed extracting context as EventBridge to SQS.") + + try: + first_record = event.get("Records")[0] + + # logic to deal with SNS => SQS event + if "body" in first_record: + body_str = first_record.get("body", {}) + try: + body = json.loads(body_str) + if body.get("Type", "") == "Notification" and "TopicArn" in body: + logger.debug("Found SNS message inside SQS event") + first_record = get_first_record(create_sns_event(body)) + except Exception: + first_record = event.get("Records")[0] + pass + + msg_attributes = first_record.get( + "messageAttributes", + first_record.get("Sns", {}).get("MessageAttributes", {}), + ) + dd_payload = msg_attributes.get("_datadog", {}) + # SQS uses dataType and binaryValue/stringValue + # SNS uses Type and Value + dd_json_data_type = dd_payload.get("Type", dd_payload.get("dataType", "")) + if dd_json_data_type == "Binary": + dd_json_data = dd_payload.get( + "binaryValue", + dd_payload.get("Value", r"{}"), + ) + dd_json_data = base64.b64decode(dd_json_data) + elif dd_json_data_type == "String": + dd_json_data = dd_payload.get( + "stringValue", + dd_payload.get("Value", r"{}"), + ) + else: + logger.debug( + "Datadog Lambda Python only supports extracting trace" + "context from String or Binary SQS/SNS message attributes" + ) + return extract_context_from_lambda_context(lambda_context) + dd_data = json.loads(dd_json_data) + return propagator.extract(dd_data) + except Exception as e: + logger.debug("The trace extractor returned with error %s", e) + return extract_context_from_lambda_context(lambda_context) + + +def _extract_context_from_eventbridge_sqs_event(event): + """ + Extracts Datadog trace context from an SQS event triggered by + EventBridge. + + This is only possible if first record in `Records` contains a + `body` field which contains the EventBridge `detail` as a JSON string. + """ + first_record = event.get("Records")[0] + body_str = first_record.get("body") + body = json.loads(body_str) + detail = body.get("detail") + dd_context = detail.get("_datadog") + return propagator.extract(dd_context) + + +def extract_context_from_eventbridge_event(event, lambda_context): + """ + Extract datadog trace context from an EventBridge message's Details. + This is only possible if Details is a JSON string. + """ + try: + detail = event.get("detail") + dd_context = detail.get("_datadog") + if not dd_context: + return extract_context_from_lambda_context(lambda_context) + return propagator.extract(dd_context) + except Exception as e: + logger.debug("The trace extractor returned with error %s", e) + return extract_context_from_lambda_context(lambda_context) + + +def extract_context_from_kinesis_event(event, lambda_context): + """ + Extract datadog trace context from a Kinesis Stream's base64 encoded data string + """ + try: + record = get_first_record(event) + data = record.get("kinesis", {}).get("data", None) + if data: + b64_bytes = data.encode("ascii") + str_bytes = base64.b64decode(b64_bytes) + data_str = str_bytes.decode("ascii") + data_obj = json.loads(data_str) + dd_ctx = data_obj.get("_datadog") + + if not dd_ctx: + return extract_context_from_lambda_context(lambda_context) + + return propagator.extract(dd_ctx) + except Exception as e: + logger.debug("The trace extractor returned with error %s", e) + return extract_context_from_lambda_context(lambda_context) + + +def _deterministic_md5_hash(s: str) -> int: + """MD5 here is to generate trace_id, not for any encryption.""" + hex_number = hashlib.md5(s.encode("ascii")).hexdigest() + binary = bin(int(hex_number, 16)) + binary_str = str(binary) + binary_str_remove_0b = binary_str[2:].rjust(128, "0") + most_significant_64_bits_without_leading_1 = "0" + binary_str_remove_0b[1:-64] + result = int(most_significant_64_bits_without_leading_1, 2) + if result == 0: + return 1 + return result + + +def extract_context_from_step_functions(event, lambda_context): + """ + Only extract datadog trace context when Step Functions Context Object is injected + into lambda's event dict. + """ + try: + execution_id = event.get("Execution").get("Id") + state_name = event.get("State").get("Name") + state_entered_time = event.get("State").get("EnteredTime") + trace_id = _deterministic_md5_hash(execution_id) + parent_id = _deterministic_md5_hash( + execution_id + "#" + state_name + "#" + state_entered_time + ) + sampling_priority = SamplingPriority.AUTO_KEEP + return Context( + trace_id=trace_id, span_id=parent_id, sampling_priority=sampling_priority + ) + except Exception as e: + logger.debug("The Step Functions trace extractor returned with error %s", e) + return extract_context_from_lambda_context(lambda_context) + + +def extract_context_custom_extractor(extractor, event, lambda_context): + """ + Extract Datadog trace context using a custom trace extractor function + """ + try: + ( + trace_id, + parent_id, + sampling_priority, + ) = extractor(event, lambda_context) + return Context( + trace_id=int(trace_id), + span_id=int(parent_id), + sampling_priority=int(sampling_priority), + ) + except Exception as e: + logger.debug("The trace extractor returned with error %s", e) + + +def is_authorizer_response(response) -> bool: + try: + return ( + response is not None + and response["principalId"] + and response["policyDocument"] + ) + except (KeyError, AttributeError): + pass + except Exception as e: + logger.debug("unknown error while checking is_authorizer_response %s", e) + return False + + +def get_injected_authorizer_data(event, is_http_api) -> dict: + try: + authorizer_headers = event.get("requestContext", {}).get("authorizer") + if not authorizer_headers: + return None + + dd_data_raw = ( + authorizer_headers.get("lambda", {}).get("_datadog") + if is_http_api + else authorizer_headers.get("_datadog") + ) + + if not dd_data_raw: + return None + + injected_data = json.loads(base64.b64decode(dd_data_raw)) + + # Lambda authorizer's results can be cached. But the payload will still have the injected + # data in cached requests. How to distinguish cached case and ignore the injected data ? + # APIGateway automatically injects a integrationLatency data in some cases. If it's >0 we + # know that it's not cached. But integrationLatency is not available for Http API case. In + # that case, we use the injected Authorizing_Request_Id to tell if it's cached. But token + # authorizers don't pass on the requestId. The Authorizing_Request_Id can't work for all + # cases neither. As a result, we combine both methods as shown below. + if authorizer_headers.get("integrationLatency", 0) > 0 or event.get( + "requestContext", {} + ).get("requestId") == injected_data.get(Headers.Authorizing_Request_Id): + return injected_data + else: + return None + + except Exception as e: + logger.debug("Failed to check if invocated by an authorizer. error %s", e) + return None + + +def extract_dd_trace_context( + event, lambda_context, extractor=None, decode_authorizer_context: bool = True +): + """ + Extract Datadog trace context from the Lambda `event` object. + + Write the context to a global `dd_trace_context`, so the trace + can be continued on the outgoing requests with the context injected. + """ + global dd_trace_context + trace_context_source = None + event_source = parse_event_source(event) + + if extractor is not None: + context = extract_context_custom_extractor(extractor, event, lambda_context) + elif isinstance(event, (set, dict)) and "headers" in event: + context = extract_context_from_http_event_or_context( + event, lambda_context, event_source, decode_authorizer_context + ) + elif event_source.equals(EventTypes.SNS) or event_source.equals(EventTypes.SQS): + context = extract_context_from_sqs_or_sns_event_or_context( + event, lambda_context + ) + elif event_source.equals(EventTypes.EVENTBRIDGE): + context = extract_context_from_eventbridge_event(event, lambda_context) + elif event_source.equals(EventTypes.KINESIS): + context = extract_context_from_kinesis_event(event, lambda_context) + elif event_source.equals(EventTypes.STEPFUNCTIONS): + context = extract_context_from_step_functions(event, lambda_context) + else: + context = extract_context_from_lambda_context(lambda_context) + + if _is_context_complete(context): + logger.debug("Extracted Datadog trace context from event or context") + dd_trace_context = context + trace_context_source = TraceContextSource.EVENT + else: + # AWS Lambda runtime caches global variables between invocations, + # reset to avoid using the context from the last invocation. + dd_trace_context = _get_xray_trace_context() + if dd_trace_context: + trace_context_source = TraceContextSource.XRAY + logger.debug("extracted dd trace context %s", dd_trace_context) + return dd_trace_context, trace_context_source, event_source + + +def get_dd_trace_context_obj(): + """ + Return the Datadog trace context to be propagated on the outgoing requests. + + If the Lambda function is invoked by a Datadog-traced service, a Datadog + trace context may already exist, and it should be used. Otherwise, use the + current X-Ray trace entity, or the dd-trace-py context if DD_TRACE_ENABLED is true. + + Most of widely-used HTTP clients are patched to inject the context + automatically, but this function can be used to manually inject the trace + context to an outgoing request. + """ + if dd_tracing_enabled: + dd_trace_py_context = _get_dd_trace_py_context() + if _is_context_complete(dd_trace_py_context): + return dd_trace_py_context + + global dd_trace_context + + try: + xray_context = _get_xray_trace_context() # xray (sub)segment + except Exception as e: + logger.debug( + "get_dd_trace_context couldn't read from segment from x-ray, with error %s" + % e + ) + if not xray_context: + return None + + if not _is_context_complete(dd_trace_context): + return xray_context + + logger.debug("Set parent id from xray trace context: %s", xray_context.span_id) + return Context( + trace_id=dd_trace_context.trace_id, + span_id=xray_context.span_id, + sampling_priority=dd_trace_context.sampling_priority, + meta=dd_trace_context._meta.copy(), + metrics=dd_trace_context._metrics.copy(), + ) + + +def get_dd_trace_context(): + """ + Return the Datadog trace context to be propagated on the outgoing requests, + as a dict of headers. + """ + headers = {} + context = get_dd_trace_context_obj() + if not _is_context_complete(context): + return headers + propagator.inject(context, headers) + return headers + + +def set_correlation_ids(): + """ + Create a dummy span, and overrides its trace_id and span_id, to make + ddtrace.helpers.get_log_correlation_context() return a dict containing the correct ids for both + auto and manual log correlations. + + TODO: Remove me when Datadog tracer is natively supported in Lambda. + """ + if not is_lambda_context(): + logger.debug("set_correlation_ids is only supported in LambdaContext") + return + if dd_tracing_enabled: + logger.debug("using ddtrace implementation for spans") + return + + context = get_dd_trace_context_obj() + if not _is_context_complete(context): + return + + tracer.context_provider.activate(context) + tracer.trace("dummy.span") + logger.debug("correlation ids set") + + +def inject_correlation_ids(): + """ + Override the formatter of LambdaLoggerHandler to inject datadog trace and + span id for log correlation. + + For manual injections to custom log handlers, use `ddtrace.helpers.get_log_correlation_context` + to retrieve a dict containing correlation ids (trace_id, span_id). + """ + # Override the log format of the AWS provided LambdaLoggerHandler + root_logger = logging.getLogger() + for handler in root_logger.handlers: + if handler.__class__.__name__ == "LambdaLoggerHandler" and isinstance( + handler.formatter, logging.Formatter + ): + handler.setFormatter( + logging.Formatter( + "[%(levelname)s]\t%(asctime)s.%(msecs)dZ\t%(aws_request_id)s\t" + "[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s]\t%(message)s\n", + "%Y-%m-%dT%H:%M:%S", + ) + ) + + # Patch `logging.Logger.makeRecord` to actually inject correlation ids + patch(logging=True) + + logger.debug("logs injection configured") + + +def is_lambda_context(): + """ + Return True if the X-Ray context is `LambdaContext`, rather than the + regular `Context` (e.g., when testing lambda functions locally). + """ + return os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME, "") != "" + + +def set_dd_trace_py_root(trace_context_source, merge_xray_traces): + if trace_context_source == TraceContextSource.EVENT or merge_xray_traces: + context = Context( + trace_id=dd_trace_context.trace_id, + span_id=dd_trace_context.span_id, + sampling_priority=dd_trace_context.sampling_priority, + ) + if merge_xray_traces: + xray_context = _get_xray_trace_context() + if xray_context and xray_context.span_id: + context.span_id = xray_context.span_id + + tracer.context_provider.activate(context) + logger.debug( + "Set dd trace root context to: %s", + (context.trace_id, context.span_id), + ) + + +def create_inferred_span( + event, + context, + event_source: _EventSource = None, + decode_authorizer_context: bool = True, +): + if event_source is None: + event_source = parse_event_source(event) + try: + if event_source.equals( + EventTypes.API_GATEWAY, subtype=EventSubtypes.API_GATEWAY + ): + logger.debug("API Gateway event detected. Inferring a span") + return create_inferred_span_from_api_gateway_event( + event, context, decode_authorizer_context + ) + elif event_source.equals(EventTypes.LAMBDA_FUNCTION_URL): + logger.debug("Function URL event detected. Inferring a span") + return create_inferred_span_from_lambda_function_url_event(event, context) + elif event_source.equals( + EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API + ): + logger.debug("HTTP API event detected. Inferring a span") + return create_inferred_span_from_http_api_event( + event, context, decode_authorizer_context + ) + elif event_source.equals( + EventTypes.API_GATEWAY, subtype=EventSubtypes.WEBSOCKET + ): + logger.debug("API Gateway Websocket event detected. Inferring a span") + return create_inferred_span_from_api_gateway_websocket_event( + event, context, decode_authorizer_context + ) + elif event_source.equals(EventTypes.SQS): + logger.debug("SQS event detected. Inferring a span") + return create_inferred_span_from_sqs_event(event, context) + elif event_source.equals(EventTypes.SNS): + logger.debug("SNS event detected. Inferring a span") + return create_inferred_span_from_sns_event(event, context) + elif event_source.equals(EventTypes.KINESIS): + logger.debug("Kinesis event detected. Inferring a span") + return create_inferred_span_from_kinesis_event(event, context) + elif event_source.equals(EventTypes.DYNAMODB): + logger.debug("Dynamodb event detected. Inferring a span") + return create_inferred_span_from_dynamodb_event(event, context) + elif event_source.equals(EventTypes.S3): + logger.debug("S3 event detected. Inferring a span") + return create_inferred_span_from_s3_event(event, context) + elif event_source.equals(EventTypes.EVENTBRIDGE): + logger.debug("Eventbridge event detected. Inferring a span") + return create_inferred_span_from_eventbridge_event(event, context) + except Exception as e: + logger.debug( + "Unable to infer span. Detected type: %s. Reason: %s", + event_source.to_string(), + e, + ) + return None + logger.debug("Unable to infer a span: unknown event type") + return None + + +def create_service_mapping(val): + new_service_mapping = {} + for entry in val.split(","): + parts = entry.split(":") + if len(parts) == 2: + key = parts[0].strip() + value = parts[1].strip() + if key != value and key and value: + new_service_mapping[key] = value + return new_service_mapping + + +def determine_service_name(service_mapping, specific_key, generic_key, default_value): + service_name = service_mapping.get(specific_key) + if service_name is None: + service_name = service_mapping.get(generic_key, default_value) + return service_name + + +service_mapping = {} +# Initialization code +service_mapping_str = os.getenv("DD_SERVICE_MAPPING", "") +service_mapping = create_service_mapping(service_mapping_str) + + +def create_inferred_span_from_lambda_function_url_event(event, context): + request_context = event.get("requestContext") + api_id = request_context.get("apiId") + domain = request_context.get("domainName") + service_name = determine_service_name(service_mapping, api_id, "lambda_url", domain) + method = request_context.get("http", {}).get("method") + path = request_context.get("http", {}).get("path") + resource = "{0} {1}".format(method, path) + tags = { + "operation_name": "aws.lambda.url", + "http.url": domain + path, + "endpoint": path, + "http.method": method, + "resource_names": domain + path, + "request_id": context.aws_request_id, + } + request_time_epoch = request_context.get("timeEpoch") + args = { + "service": service_name, + "resource": resource, + "span_type": "http", + } + tracer.set_tags( + {"_dd.origin": "lambda"} + ) # function urls don't count as lambda_inferred, + # because they're in the same service as the inferring lambda function + span = tracer.trace("aws.lambda.url", **args) + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync") + if span: + span.set_tags(tags) + span.start = request_time_epoch / 1000 + return span + + +def is_api_gateway_invocation_async(event): + return event.get("headers", {}).get("X-Amz-Invocation-Type") == "Event" + + +def insert_upstream_authorizer_span( + kwargs_to_start_span, other_tags_for_span, start_time_ns, finish_time_ns +): + """Insert the authorizer span. + Without this: parent span --child-> inferred span + With this insertion: parent span --child-> upstreamAuthorizerSpan --child-> inferred span + + Args: + kwargs_to_start_span (Dict): the same keyword arguments used for the inferred span + other_tags_for_span (Dict): the same tag keyword arguments used for the inferred span + start_time_ns (int): the start time of the span in nanoseconds + finish_time_ns (int): the finish time of the sapn in nanoseconds + """ + trace_ctx = tracer.current_trace_context() + upstream_authorizer_span = tracer.trace( + "aws.apigateway.authorizer", **kwargs_to_start_span + ) + upstream_authorizer_span.set_tags(other_tags_for_span) + upstream_authorizer_span.set_tag("operation_name", "aws.apigateway.authorizer") + # always sync for the authorizer invocation + InferredSpanInfo.set_tags_to_span(upstream_authorizer_span, synchronicity="sync") + upstream_authorizer_span.start_ns = int(start_time_ns) + upstream_authorizer_span.finish(finish_time_ns / 1e9) + # trace context needs to be set again as it is reset by finish() + tracer.context_provider.activate(trace_ctx) + return upstream_authorizer_span + + +def process_injected_data(event, request_time_epoch_ms, args, tags): + """ + This covers the ApiGateway RestAPI and Websocket cases. It doesn't cover Http API cases. + """ + injected_authorizer_data = get_injected_authorizer_data(event, False) + if injected_authorizer_data: + try: + start_time_ns = int( + injected_authorizer_data.get(Headers.Parent_Span_Finish_Time) + ) + finish_time_ns = ( + request_time_epoch_ms + + ( + int( + event["requestContext"]["authorizer"].get( + "integrationLatency", 0 + ) + ) + ) + ) * 1e6 + upstream_authorizer_span = insert_upstream_authorizer_span( + args, tags, start_time_ns, finish_time_ns + ) + return upstream_authorizer_span, finish_time_ns + except Exception as e: + logger.debug( + "Unable to insert authorizer span. Continue to generate the main span.\ + Reason: %s", + e, + ) + return None, None + else: + return None, None + + +def create_inferred_span_from_api_gateway_websocket_event( + event, context, decode_authorizer_context: bool = True +): + request_context = event.get("requestContext") + domain = request_context.get("domainName") + endpoint = request_context.get("routeKey") + api_id = request_context.get("apiId") + + service_name = determine_service_name( + service_mapping, api_id, "lambda_api_gateway", domain + ) + tags = { + "operation_name": "aws.apigateway.websocket", + "http.url": domain + endpoint, + "endpoint": endpoint, + "resource_names": endpoint, + "apiid": api_id, + "apiname": api_id, + "stage": request_context.get("stage"), + "request_id": context.aws_request_id, + "connection_id": request_context.get("connectionId"), + "event_type": request_context.get("eventType"), + "message_direction": request_context.get("messageDirection"), + } + request_time_epoch_ms = int(request_context.get("requestTimeEpoch")) + if is_api_gateway_invocation_async(event): + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + else: + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync") + args = { + "service": service_name, + "resource": endpoint, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + upstream_authorizer_span = None + finish_time_ns = None + if decode_authorizer_context: + upstream_authorizer_span, finish_time_ns = process_injected_data( + event, request_time_epoch_ms, args, tags + ) + span = tracer.trace("aws.apigateway.websocket", **args) + if span: + span.set_tags(tags) + span.start_ns = int( + finish_time_ns + if finish_time_ns is not None + else request_time_epoch_ms * 1e6 + ) + if upstream_authorizer_span: + span.parent_id = upstream_authorizer_span.span_id + return span + + +def create_inferred_span_from_api_gateway_event( + event, context, decode_authorizer_context: bool = True +): + request_context = event.get("requestContext") + domain = request_context.get("domainName", "") + api_id = request_context.get("apiId") + service_name = determine_service_name( + service_mapping, api_id, "lambda_api_gateway", domain + ) + method = event.get("httpMethod") + path = event.get("path") + resource = "{0} {1}".format(method, path) + tags = { + "operation_name": "aws.apigateway.rest", + "http.url": domain + path, + "endpoint": path, + "http.method": method, + "resource_names": resource, + "apiid": api_id, + "apiname": api_id, + "stage": request_context.get("stage"), + "request_id": context.aws_request_id, + } + request_time_epoch_ms = int(request_context.get("requestTimeEpoch")) + if is_api_gateway_invocation_async(event): + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + else: + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync") + args = { + "service": service_name, + "resource": resource, + "span_type": "http", + } + tracer.set_tags({"_dd.origin": "lambda"}) + upstream_authorizer_span = None + finish_time_ns = None + if decode_authorizer_context: + upstream_authorizer_span, finish_time_ns = process_injected_data( + event, request_time_epoch_ms, args, tags + ) + span = tracer.trace("aws.apigateway", **args) + if span: + span.set_tags(tags) + # start time pushed by the inserted authorizer span + span.start_ns = int( + finish_time_ns + if finish_time_ns is not None + else request_time_epoch_ms * 1e6 + ) + if upstream_authorizer_span: + span.parent_id = upstream_authorizer_span.span_id + return span + + +def create_inferred_span_from_http_api_event( + event, context, decode_authorizer_context: bool = True +): + request_context = event.get("requestContext") + domain = request_context.get("domainName") + api_id = request_context.get("apiId") + service_name = determine_service_name( + service_mapping, api_id, "lambda_api_gateway", domain + ) + method = request_context.get("http", {}).get("method") + path = event.get("rawPath") + resource = "{0} {1}".format(method, path) + tags = { + "operation_name": "aws.httpapi", + "endpoint": path, + "http.url": domain + path, + "http.method": request_context.get("http", {}).get("method"), + "http.protocol": request_context.get("http", {}).get("protocol"), + "http.source_ip": request_context.get("http", {}).get("sourceIp"), + "http.user_agent": request_context.get("http", {}).get("userAgent"), + "resource_names": resource, + "request_id": context.aws_request_id, + "apiid": api_id, + "apiname": api_id, + "stage": request_context.get("stage"), + } + request_time_epoch_ms = int(request_context.get("timeEpoch")) + if is_api_gateway_invocation_async(event): + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + else: + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync") + args = { + "service": service_name, + "resource": resource, + "span_type": "http", + } + tracer.set_tags({"_dd.origin": "lambda"}) + inferred_span_start_ns = request_time_epoch_ms * 1e6 + if decode_authorizer_context: + injected_authorizer_data = get_injected_authorizer_data(event, True) + if injected_authorizer_data: + inferred_span_start_ns = injected_authorizer_data.get( + Headers.Parent_Span_Finish_Time + ) + span = tracer.trace("aws.httpapi", **args) + if span: + span.set_tags(tags) + span.start_ns = int(inferred_span_start_ns) + return span + + +def create_inferred_span_from_sqs_event(event, context): + trace_ctx = tracer.current_trace_context() + + event_record = get_first_record(event) + event_source_arn = event_record.get("eventSourceARN") + queue_name = event_source_arn.split(":")[-1] + service_name = determine_service_name( + service_mapping, queue_name, "lambda_sqs", "sqs" + ) + tags = { + "operation_name": "aws.sqs", + "resource_names": queue_name, + "queuename": queue_name, + "event_source_arn": event_source_arn, + "receipt_handle": event_record.get("receiptHandle"), + "sender_id": event_record.get("attributes", {}).get("SenderId"), + } + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + request_time_epoch = event_record.get("attributes", {}).get("SentTimestamp") + args = { + "service": service_name, + "resource": queue_name, + "span_type": "web", + } + start_time = int(request_time_epoch) / 1000 + + upstream_span = None + if "body" in event_record: + body_str = event_record.get("body", {}) + try: + body = json.loads(body_str) + + # logic to deal with SNS => SQS event + if body.get("Type", "") == "Notification" and "TopicArn" in body: + logger.debug("Found SNS message inside SQS event") + upstream_span = create_inferred_span_from_sns_event( + create_sns_event(body), context + ) + upstream_span.finish(finish_time=start_time) + + # EventBridge => SQS + elif body.get("detail"): + detail = body.get("detail") + if detail.get("_datadog"): + logger.debug("Found an EventBridge message inside SQS event") + upstream_span = create_inferred_span_from_eventbridge_event( + body, context + ) + upstream_span.finish(finish_time=start_time) + + except Exception as e: + logger.debug( + "Unable to create upstream span from SQS message, with error %s" % e + ) + pass + + # trace context needs to be set again as it is reset + # when sns_span.finish executes + tracer.context_provider.activate(trace_ctx) + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.sqs", **args) + if span: + span.set_tags(tags) + span.start = start_time + if upstream_span: + span.parent_id = upstream_span.span_id + + return span + + +def create_inferred_span_from_sns_event(event, context): + event_record = get_first_record(event) + sns_message = event_record.get("Sns") + topic_arn = event_record.get("Sns", {}).get("TopicArn") + topic_name = topic_arn.split(":")[-1] + service_name = determine_service_name( + service_mapping, topic_name, "lambda_sns", "sns" + ) + tags = { + "operation_name": "aws.sns", + "resource_names": topic_name, + "topicname": topic_name, + "topic_arn": topic_arn, + "message_id": sns_message.get("MessageId"), + "type": sns_message.get("Type"), + } + + # Subject not available in SNS => SQS scenario + if "Subject" in sns_message and sns_message["Subject"]: + tags["subject"] = sns_message.get("Subject") + + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + sns_dt_format = "%Y-%m-%dT%H:%M:%S.%fZ" + timestamp = event_record.get("Sns", {}).get("Timestamp") + dt = datetime.strptime(timestamp, sns_dt_format) + + args = { + "service": service_name, + "resource": topic_name, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.sns", **args) + if span: + span.set_tags(tags) + span.start = dt.replace(tzinfo=timezone.utc).timestamp() + return span + + +def create_inferred_span_from_kinesis_event(event, context): + event_record = get_first_record(event) + event_source_arn = event_record.get("eventSourceARN") + event_id = event_record.get("eventID") + stream_name = event_source_arn.split(":")[-1] + shard_id = event_id.split(":")[0] + service_name = determine_service_name( + service_mapping, stream_name, "lambda_kinesis", "kinesis" + ) + tags = { + "operation_name": "aws.kinesis", + "resource_names": stream_name, + "streamname": stream_name, + "shardid": shard_id, + "event_source_arn": event_source_arn, + "event_id": event_id, + "event_name": event_record.get("eventName"), + "event_version": event_record.get("eventVersion"), + "partition_key": event_record.get("kinesis", {}).get("partitionKey"), + } + InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async") + request_time_epoch = event_record.get("kinesis", {}).get( + "approximateArrivalTimestamp" + ) + + args = { + "service": service_name, + "resource": stream_name, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.kinesis", **args) + if span: + span.set_tags(tags) + span.start = request_time_epoch + return span + + +def create_inferred_span_from_dynamodb_event(event, context): + event_record = get_first_record(event) + event_source_arn = event_record.get("eventSourceARN") + table_name = event_source_arn.split("/")[1] + service_name = determine_service_name( + service_mapping, table_name, "lambda_dynamodb", "dynamodb" + ) + dynamodb_message = event_record.get("dynamodb") + tags = { + "operation_name": "aws.dynamodb", + "resource_names": table_name, + "tablename": table_name, + "event_source_arn": event_source_arn, + "event_id": event_record.get("eventID"), + "event_name": event_record.get("eventName"), + "event_version": event_record.get("eventVersion"), + "stream_view_type": dynamodb_message.get("StreamViewType"), + "size_bytes": str(dynamodb_message.get("SizeBytes")), + } + InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self") + request_time_epoch = event_record.get("dynamodb", {}).get( + "ApproximateCreationDateTime" + ) + args = { + "service": service_name, + "resource": table_name, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.dynamodb", **args) + if span: + span.set_tags(tags) + + span.start = int(request_time_epoch) + return span + + +def create_inferred_span_from_s3_event(event, context): + event_record = get_first_record(event) + bucket_name = event_record.get("s3", {}).get("bucket", {}).get("name") + service_name = determine_service_name( + service_mapping, bucket_name, "lambda_s3", "s3" + ) + tags = { + "operation_name": "aws.s3", + "resource_names": bucket_name, + "event_name": event_record.get("eventName"), + "bucketname": bucket_name, + "bucket_arn": event_record.get("s3", {}).get("bucket", {}).get("arn"), + "object_key": event_record.get("s3", {}).get("object", {}).get("key"), + "object_size": str(event_record.get("s3", {}).get("object", {}).get("size")), + "object_etag": event_record.get("s3", {}).get("object", {}).get("eTag"), + } + InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self") + dt_format = "%Y-%m-%dT%H:%M:%S.%fZ" + timestamp = event_record.get("eventTime") + dt = datetime.strptime(timestamp, dt_format) + + args = { + "service": service_name, + "resource": bucket_name, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.s3", **args) + if span: + span.set_tags(tags) + span.start = dt.replace(tzinfo=timezone.utc).timestamp() + return span + + +def create_inferred_span_from_eventbridge_event(event, context): + source = event.get("source") + service_name = determine_service_name( + service_mapping, source, "lambda_eventbridge", "eventbridge" + ) + tags = { + "operation_name": "aws.eventbridge", + "resource_names": source, + "detail_type": event.get("detail-type"), + } + InferredSpanInfo.set_tags( + tags, + synchronicity="async", + tag_source="self", + ) + dt_format = "%Y-%m-%dT%H:%M:%SZ" + timestamp = event.get("time") + dt = datetime.strptime(timestamp, dt_format) + + args = { + "service": service_name, + "resource": source, + "span_type": "web", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.eventbridge", **args) + if span: + span.set_tags(tags) + span.start = dt.replace(tzinfo=timezone.utc).timestamp() + return span + + +def create_function_execution_span( + context, + function_name, + is_cold_start, + is_proactive_init, + trace_context_source, + merge_xray_traces, + trigger_tags, + parent_span=None, +): + tags = {} + if context: + function_arn = (context.invoked_function_arn or "").lower() + tk = function_arn.split(":") + function_arn = ":".join(tk[0:7]) if len(tk) > 7 else function_arn + function_version = tk[7] if len(tk) > 7 else "$LATEST" + tags = { + "cold_start": str(is_cold_start).lower(), + "function_arn": function_arn, + "function_version": function_version, + "request_id": context.aws_request_id, + "resource_names": context.function_name, + "functionname": context.function_name.lower() + if context.function_name + else None, + "datadog_lambda": datadog_lambda_version, + "dd_trace": ddtrace_version, + "span.name": "aws.lambda", + } + if is_proactive_init: + tags["proactive_initialization"] = str(is_proactive_init).lower() + if trace_context_source == TraceContextSource.XRAY and merge_xray_traces: + tags["_dd.parent_source"] = trace_context_source + tags.update(trigger_tags) + args = { + "service": "aws.lambda", + "resource": function_name, + "span_type": "serverless", + } + tracer.set_tags({"_dd.origin": "lambda"}) + span = tracer.trace("aws.lambda", **args) + if span: + span.set_tags(tags) + if parent_span: + span.parent_id = parent_span.span_id + return span + + +def mark_trace_as_error_for_5xx_responses(context, status_code, span): + if len(status_code) == 3 and status_code.startswith("5"): + submit_errors_metric(context) + if span: + span.error = 1 + + +class InferredSpanInfo(object): + BASE_NAME = "_inferred_span" + SYNCHRONICITY = f"{BASE_NAME}.synchronicity" + TAG_SOURCE = f"{BASE_NAME}.tag_source" + + @staticmethod + def set_tags( + tags: Dict[str, str], + synchronicity: Optional[Literal["sync", "async"]] = None, + tag_source: Optional[Literal["labmda", "self"]] = None, + ): + if synchronicity is not None: + tags[InferredSpanInfo.SYNCHRONICITY] = str(synchronicity) + if tag_source is not None: + tags[InferredSpanInfo.TAG_SOURCE] = str(tag_source) + + @staticmethod + def set_tags_to_span( + span: Span, + synchronicity: Optional[Literal["sync", "async"]] = None, + tag_source: Optional[Literal["labmda", "self"]] = None, + ): + if synchronicity is not None: + span.set_tags({InferredSpanInfo.SYNCHRONICITY: synchronicity}) + if tag_source is not None: + span.set_tags({InferredSpanInfo.TAG_SOURCE: str(tag_source)}) + + @staticmethod + def is_async(span: Span) -> bool: + if not span: + return False + try: + return span.get_tag(InferredSpanInfo.SYNCHRONICITY) == "async" + except Exception as e: + logger.debug( + "Unabled to read the %s tag, returning False. \ + Reason: %s.", + InferredSpanInfo.SYNCHRONICITY, + e, + ) + return False diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py new file mode 100644 index 0000000..bbb44b3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py @@ -0,0 +1,352 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. + +import base64 +import gzip +import json +from io import BytesIO, BufferedReader +from enum import Enum +from typing import Any + + +class _stringTypedEnum(Enum): + """ + _stringTypedEnum provides a type-hinted convenience function for getting the string value of + an enum. + """ + + def get_string(self) -> str: + return self.value + + +class EventTypes(_stringTypedEnum): + """ + EventTypes is an enum of Lambda event types we care about. + """ + + UNKNOWN = "unknown" + API_GATEWAY = "api-gateway" + APPSYNC = "appsync" + ALB = "application-load-balancer" + CLOUDWATCH_LOGS = "cloudwatch-logs" + CLOUDWATCH_EVENTS = "cloudwatch-events" + CLOUDFRONT = "cloudfront" + DYNAMODB = "dynamodb" + EVENTBRIDGE = "eventbridge" + KINESIS = "kinesis" + LAMBDA_FUNCTION_URL = "lambda-function-url" + S3 = "s3" + SNS = "sns" + SQS = "sqs" + STEPFUNCTIONS = "states" + + +class EventSubtypes(_stringTypedEnum): + """ + EventSubtypes is an enum of Lambda event subtypes. + Currently, API Gateway events subtypes are supported, + e.g. HTTP-API and Websocket events vs vanilla API-Gateway events. + """ + + NONE = "none" + API_GATEWAY = "api-gateway" # regular API Gateway + WEBSOCKET = "websocket" + HTTP_API = "http-api" + + +class _EventSource: + """ + _EventSource holds an event's type and subtype. + """ + + def __init__( + self, + event_type: EventTypes, + subtype: EventSubtypes = EventSubtypes.NONE, + ): + self.event_type = event_type + self.subtype = subtype + + def to_string(self) -> str: + """ + to_string returns the string representation of an _EventSource. + Since to_string was added to support trigger tagging, + the event's subtype will never be included in the string. + """ + return self.event_type.get_string() + + def equals( + self, event_type: EventTypes, subtype: EventSubtypes = EventSubtypes.NONE + ) -> bool: + """ + equals provides syntactic sugar to determine whether this _EventSource has a given type + and subtype. + Unknown events will never equal other events. + """ + if self.event_type == EventTypes.UNKNOWN: + return False + if self.event_type != event_type: + return False + if self.subtype != subtype: + return False + return True + + +def get_aws_partition_by_region(region): + if region.startswith("us-gov-"): + return "aws-us-gov" + if region.startswith("cn-"): + return "aws-cn" + return "aws" + + +def get_first_record(event): + records = event.get("Records") + if records and len(records) > 0: + return records[0] + + +def parse_event_source(event: dict) -> _EventSource: + """Determines the source of the trigger event""" + if type(event) is not dict: + return _EventSource(EventTypes.UNKNOWN) + + event_source = _EventSource(EventTypes.UNKNOWN) + + request_context = event.get("requestContext") + if request_context and request_context.get("stage"): + if "domainName" in request_context and detect_lambda_function_url_domain( + request_context.get("domainName") + ): + return _EventSource(EventTypes.LAMBDA_FUNCTION_URL) + event_source = _EventSource(EventTypes.API_GATEWAY) + if "httpMethod" in event: + event_source.subtype = EventSubtypes.API_GATEWAY + if "routeKey" in event: + event_source.subtype = EventSubtypes.HTTP_API + if event.get("requestContext", {}).get("messageDirection"): + event_source.subtype = EventSubtypes.WEBSOCKET + + if request_context and request_context.get("elb"): + event_source = _EventSource(EventTypes.ALB) + + if event.get("awslogs"): + event_source = _EventSource(EventTypes.CLOUDWATCH_LOGS) + + if event.get("detail-type"): + event_source = _EventSource(EventTypes.EVENTBRIDGE) + + event_detail = event.get("detail") + has_event_categories = ( + isinstance(event_detail, dict) + and event_detail.get("EventCategories") is not None + ) + if event.get("source") == "aws.events" or has_event_categories: + event_source = _EventSource(EventTypes.CLOUDWATCH_EVENTS) + + if "Execution" in event and "StateMachine" in event and "State" in event: + event_source = _EventSource(EventTypes.STEPFUNCTIONS) + + event_record = get_first_record(event) + if event_record: + aws_event_source = event_record.get( + "eventSource", event_record.get("EventSource") + ) + + if aws_event_source == "aws:dynamodb": + event_source = _EventSource(EventTypes.DYNAMODB) + if aws_event_source == "aws:kinesis": + event_source = _EventSource(EventTypes.KINESIS) + if aws_event_source == "aws:s3": + event_source = _EventSource(EventTypes.S3) + if aws_event_source == "aws:sns": + event_source = _EventSource(EventTypes.SNS) + if aws_event_source == "aws:sqs": + event_source = _EventSource(EventTypes.SQS) + + if event_record.get("cf"): + event_source = _EventSource(EventTypes.CLOUDFRONT) + + return event_source + + +def detect_lambda_function_url_domain(domain: str) -> bool: + # e.g. "etsn5fibjr.lambda-url.eu-south-1.amazonaws.com" + domain_parts = domain.split(".") + if len(domain_parts) < 2: + return False + return domain_parts[1] == "lambda-url" + + +def parse_event_source_arn(source: _EventSource, event: dict, context: Any) -> str: + """ + Parses the trigger event for an available ARN. If an ARN field is not provided + in the event we stitch it together. + """ + split_function_arn = context.invoked_function_arn.split(":") + region = split_function_arn[3] + account_id = split_function_arn[4] + aws_arn = get_aws_partition_by_region(region) + + event_record = get_first_record(event) + # e.g. arn:aws:s3:::lambda-xyz123-abc890 + if source.to_string() == "s3": + return event_record.get("s3", {}).get("bucket", {}).get("arn") + + # e.g. arn:aws:sns:us-east-1:123456789012:sns-lambda + if source.to_string() == "sns": + return event_record.get("Sns", {}).get("TopicArn") + + # e.g. arn:aws:cloudfront::123456789012:distribution/ABC123XYZ + if source.event_type == EventTypes.CLOUDFRONT: + distribution_id = ( + event_record.get("cf", {}).get("config", {}).get("distributionId") + ) + return "arn:{}:cloudfront::{}:distribution/{}".format( + aws_arn, account_id, distribution_id + ) + + # e.g. arn:aws:lambda:::url:: + if source.equals(EventTypes.LAMBDA_FUNCTION_URL): + function_name = "" + if len(split_function_arn) >= 7: + function_name = split_function_arn[6] + function_arn = f"arn:aws:lambda:{region}:{account_id}:url:{function_name}" + function_qualifier = "" + if len(split_function_arn) >= 8: + function_qualifier = split_function_arn[7] + function_arn = function_arn + f":{function_qualifier}" + return function_arn + + # e.g. arn:aws:apigateway:us-east-1::/restapis/xyz123/stages/default + if source.event_type == EventTypes.API_GATEWAY: + request_context = event.get("requestContext") + return "arn:{}:apigateway:{}::/restapis/{}/stages/{}".format( + aws_arn, region, request_context.get("apiId"), request_context.get("stage") + ) + + # e.g. arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/lambda-xyz/123 + if source.event_type == EventTypes.ALB: + request_context = event.get("requestContext") + return request_context.get("elb", {}).get("targetGroupArn") + + # e.g. arn:aws:logs:us-west-1:123456789012:log-group:/my-log-group-xyz + if source.event_type == EventTypes.CLOUDWATCH_LOGS: + with gzip.GzipFile( + fileobj=BytesIO(base64.b64decode(event.get("awslogs", {}).get("data"))) + ) as decompress_stream: + data = b"".join(BufferedReader(decompress_stream)) + logs = json.loads(data) + log_group = logs.get("logGroup", "cloudwatch") + return "arn:{}:logs:{}:{}:log-group:{}".format( + aws_arn, region, account_id, log_group + ) + + # e.g. arn:aws:events:us-east-1:123456789012:rule/my-schedule + if source.event_type == EventTypes.CLOUDWATCH_EVENTS and event.get("resources"): + return event.get("resources")[0] + + +def get_event_source_arn(source: _EventSource, event: dict, context: Any) -> str: + event_source_arn = event.get("eventSourceARN") or event.get("eventSourceArn") + + event_record = get_first_record(event) + if event_record: + event_source_arn = event_record.get("eventSourceARN") or event_record.get( + "eventSourceArn" + ) + + if event_source_arn is None: + event_source_arn = parse_event_source_arn(source, event, context) + + return event_source_arn + + +def extract_http_tags(event): + """ + Extracts HTTP facet tags from the triggering event + """ + http_tags = {} + request_context = event.get("requestContext") + path = event.get("path") + method = event.get("httpMethod") + if request_context and request_context.get("stage"): + if request_context.get("domainName"): + http_tags["http.url"] = request_context.get("domainName") + + path = request_context.get("path") + method = request_context.get("httpMethod") + # Version 2.0 HTTP API Gateway + apigateway_v2_http = request_context.get("http") + if event.get("version") == "2.0" and apigateway_v2_http: + path = apigateway_v2_http.get("path") + method = apigateway_v2_http.get("method") + + if path: + http_tags["http.url_details.path"] = path + if method: + http_tags["http.method"] = method + + headers = event.get("headers") + if headers and headers.get("Referer"): + http_tags["http.referer"] = headers.get("Referer") + + return http_tags + + +def extract_trigger_tags(event: dict, context: Any) -> dict: + """ + Parses the trigger event object to get tags to be added to the span metadata + """ + trigger_tags = {} + event_source = parse_event_source(event) + if event_source.to_string() is not None and event_source.to_string() != "unknown": + trigger_tags["function_trigger.event_source"] = event_source.to_string() + + event_source_arn = get_event_source_arn(event_source, event, context) + if event_source_arn: + trigger_tags["function_trigger.event_source_arn"] = event_source_arn + + if event_source.event_type in [ + EventTypes.API_GATEWAY, + EventTypes.ALB, + EventTypes.LAMBDA_FUNCTION_URL, + ]: + trigger_tags.update(extract_http_tags(event)) + + return trigger_tags + + +def extract_http_status_code_tag(trigger_tags, response): + """ + If the Lambda was triggered by API Gateway, Lambda Function URL, or ALB, + add the returned status code as a tag to the function execution span. + """ + if trigger_tags is None: + return + str_event_source = trigger_tags.get("function_trigger.event_source") + # it would be cleaner if each event type was a constant object that + # knew some properties about itself like this. + str_http_triggers = [ + et.value + for et in [ + EventTypes.API_GATEWAY, + EventTypes.LAMBDA_FUNCTION_URL, + EventTypes.ALB, + ] + ] + if str_event_source not in str_http_triggers: + return + + status_code = "200" + if response is None: + # Return a 502 status if no response is found + status_code = "502" + elif hasattr(response, "get"): + status_code = response.get("statusCode") + elif hasattr(response, "status_code"): + status_code = response.status_code + + return str(status_code) diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py new file mode 100644 index 0000000..73d1788 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py @@ -0,0 +1,395 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019 Datadog, Inc. +import base64 +import os +import logging +import traceback +from importlib import import_module +import json +from time import time_ns + +from datadog_lambda.extension import should_use_extension, flush_extension +from datadog_lambda.cold_start import ( + set_cold_start, + is_cold_start, + is_proactive_init, + is_new_sandbox, + ColdStartTracer, +) +from datadog_lambda.constants import ( + TraceContextSource, + XraySubsegment, + Headers, + TraceHeader, +) +from datadog_lambda.metric import ( + flush_stats, + submit_invocations_metric, + submit_errors_metric, +) +from datadog_lambda.module_name import modify_module_name +from datadog_lambda.patch import patch_all +from datadog_lambda.tracing import ( + extract_dd_trace_context, + create_dd_dummy_metadata_subsegment, + inject_correlation_ids, + dd_tracing_enabled, + mark_trace_as_error_for_5xx_responses, + set_correlation_ids, + set_dd_trace_py_root, + create_function_execution_span, + create_inferred_span, + InferredSpanInfo, + is_authorizer_response, + tracer, +) +from datadog_lambda.trigger import ( + extract_trigger_tags, + extract_http_status_code_tag, +) + +profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true" +if profiling_env_var: + from ddtrace.profiling import profiler + +logger = logging.getLogger(__name__) + +DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG" +DD_LOGS_INJECTION = "DD_LOGS_INJECTION" +DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES" +AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME" +DD_LOCAL_TEST = "DD_LOCAL_TEST" +DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR" +DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES" +DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT" +DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT" +DD_COLD_START_TRACING = "DD_COLD_START_TRACING" +DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION" +DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB" +DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD" +DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH" +DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME" +DD_SERVICE = "DD_SERVICE" +DD_ENV = "DD_ENV" + + +def get_env_as_int(env_key, default_value: int) -> int: + try: + return int(os.environ.get(env_key, default_value)) + except Exception as e: + logger.warn( + f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}" + ) + return default_value + + +dd_capture_lambda_payload_enabled = ( + os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true" +) + +if dd_capture_lambda_payload_enabled: + import datadog_lambda.tag_object as tag_object + + tag_object.max_depth = get_env_as_int( + DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth + ) + +env_env_var = os.environ.get(DD_ENV, None) + +init_timestamp_ns = time_ns() + +""" +Usage: + +import requests +from datadog_lambda.wrapper import datadog_lambda_wrapper +from datadog_lambda.metric import lambda_metric + +@datadog_lambda_wrapper +def my_lambda_handle(event, context): + lambda_metric("my_metric", 10) + requests.get("https://www.datadoghq.com") +""" + + +class _NoopDecorator(object): + def __init__(self, func): + self.func = func + + def __call__(self, *args, **kwargs): + return self.func(*args, **kwargs) + + +class _LambdaDecorator(object): + """ + Decorator to automatically initialize Datadog API client, flush metrics, + and extracts/injects trace context. + """ + + _force_wrap = False + + def __new__(cls, func): + """ + If the decorator is accidentally applied to the same function multiple times, + wrap only once. + + If _force_wrap, always return a real decorator, useful for unit tests. + """ + try: + if cls._force_wrap or not isinstance(func, _LambdaDecorator): + wrapped = super(_LambdaDecorator, cls).__new__(cls) + logger.debug("datadog_lambda_wrapper wrapped") + return wrapped + else: + logger.debug("datadog_lambda_wrapper already wrapped") + return _NoopDecorator(func) + except Exception as e: + logger.error(format_err_with_traceback(e)) + return func + + def __init__(self, func): + """Executes when the wrapped function gets wrapped""" + try: + self.func = func + self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true" + self.logs_injection = ( + os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true" + ) + self.merge_xray_traces = ( + os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true" + ) + self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function") + self.service = os.environ.get(DD_SERVICE, None) + self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None) + self.trace_extractor = None + self.span = None + self.inferred_span = None + depends_on_dd_tracing_enabled = ( + lambda original_boolean: dd_tracing_enabled and original_boolean + ) + self.make_inferred_span = depends_on_dd_tracing_enabled( + os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true" + ) + self.encode_authorizer_context = depends_on_dd_tracing_enabled( + os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true" + ) + self.decode_authorizer_context = depends_on_dd_tracing_enabled( + os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true" + ) + self.cold_start_tracing = depends_on_dd_tracing_enabled( + os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true" + ) + self.min_cold_start_trace_duration = get_env_as_int( + DD_MIN_COLD_START_DURATION, 3 + ) + self.local_testing_mode = os.environ.get( + DD_LOCAL_TEST, "false" + ).lower() in ("true", "1") + self.cold_start_trace_skip_lib = [ + "ddtrace.internal.compat", + "ddtrace.filters", + ] + if DD_COLD_START_TRACE_SKIP_LIB in os.environ: + try: + self.cold_start_trace_skip_lib = os.environ[ + DD_COLD_START_TRACE_SKIP_LIB + ].split(",") + except Exception: + logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}") + self.response = None + if profiling_env_var: + self.prof = profiler.Profiler(env=env_env_var, service=self.service) + if self.extractor_env: + extractor_parts = self.extractor_env.rsplit(".", 1) + if len(extractor_parts) == 2: + (mod_name, extractor_name) = extractor_parts + modified_extractor_name = modify_module_name(mod_name) + extractor_module = import_module(modified_extractor_name) + self.trace_extractor = getattr(extractor_module, extractor_name) + + # Inject trace correlation ids to logs + if self.logs_injection: + inject_correlation_ids() + + # This prevents a breaking change in ddtrace v0.49 regarding the service name + # in requests-related spans + os.environ[DD_REQUESTS_SERVICE_NAME] = os.environ.get( + DD_SERVICE, "aws.lambda" + ) + # Patch third-party libraries for tracing + patch_all() + + logger.debug("datadog_lambda_wrapper initialized") + except Exception as e: + logger.error(format_err_with_traceback(e)) + + def __call__(self, event, context, **kwargs): + """Executes when the wrapped function gets called""" + self._before(event, context) + try: + self.response = self.func(event, context, **kwargs) + return self.response + except Exception: + submit_errors_metric(context) + if self.span: + self.span.set_traceback() + raise + finally: + self._after(event, context) + + def _inject_authorizer_span_headers(self, request_id): + reference_span = self.inferred_span if self.inferred_span else self.span + assert reference_span.finished + # the finish_time_ns should be set as the end of the inferred span if it exist + # or the end of the current span + finish_time_ns = ( + reference_span.start_ns + reference_span.duration_ns + if reference_span is not None + and hasattr(reference_span, "start_ns") + and hasattr(reference_span, "duration_ns") + else time_ns() + ) + injected_headers = {} + source_span = self.inferred_span if self.inferred_span else self.span + span_context = source_span.context + injected_headers[TraceHeader.TRACE_ID] = str(span_context.trace_id) + injected_headers[TraceHeader.PARENT_ID] = str(span_context.span_id) + sampling_priority = span_context.sampling_priority + if sampling_priority is not None: + injected_headers[TraceHeader.SAMPLING_PRIORITY] = str( + span_context.sampling_priority + ) + injected_headers[Headers.Parent_Span_Finish_Time] = finish_time_ns + if request_id is not None: + injected_headers[Headers.Authorizing_Request_Id] = request_id + datadog_data = base64.b64encode(json.dumps(injected_headers).encode()).decode() + self.response.setdefault("context", {}) + self.response["context"]["_datadog"] = datadog_data + + def _before(self, event, context): + try: + self.response = None + set_cold_start(init_timestamp_ns) + submit_invocations_metric(context) + self.trigger_tags = extract_trigger_tags(event, context) + # Extract Datadog trace context and source from incoming requests + dd_context, trace_context_source, event_source = extract_dd_trace_context( + event, + context, + extractor=self.trace_extractor, + decode_authorizer_context=self.decode_authorizer_context, + ) + self.event_source = event_source + # Create a Datadog X-Ray subsegment with the trace context + if dd_context and trace_context_source == TraceContextSource.EVENT: + create_dd_dummy_metadata_subsegment( + { + "trace-id": str(dd_context.trace_id), + "parent-id": str(dd_context.span_id), + "sampling-priority": str(dd_context.sampling_priority), + }, + XraySubsegment.TRACE_KEY, + ) + + if dd_tracing_enabled: + set_dd_trace_py_root(trace_context_source, self.merge_xray_traces) + if self.make_inferred_span: + self.inferred_span = create_inferred_span( + event, context, event_source, self.decode_authorizer_context + ) + self.span = create_function_execution_span( + context, + self.function_name, + is_cold_start(), + is_proactive_init(), + trace_context_source, + self.merge_xray_traces, + self.trigger_tags, + parent_span=self.inferred_span, + ) + else: + set_correlation_ids() + if profiling_env_var and is_new_sandbox(): + self.prof.start(stop_on_exit=False, profile_children=True) + logger.debug("datadog_lambda_wrapper _before() done") + except Exception as e: + logger.error(format_err_with_traceback(e)) + + def _after(self, event, context): + try: + status_code = extract_http_status_code_tag(self.trigger_tags, self.response) + if status_code: + self.trigger_tags["http.status_code"] = status_code + mark_trace_as_error_for_5xx_responses(context, status_code, self.span) + + # Create a new dummy Datadog subsegment for function trigger tags so we + # can attach them to X-Ray spans when hybrid tracing is used + if self.trigger_tags: + create_dd_dummy_metadata_subsegment( + self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY + ) + should_trace_cold_start = self.cold_start_tracing and is_new_sandbox() + if should_trace_cold_start: + trace_ctx = tracer.current_trace_context() + + if self.span: + if dd_capture_lambda_payload_enabled: + tag_object.tag_object(self.span, "function.request", event) + tag_object.tag_object(self.span, "function.response", self.response) + + if status_code: + self.span.set_tag("http.status_code", status_code) + self.span.finish() + + if self.inferred_span: + if status_code: + self.inferred_span.set_tag("http.status_code", status_code) + + if self.service: + self.inferred_span.set_tag("peer.service", self.service) + + if InferredSpanInfo.is_async(self.inferred_span) and self.span: + self.inferred_span.finish(finish_time=self.span.start) + else: + self.inferred_span.finish() + + if should_trace_cold_start: + try: + following_span = self.span or self.inferred_span + ColdStartTracer( + tracer, + self.function_name, + following_span.start_ns, + trace_ctx, + self.min_cold_start_trace_duration, + self.cold_start_trace_skip_lib, + ).trace() + except Exception as e: + logger.debug("Failed to create cold start spans. %s", e) + + if not self.flush_to_log or should_use_extension: + flush_stats() + if should_use_extension and self.local_testing_mode: + # when testing locally, the extension does not know when an + # invocation completes because it does not have access to the + # logs api + flush_extension() + + if self.encode_authorizer_context and is_authorizer_response(self.response): + self._inject_authorizer_span_headers( + event.get("requestContext", {}).get("requestId") + ) + logger.debug("datadog_lambda_wrapper _after() done") + except Exception as e: + logger.error(format_err_with_traceback(e)) + + +def format_err_with_traceback(e): + return "Error {}. Traceback: {}".format( + e, traceback.format_exc().replace("\n", "\r") + ) + + +datadog_lambda_wrapper = _LambdaDecorator diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py new file mode 100644 index 0000000..88d108f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py @@ -0,0 +1,118 @@ +import os +import logging +import json +import binascii +import time +import socket + +from datadog_lambda.constants import XrayDaemon, XraySubsegment, TraceContextSource + +logger = logging.getLogger(__name__) + + +def get_xray_host_port(address): + if address == "": + logger.debug("X-Ray daemon env var not set, not sending sub-segment") + return None + parts = address.split(":") + if len(parts) <= 1: + logger.debug("X-Ray daemon env var not set, not sending sub-segment") + return None + port = int(parts[1]) + host = parts[0] + return (host, port) + + +def send(host_port_tuple, payload): + sock = None + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(0) + sock.connect(host_port_tuple) + sock.send(payload.encode("utf-8")) + except Exception as e_send: + logger.error("Error occurred submitting to xray daemon: %s", str(e_send)) + try: + sock.close() + except Exception as e_close: + logger.error("Error while closing the socket: %s", str(e_close)) + + +def build_segment_payload(payload): + if payload is None: + return None + return '{"format": "json", "version": 1}' + "\n" + payload + + +def parse_xray_header(raw_trace_id): + # Example: + # Root=1-5e272390-8c398be037738dc042009320;Parent=94ae789b969f1cc5;Sampled=1;Lineage=c6c5b1b9:0 + logger.debug("Reading trace context from env var %s", raw_trace_id) + if len(raw_trace_id) == 0: + return None + parts = raw_trace_id.split(";") + if len(parts) < 3: + return None + root = parts[0].replace("Root=", "") + parent = parts[1].replace("Parent=", "") + sampled = parts[2].replace("Sampled=", "") + if ( + len(root) == len(parts[0]) + or len(parent) == len(parts[1]) + or len(sampled) == len(parts[2]) + ): + return None + return { + "parent_id": parent, + "trace_id": root, + "sampled": sampled, + "source": TraceContextSource.XRAY, + } + + +def generate_random_id(): + return binascii.b2a_hex(os.urandom(8)).decode("utf-8") + + +def build_segment(context, key, metadata): + segment = json.dumps( + { + "id": generate_random_id(), + "trace_id": context["trace_id"], + "parent_id": context["parent_id"], + "name": XraySubsegment.NAME, + "start_time": time.time(), + "end_time": time.time(), + "type": "subsegment", + "metadata": { + XraySubsegment.NAMESPACE: { + key: metadata, + } + }, + } + ) + return segment + + +def send_segment(key, metadata): + host_port_tuple = get_xray_host_port( + os.environ.get(XrayDaemon.XRAY_DAEMON_ADDRESS, "") + ) + if host_port_tuple is None: + return None + context = parse_xray_header( + os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "") + ) + if context is None: + logger.debug( + "Failed to create segment since it was not possible to get trace context from header" + ) + return None + + # Skip adding segment, if the xray trace is going to be sampled away. + if context["sampled"] == "0": + logger.debug("Skipping sending metadata, x-ray trace was sampled out") + return None + segment = build_segment(context, key, metadata) + segment_payload = build_segment_payload(segment) + send(host_port_tuple, segment_payload) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE new file mode 100644 index 0000000..7e153db --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2020 DataDog, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv new file mode 100644 index 0000000..66c2263 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv @@ -0,0 +1,3 @@ +Component,Origin,License,Copyright +import,numpy,BSD-3-Clause,Copyright (c) 2005-2020 NumPy Developers.; All rights reserved. +import,setuptools,MIT,Copyright (c) 2016 Jason R Coombs diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA new file mode 100644 index 0000000..3716a35 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA @@ -0,0 +1,166 @@ +Metadata-Version: 2.1 +Name: ddsketch +Version: 2.0.4 +Summary: Distributed quantile sketches +Home-page: http://github.com/datadog/sketches-py +Download-URL: https://github.com/DataDog/sketches-py/archive/v1.0.tar.gz +Author: Jee Rim, Charles-Philippe Masson, Homin Lee +Author-email: jee.rim@datadoghq.com, charles.masson@datadoghq.com, homin@datadoghq.com +Keywords: ddsketch,quantile,sketch +Classifier: Programming Language :: Python :: 3 +Classifier: License :: OSI Approved :: Apache Software License +Requires-Python: >=2.7 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: LICENSE-3rdparty.csv +License-File: NOTICE +Requires-Dist: six +Requires-Dist: typing ; python_version < "3.5" +Requires-Dist: protobuf (<4.21.0,>=3.0.0) ; python_version < "3.7" +Requires-Dist: protobuf (>=3.0.0) ; python_version >= "3.7" + +# ddsketch + +This repo contains the Python implementation of the distributed quantile sketch +algorithm DDSketch [1]. DDSketch has relative-error guarantees for any quantile +q in [0, 1]. That is if the true value of the qth-quantile is `x` then DDSketch +returns a value `y` such that `|x-y| / x < e` where `e` is the relative error +parameter. (The default here is set to 0.01.) DDSketch is also fully mergeable, +meaning that multiple sketches from distributed systems can be combined in a +central node. + +Our default implementation, `DDSketch`, is guaranteed [1] to not grow too large +in size for any data that can be described by a distribution whose tails are +sub-exponential. + +We also provide implementations (`LogCollapsingLowestDenseDDSketch` and +`LogCollapsingHighestDenseDDSketch`) where the q-quantile will be accurate up to +the specified relative error for q that is not too small (or large). Concretely, +the q-quantile will be accurate up to the specified relative error as long as it +belongs to one of the `m` bins kept by the sketch. If the data is time in +seconds, the default of `m = 2048` covers 80 microseconds to 1 year. + +## Installation + +To install this package, run `pip install ddsketch`, or clone the repo and run +`python setup.py install`. This package depends on `numpy` and `protobuf`. (The +protobuf dependency can be removed if it's not applicable.) + +## Usage +``` +from ddsketch import DDSketch + +sketch = DDSketch() +``` +Add values to the sketch +``` +import numpy as np + +values = np.random.normal(size=500) +for v in values: + sketch.add(v) +``` +Find the quantiles of `values` to within the relative error. +``` +quantiles = [sketch.get_quantile_value(q) for q in [0.5, 0.75, 0.9, 1]] +``` +Merge another `DDSketch` into `sketch`. +``` +another_sketch = DDSketch() +other_values = np.random.normal(size=500) +for v in other_values: + another_sketch.add(v) +sketch.merge(another_sketch) +``` +The quantiles of `values` concatenated with `other_values` are still accurate to within the relative error. + +## Development + +To work on ddsketch a Python interpreter must be installed. It is recommended to use the provided development +container (requires [docker](https://www.docker.com/)) which includes all the required Python interpreters. + + docker-compose run dev + +Or, if developing outside of docker then it is recommended to use a virtual environment: + + pip install virtualenv + virtualenv --python=3 .venv + source .venv/bin/activate + + +### Testing + +To run the tests install `riot`: + + pip install riot + +Replace the Python version with the interpreter(s) available. + + # Run tests with Python 3.9 + riot run -p3.9 test + +### Release notes + +New features, bug fixes, deprecations and other breaking changes must have +release notes included. + +To generate a release note for the change: + + riot run reno new + +Edit the generated file to include notes on the changes made in the commit/PR +and add commit it. + + +### Formatting + +Format code with + + riot run fmt + + +### Type-checking + +Type checking is done with [mypy](http://mypy-lang.org/): + + riot run mypy + + +### Type-checking + +Lint the code with [flake8](https://flake8.pycqa.org/en/latest/): + + riot run flake8 + + +### Protobuf + +The protobuf is stored in the go repository: https://github.com/DataDog/sketches-go/blob/master/ddsketch/pb/ddsketch.proto + +Install the minimum required protoc and generate the Python code: + +```sh +docker run -v $PWD:/code -it ubuntu:18.04 /bin/bash +apt update && apt install protobuf-compiler # default is 3.0.0 +protoc --proto_path=ddsketch/pb/ --python_out=ddsketch/pb/ ddsketch/pb/ddsketch.proto +``` + + +### Releasing + +1. Generate the release notes and use [`pandoc`](https://pandoc.org/) to format +them for Github: +```bash + git checkout master && git pull + riot run -s reno report --no-show-source | pandoc -f rst -t gfm --wrap=none +``` + Copy the output into a new release: https://github.com/DataDog/sketches-py/releases/new. + +2. Enter a tag for the release (following [`semver`](https://semver.org)) (eg. `v1.1.3`, `v1.0.3`, `v1.2.0`). +3. Use the tag without the `v` as the title. +4. Save the release as a draft and pass the link to someone else to give a quick review. +5. If all looks good hit publish + + +## References +[1] Charles Masson and Jee E Rim and Homin K. Lee. DDSketch: A fast and fully-mergeable quantile sketch with relative-error guarantees. PVLDB, 12(12): 2195-2205, 2019. (The code referenced in the paper, including our implementation of the the Greenwald-Khanna (GK) algorithm, can be found at: https://github.com/DataDog/sketches-py/releases/tag/v0.1 ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE new file mode 100644 index 0000000..035c9ad --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE @@ -0,0 +1,4 @@ +Datadog sketches-py +Copyright 2020 Datadog, Inc. + +This product includes software developed at Datadog (https://www.datadoghq.com/). diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD new file mode 100644 index 0000000..d951ffa --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD @@ -0,0 +1,30 @@ +ddsketch-2.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ddsketch-2.0.4.dist-info/LICENSE,sha256=T0-WFEYXjD5IYjlmlH0JAbfqTHa_YvAl875ydhaqdKA,554 +ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv,sha256=z16O1RgqAgDTZqLSzos-HZ1vqH1cqaR6Vm9qDz5Fhuc,201 +ddsketch-2.0.4.dist-info/METADATA,sha256=cMuoMMwqZ0i1m3iMwqDBdgsWokDVcY4BzUxZj0cZe68,5456 +ddsketch-2.0.4.dist-info/NOTICE,sha256=rVyH-sbkieAzCC_Ni4rDz0feqaG-7tWHwlj1aIfH33Q,132 +ddsketch-2.0.4.dist-info/RECORD,, +ddsketch-2.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddsketch-2.0.4.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +ddsketch-2.0.4.dist-info/top_level.txt,sha256=pPjwA4dRqDmExS6WSOHicanUpoHB-b9852iZwDUkNcI,9 +ddsketch/__init__.py,sha256=IYdg8QtcIE_l66Ze1TiC18XaVZwAR9k8YOSY_2rkMYs,717 +ddsketch/__pycache__/__init__.cpython-311.pyc,, +ddsketch/__pycache__/__version.cpython-311.pyc,, +ddsketch/__pycache__/_version.cpython-311.pyc,, +ddsketch/__pycache__/ddsketch.cpython-311.pyc,, +ddsketch/__pycache__/mapping.cpython-311.pyc,, +ddsketch/__pycache__/store.cpython-311.pyc,, +ddsketch/__version.py,sha256=O-_Pobu2r8B57xjWutMfz4LJDMMon23xJAyWJe3yPAQ,176 +ddsketch/_version.py,sha256=HUdEupMtRv7sb1QCczoihjq-kz5jF4rDbew15qDFB-g,504 +ddsketch/ddsketch.py,sha256=ha-eunudNhueaVJGDB9VF13QGhi1W1ncqAto5XDGtnI,11444 +ddsketch/mapping.py,sha256=FBs2PdhLAQB3F28GpWfpAGRpDcnt-FM-n8fIdtC0JYM,7759 +ddsketch/pb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddsketch/pb/__pycache__/__init__.cpython-311.pyc,, +ddsketch/pb/__pycache__/ddsketch_pb2.cpython-311.pyc,, +ddsketch/pb/__pycache__/ddsketch_pre319_pb2.cpython-311.pyc,, +ddsketch/pb/__pycache__/proto.cpython-311.pyc,, +ddsketch/pb/ddsketch_pb2.py,sha256=y_5bB9hMZyDZQtNvSzRRxMua-BYBtarjSA6qLkuyA54,3580 +ddsketch/pb/ddsketch_pre319_pb2.py,sha256=lqYZ8DaWHl6Sgb5MW2rb4ztJQbE7Su-g5yqkClTmICA,10087 +ddsketch/pb/proto.py,sha256=S9PYWyGTpmB2XI_RJIdxLw9VA-b_OB1YQ-P7VXHcvNk,3315 +ddsketch/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddsketch/store.py,sha256=2yZvF78s65blAon-TuS_vRJiea4nD71P9Bo01rUuP7w,17515 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt new file mode 100644 index 0000000..292b4e0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +ddsketch diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store new file mode 100644 index 0000000..c5e11fb Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py new file mode 100644 index 0000000..fcf1e10 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py @@ -0,0 +1,24 @@ +from ._version import get_version +from .ddsketch import DDSketch +from .ddsketch import LogCollapsingHighestDenseDDSketch +from .ddsketch import LogCollapsingLowestDenseDDSketch +from .mapping import CubicallyInterpolatedMapping +from .mapping import LinearlyInterpolatedMapping +from .mapping import LogarithmicMapping +from .store import CollapsingHighestDenseStore +from .store import CollapsingLowestDenseStore + + +__version__ = get_version() + + +__all__ = [ + "DDSketch", + "LogCollapsingLowestDenseDDSketch", + "LogCollapsingHighestDenseDDSketch", + "CubicallyInterpolatedMapping", + "LinearlyInterpolatedMapping", + "LogarithmicMapping", + "CollapsingHighestDenseStore", + "CollapsingLowestDenseStore", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py new file mode 100644 index 0000000..51765be --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py @@ -0,0 +1,5 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +__version__ = version = '2.0.4' +__version_tuple__ = version_tuple = (2, 0, 4) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py new file mode 100644 index 0000000..1c65a0d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py @@ -0,0 +1,17 @@ +def get_version(): + # type: () -> str + """Return the package version. + + The write_to functionality of setuptools_scm is used (see setup.py) + to output the version to ddsketch/__version.py which we attempt to import. + + This is done to avoid the expensive overhead of importing pkg_resources. + """ + try: + from .__version import version + + return version + except ImportError: + import pkg_resources + + return pkg_resources.get_distribution(__name__).version diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py new file mode 100644 index 0000000..ba72562 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py @@ -0,0 +1,316 @@ +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2020 Datadog, Inc. + +"""A quantile sketch with relative-error guarantees. This sketch computes +quantile values with an approximation error that is relative to the actual +quantile value. It works on both negative and non-negative input values. + +For instance, using DDSketch with a relative accuracy guarantee set to 1%, if +the expected quantile value is 100, the computed quantile value is guaranteed to +be between 99 and 101. If the expected quantile value is 1000, the computed +quantile value is guaranteed to be between 990 and 1010. + +DDSketch works by mapping floating-point input values to bins and counting the +number of values for each bin. The underlying structure that keeps track of bin +counts is store. + +The memory size of the sketch depends on the range that is covered by the input +values: the larger that range, the more bins are needed to keep track of the +input values. As a rough estimate, if working on durations with a relative +accuracy of 2%, about 2kB (275 bins) are needed to cover values between 1 +millisecond and 1 minute, and about 6kB (802 bins) to cover values between 1 +nanosecond and 1 day. + +The size of the sketch can be have a fail-safe upper-bound by using collapsing +stores. As shown in +the DDSketch paper +the likelihood of a store collapsing when using the default bound is vanishingly +small for most data. + +DDSketch implementations are also available in: +Go +Python +JavaScript +""" +import typing + +from .mapping import LogarithmicMapping +from .store import CollapsingHighestDenseStore +from .store import CollapsingLowestDenseStore +from .store import DenseStore + + +if typing.TYPE_CHECKING: + from typing import Optional + + from .mapping import KeyMapping + from .store import Store + + +DEFAULT_REL_ACC = 0.01 # "alpha" in the paper +DEFAULT_BIN_LIMIT = 2048 + + +class BaseDDSketch(object): + """The base implementation of DDSketch with neither mapping nor storage specified. + + Args: + mapping (mapping.KeyMapping): map btw values and store bins + store (store.Store): storage for positive values + negative_store (store.Store): storage for negative values + zero_count (float): The count of zero values + + Attributes: + relative_accuracy (float): the accuracy guarantee; referred to as alpha + in the paper. (0. < alpha < 1.) + + count: the number of values seen by the sketch + min: the minimum value seen by the sketch + max: the maximum value seen by the sketch + sum: the sum of the values seen by the sketch + """ + + def __init__( + self, + mapping, + store, + negative_store, + zero_count, + ): + # type: (KeyMapping, Store, Store, float) -> None + self._mapping = mapping + self._store = store + self._negative_store = negative_store + self._zero_count = zero_count + + self._relative_accuracy = mapping.relative_accuracy + self._count = self._negative_store.count + self._zero_count + self._store.count + self._min = float("+inf") + self._max = float("-inf") + self._sum = 0.0 + + def __repr__(self): + # type: () -> str + return ( + "store: {}, negative_store: {}, " + "zero_count: {}, count: {}, " + "sum: {}, min: {}, max: {}" + ).format( + self._store, + self._negative_store, + self._zero_count, + self._count, + self._sum, + self._min, + self._max, + ) + + @property + def count(self): + return self._count + + @property + def name(self): + # type: () -> str + """str: name of the sketch""" + return "DDSketch" + + @property + def num_values(self): + # type: () -> float + """Return the number of values in the sketch.""" + return self._count + + @property + def avg(self): + # type: () -> float + """Return the exact average of the values added to the sketch.""" + return self._sum / self._count + + @property + def sum(self): # noqa: A003 + # type: () -> float + """Return the exact sum of the values added to the sketch.""" + return self._sum + + def add(self, val, weight=1.0): + # type: (float, float) -> None + """Add a value to the sketch.""" + if weight <= 0.0: + raise ValueError("weight must be a positive float, got %r" % weight) + + if val > self._mapping.min_possible: + self._store.add(self._mapping.key(val), weight) + elif val < -self._mapping.min_possible: + self._negative_store.add(self._mapping.key(-val), weight) + else: + self._zero_count += weight + + # Keep track of summary stats + self._count += weight + self._sum += val * weight + if val < self._min: + self._min = val + if val > self._max: + self._max = val + + def get_quantile_value(self, quantile): + # type: (float) -> Optional[float] + """Return the approximate value at the specified quantile. + + Args: + quantile (float): 0 <= q <=1 + + Returns: + the value at the specified quantile or None if the sketch is empty + """ + if quantile < 0 or quantile > 1 or self._count == 0: + return None + + rank = quantile * (self._count - 1) + if rank < self._negative_store.count: + reversed_rank = self._negative_store.count - rank - 1 + key = self._negative_store.key_at_rank(reversed_rank, lower=False) + quantile_value = -self._mapping.value(key) + elif rank < self._zero_count + self._negative_store.count: + return 0 + else: + key = self._store.key_at_rank( + rank - self._zero_count - self._negative_store.count + ) + quantile_value = self._mapping.value(key) + return quantile_value + + def merge(self, sketch): + # type: (BaseDDSketch) -> None + """Merge the given sketch into this one. After this operation, this sketch + encodes the values that were added to both this and the input sketch. + """ + if not self._mergeable(sketch): + raise ValueError( + "Cannot merge two DDSketches with different parameters, got %r and %r" + % (self._mapping.gamma, sketch._mapping.gamma) + ) + + if sketch.count == 0: + return + + if self._count == 0: + self._copy(sketch) + return + + # Merge the stores + self._store.merge(sketch._store) + self._negative_store.merge(sketch._negative_store) + self._zero_count += sketch._zero_count + + # Merge summary stats + self._count += sketch._count + self._sum += sketch._sum + if sketch._min < self._min: + self._min = sketch._min + if sketch._max > self._max: + self._max = sketch._max + + def _mergeable(self, other): + # type: (BaseDDSketch) -> bool + """Two sketches can be merged only if their gammas are equal.""" + return self._mapping.gamma == other._mapping.gamma + + def _copy(self, sketch): + # type: (BaseDDSketch) -> None + """Copy the input sketch into this one""" + self._store.copy(sketch._store) + self._negative_store.copy(sketch._negative_store) + self._zero_count = sketch._zero_count + self._min = sketch._min + self._max = sketch._max + self._count = sketch._count + self._sum = sketch._sum + + +class DDSketch(BaseDDSketch): + """The default implementation of BaseDDSketch, with optimized memory usage at + the cost of lower ingestion speed, using an unlimited number of bins. The + number of bins will not exceed a reasonable number unless the data is + distributed with tails heavier than any subexponential. + (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf) + """ + + def __init__(self, relative_accuracy=None): + # type: (Optional[float]) -> None + # Make sure the parameters are valid + if relative_accuracy is None: + relative_accuracy = DEFAULT_REL_ACC + + mapping = LogarithmicMapping(relative_accuracy) + store = DenseStore() + negative_store = DenseStore() + super(DDSketch, self).__init__( + mapping=mapping, + store=store, + negative_store=negative_store, + zero_count=0.0, + ) + + +class LogCollapsingLowestDenseDDSketch(BaseDDSketch): + """Implementation of BaseDDSketch with optimized memory usage at the cost of + lower ingestion speed, using a limited number of bins. When the maximum + number of bins is reached, bins with lowest indices are collapsed, which + causes the relative accuracy to be lost on the lowest quantiles. For the + default bin limit, collapsing is unlikely to occur unless the data is + distributed with tails heavier than any subexponential. + (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf) + """ + + def __init__(self, relative_accuracy=None, bin_limit=None): + # type: (Optional[float], Optional[int]) -> None + # Make sure the parameters are valid + if relative_accuracy is None: + relative_accuracy = DEFAULT_REL_ACC + + if bin_limit is None or bin_limit < 0: + bin_limit = DEFAULT_BIN_LIMIT + + mapping = LogarithmicMapping(relative_accuracy) + store = CollapsingLowestDenseStore(bin_limit) + negative_store = CollapsingLowestDenseStore(bin_limit) + super(LogCollapsingLowestDenseDDSketch, self).__init__( + mapping=mapping, + store=store, + negative_store=negative_store, + zero_count=0.0, + ) + + +class LogCollapsingHighestDenseDDSketch(BaseDDSketch): + """Implementation of BaseDDSketch with optimized memory usage at the cost of + lower ingestion speed, using a limited number of bins. When the maximum + number of bins is reached, bins with highest indices are collapsed, which + causes the relative accuracy to be lost on the highest quantiles. For the + default bin limit, collapsing is unlikely to occur unless the data is + distributed with tails heavier than any subexponential. + (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf) + """ + + def __init__(self, relative_accuracy=None, bin_limit=None): + # type: (Optional[float], Optional[int]) -> None + # Make sure the parameters are valid + if relative_accuracy is None: + relative_accuracy = DEFAULT_REL_ACC + + if bin_limit is None or bin_limit < 0: + bin_limit = DEFAULT_BIN_LIMIT + + mapping = LogarithmicMapping(relative_accuracy) + store = CollapsingHighestDenseStore(bin_limit) + negative_store = CollapsingHighestDenseStore(bin_limit) + super(LogCollapsingHighestDenseDDSketch, self).__init__( + mapping=mapping, + store=store, + negative_store=negative_store, + zero_count=0.0, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py new file mode 100644 index 0000000..4599385 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py @@ -0,0 +1,216 @@ +from __future__ import division + + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2020 Datadog, Inc. + +"""A mapping between values and integer indices that imposes relative accuracy +guarantees. Specifically, for any value `minIndexableValue() < value < +maxIndexableValue` implementations of `KeyMapping` must be such that +`value(key(v))` is close to `v` with a relative error that is less than +`relative_accuracy`. + +In implementations of KeyMapping, there is generally a trade-off between the +cost of computing the key and the number of keys that are required to cover a +given range of values (memory optimality). The most memory-optimal mapping is +the LogarithmicMapping, but it requires the costly evaluation of the logarithm +when computing the index. Other mappings can approximate the logarithmic +mapping, while being less computationally costly. +""" +from abc import ABCMeta +from abc import abstractmethod +import math +import sys + +import six + + +class KeyMapping(six.with_metaclass(ABCMeta)): + """ + Args: + relative_accuracy (float): the accuracy guarantee; referred to as alpha + in the paper. (0. < alpha < 1.) + offset (float): an offset that can be used to shift all bin keys + Attributes: + gamma (float): the base for the exponential buckets. gamma = (1 + alpha) / (1 - alpha) + min_possible: the smallest value the sketch can distinguish from 0 + max_possible: the largest value the sketch can handle + _multiplier (float): used for calculating log_gamma(value) initially, _multiplier = 1 / log(gamma) + """ + + def __init__(self, relative_accuracy, offset=0.0): + # type: (float, float) -> None + if relative_accuracy <= 0 or relative_accuracy >= 1: + raise ValueError( + "Relative accuracy must be between 0 and 1, got %r" % relative_accuracy + ) + self.relative_accuracy = relative_accuracy + self._offset = offset + + gamma_mantissa = 2 * relative_accuracy / (1 - relative_accuracy) + self.gamma = 1 + gamma_mantissa + self._multiplier = 1 / math.log1p(gamma_mantissa) + self.min_possible = sys.float_info.min * self.gamma + self.max_possible = sys.float_info.max / self.gamma + + @classmethod + def from_gamma_offset(cls, gamma, offset): + # type: (float, float) -> KeyMapping + """Constructor used by pb.proto""" + relative_accuracy = (gamma - 1.0) / (gamma + 1.0) + return cls(relative_accuracy, offset=offset) + + @abstractmethod + def _log_gamma(self, value): + # type: (float) -> float + """Return (an approximation of) the logarithm of the value base gamma""" + + @abstractmethod + def _pow_gamma(self, value): + # type: (float) -> float + """Return (an approximation of) gamma to the power value""" + + def key(self, value): + # type: (float) -> int + """ + Args: + value (float) + Returns: + int: the key specifying the bucket for value + """ + return int(math.ceil(self._log_gamma(value)) + self._offset) + + def value(self, key): + # type: (int) -> float + """ + Args: + key (int) + Returns: + float: the value represented by the bucket specified by the key + """ + return self._pow_gamma(key - self._offset) * (2.0 / (1 + self.gamma)) + + +class LogarithmicMapping(KeyMapping): + """A memory-optimal KeyMapping, i.e., given a targeted relative accuracy, it + requires the least number of keys to cover a given range of values. This is + done by logarithmically mapping floating-point values to integers. + """ + + def __init__(self, relative_accuracy, offset=0.0): + # type: (float, float) -> None + super(LogarithmicMapping, self).__init__(relative_accuracy, offset=offset) + self._multiplier *= math.log(2) + + def _log_gamma(self, value): + # type: (float) -> float + return math.log(value, 2) * self._multiplier + + def _pow_gamma(self, value): + # type: (float) -> float + return math.pow(2.0, value / self._multiplier) + + +def _cbrt(x): + # type: (float) -> float + y = float(abs(x) ** (1.0 / 3.0)) + if x < 0: + return -y + return y + + +class LinearlyInterpolatedMapping(KeyMapping): + """A fast KeyMapping that approximates the memory-optimal + LogarithmicMapping by extracting the floor value of the logarithm to the + base 2 from the binary representations of floating-point values and + linearly interpolating the logarithm in-between. + """ + + def _log2_approx(self, value): + # type: (float) -> float + """Approximates log2 by s + f + where v = (s+1) * 2 ** f for s in [0, 1) + + frexp(v) returns m and e s.t. + v = m * 2 ** e ; (m in [0.5, 1) or 0.0) + so we adjust m and e accordingly + """ + mantissa, exponent = math.frexp(value) + significand = 2 * mantissa - 1 + return significand + (exponent - 1) + + def _exp2_approx(self, value): + # type: (float) -> float + """Inverse of _log2_approx""" + exponent = int(math.floor(value) + 1) + mantissa = (value - exponent + 2) / 2.0 + return math.ldexp(mantissa, exponent) + + def _log_gamma(self, value): + # type: (float) -> float + return self._log2_approx(value) * self._multiplier + + def _pow_gamma(self, value): + # type: (float) -> float + return self._exp2_approx(value / self._multiplier) + + +class CubicallyInterpolatedMapping(KeyMapping): + """A fast KeyMapping that approximates the memory-optimal LogarithmicMapping by + extracting the floor value of the logarithm to the base 2 from the binary + representations of floating-point values and cubically interpolating the + logarithm in-between. + + More detailed documentation of this method can be found in: + sketches-java + """ + + A = 6.0 / 35.0 + B = -3.0 / 5.0 + C = 10.0 / 7.0 + + def __init__(self, relative_accuracy, offset=0.0): + # type: (float, float) -> None + super(CubicallyInterpolatedMapping, self).__init__( + relative_accuracy, offset=offset + ) + self._multiplier /= self.C + + def _cubic_log2_approx(self, value): + # type: (float) -> float + """Approximates log2 using a cubic polynomial""" + mantissa, exponent = math.frexp(value) + significand = 2 * mantissa - 1 + return ( + (self.A * significand + self.B) * significand + self.C + ) * significand + (exponent - 1) + + def _cubic_exp2_approx(self, value): + # type: (float) -> float + # Derived from Cardano's formula + exponent = int(math.floor(value)) + delta_0 = self.B * self.B - 3 * self.A * self.C + delta_1 = ( + 2.0 * self.B * self.B * self.B + - 9.0 * self.A * self.B * self.C + - 27.0 * self.A * self.A * (value - exponent) + ) + cardano = _cbrt( + (delta_1 - ((delta_1 * delta_1 - 4 * delta_0 * delta_0 * delta_0) ** 0.5)) + / 2.0 + ) + significand_plus_one = ( + -(self.B + cardano + delta_0 / cardano) / (3.0 * self.A) + 1.0 + ) + mantissa = significand_plus_one / 2 + return math.ldexp(mantissa, exponent + 1) + + def _log_gamma(self, value): + # type: (float) -> float + return self._cubic_log2_approx(value) * self._multiplier + + def _pow_gamma(self, value): + # type: (float) -> float + return self._cubic_exp2_approx(value / self._multiplier) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py new file mode 100644 index 0000000..81525b2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: ddsketch.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x64\x64sketch.proto\"}\n\x08\x44\x44Sketch\x12\x1e\n\x07mapping\x18\x01 \x01(\x0b\x32\r.IndexMapping\x12\x1e\n\x0epositiveValues\x18\x02 \x01(\x0b\x32\x06.Store\x12\x1e\n\x0enegativeValues\x18\x03 \x01(\x0b\x32\x06.Store\x12\x11\n\tzeroCount\x18\x04 \x01(\x01\"\xa7\x01\n\x0cIndexMapping\x12\r\n\x05gamma\x18\x01 \x01(\x01\x12\x13\n\x0bindexOffset\x18\x02 \x01(\x01\x12\x32\n\rinterpolation\x18\x03 \x01(\x0e\x32\x1b.IndexMapping.Interpolation\"?\n\rInterpolation\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\r\n\tQUADRATIC\x10\x02\x12\t\n\x05\x43UBIC\x10\x03\"\xa6\x01\n\x05Store\x12(\n\tbinCounts\x18\x01 \x03(\x0b\x32\x15.Store.BinCountsEntry\x12\x1f\n\x13\x63ontiguousBinCounts\x18\x02 \x03(\x01\x42\x02\x10\x01\x12 \n\x18\x63ontiguousBinIndexOffset\x18\x03 \x01(\x11\x1a\x30\n\x0e\x42inCountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x62\x06proto3') + + + +_DDSKETCH = DESCRIPTOR.message_types_by_name['DDSketch'] +_INDEXMAPPING = DESCRIPTOR.message_types_by_name['IndexMapping'] +_STORE = DESCRIPTOR.message_types_by_name['Store'] +_STORE_BINCOUNTSENTRY = _STORE.nested_types_by_name['BinCountsEntry'] +_INDEXMAPPING_INTERPOLATION = _INDEXMAPPING.enum_types_by_name['Interpolation'] +DDSketch = _reflection.GeneratedProtocolMessageType('DDSketch', (_message.Message,), { + 'DESCRIPTOR' : _DDSKETCH, + '__module__' : 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:DDSketch) + }) +_sym_db.RegisterMessage(DDSketch) + +IndexMapping = _reflection.GeneratedProtocolMessageType('IndexMapping', (_message.Message,), { + 'DESCRIPTOR' : _INDEXMAPPING, + '__module__' : 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:IndexMapping) + }) +_sym_db.RegisterMessage(IndexMapping) + +Store = _reflection.GeneratedProtocolMessageType('Store', (_message.Message,), { + + 'BinCountsEntry' : _reflection.GeneratedProtocolMessageType('BinCountsEntry', (_message.Message,), { + 'DESCRIPTOR' : _STORE_BINCOUNTSENTRY, + '__module__' : 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:Store.BinCountsEntry) + }) + , + 'DESCRIPTOR' : _STORE, + '__module__' : 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:Store) + }) +_sym_db.RegisterMessage(Store) +_sym_db.RegisterMessage(Store.BinCountsEntry) + +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _STORE_BINCOUNTSENTRY._options = None + _STORE_BINCOUNTSENTRY._serialized_options = b'8\001' + _STORE.fields_by_name['contiguousBinCounts']._options = None + _STORE.fields_by_name['contiguousBinCounts']._serialized_options = b'\020\001' + _DDSKETCH._serialized_start=18 + _DDSKETCH._serialized_end=143 + _INDEXMAPPING._serialized_start=146 + _INDEXMAPPING._serialized_end=313 + _INDEXMAPPING_INTERPOLATION._serialized_start=250 + _INDEXMAPPING_INTERPOLATION._serialized_end=313 + _STORE._serialized_start=316 + _STORE._serialized_end=482 + _STORE_BINCOUNTSENTRY._serialized_start=434 + _STORE_BINCOUNTSENTRY._serialized_end=482 +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py new file mode 100644 index 0000000..4a6d3ef --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py @@ -0,0 +1,283 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: ddsketch.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='ddsketch.proto', + package='', + syntax='proto3', + serialized_pb=_b('\n\x0e\x64\x64sketch.proto\"}\n\x08\x44\x44Sketch\x12\x1e\n\x07mapping\x18\x01 \x01(\x0b\x32\r.IndexMapping\x12\x1e\n\x0epositiveValues\x18\x02 \x01(\x0b\x32\x06.Store\x12\x1e\n\x0enegativeValues\x18\x03 \x01(\x0b\x32\x06.Store\x12\x11\n\tzeroCount\x18\x04 \x01(\x01\"\xa7\x01\n\x0cIndexMapping\x12\r\n\x05gamma\x18\x01 \x01(\x01\x12\x13\n\x0bindexOffset\x18\x02 \x01(\x01\x12\x32\n\rinterpolation\x18\x03 \x01(\x0e\x32\x1b.IndexMapping.Interpolation\"?\n\rInterpolation\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\r\n\tQUADRATIC\x10\x02\x12\t\n\x05\x43UBIC\x10\x03\"\xa6\x01\n\x05Store\x12(\n\tbinCounts\x18\x01 \x03(\x0b\x32\x15.Store.BinCountsEntry\x12\x1f\n\x13\x63ontiguousBinCounts\x18\x02 \x03(\x01\x42\x02\x10\x01\x12 \n\x18\x63ontiguousBinIndexOffset\x18\x03 \x01(\x11\x1a\x30\n\x0e\x42inCountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x62\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_INDEXMAPPING_INTERPOLATION = _descriptor.EnumDescriptor( + name='Interpolation', + full_name='IndexMapping.Interpolation', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LINEAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUADRATIC', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CUBIC', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=250, + serialized_end=313, +) +_sym_db.RegisterEnumDescriptor(_INDEXMAPPING_INTERPOLATION) + + +_DDSKETCH = _descriptor.Descriptor( + name='DDSketch', + full_name='DDSketch', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mapping', full_name='DDSketch.mapping', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='positiveValues', full_name='DDSketch.positiveValues', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='negativeValues', full_name='DDSketch.negativeValues', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='zeroCount', full_name='DDSketch.zeroCount', index=3, + number=4, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18, + serialized_end=143, +) + + +_INDEXMAPPING = _descriptor.Descriptor( + name='IndexMapping', + full_name='IndexMapping', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='gamma', full_name='IndexMapping.gamma', index=0, + number=1, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='indexOffset', full_name='IndexMapping.indexOffset', index=1, + number=2, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='interpolation', full_name='IndexMapping.interpolation', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _INDEXMAPPING_INTERPOLATION, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=146, + serialized_end=313, +) + + +_STORE_BINCOUNTSENTRY = _descriptor.Descriptor( + name='BinCountsEntry', + full_name='Store.BinCountsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='Store.BinCountsEntry.key', index=0, + number=1, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='Store.BinCountsEntry.value', index=1, + number=2, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=434, + serialized_end=482, +) + +_STORE = _descriptor.Descriptor( + name='Store', + full_name='Store', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='binCounts', full_name='Store.binCounts', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='contiguousBinCounts', full_name='Store.contiguousBinCounts', index=1, + number=2, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='contiguousBinIndexOffset', full_name='Store.contiguousBinIndexOffset', index=2, + number=3, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_STORE_BINCOUNTSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=316, + serialized_end=482, +) + +_DDSKETCH.fields_by_name['mapping'].message_type = _INDEXMAPPING +_DDSKETCH.fields_by_name['positiveValues'].message_type = _STORE +_DDSKETCH.fields_by_name['negativeValues'].message_type = _STORE +_INDEXMAPPING.fields_by_name['interpolation'].enum_type = _INDEXMAPPING_INTERPOLATION +_INDEXMAPPING_INTERPOLATION.containing_type = _INDEXMAPPING +_STORE_BINCOUNTSENTRY.containing_type = _STORE +_STORE.fields_by_name['binCounts'].message_type = _STORE_BINCOUNTSENTRY +DESCRIPTOR.message_types_by_name['DDSketch'] = _DDSKETCH +DESCRIPTOR.message_types_by_name['IndexMapping'] = _INDEXMAPPING +DESCRIPTOR.message_types_by_name['Store'] = _STORE + +DDSketch = _reflection.GeneratedProtocolMessageType('DDSketch', (_message.Message,), dict( + DESCRIPTOR = _DDSKETCH, + __module__ = 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:DDSketch) + )) +_sym_db.RegisterMessage(DDSketch) + +IndexMapping = _reflection.GeneratedProtocolMessageType('IndexMapping', (_message.Message,), dict( + DESCRIPTOR = _INDEXMAPPING, + __module__ = 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:IndexMapping) + )) +_sym_db.RegisterMessage(IndexMapping) + +Store = _reflection.GeneratedProtocolMessageType('Store', (_message.Message,), dict( + + BinCountsEntry = _reflection.GeneratedProtocolMessageType('BinCountsEntry', (_message.Message,), dict( + DESCRIPTOR = _STORE_BINCOUNTSENTRY, + __module__ = 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:Store.BinCountsEntry) + )) + , + DESCRIPTOR = _STORE, + __module__ = 'ddsketch_pb2' + # @@protoc_insertion_point(class_scope:Store) + )) +_sym_db.RegisterMessage(Store) +_sym_db.RegisterMessage(Store.BinCountsEntry) + + +_STORE_BINCOUNTSENTRY.has_options = True +_STORE_BINCOUNTSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_STORE.fields_by_name['contiguousBinCounts'].has_options = True +_STORE.fields_by_name['contiguousBinCounts']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py new file mode 100644 index 0000000..ebccfd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py @@ -0,0 +1,104 @@ +from ddsketch.ddsketch import BaseDDSketch +from ..mapping import ( + CubicallyInterpolatedMapping, + LinearlyInterpolatedMapping, + LogarithmicMapping, +) +from ..store import DenseStore + +import google.protobuf + + +pb_version = tuple(map(int, google.protobuf.__version__.split(".")[0:2])) + +if pb_version >= (3, 19, 0): + import ddsketch.pb.ddsketch_pb2 as pb +else: + import ddsketch.pb.ddsketch_pre319_pb2 as pb + + +class KeyMappingProto: + @classmethod + def _proto_interpolation(cls, mapping): + if type(mapping) is LogarithmicMapping: + return pb.IndexMapping.NONE + if type(mapping) is LinearlyInterpolatedMapping: + return pb.IndexMapping.LINEAR + if type(mapping) is CubicallyInterpolatedMapping: + return pb.IndexMapping.CUBIC + + @classmethod + def to_proto(cls, mapping): + """serialize to protobuf""" + return pb.IndexMapping( + gamma=mapping.gamma, + indexOffset=mapping._offset, + interpolation=cls._proto_interpolation(mapping), + ) + + @classmethod + def from_proto(cls, proto): + """deserialize from protobuf""" + if proto.interpolation == pb.IndexMapping.NONE: + return LogarithmicMapping.from_gamma_offset(proto.gamma, proto.indexOffset) + elif proto.interpolation == pb.IndexMapping.LINEAR: + return LinearlyInterpolatedMapping.from_gamma_offset( + proto.gamma, proto.indexOffset + ) + elif proto.interpolation == pb.IndexMapping.CUBIC: + return CubicallyInterpolatedMapping.from_gamma_offset( + proto.gamma, proto.indexOffset + ) + else: + raise ValueError("Unrecognized interpolation %r" % proto.interpolation) + + +class StoreProto: + """Currently only supports DenseStore""" + + @classmethod + def to_proto(cls, store): + """serialize to protobuf""" + return pb.Store( + contiguousBinCounts=store.bins, contiguousBinIndexOffset=store.offset + ) + + @classmethod + def from_proto(cls, proto): + """deserialize from protobuf""" + store = DenseStore() + index = proto.contiguousBinIndexOffset + store.offset = index + for count in proto.contiguousBinCounts: + store.add(index, count) + index += 1 + return store + + +class DDSketchProto: + @classmethod + def to_proto(self, ddsketch): + """serialize to protobuf""" + return pb.DDSketch( + mapping=KeyMappingProto.to_proto(ddsketch._mapping), + positiveValues=StoreProto.to_proto(ddsketch._store), + negativeValues=StoreProto.to_proto(ddsketch._negative_store), + zeroCount=ddsketch._zero_count, + ) + + @classmethod + def from_proto(cls, proto): + """deserialize from protobuf + + N.B., The current protobuf loses any min/max/sum/avg information. + """ + mapping = KeyMappingProto.from_proto(proto.mapping) + negative_store = StoreProto.from_proto(proto.negativeValues) + store = StoreProto.from_proto(proto.positiveValues) + zero_count = proto.zeroCount + return BaseDDSketch( + mapping=mapping, + store=store, + negative_store=negative_store, + zero_count=zero_count, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/py.typed b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py new file mode 100644 index 0000000..b9fbb48 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py @@ -0,0 +1,504 @@ +from __future__ import division + + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2020 Datadog, Inc. + +""" +Stores map integers to counters. They can be seen as a collection of bins. +We start with 128 bins and grow the store in chunks of 128 unless specified +otherwise. +""" + +import abc +import math +import typing + + +if typing.TYPE_CHECKING: + from typing import List + from typing import Optional + +import six + + +CHUNK_SIZE = 128 + + +class _NegativeIntInfinity(int): + def __ge__(self, x): + return False + + __gt__ = __ge__ + + def __lt__(self, x): + return True + + __le__ = __lt__ + + +class _PositiveIntInfinity(int): + def __ge__(self, x): + return True + + __gt__ = __ge__ + + def __lt__(self, x): + return False + + __le__ = __lt__ + + +_neg_infinity = _NegativeIntInfinity() +_pos_infinity = _PositiveIntInfinity() + + +class Store(six.with_metaclass(abc.ABCMeta)): + """The basic specification of a store + + Attributes: + count (float): the sum of the counts for the bins + min_key (int): the minimum key bin + max_key (int): the maximum key bin + """ + + def __init__(self): + # type: () -> None + self.count = 0 # type: float + self.min_key = _pos_infinity # type: int + self.max_key = _neg_infinity # type: int + + @abc.abstractmethod + def copy(self, store): + """Copies the input store into this one.""" + + @abc.abstractmethod + def length(self): + # type: () -> int + """Return the number of bins.""" + + @abc.abstractmethod + def add(self, key, weight=1.0): + # type: (int, float) -> None + """Updates the counter at the specified index key, growing the number of bins if + necessary. + """ + + @abc.abstractmethod + def key_at_rank(self, rank, lower=True): + # type: (float, bool) -> int + """Return the key for the value at given rank. + + E.g., if the non-zero bins are [1, 1] for keys a, b with no offset + + if lower = True: + key_at_rank(x) = a for x in [0, 1) + key_at_rank(x) = b for x in [1, 2) + + if lower = False: + key_at_rank(x) = a for x in (-1, 0] + key_at_rank(x) = b for x in (0, 1] + """ + + @abc.abstractmethod + def merge(self, store): + # type: (Store) -> None + """Merge another store into this one. This should be equivalent as running the + add operations that have been run on the other store on this one. + """ + + +class DenseStore(Store): + """A dense store that keeps all the bins between the bin for the min_key and the + bin for the max_key. + + Args: + chunk_size (int, optional): the number of bins to grow by + + Attributes: + count (int): the sum of the counts for the bins + min_key (int): the minimum key bin + max_key (int): the maximum key bin + offset (int): the difference btw the keys and the index in which they are stored + bins (List[float]): the bins + """ + + def __init__(self, chunk_size=CHUNK_SIZE): + # type: (int) -> None + super(DenseStore, self).__init__() + + self.chunk_size = chunk_size # type: int + self.offset = 0 # type: int + self.bins = [] # type: List[float] + + def __repr__(self): + # type: () -> str + repr_str = "{" + for i, sbin in enumerate(self.bins): + repr_str += "%s: %s, " % (i + self.offset, sbin) + repr_str += "}}, min_key:%s, max_key:%s, offset:%s" % ( + self.min_key, + self.max_key, + self.offset, + ) + return repr_str + + def copy(self, store): + # type: (DenseStore) -> None + self.bins = store.bins[:] + self.count = store.count + self.min_key = store.min_key + self.max_key = store.max_key + self.offset = store.offset + + def length(self): + # type: () -> int + """Return the number of bins.""" + return len(self.bins) + + def add(self, key, weight=1.0): + # type: (int, float) -> None + idx = self._get_index(key) + self.bins[idx] += weight + self.count += weight + + def _get_index(self, key): + # type: (int) -> int + """Calculate the bin index for the key, extending the range if necessary.""" + if key < self.min_key: + self._extend_range(key) + elif key > self.max_key: + self._extend_range(key) + + return key - self.offset + + def _get_new_length(self, new_min_key, new_max_key): + # type: (int, int) -> int + desired_length = new_max_key - new_min_key + 1 + return self.chunk_size * int(math.ceil(desired_length / self.chunk_size)) + + def _extend_range(self, key, second_key=None): + # type: (int, Optional[int]) -> None + """Grow the bins as necessary and call _adjust""" + if second_key is None: + second_key = key + new_min_key = min(key, second_key, self.min_key) + new_max_key = max(key, second_key, self.max_key) + + if self.length() == 0: + # initialize bins + self.bins = [0.0] * self._get_new_length(new_min_key, new_max_key) + self.offset = new_min_key + self._adjust(new_min_key, new_max_key) + + elif new_min_key >= self.min_key and new_max_key < self.offset + self.length(): + # no need to change the range; just update min/max keys + self.min_key = new_min_key + self.max_key = new_max_key + + else: + # grow the bins + new_length = self._get_new_length(new_min_key, new_max_key) + if new_length > self.length(): + self.bins.extend([0.0] * (new_length - self.length())) + self._adjust(new_min_key, new_max_key) + + def _adjust(self, new_min_key, new_max_key): + # type: (int, int) -> None + """Adjust the bins, the offset, the min_key, and max_key, without resizing the + bins, in order to try making it fit the specified range. + """ + self._center_bins(new_min_key, new_max_key) + self.min_key = new_min_key + self.max_key = new_max_key + + def _shift_bins(self, shift): + # type: (int) -> None + """Shift the bins; this changes the offset.""" + if shift > 0: + self.bins = self.bins[:-shift] + self.bins[:0] = [0.0] * shift + else: + self.bins = self.bins[abs(shift) :] + self.bins.extend([0.0] * abs(shift)) + self.offset -= shift + + def _center_bins(self, new_min_key, new_max_key): + # type: (int, int) -> None + """Center the bins; this changes the offset.""" + middle_key = new_min_key + (new_max_key - new_min_key + 1) // 2 + self._shift_bins(self.offset + self.length() // 2 - middle_key) + + def key_at_rank(self, rank, lower=True): + # type: (float, bool) -> int + running_ct = 0.0 + for i, bin_ct in enumerate(self.bins): + running_ct += bin_ct + if (lower and running_ct > rank) or (not lower and running_ct >= rank + 1): + return i + self.offset + + return self.max_key + + def merge(self, store): # type: ignore[override] + # type: (DenseStore) -> None + if store.count == 0: + return + + if self.count == 0: + self.copy(store) + return + + if store.min_key < self.min_key or store.max_key > self.max_key: + self._extend_range(store.min_key, store.max_key) + + for key in range(store.min_key, store.max_key + 1): + self.bins[key - self.offset] += store.bins[key - store.offset] + + self.count += store.count + + +class CollapsingLowestDenseStore(DenseStore): + """A dense store that keeps all the bins between the bin for the min_key and the + bin for the max_key, but collapsing the left-most bins if the number of bins + exceeds the bin_limit + + Args: + bin_limit (int): the maximum number of bins + chunk_size (int, optional): the number of bins to grow by + + Attributes: + count (int): the sum of the counts for the bins + min_key (int): the minimum key bin + max_key (int): the maximum key bin + offset (int): the difference btw the keys and the index in which they are stored + bins (List[int]): the bins + """ + + def __init__(self, bin_limit, chunk_size=CHUNK_SIZE): + # type: (int, int) -> None + super(CollapsingLowestDenseStore, self).__init__() + self.bin_limit = bin_limit + self.is_collapsed = False + + def copy(self, store): # type: ignore[override] + # type: (CollapsingLowestDenseStore) -> None + self.bin_limit = store.bin_limit + self.is_collapsed = store.is_collapsed + super(CollapsingLowestDenseStore, self).copy(store) + + def _get_new_length(self, new_min_key, new_max_key): + # type: (int, int) -> int + desired_length = new_max_key - new_min_key + 1 + return min( + self.chunk_size * int(math.ceil(desired_length / self.chunk_size)), + self.bin_limit, + ) + + def _get_index(self, key): + # type: (int) -> int + """Calculate the bin index for the key, extending the range if necessary.""" + if key < self.min_key: + if self.is_collapsed: + return 0 + + self._extend_range(key) + if self.is_collapsed: + return 0 + elif key > self.max_key: + self._extend_range(key) + + return key - self.offset + + def _adjust(self, new_min_key, new_max_key): + # type: (int, int) -> None + """Override. Adjust the bins, the offset, the min_key, and max_key, without + resizing the bins, in order to try making it fit the specified + range. Collapse to the left if necessary. + """ + if new_max_key - new_min_key + 1 > self.length(): + # The range of keys is too wide, the lowest bins need to be collapsed. + new_min_key = new_max_key - self.length() + 1 + + if new_min_key >= self.max_key: + # put everything in the first bin + self.offset = new_min_key + self.min_key = new_min_key + self.bins[:] = [0.0] * self.length() + self.bins[0] = self.count + else: + shift = self.offset - new_min_key + if shift < 0: + collapse_start_index = self.min_key - self.offset + collapse_end_index = new_min_key - self.offset + collapsed_count = sum( + self.bins[collapse_start_index:collapse_end_index] + ) + self.bins[collapse_start_index:collapse_end_index] = [0.0] * ( + new_min_key - self.min_key + ) + self.bins[collapse_end_index] += collapsed_count + self.min_key = new_min_key + # shift the buckets to make room for new_max_key + self._shift_bins(shift) + else: + self.min_key = new_min_key + # shift the buckets to make room for new_min_key + self._shift_bins(shift) + + self.max_key = new_max_key + self.is_collapsed = True + else: + self._center_bins(new_min_key, new_max_key) + self.min_key = new_min_key + self.max_key = new_max_key + + def merge(self, store): # type: ignore[override] + # type: (CollapsingLowestDenseStore) -> None # type: ignore[override] + """Override.""" + if store.count == 0: + return + + if self.count == 0: + self.copy(store) + return + + if store.min_key < self.min_key or store.max_key > self.max_key: + self._extend_range(store.min_key, store.max_key) + + collapse_start_idx = store.min_key - store.offset + collapse_end_idx = min(self.min_key, store.max_key + 1) - store.offset + if collapse_end_idx > collapse_start_idx: + collapse_count = sum(store.bins[collapse_start_idx:collapse_end_idx]) + self.bins[0] += collapse_count + else: + collapse_end_idx = collapse_start_idx + + for key in range(collapse_end_idx + store.offset, store.max_key + 1): + self.bins[key - self.offset] += store.bins[key - store.offset] + + self.count += store.count + + +class CollapsingHighestDenseStore(DenseStore): + """A dense store that keeps all the bins between the bin for the min_key and the + bin for the max_key, but collapsing the right-most bins if the number of bins + exceeds the bin_limit + + Args: + bin_limit (int): the maximum number of bins + chunk_size (int, optional): the number of bins to grow by + + Attributes: + count (int): the sum of the counts for the bins + min_key (int): the minimum key bin + max_key (int): the maximum key bin + offset (int): the difference btw the keys and the index in which they are stored + bins (List[int]): the bins + """ + + def __init__(self, bin_limit, chunk_size=CHUNK_SIZE): + super(CollapsingHighestDenseStore, self).__init__() + self.bin_limit = bin_limit + self.is_collapsed = False + + def copy(self, store): # type: ignore[override] + # type: (CollapsingHighestDenseStore) -> None + self.bin_limit = store.bin_limit + self.is_collapsed = store.is_collapsed + super(CollapsingHighestDenseStore, self).copy(store) + + def _get_new_length(self, new_min_key, new_max_key): + # type: (int, int) -> int + desired_length = new_max_key - new_min_key + 1 + # For some reason mypy can't infer that min(int, int) is an int, so cast it. + return int( + min( + self.chunk_size * int(math.ceil(desired_length / self.chunk_size)), + self.bin_limit, + ) + ) + + def _get_index(self, key): + # type: (int) -> int + """Calculate the bin index for the key, extending the range if necessary""" + if key > self.max_key: + if self.is_collapsed: + return self.length() - 1 + + self._extend_range(key) + if self.is_collapsed: + return self.length() - 1 + elif key < self.min_key: + self._extend_range(key) + return key - self.offset + + def _adjust(self, new_min_key, new_max_key): + # type: (int, int) -> None + """Override. Adjust the bins, the offset, the min_key, and max_key, without + resizing the bins, in order to try making it fit the specified + range. Collapse to the left if necessary. + """ + if new_max_key - new_min_key + 1 > self.length(): + # The range of keys is too wide, the lowest bins need to be collapsed. + new_max_key = new_min_key + self.length() - 1 + + if new_max_key <= self.min_key: + # put everything in the last bin + self.offset = new_min_key + self.max_key = new_max_key + self.bins[:] = [0.0] * self.length() + self.bins[-1] = self.count + else: + shift = self.offset - new_min_key + if shift > 0: + collapse_start_index = new_max_key - self.offset + 1 + collapse_end_index = self.max_key - self.offset + 1 + collapsed_count = sum( + self.bins[collapse_start_index:collapse_end_index] + ) + self.bins[collapse_start_index:collapse_end_index] = [0.0] * ( + self.max_key - new_max_key + ) + self.bins[collapse_start_index - 1] += collapsed_count + self.max_key = new_max_key + # shift the buckets to make room for new_max_key + self._shift_bins(shift) + else: + self.max_key = new_max_key + # shift the buckets to make room for new_min_key + self._shift_bins(shift) + + self.min_key = new_min_key + self.is_collapsed = True + else: + self._center_bins(new_min_key, new_max_key) + self.min_key = new_min_key + self.max_key = new_max_key + + def merge(self, store): # type: ignore[override] + # type: (CollapsingHighestDenseStore) -> None # type: ignore[override] + """Override.""" + if store.count == 0: + return + + if self.count == 0: + self.copy(store) + return + + if store.min_key < self.min_key or store.max_key > self.max_key: + self._extend_range(store.min_key, store.max_key) + + collapse_end_idx = store.max_key - store.offset + 1 + collapse_start_idx = max(self.max_key + 1, store.min_key) - store.offset + if collapse_end_idx > collapse_start_idx: + collapse_count = sum(store.bins[collapse_start_idx:collapse_end_idx]) + self.bins[-1] += collapse_count + else: + collapse_start_idx = collapse_end_idx + + for key in range(store.min_key, collapse_start_idx + store.offset): + self.bins[key - self.offset] += store.bins[key - store.offset] + + self.count += store.count diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE new file mode 100644 index 0000000..5f8fd63 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE @@ -0,0 +1,6 @@ +## License + +This work is dual-licensed under Apache 2.0 or BSD3. +You may select, at your option, one of the above-listed licenses. + +`SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause` diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache new file mode 100644 index 0000000..bff56b5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache @@ -0,0 +1,200 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 Datadog, Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3 b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3 new file mode 100644 index 0000000..e8f3a81 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3 @@ -0,0 +1,24 @@ +Copyright (c) 2016, Datadog +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Datadog nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL DATADOG BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA new file mode 100644 index 0000000..fd46eae --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA @@ -0,0 +1,68 @@ +Metadata-Version: 2.1 +Name: ddtrace +Version: 2.6.5 +Summary: Datadog APM client library +Home-page: https://github.com/DataDog/dd-trace-py +Author: Datadog, Inc. +Author-email: "Datadog, Inc." +License: LICENSE.BSD3 +Project-URL: Bug Tracker, https://github.com/DataDog/dd-trace-py/issues +Project-URL: Changelog, https://github.com/DataDog/dd-trace-py/releases +Project-URL: Documentation, https://ddtrace.readthedocs.io/en/stable/ +Project-URL: Homepage, https://github.com/DataDog/dd-trace-py +Project-URL: Source Code, https://github.com/DataDog/dd-trace-py/ +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: LICENSE.Apache +License-File: LICENSE.BSD3 +License-File: NOTICE +Requires-Dist: attrs >=20 +Requires-Dist: cattrs +Requires-Dist: ddsketch >=2.0.1 +Requires-Dist: envier +Requires-Dist: opentelemetry-api >=1 +Requires-Dist: protobuf >=3 +Requires-Dist: six >=1.12.0 +Requires-Dist: typing-extensions +Requires-Dist: xmltodict >=0.12 +Requires-Dist: importlib-metadata <=6.5.0 ; python_version < "3.8" +Requires-Dist: bytecode ~=0.13.0 ; python_version == "3.7" +Requires-Dist: setuptools ; python_version >= "3.12" +Requires-Dist: bytecode ; python_version >= "3.8" +Provides-Extra: opentracing +Requires-Dist: opentracing >=2.0.0 ; extra == 'opentracing' + +# `ddtrace` + +[![CircleCI](https://circleci.com/gh/DataDog/dd-trace-py/tree/main.svg?style=svg)](https://circleci.com/gh/DataDog/dd-trace-py/tree/main) +[![PypiVersions](https://img.shields.io/pypi/v/ddtrace.svg)](https://pypi.org/project/ddtrace/) +[![Pyversions](https://img.shields.io/pypi/pyversions/ddtrace.svg?style=flat)](https://pypi.org/project/ddtrace/) + +bits python + +This library powers [Distributed Tracing](https://docs.datadoghq.com/tracing/), + [Continuous Profiling](https://docs.datadoghq.com/tracing/profiler/), + [Error Tracking](https://docs.datadoghq.com/tracing/error_tracking/), + [Continuous Integration Visibility](https://docs.datadoghq.com/continuous_integration/), + [Deployment Tracking](https://docs.datadoghq.com/tracing/deployment_tracking/), + [Code Hotspots](https://docs.datadoghq.com/tracing/profiler/connect_traces_and_profiles/), + [Dynamic Instrumentation](https://docs.datadoghq.com/dynamic_instrumentation/), + and more. + +To get started with tracing, check out the [product documentation][setup docs] or the [glossary][visualization docs]. + +For advanced usage and configuration information, check out the [library documentation][api docs]. + +To get started as a contributor, see [the contributing docs](https://ddtrace.readthedocs.io/en/stable/contributing.html) first. + +[setup docs]: https://docs.datadoghq.com/tracing/setup/python/ +[api docs]: https://ddtrace.readthedocs.io/ +[visualization docs]: https://docs.datadoghq.com/tracing/visualization/ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE new file mode 100644 index 0000000..732c748 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE @@ -0,0 +1,4 @@ +Datadog dd-trace-py +Copyright 2016-Present Datadog, Inc. + +This product includes software developed at Datadog, Inc. (https://www.datadoghq.com/). diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD new file mode 100644 index 0000000..57017c5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD @@ -0,0 +1,1391 @@ +../../bin/ddtrace-run,sha256=-2hcsI92sxn_Du4qKKiIz_MEFpezi6TBsB7AVMn47U8,236 +ddtrace-2.6.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ddtrace-2.6.5.dist-info/LICENSE,sha256=OZvn-IQ0kjk6HmSP8Yi2WQqZBzGRJQmvBO5w9KBdD3o,186 +ddtrace-2.6.5.dist-info/LICENSE.Apache,sha256=5V2RruBHZQIcPyceiv51DjjvdvhgsgS4pnXAOHDuZkQ,11342 +ddtrace-2.6.5.dist-info/LICENSE.BSD3,sha256=J9S_Tq-hhvteDV2W8R0rqht5DZHkmvgdx3gnLZg4j6Q,1493 +ddtrace-2.6.5.dist-info/METADATA,sha256=tGHBBqBIyOkA1eJ3mPq8FY6G12qN2-hl-Ef-axiZK0w,3287 +ddtrace-2.6.5.dist-info/NOTICE,sha256=Wmf6iXVNfb58zWLK5pIkcbqMflb7pl38JhxjMwmjtyc,146 +ddtrace-2.6.5.dist-info/RECORD,, +ddtrace-2.6.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace-2.6.5.dist-info/WHEEL,sha256=AI1yqBLEPcVKWn5Ls2uPawjbqPXPFTYdQLSdN8WFCJw,152 +ddtrace-2.6.5.dist-info/entry_points.txt,sha256=1t-yacpd7hsx2aKKB7_O34U414M5pPnqp-tu0XmflB8,337 +ddtrace-2.6.5.dist-info/top_level.txt,sha256=jPd7qTCAnWevz7DZiI0jdVlnFB3cautvluLsO-iMgQY,8 +ddtrace/__init__.py,sha256=bBRAg-TKVYa9IaNrop8whGGNAGY4PSofcN_FxgrIeHY,1754 +ddtrace/__pycache__/__init__.cpython-311.pyc,, +ddtrace/__pycache__/_hooks.cpython-311.pyc,, +ddtrace/__pycache__/_logger.cpython-311.pyc,, +ddtrace/__pycache__/_monkey.cpython-311.pyc,, +ddtrace/__pycache__/_version.cpython-311.pyc,, +ddtrace/__pycache__/auto.cpython-311.pyc,, +ddtrace/__pycache__/constants.cpython-311.pyc,, +ddtrace/__pycache__/context.cpython-311.pyc,, +ddtrace/__pycache__/data_streams.cpython-311.pyc,, +ddtrace/__pycache__/filters.cpython-311.pyc,, +ddtrace/__pycache__/pin.cpython-311.pyc,, +ddtrace/__pycache__/provider.cpython-311.pyc,, +ddtrace/__pycache__/sampler.cpython-311.pyc,, +ddtrace/__pycache__/sampling_rule.cpython-311.pyc,, +ddtrace/__pycache__/span.cpython-311.pyc,, +ddtrace/__pycache__/tracer.cpython-311.pyc,, +ddtrace/__pycache__/version.cpython-311.pyc,, +ddtrace/_hooks.py,sha256=VW8lblk-yD8rBtvU7dPetjAfaeEtUVDuJCrGbPEnwMI,3865 +ddtrace/_logger.py,sha256=T55nFe_TYEfQK0NmTb513ux6yurm5s1qAWc3j-ZIA8g,3658 +ddtrace/_monkey.py,sha256=AC5dmFiLBVrA2Hl3ZJmfhSNOcGQMsX7aUFKfYBv_o3M,8905 +ddtrace/_trace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/_trace/__pycache__/__init__.cpython-311.pyc,, +ddtrace/_trace/__pycache__/_limits.cpython-311.pyc,, +ddtrace/_trace/_limits.py,sha256=L2N27bqVnpOn2T53HDn-0rD43ek7GNuyS6piZgP-2tM,92 +ddtrace/_version.py,sha256=D_wLNXauKIfiJpn1EENjWxSq66DO-yHjqpgfDoH3wzY,411 +ddtrace/appsec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_asm_request_context.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_capabilities.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_constants.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_deduplications.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_handlers.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_metrics.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_processor.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_remoteconfiguration.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_trace_utils.cpython-311.pyc,, +ddtrace/appsec/__pycache__/_utils.cpython-311.pyc,, +ddtrace/appsec/_api_security/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_api_security/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_api_security/__pycache__/api_manager.cpython-311.pyc,, +ddtrace/appsec/_api_security/api_manager.py,sha256=g8L3zogrDhJQGgdxULXdigviGnmzD1pEF2Oc9yVLQqQ,6217 +ddtrace/appsec/_asm_request_context.py,sha256=K4MArRjpn9AI0g5563dd2tQyu2V-FhkdELspeTtm7Wo,18228 +ddtrace/appsec/_capabilities.py,sha256=NL1qG7JMitdrEF53kkUKiXLMyvDOCIQ0Jp9s3KsUo38,2513 +ddtrace/appsec/_constants.py,sha256=pUaW4ys3NpzgbZa_GB8gOBD8Zl6lUlpNHMA8DzrLQ8s,8237 +ddtrace/appsec/_ddwaf/__init__.py,sha256=zQKZHtgw2QQv3SQ_5gGgV2z79T4C1KSqOFkCgojS6vQ,7685 +ddtrace/appsec/_ddwaf/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_ddwaf/__pycache__/ddwaf_types.cpython-311.pyc,, +ddtrace/appsec/_ddwaf/ddwaf_types.py,sha256=rCTezCO2D7dskFQ14_2Q7VodGBgjaOEWjGY8jZSkrY0,16417 +ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so,sha256=N_HdHd9oTea4Ep9fyse9AUgfHoddwt9OKJfo5iir2Oc,2195976 +ddtrace/appsec/_deduplications.py,sha256=balBc-3vYEsb9qsajcMTqZ0TYM8eaAyygDymuUTQJTA,1096 +ddtrace/appsec/_handlers.py,sha256=Z9y5pNYejwxbAEwHA1cmRwONHKgoUzJEJvmfdIk4bNU,14559 +ddtrace/appsec/_iast/__init__.py,sha256=115d3innxqsf_N85LwpM47F_rBlBENHipfV4Rh1u_DM,2528 +ddtrace/appsec/_iast/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_input_info.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_loader.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_metrics.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_overhead_control_engine.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_patch.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_patch_modules.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_taint_dict.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_taint_utils.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/_utils.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/constants.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/processor.cpython-311.pyc,, +ddtrace/appsec/_iast/__pycache__/reporter.cpython-311.pyc,, +ddtrace/appsec/_iast/_ast/__init__.py,sha256=1oLL20yLB1GL9IbFiZD8OReDqiCpFr-yetIR6x1cNkI,23 +ddtrace/appsec/_iast/_ast/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_ast/__pycache__/ast_patching.cpython-311.pyc,, +ddtrace/appsec/_iast/_ast/__pycache__/visitor.cpython-311.pyc,, +ddtrace/appsec/_iast/_ast/ast_patching.py,sha256=EiFFgzy-ICs9EWYDPRRUrYezp_X-HNpwyijhq0kNQxk,5319 +ddtrace/appsec/_iast/_ast/visitor.py,sha256=i4rF8ijsM2Rp57zkBkcG-fB0aEH_GRuRuRtZyUgd_9g,31555 +ddtrace/appsec/_iast/_input_info.py,sha256=fd2GRmY1bsjrm3jFXu_dgwerOlKWO3B785FG-XhAgxQ,447 +ddtrace/appsec/_iast/_loader.py,sha256=48dE2QjNYEwYPXiy4BTtm83ucDuDYAkklFTGs55ZyCw,863 +ddtrace/appsec/_iast/_metrics.py,sha256=B6gK0mDidUAk3jfOC0CuI2Apk2yuIh-r9eI_FlC3qhA,5336 +ddtrace/appsec/_iast/_overhead_control_engine.py,sha256=BI0oXr3XLRWW4Pds5B57LmXiOOKpiM6y_Y89l4H6ZLs,4451 +ddtrace/appsec/_iast/_patch.py,sha256=oVKBEfsTpPSIb0sLEalQppf4ccXQmPNKQCiko713INQ,5764 +ddtrace/appsec/_iast/_patch_modules.py,sha256=v-C0yqWrEpr_NpRry8jufU2ZCD2Kyqa8F0SYgQV_Ff8,789 +ddtrace/appsec/_iast/_patches/__pycache__/json_tainting.cpython-311.pyc,, +ddtrace/appsec/_iast/_patches/json_tainting.py,sha256=BS4KA9M5q5gozp0A3nRwQkbh6cM09PAgWvxP4flPmWA,2881 +ddtrace/appsec/_iast/_stacktrace.c,sha256=c-iYHfUnsbJQhIvpYVMJP1sJYOlGgbW1gicnjyYI0Tg,4745 +ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so,sha256=fxKX5mOHeE5nSZyInZXStAuTrO-PuH_Zzn9f_CwPq6I,16976 +ddtrace/appsec/_iast/_taint_dict.py,sha256=aX-Orr9g0HkASLmEBg57z8oLP9NBzo6C6FEpkHSfVHE,522 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectExtend.cpp,sha256=lTYdMHMeKdke5vadPDMg9dtGMLwcwnb2r1F1P1dorxs,1310 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectExtend.h,sha256=SiBtZrE70Jl2Slk4rPyyxQC7OgPDU5SRNrbNDIs_iwQ,157 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectFormat.cpp,sha256=ALl3sNgE9DWte8NAWMBTMObUEOM_6xJO4wGKl12Jhkk,2095 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectFormat.h,sha256=BWDsHh9McH3GdvzjA8ljNDXb-uHQ7a3EdsPpGlBNjGw,448 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectIndex.cpp,sha256=AS1_-KYkTbSceURhDJNKy1AOvsZ_n0HAsHsy4Fasw90,1282 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectIndex.h,sha256=g58F_438ciqqhKtAY4yqEheDZJgv93lBCCZ1tCgpgFo,281 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectJoin.cpp,sha256=NyoC_13HIJtS9ERrg9wCwCtL7SmDzvQBk_EmtthoSHA,7175 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectJoin.h,sha256=Nm7Zqs_LhHyttysjV9wu86UME3a_dgEMa6le6k-xRFs,240 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectOperatorAdd.cpp,sha256=Q11TfgGUnOO5D3n7W5AzED7cru1BmfeYbvSAcUiesgM,4129 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectOperatorAdd.h,sha256=bmkqJlXgpphoOqBTOh098MWmjkp-kDDPRDMjyJwWtbg,213 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectSlice.cpp,sha256=oC7zPY2n_3Nmz8GRglXY27Pz543FRXvhCb5oNvyjsZY,3853 +ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectSlice.h,sha256=xBy5qunF4kNnBh1yQEJUNkHPZGx3q3yHC55OP1QKClc,163 +ddtrace/appsec/_iast/_taint_tracking/Aspects/Helpers.cpp,sha256=7158WBwsJioeb52e_InpKzkYhYd07MgYQSm4OYO7Hy4,12777 +ddtrace/appsec/_iast/_taint_tracking/Aspects/Helpers.h,sha256=PxvMWJPKAv52IhrgY2fNjWDd2n6NmHV7pdfcVaU7FZM,1834 +ddtrace/appsec/_iast/_taint_tracking/Aspects/_aspects_exports.h,sha256=W9eoyYf-sNInZU1bVn23UcQ0E-Ghut8KMTANmkXH0Bk,412 +ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt,sha256=SlR7CmiJ9ui_KtFvVLRBHg4pCrf7_6IGPxiHFNoOJjY,2305 +ddtrace/appsec/_iast/_taint_tracking/Constants.h,sha256=8pRUHCymtbzoIFKGdLnmCKDPzLY9BM6ombe0p0LLRGo,134 +ddtrace/appsec/_iast/_taint_tracking/Initializer/Initializer.cpp,sha256=81vx8MXRYX2XM3OLEuaiaCSHiXjbq0YxEpyy46s0wGM,6471 +ddtrace/appsec/_iast/_taint_tracking/Initializer/Initializer.h,sha256=M1TmaVeUl0ZCH3pQ8SEI69q7K-6vwHZi811TQLxWIxs,4548 +ddtrace/appsec/_iast/_taint_tracking/Initializer/_initializer.h,sha256=LwQAUIDDAx-Yk7_k1O6hoRGek6VoAqnMPjzYM-bV_8E,307 +ddtrace/appsec/_iast/_taint_tracking/README.txt,sha256=TLWoDrzE3qfLe88EvUQkJthIRrWYN6JG9oXTw7YDZqs,821 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/Source.cpp,sha256=rC9Eg4StydtdxwAJNaNs1CQAwi8M_WHxzlZ_3yd1sXM,2847 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/Source.h,sha256=A5BgphieMkFVAn3G8QBv5E7sV5kJ5CMI2OZbRuSuiFk,3347 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintRange.cpp,sha256=3k88GA3ONPvp3cR40ARkkprmqX-b3TdrLOSEjMit6Tw,13939 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintRange.h,sha256=78w6fhKoIvLf4sak3UGgTEMvCHNFBM4eG7H_3182Ewg,3983 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintedObject.cpp,sha256=V_u-Z3mFcwLR2nZJRElVjZMomGjJjf01ZICQNI9H-JQ,4014 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintedObject.h,sha256=yMKiLV9RupcamBpHbePvGRmpO8lGMYwBbGzsjMasPRU,1615 +ddtrace/appsec/_iast/_taint_tracking/TaintTracking/_taint_tracking.h,sha256=EVO6qgu6MyZtxsTWiomXsOpl_NaxLTT-299jwo9lpxE,474 +ddtrace/appsec/_iast/_taint_tracking/TaintedOps/TaintedOps.cpp,sha256=_1E1VqgfvXXoTrpw__J6uch8na5RuRTJvpZf9f6aZq0,1152 +ddtrace/appsec/_iast/_taint_tracking/TaintedOps/TaintedOps.h,sha256=UsV-E2mlzpGNELnbgJ1SY07CJz3BbziufhC_p2JtVOk,1032 +ddtrace/appsec/_iast/_taint_tracking/Utils/StringUtils.cpp,sha256=sKtcQp_RI8Ayi8U4m6JX-UlDFGeShvV8cprvSPVtO94,4290 +ddtrace/appsec/_iast/_taint_tracking/Utils/StringUtils.h,sha256=jPiwMj7Lo3T4vnaTlQL8Y8WESVaHvbno0lxQ5LP9flk,684 +ddtrace/appsec/_iast/_taint_tracking/__init__.py,sha256=V2yjQL_pdVkwyQOmRFJk3U-5stEKWy_YKJCMGSbMzzY,5048 +ddtrace/appsec/_iast/_taint_tracking/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/__pycache__/aspects.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_native.cpp,sha256=7gFaQS6h_dw53_AGvbmnwJWDhq5R33DohTzPfNT5WCA,3611 +ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so,sha256=-miouZYVKFSgF-pfMLVgoFGT9c26WLvUTmX081jgY-Y,633224 +ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_iast/_taint_tracking/_vendor/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt,sha256=X4_B7XO6EzkqC94TVFELCWz_CS0kxMJIo31_ohjS_gY,12067 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/attr.h,sha256=QPjH7BfhL8QFwHHkrDak8gNOLMlb1itAO5fobjdoLp8,24334 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/buffer_info.h,sha256=m_VE_hfWPKl-KgUZy9aVQdPg1xtoaDaBgkurIX7aGig,7750 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/cast.h,sha256=Rcq-l1HCfMIyBxbJSM041wpu3EpZBVChfgVdEpdnqC0,67312 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/chrono.h,sha256=A23naeloqn-1NKVAABOsJtHU9Vz8lfvrAICuLk-7qBM,8458 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/common.h,sha256=ATg9Bt1pwF8qnNuI086fprM4CUTdrZdk_g2HXE1Sf6A,120 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/complex.h,sha256=AaDZ-rEmK4tFaue-K9P5y3TxxnaQF6JwZ_6LAzkdLQI,2096 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/class.h,sha256=Y2IzTplhE5KiMiBlzWSAovCQaI_1M0dlsoDYCTpB5Hg,28518 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/common.h,sha256=rACKWPmqUkdizSQMftuCkLCZBXGbVSiuRv_h0uaY-k4,53480 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/descr.h,sha256=k1nvytx1zhMh8ERL2xS8Unbxcio5fa7eZIqnTsZ0orE,5962 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/init.h,sha256=vYO2nScstnYiCn4Kh57IKrOwpNkQ9gqME4foF03JU6A,17859 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/internals.h,sha256=Az8HDKl3QU-KEOapdwHYifjWpwbej4Js5wKvSspAjQk,28221 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/type_caster_base.h,sha256=LC91ejtvXjGQ0DaUFrYN3ChE1agf9Y2hHDs7byTbZa8,48364 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/typeid.h,sha256=jw5pr9m72vkDsloT8vxl9wj17VJGcEdXDyziBlt89Js,1625 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen.h,sha256=-HmSA1kgwCQ-GHUt7PHtTEc-vxqw9xARpF8PHWJip28,316 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/common.h,sha256=dIeqmK7IzW5K4k2larPnA1A863rDp38U9YbNIwiIyYk,378 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/matrix.h,sha256=CS8NpkZI8Y8ty0NFQC7GZcUlM5o8_1Abv1GbGltsbkA,32135 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/tensor.h,sha256=jLtNZKXr7MWFplt9x3qnHdO73jNZlAqT40Hb4FPabnk,18442 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/embed.h,sha256=xD-oEg56PadTig9a8FOcMgbsL64jaie7hwG3y6DWPEI,13459 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eval.h,sha256=7re-O2Eor1yD0Q_KgFkHIjKD17ejzII687Yszl9_KfE,4731 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/functional.h,sha256=cXDJUS0Y_1GBbOK4Nn13exhkZsAQWx408HZ-PFBmbJo,5002 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/gil.h,sha256=RZkkMm0E9PQlHXW6xkBIhM7VBeCvmyJlPVQNaSJMUQQ,8262 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/iostream.h,sha256=K5rPXoCYN325r1PptcJCIhPhgtRtTJQjMr7bvUIOwxk,8862 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/numpy.h,sha256=G-hxJJom5roJ7s_hTiG1Mq9NxpZ6BOzK03fLXUQuH30,79725 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/operators.h,sha256=224RoAXcv1la4NNY9rQ3aD_AeC8S9ZKx3HVK1O8B4MU,9103 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/options.h,sha256=qXvmnj--9fZSp56NYefnB3W5V17ppHlY1Srgo3DNBpw,2734 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/pybind11.h,sha256=V1zKPVpON-t2yGVQigySYMALadMx-ux7eZ_xC0ILg9c,126706 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/pytypes.h,sha256=mshHowCgq91Dt06atf5C6DFhRSWbUwYBgIl21-2usco,98455 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl.h,sha256=dajIEYNnTimX5uYYLm0TzYesxq87JakWZ5KWCzbET2I,15477 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl/filesystem.h,sha256=refLexmdcqOM6Qjo9QMB6heA5bQ7GZrP6DCvVBv0R1M,4185 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl_bind.h,sha256=835YP_4OkcKTkNOaY-GUUXIDf86GSpN65lVivP0M4TY,29897 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/type_caster_pyobject_ptr.h,sha256=H7pKBYTvUlibiJQEcKmeAkygSQwoCkuIyukNSDmVq-U,1929 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py,sha256=4-WhH9Ac6P8D_FqnflpOch8XlaZrkXbe95FspbMvwu0,429 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py,sha256=ATLlhFlhBxDXxxXEfnf2F1RcRhuWN1ziMwbmrGuhif0,1544 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/__main__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/_version.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/commands.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/setup_helpers.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py,sha256=wUJj-zKbescYMsNA17iZHUMgzy99ahCucWDV4KMhPCg,228 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py,sha256=iJBFWhXHqlC_JMAgMjMIn6H_hizvorS572upGU1roGA,1207 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py,sha256=CSDjuh2T5zlcZHqII8tO7HZcz8-qmMlUfCo23_WESaw,17475 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake,sha256=7D4GwE3lgw_0y-NMGqkGS9aTEXFteGbj3ZgXlXr3g2A,2449 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake,sha256=liSnYcMw1gAxI-AZGVS0CJJsOQ2bGcDcG3LbCR5sta8,3105 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake,sha256=1dEMOUQxj-xTQzlQLBBXMT4DTScNJFquOQGAWiJawCA,11190 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake,sha256=eUsNj6YqO3mMffEtUBFFgNGkeiNL-2tNgwkutkam7MQ,817 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/codespell_ignore_lines_from_errors.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/libsize.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/make_changelog.cpython-311.pyc,, +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh,sha256=TigulaRClaWcR-fjZLt0PtH9oncUdvsDnOxFyp9CjX8,1423 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in,sha256=39wpRXJHm-hEzGGkil1HbBFPITu9JOzV5xVt46JLcBE,952 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py,sha256=bTs7QS1-reWL04cS6C-Fh4F-TTXBgLIhemO4gfRaIgo,1117 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py,sha256=3MBZDCi0-kdKei_6RcTbmVJgtmT4udB-WIF-mOPLBD4,1031 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py,sha256=W1WAntnxxI8vWp6JPikaiY6FToN4vpYcXFBSJhP7ZdM,1311 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in,sha256=O_qrWegwZzC4WaSJ5pCnoeTCRt6-z1KRrb0gElWoBYo,196 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake,sha256=JXy2aActygFERWyazURxTk0jW1MTcYtvaOL_8KFjNWw,14449 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in,sha256=AkLEzM2gv7T49m5w32CnB1ez6uxX9P2_fUdypNc_MPo,7101 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake,sha256=2mZ2ZUkFMsDk2JQU5hRwIjamCsljarTQu8CM8mY7P8A,8960 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake,sha256=kRoc1SvQl6NRZEHHuM-NDrfyMJF1HbgL-TiTu__Dkw8,8361 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml,sha256=JPALGLInEk7BiomriJ9xCKQW6vzO82rAvFSn1125GMA,94 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in,sha256=01t6jThpKlPyD3SJGWPj1aiejGAzGtzlm_XjRY4fsHM,2104 +ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in,sha256=XNB-0uhzvP6MP0mePkM0oWcIPpjBdwgVCeseEhTvX34,1234 +ddtrace/appsec/_iast/_taint_tracking/aspects.py,sha256=JCVHjDKVFmlnkOYOhPj6ijzDUhr299kUlF4eCiO6bjA,34809 +ddtrace/appsec/_iast/_taint_tracking/clean.sh,sha256=RF6OT4iCwc19nMNWVTK_Ka6GweEqc4Cucz_2BLsZ7-k,231 +ddtrace/appsec/_iast/_taint_utils.py,sha256=98Zd6pFR-bpWkTntbBOS_fph_EjJJY55C6KO36ldzO8,18317 +ddtrace/appsec/_iast/_utils.py,sha256=2r3bykNHgeooPO1rWL5at0K38QFq0UysF_pb5x_WOoc,3478 +ddtrace/appsec/_iast/constants.py,sha256=Z8nH-Y1aUd7OB633ehFGfESr5mcoLZnGv2mwb0jcCw4,1907 +ddtrace/appsec/_iast/processor.py,sha256=MVPSNBuE0eKZnCMNPLaDbxcMNe26Vzeh4WcwyruDaoo,2921 +ddtrace/appsec/_iast/reporter.py,sha256=ERwuOWSP8wWrauRgSbtoerRxmM-IVqoh6je3fB1nloc,2811 +ddtrace/appsec/_iast/taint_sinks/__init__.py,sha256=xDTH7yfOCdm84VRsHDvmCJSTFwY-FXxbFVs1pPjxXeQ,147 +ddtrace/appsec/_iast/taint_sinks/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/_base.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/ast_taint.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/command_injection.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/insecure_cookie.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/path_traversal.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/sql_injection.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/ssrf.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_cipher.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_hash.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_randomness.cpython-311.pyc,, +ddtrace/appsec/_iast/taint_sinks/_base.py,sha256=EDZseIRIo_Or3YhVw6ACs42r8Ro-QtKLQh1tiTLvcoI,12126 +ddtrace/appsec/_iast/taint_sinks/ast_taint.py,sha256=amvb3i-w_Wln0c_jLICoS7GRTxegunYFFGyIaN98uTY,1723 +ddtrace/appsec/_iast/taint_sinks/command_injection.py,sha256=v0W6TbctjKz_0Rru3j1ROvT8IiSR9fjC5Z14R81YPA8,9623 +ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py,sha256=Rg2lp-JQUMT7QwDrrcScBph4Upj-MsS_JcE9Yajx59g,2545 +ddtrace/appsec/_iast/taint_sinks/path_traversal.py,sha256=eODEuVCuA0koBJynvQSxXTWxYXPlB8a2taeqlaCFzKo,2456 +ddtrace/appsec/_iast/taint_sinks/sql_injection.py,sha256=EZIl8U4DeWEYL9PpO76mQzjIAmTpR_Th4FumjXyvesE,1503 +ddtrace/appsec/_iast/taint_sinks/ssrf.py,sha256=FoQivFkvcHGCUHEdlnMpUIjitGIAq1_2vPaoh1ds6Wg,7558 +ddtrace/appsec/_iast/taint_sinks/weak_cipher.py,sha256=XpCBpZOGKNdls7fNS3N684rWQ5Q9SEESv3pTQu-0JYw,6302 +ddtrace/appsec/_iast/taint_sinks/weak_hash.py,sha256=CnxlkN_Yar52lZyJtvpjrFFsyFzjaJfveSoi3zf-B3c,6694 +ddtrace/appsec/_iast/taint_sinks/weak_randomness.py,sha256=28UXtKlDYspbXMlj4raZF-l_oFoTn5YKSMqjTfCXQ8E,445 +ddtrace/appsec/_metrics.py,sha256=clpTcUMi8rYlwmCfa4c0d6qzEwYKTeTKI8i7T_Xeaek,4374 +ddtrace/appsec/_processor.py,sha256=PYnoaPb6lHGVIEkud5WZQ9HADJhI5F88jb6wP-Mg_RE,17693 +ddtrace/appsec/_python_info/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/appsec/_python_info/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__init__.py,sha256=KMAkWzD1EKBveQs9pDbEQrbCLk-TVqTo2lSzYL2yWW0,634 +ddtrace/appsec/_python_info/stdlib/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py310.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py311.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py36.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py37.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py38.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py39.cpython-311.pyc,, +ddtrace/appsec/_python_info/stdlib/module_names_py310.py,sha256=ngWzLCBwy9ByT-cyVIjyBSLc4WDYCRYu1vvP5Ke9fRg,3176 +ddtrace/appsec/_python_info/stdlib/module_names_py311.py,sha256=2IiNW7Hgew3aaUMD9my6yRB9ufexn2C8wJXGXbP_RTk,3177 +ddtrace/appsec/_python_info/stdlib/module_names_py36.py,sha256=4LjKXzxt67jF6chR7if5s1UQYgoqYEkUlHbqeeBUldc,3209 +ddtrace/appsec/_python_info/stdlib/module_names_py37.py,sha256=szgt5Nxtx2yWlaeepsgROy1V7LT3nUHXFiM9da9bMmY,3233 +ddtrace/appsec/_python_info/stdlib/module_names_py38.py,sha256=3MCI6Um6QWjdS1Vorn4WQxyAbZaaGYf8eYVdpH720Ho,3218 +ddtrace/appsec/_python_info/stdlib/module_names_py39.py,sha256=jvuaSW0_7wtPnmIP6BdPSCwc1SKcw5EcBmnndAhnQ0A,3206 +ddtrace/appsec/_remoteconfiguration.py,sha256=4oFpe_6LloYsA2aRVGqLX818wPio4wfXhuDPQRH-fuQ,10647 +ddtrace/appsec/_trace_utils.py,sha256=UDUADGGe7_DFLbTMgyqU-BSPpJg9DZUv76UMKwFKeRk,12733 +ddtrace/appsec/_utils.py,sha256=Srs9dBgdSZubMfOBCCVuWQtSFPixADzt_MteL459GEs,5478 +ddtrace/appsec/iast/__init__.py,sha256=CPD2rJbZh290-1clJzq3J1E0d9dpNjvR3cWyL6nLuGc,72 +ddtrace/appsec/iast/__pycache__/__init__.cpython-311.pyc,, +ddtrace/appsec/rules.json,sha256=8KQsRDdq1ouTNZfTXt1LFdmXH45ZHAPUpjhRRTOShc4,278159 +ddtrace/appsec/trace_utils/__init__.py,sha256=oWT5uFME10wKUiQX0GDnWWAXOvmPOrPl7otH7DlUlhQ,666 +ddtrace/appsec/trace_utils/__pycache__/__init__.cpython-311.pyc,, +ddtrace/auto.py,sha256=PrVqH550rIK7CLn3O8eUpOM6ufWI8EaFVu45li8KR_k,601 +ddtrace/bootstrap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/bootstrap/__pycache__/__init__.cpython-311.pyc,, +ddtrace/bootstrap/__pycache__/preload.cpython-311.pyc,, +ddtrace/bootstrap/__pycache__/sitecustomize.cpython-311.pyc,, +ddtrace/bootstrap/preload.py,sha256=8pkqcDru6SeC6Y4ZOa6k9wp-zS9dJzcoCD-roiOmuL0,4111 +ddtrace/bootstrap/sitecustomize.py,sha256=YSiZiOMB7VOPbmPO8F9AhZmljf8VmblNI-vERyHQMGg,6790 +ddtrace/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/commands/__pycache__/__init__.cpython-311.pyc,, +ddtrace/commands/__pycache__/ddtrace_run.cpython-311.pyc,, +ddtrace/commands/ddtrace_run.py,sha256=4snfPPvjISyj9238hkf1dic7MSoac7CIOIDtmYNEJK4,4585 +ddtrace/constants.py,sha256=PUmBvEkUlKfxKeuVqBrGDNbq21V0iB6hl9pyfmqLZaM,1708 +ddtrace/context.py,sha256=TAaybV1PrIr0MOUYvnsAoSg59C1gM44ahI1mD_eLhKk,9543 +ddtrace/contrib/__init__.py,sha256=qFyfkycuuYfhxPcJIHW0Q5hBRwsfUaabDanAKm7fVqU,244 +ddtrace/contrib/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/__pycache__/trace_utils.cpython-311.pyc,, +ddtrace/contrib/__pycache__/trace_utils_async.cpython-311.pyc,, +ddtrace/contrib/__pycache__/trace_utils_redis.cpython-311.pyc,, +ddtrace/contrib/aiobotocore/__init__.py,sha256=9bV_yu3Eff-SRRAvPykek5LpkJtq7zV36lMfCBdXuJo,1023 +ddtrace/contrib/aiobotocore/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aiobotocore/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aiobotocore/patch.py,sha256=vjWEkkS5ql98QRi9CTB2QPoqZUPM9u1yy7--t3DosF0,6347 +ddtrace/contrib/aiohttp/__init__.py,sha256=UmZ0ex1doI_bvV2MXui2PagPEZhJancBv4k4nETUHfg,3061 +ddtrace/contrib/aiohttp/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aiohttp/__pycache__/middlewares.cpython-311.pyc,, +ddtrace/contrib/aiohttp/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aiohttp/middlewares.py,sha256=RqDs9dWPJjMqyqqDT2eTP5YMi73lGcBwWvD-K1DgnUQ,6769 +ddtrace/contrib/aiohttp/patch.py,sha256=2uWWp6vFJYQ5gay5nM2u448dsTi6Hiyu9K8JCzny1eY,5107 +ddtrace/contrib/aiohttp_jinja2/__init__.py,sha256=ZvNEC7eSdy2UWiSJhCYoc8STU2OTgw6lHIJZMM1f0Rs,711 +ddtrace/contrib/aiohttp_jinja2/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aiohttp_jinja2/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aiohttp_jinja2/patch.py,sha256=rxsbERf6uyDmKu3JLZb7WPwzDcAO2-_8zWsRgi9rWq4,2023 +ddtrace/contrib/aiomysql/__init__.py,sha256=3zK56C7PWr5uYGl5oMSYrEwlKZSICy4Hl0A3ggZ__Os,1338 +ddtrace/contrib/aiomysql/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aiomysql/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aiomysql/patch.py,sha256=alWBpZ6ZBs4xy2i8MHzg7qOdjw98P6BSABTHgGbecs0,5849 +ddtrace/contrib/aiopg/__init__.py,sha256=Of3jYJ4jsNnNUDLNkstkRYWnSJ29hpOdj-GwagxZ8Hg,827 +ddtrace/contrib/aiopg/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aiopg/__pycache__/connection.cpython-311.pyc,, +ddtrace/contrib/aiopg/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aiopg/connection.py,sha256=90sl-eDM5QpbIIuM0SkmXDTKHnFRyU0D6E2L8Kr_-Do,4393 +ddtrace/contrib/aiopg/patch.py,sha256=2P8XOpJXmQsc0nhAAzjue_ddAF0x13_L0mDq_JjExDo,1887 +ddtrace/contrib/aioredis/__init__.py,sha256=C5EeJsQPTJHIILnPqRU-q28nHZZaByptetMmHcG6Fys,2222 +ddtrace/contrib/aioredis/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aioredis/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aioredis/patch.py,sha256=tvDMaHqJglMm2_9N4tQxNOYgHCzSe8zyWWeMAYDmOxI,8603 +ddtrace/contrib/algoliasearch/__init__.py,sha256=Hw78TlIYgBQEA_YyfYMMwIrorhSLIcW1A0iE5bA9rOo,974 +ddtrace/contrib/algoliasearch/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/algoliasearch/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/algoliasearch/patch.py,sha256=NoZcMQP-QNrf5oaGgafwNlINaptyvI9GmCMuLQKqJso,5696 +ddtrace/contrib/aredis/__init__.py,sha256=AF8GAoHfbKiChvZtDd8A6yKjLV39CvEbVK_yhWCX92o,2030 +ddtrace/contrib/aredis/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aredis/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aredis/patch.py,sha256=D2mggX9PFLqcwnZq4s7g4MxLLZcAWN3xPb2zXhy0Frc,2783 +ddtrace/contrib/asgi/__init__.py,sha256=Wrd3MJD8DOzh3ntszQ7BckhPedB8EAjHTf9CuxUFt-c,2145 +ddtrace/contrib/asgi/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/asgi/__pycache__/middleware.cpython-311.pyc,, +ddtrace/contrib/asgi/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/asgi/middleware.py,sha256=p0NMvCMzbMgten6AC5Q3mVHYvAaJAiUQLCjMAAH7wCc,11437 +ddtrace/contrib/asgi/utils.py,sha256=A0YA0ZMhryqiM4jXkeWenF320SCJmXIHy4GQzl-fhXQ,3287 +ddtrace/contrib/asyncio/__init__.py,sha256=rPEAg2IyIY8Ar2O3s1p9X-n3KA-Prx1bbK_LKRgsWs4,2553 +ddtrace/contrib/asyncio/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/asyncio/__pycache__/compat.cpython-311.pyc,, +ddtrace/contrib/asyncio/__pycache__/helpers.cpython-311.pyc,, +ddtrace/contrib/asyncio/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/asyncio/__pycache__/provider.cpython-311.pyc,, +ddtrace/contrib/asyncio/__pycache__/wrappers.cpython-311.pyc,, +ddtrace/contrib/asyncio/compat.py,sha256=HgAfoCMH2TTyooC4siI5pH2472-mFzC8-yrAExm0m7U,278 +ddtrace/contrib/asyncio/helpers.py,sha256=G1L7FBnpFVBkn5m9dEFHdXLG1ECY2LANpJAN2H3_nEU,3102 +ddtrace/contrib/asyncio/patch.py,sha256=QcFgpIKsiNB5kRO2mSXTT_bdTP44zhzKF-Js_3GPHYo,463 +ddtrace/contrib/asyncio/provider.py,sha256=CIZMUeR3Glm-WrO7PQH0OUMUSUXjI-YMMv73zueWFpA,2855 +ddtrace/contrib/asyncio/wrappers.py,sha256=svG3-7JQ9kM1pFcKYCji-LRGGqe5ZDUTFx4h3qkDpsM,953 +ddtrace/contrib/asyncpg/__init__.py,sha256=cGQezxZJ2UYMlK1ypBzQ3WuVksl9l1UQbBIc1RngNJM,1322 +ddtrace/contrib/asyncpg/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/asyncpg/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/asyncpg/patch.py,sha256=JIkh5ZdxjWjoDjYypfWNIuvPhOQbDX5zslLjil8AYVo,4664 +ddtrace/contrib/aws_lambda/__init__.py,sha256=h0Wsf5SvBEy9bJVhVSNBPICTFBhtgHsGnzJUU8m8TQI,1434 +ddtrace/contrib/aws_lambda/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/aws_lambda/__pycache__/_cold_start.cpython-311.pyc,, +ddtrace/contrib/aws_lambda/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/aws_lambda/_cold_start.py,sha256=nOvLzOrLto40M_JlV2NY_7qfI-HbhcaMiMwDr1MHTXI,482 +ddtrace/contrib/aws_lambda/patch.py,sha256=EitsmJ9HAlA2e414488WIpNJ0xtEDcCHl5mD8KJ4bzY,9218 +ddtrace/contrib/boto/__init__.py,sha256=IfPRmsVDN_OiiLPZAQg3oXgTrRGC9Jw1E2BcYNtU3vs,1019 +ddtrace/contrib/boto/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/boto/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/boto/patch.py,sha256=rAWuYvUvCpVk67pnM68uA6KhpVL7yQdksYmCH91KEO8,6881 +ddtrace/contrib/botocore/__init__.py,sha256=f-8_Ra8c1IUM3mK63D300YYUjeZY3dvQfovBYkjOeFM,3870 +ddtrace/contrib/botocore/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/botocore/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/botocore/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/botocore/patch.py,sha256=w8cSKtZLuZxEtNDRmyDCmKBhMLGumGjWBN7L5FWulE4,11389 +ddtrace/contrib/botocore/services/__pycache__/bedrock.cpython-311.pyc,, +ddtrace/contrib/botocore/services/__pycache__/kinesis.cpython-311.pyc,, +ddtrace/contrib/botocore/services/__pycache__/sqs.cpython-311.pyc,, +ddtrace/contrib/botocore/services/__pycache__/stepfunctions.cpython-311.pyc,, +ddtrace/contrib/botocore/services/bedrock.py,sha256=w7xujld-YvXFXYNH-gWmJAec5-ar397OvDj7e-BPOhw,15713 +ddtrace/contrib/botocore/services/kinesis.py,sha256=879NrScMaMaZ90q3FQNYQVnkmL-T5PX_qlIfHw6xXek,7430 +ddtrace/contrib/botocore/services/sqs.py,sha256=2pN--HSpNZl6eB_W1hINRBaetKe1YMx1V0vkMlnoY4Q,10521 +ddtrace/contrib/botocore/services/stepfunctions.py,sha256=NOjLyVlJqb0qKEh0ze1KsOMLHI1Ef63SBMc2zaGx8HI,4122 +ddtrace/contrib/botocore/utils.py,sha256=iUUVH1O_UWBrlK-7pUz4EQM61KYb1FhnQgac7ghEcTo,9177 +ddtrace/contrib/bottle/__init__.py,sha256=wy6alj22Uo4lNzk8BQUEnA__lGx7P7Beqxp2yiAF-Ug,1140 +ddtrace/contrib/bottle/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/bottle/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/bottle/__pycache__/trace.cpython-311.pyc,, +ddtrace/contrib/bottle/patch.py,sha256=9OzWL40cv559g1gAyN0RctZH_yiRteJUqhvYYjd4y7E,861 +ddtrace/contrib/bottle/trace.py,sha256=wXEhP6piR2oEemPFXsRE4bOR48Pz6V3JZoyc7Yblh5M,4246 +ddtrace/contrib/cassandra/__init__.py,sha256=LeXodkO4NBuOndLu-iZ0oPT4XZsG71-keSw7w5wux0A,1190 +ddtrace/contrib/cassandra/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/cassandra/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/cassandra/__pycache__/session.cpython-311.pyc,, +ddtrace/contrib/cassandra/patch.py,sha256=gTEULNQXWMvEH4VQHcvBthVH3XGWytnHLMCum2XcjeQ,89 +ddtrace/contrib/cassandra/session.py,sha256=KkpujlPSpcev1pvdaU6iVQR4e-qheqqL3ve5eDiCI0M,10272 +ddtrace/contrib/celery/__init__.py,sha256=Kgp_FgcEiQF4VOVE0TqIRo5OqwssjvNePtF34ujRz6Q,1583 +ddtrace/contrib/celery/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/celery/__pycache__/app.cpython-311.pyc,, +ddtrace/contrib/celery/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/celery/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/celery/__pycache__/signals.cpython-311.pyc,, +ddtrace/contrib/celery/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/celery/app.py,sha256=t4tRJ3o2ZN8s-q3_Fyp9e3W95ugxL65WfTTcj_ARmfk,3340 +ddtrace/contrib/celery/constants.py,sha256=2ULjjEu1VmKSb26zHXWcWhScx3uGP_nc7SoxRdo_jkk,470 +ddtrace/contrib/celery/patch.py,sha256=5Jc37i_k0w1juOz6TFyVnyAty9n8hEhQ1zeZTsUBSIQ,1138 +ddtrace/contrib/celery/signals.py,sha256=YyM7Oex2_1aDfx1sIahMh7a4sC3umEDaSnvKaNp8QoI,8211 +ddtrace/contrib/celery/utils.py,sha256=ICRnxclfoa-OqWypL-YivweF02MvGvHGX9csKnAJ_GU,4545 +ddtrace/contrib/cherrypy/__init__.py,sha256=GU1w6levb2eE_O4_L8F5KXSNeKSYrD_tzsVS1sXJEXQ,1642 +ddtrace/contrib/cherrypy/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/cherrypy/__pycache__/middleware.cpython-311.pyc,, +ddtrace/contrib/cherrypy/middleware.py,sha256=5P_rk9yVIZ_8ccUtXAfsCK4BTHTHrPfaiALM0ZTj4lA,5743 +ddtrace/contrib/consul/__init__.py,sha256=0i1qNV6XNYcA1Se0_X0h5y3e60lPzorE_-d0cDQexlE,907 +ddtrace/contrib/consul/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/consul/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/consul/patch.py,sha256=Y3gY9ku8OBveB4o2O2y14BhZnHBUYrPgnA_thM6Jdtw,2685 +ddtrace/contrib/coverage/__init__.py,sha256=94_a-BjnFq_5-89Q293HWZirZsklo-dnmmXAIItW9a4,914 +ddtrace/contrib/coverage/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/coverage/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/coverage/__pycache__/data.cpython-311.pyc,, +ddtrace/contrib/coverage/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/coverage/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/coverage/constants.py,sha256=6llF4KSvJb8GfOYvQUdRlMjZN23Y8n3YNcECnkWcpjM,33 +ddtrace/contrib/coverage/data.py,sha256=l-JnkQjfIw_xakUxD6xeohdndOPXLf-7WWuTsX90wCw,100 +ddtrace/contrib/coverage/patch.py,sha256=3EIgmvgmpa-jP87J7XtNK9NjCXr5sWlVIBPHEqD4-fo,1604 +ddtrace/contrib/coverage/utils.py,sha256=g_O_N-Fef9859SpUxS64voJh01Pad48t2iD1WeQjWOg,664 +ddtrace/contrib/dbapi/__init__.py,sha256=fcSUo-w_COulEPTpZ0O2B1uEo1uV8YW3C8OOWvFipAY,14002 +ddtrace/contrib/dbapi/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/dbapi_async/__init__.py,sha256=igsTeNZG69SzZe09FvjnVxwYG5K5PFfDTECxF6noF1c,11190 +ddtrace/contrib/dbapi_async/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/django/__init__.py,sha256=JWv5O5uYYbZlagVDmEBhyvozo5Jm9EMpxywp45_jk7o,5488 +ddtrace/contrib/django/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/django/__pycache__/_asgi.cpython-311.pyc,, +ddtrace/contrib/django/__pycache__/compat.cpython-311.pyc,, +ddtrace/contrib/django/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/django/__pycache__/restframework.cpython-311.pyc,, +ddtrace/contrib/django/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/django/_asgi.py,sha256=qzP0RaYXUaEhMa0QfF2hU7hdMt6XSQU76ZzozeZJLKE,1390 +ddtrace/contrib/django/compat.py,sha256=AAjJn7NXuQMNFsyrcSc_B7B1NyugPTB-wEUSvz3knOY,908 +ddtrace/contrib/django/patch.py,sha256=TTJSayW6SpRGgQBgFYSfRsOfEIWYZTRtL3HipiYNPoY,34624 +ddtrace/contrib/django/restframework.py,sha256=pZ0I_FEwZA2L0yg3aUAw1yWRUXpQi-ccCRy3PPxlFvs,1155 +ddtrace/contrib/django/utils.py,sha256=K61VSWZA6ooTxFS0NGE9UsbnVWFoCgMN05NzUsbEN1M,16558 +ddtrace/contrib/dogpile_cache/__init__.py,sha256=rnl4KhUBRUcZC7FNnu1g9gcJZu78FOgNsZfuN2di_CU,1653 +ddtrace/contrib/dogpile_cache/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/dogpile_cache/__pycache__/lock.cpython-311.pyc,, +ddtrace/contrib/dogpile_cache/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/dogpile_cache/__pycache__/region.cpython-311.pyc,, +ddtrace/contrib/dogpile_cache/lock.py,sha256=zmEG-OVH_veGeupY4UdxAZ6kERgjmpHZLjUMupFT6vc,1636 +ddtrace/contrib/dogpile_cache/patch.py,sha256=e8wiG1obluHLLPObyb9l9t_TajaRy7FnhqGeksv_mHk,1875 +ddtrace/contrib/dogpile_cache/region.py,sha256=DGO_uSOTEM3lAyug5bTFo1Pf0_NgNPliRW9vhkrwUWw,1990 +ddtrace/contrib/elasticsearch/__init__.py,sha256=dVviFwaMSxlUctu7yARm2yezD2W4bzirnLu2U551lds,1526 +ddtrace/contrib/elasticsearch/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/elasticsearch/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/elasticsearch/__pycache__/quantize.cpython-311.pyc,, +ddtrace/contrib/elasticsearch/patch.py,sha256=D2UuaxvwsTahHGUphybiB8urrGUjRMUf-qpoIvrlvfE,8926 +ddtrace/contrib/elasticsearch/quantize.py,sha256=wdiGa9N2CcPXw614lwb6-kCTBrxvkubtmb4KI2S9H5c,1052 +ddtrace/contrib/falcon/__init__.py,sha256=XbjcXYfnw7lLbgJh9u-2ylkut7UtQ2O29z6v-TzYCBs,1512 +ddtrace/contrib/falcon/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/falcon/__pycache__/middleware.cpython-311.pyc,, +ddtrace/contrib/falcon/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/falcon/middleware.py,sha256=qIOvgkBJLsYaJMW60NQsrokNL91UFDe4lx1zamkU6dA,4444 +ddtrace/contrib/falcon/patch.py,sha256=tM-0kfBFRW4F0DlkD46Lz_KGqK6wD0jyh_61ZIrvLjw,1217 +ddtrace/contrib/fastapi/__init__.py,sha256=7j0YDwboKR_ij9DP-qjB99mpUC9lkCUf3Yyyf1SDtac,1978 +ddtrace/contrib/fastapi/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/fastapi/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/fastapi/patch.py,sha256=uaii_6XeyBizbCT7VFN8lMBi2ixSBFsqsx7Ls_HFr9w,3401 +ddtrace/contrib/flask/__init__.py,sha256=3w7QicHrmgLmQD7GkCXX56a_-UnBquxDzW3TiweLvE0,2864 +ddtrace/contrib/flask/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/flask/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/flask/__pycache__/wrappers.cpython-311.pyc,, +ddtrace/contrib/flask/patch.py,sha256=n0Sxkz5t4T1g82lfafUWwWXdKM1V9wyeUaHs3aeTEcw,19694 +ddtrace/contrib/flask/wrappers.py,sha256=6jphZzUVDvfJuc_adDTZ7o3ih_y7u1A7LK2BeoIgg8M,3187 +ddtrace/contrib/flask_cache/__init__.py,sha256=MfOIBgrD0uPsa0b2lfoG-G3Mnzti5S8PY51P0V1aW2g,1702 +ddtrace/contrib/flask_cache/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/flask_cache/__pycache__/tracers.cpython-311.pyc,, +ddtrace/contrib/flask_cache/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/flask_cache/tracers.py,sha256=NXbnigTR_MXnVpzvDvaST0Kx7HzgLZELvQBTvNDFDyQ,6752 +ddtrace/contrib/flask_cache/utils.py,sha256=V6nc8kZKp_dhQFyco_gx3ez0FCZYfF6bcYLf_hgw5eE,2186 +ddtrace/contrib/flask_login/__init__.py,sha256=29ErfIxFnpOOKktBl-Ag9keQWFTlZTT-tPivqncowBc,1919 +ddtrace/contrib/flask_login/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/flask_login/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/flask_login/patch.py,sha256=Gfp2jwW-4gBvd38Qs_Sb2vdW1RRS6cN-ZfwgUj6Txwk,3626 +ddtrace/contrib/futures/__init__.py,sha256=RBln83ltDdVksGsSFM8rsDv9m9RsECNAQify_Rrkf3s,979 +ddtrace/contrib/futures/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/futures/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/futures/__pycache__/threading.cpython-311.pyc,, +ddtrace/contrib/futures/patch.py,sha256=_CgJp1Zf5pxHzqdQuYaxfDc1DpEvm1GZz9jQS-VMPhI,1106 +ddtrace/contrib/futures/threading.py,sha256=KsSEBnoKn6Qy_2T8AGN8JneN7RBoQrmWGWxN6Cep9iE,1822 +ddtrace/contrib/gevent/__init__.py,sha256=lwZFVcW4MMkAu6YiD8hOBj1qqxSLwVUy8ni6ENaRpJ0,1962 +ddtrace/contrib/gevent/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/gevent/__pycache__/greenlet.cpython-311.pyc,, +ddtrace/contrib/gevent/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/gevent/__pycache__/provider.cpython-311.pyc,, +ddtrace/contrib/gevent/greenlet.py,sha256=rEzvw7pibHIXNe8tOK4Jm9J7Cb4pK6_nt-me8p6lNVE,2241 +ddtrace/contrib/gevent/patch.py,sha256=XiU7qr-im4NYZmwTUAPL4221Ii7-Fc-I6XN_zQDDp8c,2538 +ddtrace/contrib/gevent/provider.py,sha256=wWh_EnT4xdpekgdJhiqGWnTj2CXyUfLlYJAVYX2WrjA,1468 +ddtrace/contrib/graphql/__init__.py,sha256=oCrbJUcgUTbC-Evfg4Xb5d8RtHQEBs_xlt1v38JY3-8,1482 +ddtrace/contrib/graphql/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/graphql/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/graphql/patch.py,sha256=DIJXmXJUJPJRlQ1x7DZhqktcNcbSVKy9sprc3aB91UA,11437 +ddtrace/contrib/grpc/__init__.py,sha256=g5ckIPN9aK0aTIzWi624l9t-5mtxKLKyENjALrTm_iQ,2248 +ddtrace/contrib/grpc/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/aio_client_interceptor.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/aio_server_interceptor.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/client_interceptor.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/server_interceptor.cpython-311.pyc,, +ddtrace/contrib/grpc/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/grpc/aio_client_interceptor.py,sha256=JuAd5dzJMYDqj3Y6kbZVvZLj5YU4WofAA0GvrtK_K1w,9950 +ddtrace/contrib/grpc/aio_server_interceptor.py,sha256=QxFDDHUpplTcDMyAuCMsHXWKe4uNzcK2ILWU1TMZgXw,12388 +ddtrace/contrib/grpc/client_interceptor.py,sha256=paMEhofuD2CPb8yQH2ZgYK5JKbH92jIQ7E1if6VvW4k,10747 +ddtrace/contrib/grpc/constants.py,sha256=g4pTnEmFqAabsQBLDCvKoB5lKZCtZc-ah2qs6VEVOPw,1016 +ddtrace/contrib/grpc/patch.py,sha256=HqfgTPXP2pBiUpnCn2UtYxL57nZy5rJwWlbppnMUUJk,7526 +ddtrace/contrib/grpc/server_interceptor.py,sha256=NWQmtbk8NjZ1qYWJEqrCpb5UokaX4ZV8uhPTkxGw15I,5161 +ddtrace/contrib/grpc/utils.py,sha256=FAfO5UyMg2nkLZ_NxVzR3_z8v3Z6tqIytNsn6L6tsdQ,2970 +ddtrace/contrib/gunicorn/__init__.py,sha256=tEinb0tU3N8cW111sI9gRlL5dNpL_hkhWwd8lzX5s6A,421 +ddtrace/contrib/gunicorn/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/httplib/__init__.py,sha256=wol5d0NIeR9jhdqC0CXkMx42wkAh989LypeaUOHPKmU,1604 +ddtrace/contrib/httplib/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/httplib/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/httplib/patch.py,sha256=m-ooHqTSl1aL5RUZpVO8n7O3RhEqRdwndzIILROZAto,8130 +ddtrace/contrib/httpx/__init__.py,sha256=sDbsKCk_Yv_ZsJaKWU7nYDh5uCW2tVCZkOUlyjihrfU,2556 +ddtrace/contrib/httpx/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/httpx/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/httpx/patch.py,sha256=-cLHcKUWnpLy9hxRnc94yxtiTLpBjwWquvgupMT6n_w,6951 +ddtrace/contrib/jinja2/__init__.py,sha256=sbeozQZfnw1pyXNKhwefKO-X8Le1cI6fEawccPuLyDo,1273 +ddtrace/contrib/jinja2/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/jinja2/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/jinja2/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/jinja2/constants.py,sha256=BpRqO-bQIWzrHFO7DtJAZ4v7lH7HpYWjYri7yCOG-Ts,35 +ddtrace/contrib/jinja2/patch.py,sha256=bFKs5VUF1LICVwCJkSJ6EOkFi3pZNSUJyA8CCf3EAnY,3483 +ddtrace/contrib/kafka/__init__.py,sha256=d2GeIQqFVrPhkO_JAKvgPx73iW2RjqOAzAggyydnb4s,1375 +ddtrace/contrib/kafka/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/kafka/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/kafka/patch.py,sha256=m2BTr7FrWEAU_LbkXafddxsCZiKqEhw-HL3Z_affW9k,11518 +ddtrace/contrib/kombu/__init__.py,sha256=SWh0sC8gHZ71qvDd5j2TiOZQ1h2ECnSCONntoSOhgbo,1671 +ddtrace/contrib/kombu/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/kombu/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/kombu/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/kombu/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/kombu/constants.py,sha256=RhhLOyKsbyFLnko4wvuyDQvBv_wy0CMN27k4akMSlak,26 +ddtrace/contrib/kombu/patch.py,sha256=QEz5P8WCVjVo0SjV1e9cLOFq2kpoPQ1ZPN5X3nublnU,5818 +ddtrace/contrib/kombu/utils.py,sha256=xjAzg2zr7o74Iy7Bc2vIQvfvm_W82ZzvbiBHLNtCa7E,1101 +ddtrace/contrib/langchain/__init__.py,sha256=iy5GzZMp1ApEevdyPrtQBqZ9Z81RWmlLCBlZD7FE8Zk,6459 +ddtrace/contrib/langchain/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/langchain/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/langchain/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/langchain/constants.py,sha256=hgeiUEiR5fjYj7w2uxO1osAesux0172juwSES-gE2uY,2478 +ddtrace/contrib/langchain/patch.py,sha256=zAhRvOnX0vXC0hgr-lToG8sti4NF92Z6VeYwOmtdk2I,36845 +ddtrace/contrib/logbook/__init__.py,sha256=lm-ge5wJYMIzmW7Vm_uu0IgElav1tgiSGruvCCvKZRo,2578 +ddtrace/contrib/logbook/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/logbook/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/logbook/patch.py,sha256=Np-C41AcLIDyDBgbqlSgX75vb2cz3k6jUnxCP0Ms5sI,2327 +ddtrace/contrib/logging/__init__.py,sha256=Q1NLFHbcjRFBLg05AQ96Qm-vsA-o3cPYTAOYS3b-G7I,2145 +ddtrace/contrib/logging/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/logging/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/logging/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/logging/constants.py,sha256=kOOXgHFz3wb4Im9UuARht1TPZaOpVA_QvzM0j0bHJs4,227 +ddtrace/contrib/logging/patch.py,sha256=zGfI465HqorDyuV5tfBna-sCyHavpHRzaCnaoOyPW9k,5077 +ddtrace/contrib/loguru/__init__.py,sha256=gs8t4FTGLbWDfsafRbWkBxNQsCPKc1G9b0X6B94xgsU,2671 +ddtrace/contrib/loguru/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/loguru/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/loguru/patch.py,sha256=2OaCdYyumFeRlUjhN7pFfLDbBXdhjeFeR8A40nJXRww,2835 +ddtrace/contrib/mako/__init__.py,sha256=hXN21PGRMpV9RkGA3-ws5j_YaXXrBaKJp2Kcwze9LpQ,616 +ddtrace/contrib/mako/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/mako/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/mako/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/mako/constants.py,sha256=BpRqO-bQIWzrHFO7DtJAZ4v7lH7HpYWjYri7yCOG-Ts,35 +ddtrace/contrib/mako/patch.py,sha256=rUg6jdkaDxLobRFyHnFNfeziSP2coaojPd4QfMgrSIM,2298 +ddtrace/contrib/mariadb/__init__.py,sha256=yV-9X6B-EnmFd8CbqcfZWu6s-WPML5vtJJ9mE4YTcOw,1716 +ddtrace/contrib/mariadb/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/mariadb/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/mariadb/patch.py,sha256=tQnNB0Zo9xhxmPV6XFmRRJ12eoKO3gq8TwJpCKOVY3U,1501 +ddtrace/contrib/molten/__init__.py,sha256=QsJLSom7Lphx2S4uofnr7XN_7Ge8SGPljCO312AqqGo,1271 +ddtrace/contrib/molten/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/molten/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/molten/__pycache__/wrappers.cpython-311.pyc,, +ddtrace/contrib/molten/patch.py,sha256=_x1b6d6kG9FOePSj-CuHTZ9iE9NV947Ut_WkEVluQCE,5838 +ddtrace/contrib/molten/wrappers.py,sha256=xpd3ygO-4rnuQWhtXEdJYq1IJa5V6qv0zPK10xCafmQ,4018 +ddtrace/contrib/mongoengine/__init__.py,sha256=1Buh90LANDKZiIREQFKm__Ai5LBx92XSk72nkkPrgnI,897 +ddtrace/contrib/mongoengine/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/mongoengine/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/mongoengine/__pycache__/trace.cpython-311.pyc,, +ddtrace/contrib/mongoengine/patch.py,sha256=Zg18ToqJFeGIASS52ZbF7N4nMDXc2G4GRf2Q89YSCnk,327 +ddtrace/contrib/mongoengine/trace.py,sha256=pweTTRGYAKhUoyIqyZEJjobCc2CmSwDfj5T4NZvikzk,1269 +ddtrace/contrib/mysql/__init__.py,sha256=4LqJc0VntMxCQzxWJBSy7WJYUq5U9ihZRe6fx25olWc,2070 +ddtrace/contrib/mysql/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/mysql/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/mysql/patch.py,sha256=9O1RUsLCN-KTeOgGm5lJfVZvh_nKehb5iU-yOMoiLic,1895 +ddtrace/contrib/mysqldb/__init__.py,sha256=-PtsHQA_acRQSd1msdEEh09RKpB5U-Zf0Zd4qXaMM-4,2418 +ddtrace/contrib/mysqldb/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/mysqldb/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/mysqldb/patch.py,sha256=_p8AaBY4tbcPekQEp1rsSFgbP0OswmzMLynzeMQzz9E,3286 +ddtrace/contrib/openai/__init__.py,sha256=gqjPSjrQfizPnu2MHoSzpgAA1WPTth8XBpTAlx6Qe4I,8349 +ddtrace/contrib/openai/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/openai/__pycache__/_endpoint_hooks.cpython-311.pyc,, +ddtrace/contrib/openai/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/openai/__pycache__/utils.cpython-311.pyc,, +ddtrace/contrib/openai/_endpoint_hooks.py,sha256=Nd45jIy0DYhr4DxJBcO929eDeAERuI0VxHznjE9hLJ8,38541 +ddtrace/contrib/openai/patch.py,sha256=gOQ8a3uyjPdqCH-z3nUS43lModo60gzUrmRhn0wMyuU,15797 +ddtrace/contrib/openai/utils.py,sha256=Sc37PAWkqwwlBzqCvCTTGWF4wRwTjIUf6FGN79iegWs,4308 +ddtrace/contrib/psycopg/__init__.py,sha256=TpbJmEjFAg2f44h6tJNNRHqG5NtTk6jVP3wv-HzVZvU,1646 +ddtrace/contrib/psycopg/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/async_connection.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/async_cursor.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/connection.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/cursor.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/extensions.cpython-311.pyc,, +ddtrace/contrib/psycopg/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/psycopg/async_connection.py,sha256=C5J8oNkn8qDnxcofPHC3DsX_pt_f0tX9R_s_m0kF8Qw,2784 +ddtrace/contrib/psycopg/async_cursor.py,sha256=5_b6hgUMogq_k0Pw9UqyWY8A0qh4pny4ntuBLb7gXrg,1147 +ddtrace/contrib/psycopg/connection.py,sha256=hWCZ5FA8qOrpv9S1J8iwIwqUnQMtINeyOR50h0JUyi0,4271 +ddtrace/contrib/psycopg/cursor.py,sha256=8yQREYRPy97Z4wTsj1hhP8d0eAMXONss3SDmpGE4Nb0,1041 +ddtrace/contrib/psycopg/extensions.py,sha256=6jnUb4otunC0A57CmdO48XutTaCW7YfQxlqP8XDNR0Y,6777 +ddtrace/contrib/psycopg/patch.py,sha256=oPHIT22RhYAz6jzNOjazgFoHOfMF96sDMLGKFSRqvOs,8132 +ddtrace/contrib/pylibmc/__init__.py,sha256=peMKPM5-dPINxamo8OM0AaXtRMNRcD1HUjZVsz2Mo0Q,1017 +ddtrace/contrib/pylibmc/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pylibmc/__pycache__/addrs.cpython-311.pyc,, +ddtrace/contrib/pylibmc/__pycache__/client.cpython-311.pyc,, +ddtrace/contrib/pylibmc/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pylibmc/addrs.py,sha256=S1wHEMy_T_zZWom_p6_u9eyC1W39nn_sXE9gC9nsTGg,365 +ddtrace/contrib/pylibmc/client.py,sha256=aNZuTtUMYfMYwE3EqHDIl4RzOXBPM9_oQ0wXogZb76E,7087 +ddtrace/contrib/pylibmc/patch.py,sha256=ui1_WRVbZ8Pyt3i5R8hugd4mmnV7PwDfgWDNYEQn3Lg,285 +ddtrace/contrib/pymemcache/__init__.py,sha256=TJltXVZEH9LxVdztLjJkRclsqv3ciFcAFy2oDtimfSg,1433 +ddtrace/contrib/pymemcache/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pymemcache/__pycache__/client.cpython-311.pyc,, +ddtrace/contrib/pymemcache/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pymemcache/client.py,sha256=4rtTVAoReUOZPqdf200BxrDn9__GvhGDhmroku1ogiU,12267 +ddtrace/contrib/pymemcache/patch.py,sha256=zX8yxueKVhU5m-LuZ9rtb7NtFf67evCu8VNf2GY9eNc,1528 +ddtrace/contrib/pymongo/__init__.py,sha256=2efOGg1MJ1JvlBZMqa0wBa0plAZn3SlmYNCpYVL9jaE,1497 +ddtrace/contrib/pymongo/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pymongo/__pycache__/client.cpython-311.pyc,, +ddtrace/contrib/pymongo/__pycache__/parse.cpython-311.pyc,, +ddtrace/contrib/pymongo/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pymongo/client.py,sha256=Lks6ebPx9frA7I8UPiLBUyTBgNDug4xUuLgRNOQNz8Y,13064 +ddtrace/contrib/pymongo/parse.py,sha256=kJccHF7TKlWnfrN75duPAkRQeKph75h3LyzON0y_cBs,6292 +ddtrace/contrib/pymongo/patch.py,sha256=r7_VAeSxRcExucmo3IxRrn9SFBNmyFoPTAfKKytHLu4,2611 +ddtrace/contrib/pymysql/__init__.py,sha256=-aqmsgZRHaYRNMzUeCKZntkuVWxdmxIRwXm_S_zdR5Y,1674 +ddtrace/contrib/pymysql/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pymysql/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pymysql/patch.py,sha256=dszSvQTkBVdX726O4eC4xOLCxPeqMqbcpsPstWZEpOg,1552 +ddtrace/contrib/pynamodb/__init__.py,sha256=vz_iCDyV1Q8n2hltDBwY1D4X99CJVGhRxfvBd4Mcruo,1019 +ddtrace/contrib/pynamodb/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pynamodb/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pynamodb/patch.py,sha256=B9Xn4Z5H013kUD2zAHuylodiVMYkWjNXlGYlZJbpW_g,3395 +ddtrace/contrib/pyodbc/__init__.py,sha256=LwxVnw4ji9JZyqY0i_KPUdQDm34BL6FWqKrMcLn3Vtc,1604 +ddtrace/contrib/pyodbc/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pyodbc/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pyodbc/patch.py,sha256=DpRKnDFDx6RQkXjRlkRyvFcKTjmid2FrxPrCrY5tsc4,1689 +ddtrace/contrib/pyramid/__init__.py,sha256=H30eVR2iOoWisvpFupS7VTMXiOqEyC5eQ3O3LakUhnI,1710 +ddtrace/contrib/pyramid/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pyramid/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/pyramid/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/pyramid/__pycache__/trace.cpython-311.pyc,, +ddtrace/contrib/pyramid/constants.py,sha256=69TDS3jZ0kf6M7WlNT60Nv4qOWIiGEcbsCeG8_Qp9vw,310 +ddtrace/contrib/pyramid/patch.py,sha256=oqYCKckucQOeVkpLMo0lKa6gW6EuMewMraD_Ls2NECU,3773 +ddtrace/contrib/pyramid/trace.py,sha256=lAhsmH5Mp7XyaJq9scjNWt9fAwP61pV8WzAkS9pvp2I,5774 +ddtrace/contrib/pytest/__init__.py,sha256=zVzjTq1hiBAEQc-WBmsKFe8yfiITAmr5sfd84hDH9DU,2123 +ddtrace/contrib/pytest/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pytest/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/pytest/__pycache__/newhooks.cpython-311.pyc,, +ddtrace/contrib/pytest/__pycache__/plugin.cpython-311.pyc,, +ddtrace/contrib/pytest/constants.py,sha256=pqR9gHBrmyU5CyU7My2zeB9KR5BPOQOQv87KJA0FGEw,294 +ddtrace/contrib/pytest/newhooks.py,sha256=kVTKsDqYKrw_VM7MffQAJYKiuHnqUbPa7Qwrk8Wq8pE,1021 +ddtrace/contrib/pytest/plugin.py,sha256=VfR_r106w9p_Tsfkp8AxENRQrHDuQjl6bmNlnbVVHcc,38415 +ddtrace/contrib/pytest_bdd/__init__.py,sha256=4z2Vsify_2RmMeBl-tEBVhxDvOiYm9egr6mh_YKJMM4,1088 +ddtrace/contrib/pytest_bdd/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pytest_bdd/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/pytest_bdd/__pycache__/plugin.cpython-311.pyc,, +ddtrace/contrib/pytest_bdd/constants.py,sha256=hJUsSNAs87r1DIS41T39abbDG3pJpTNC52e2ZPtkvOk,55 +ddtrace/contrib/pytest_bdd/plugin.py,sha256=1tC3dk5ThGqLJk7JhVsvHgrL7yl_CZyWDG5bo0x9wBE,5191 +ddtrace/contrib/pytest_benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/contrib/pytest_benchmark/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/pytest_benchmark/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/pytest_benchmark/__pycache__/plugin.cpython-311.pyc,, +ddtrace/contrib/pytest_benchmark/constants.py,sha256=Qldd-8g3tRjpdgVTMMwL4i49_ROoJ3t0VB9eJ4FeiJo,2134 +ddtrace/contrib/pytest_benchmark/plugin.py,sha256=DkAtkQoQps8AOLNC7NzcsTn2heto0ywkGkE5rxVsVXE,1302 +ddtrace/contrib/redis/__init__.py,sha256=eVvzamumY2gjIPmKfiNk9-ZglLg59Aq6KFkhukVQV3o,1981 +ddtrace/contrib/redis/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/redis/__pycache__/asyncio_patch.cpython-311.pyc,, +ddtrace/contrib/redis/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/redis/asyncio_patch.py,sha256=tK-gFeBzd3ohwNKf_0Ha3xnC_eeI2XNhGFavGKNR6uU,1595 +ddtrace/contrib/redis/patch.py,sha256=OJ53h-fbNY4bBHIpvm-n1pL4-QDdFB5IIi513J4pmWU,6933 +ddtrace/contrib/rediscluster/__init__.py,sha256=vAhdTYKt8Cet0L1GkSZbPsEyepRT1sw1uQ6rlMAMSjk,1837 +ddtrace/contrib/rediscluster/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/rediscluster/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/rediscluster/patch.py,sha256=5dKj2-Qi91yvHtDo1CcUErojELrWaW_yLRSbmQkehBo,4191 +ddtrace/contrib/requests/__init__.py,sha256=hSf34AnfweSRwnPd5vSuyAFGfxgXz7MvSfNdLX85PsA,2280 +ddtrace/contrib/requests/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/requests/__pycache__/connection.cpython-311.pyc,, +ddtrace/contrib/requests/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/requests/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/requests/__pycache__/session.cpython-311.pyc,, +ddtrace/contrib/requests/connection.py,sha256=14QyMr_458cXPmQBFqpFn1hJJ6eLqao05_IBU5lvPmA,5288 +ddtrace/contrib/requests/constants.py,sha256=HY71FxxxJKkCVoCNspOczEf9Ji6QcVDTeDCgYyuA0qU,29 +ddtrace/contrib/requests/patch.py,sha256=prHMxIef4MsTO2bAHUWH7E4sMAQ-nwoZJIb5tDg912w,1448 +ddtrace/contrib/requests/session.py,sha256=_vtXsnkadx3J6cJXjDcz39VKPQIzMVLWf0XyB-hOXm8,512 +ddtrace/contrib/rq/__init__.py,sha256=J9hChEpXvRKNvcUsIe4HdXbRTtqGMcnSn5-iSGFlv24,8596 +ddtrace/contrib/rq/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/sanic/__init__.py,sha256=yzXGvBedAy-_llonmMe6mroKNE29J0AmZMbo5Kanw2k,1830 +ddtrace/contrib/sanic/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/sanic/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/sanic/patch.py,sha256=BbFMt18dpIbq3OEnr1W5NHtSo204HERGnZfcx7ZKjGg,9868 +ddtrace/contrib/snowflake/__init__.py,sha256=BD1iYTVozgcwANvNG5keqz4HXNV9ez9_dTUbef7bNko,1916 +ddtrace/contrib/snowflake/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/snowflake/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/snowflake/patch.py,sha256=YhjNOrjyW4s18SaoFpHDrJ6kk_fIpgQ62w_xZDZvcWs,2715 +ddtrace/contrib/sqlalchemy/__init__.py,sha256=RovvKRxQQuWTjEnRAqw4QZ8tKZtpd5kLyOoJDsZoD_0,1159 +ddtrace/contrib/sqlalchemy/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/sqlalchemy/__pycache__/engine.cpython-311.pyc,, +ddtrace/contrib/sqlalchemy/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/sqlalchemy/engine.py,sha256=bsULXVgNUp7mZmDcDTGSkZbmA2y2DIL-4opqd3z-OVo,5475 +ddtrace/contrib/sqlalchemy/patch.py,sha256=sahiUKP2g9PazD2cCvD4tzowi2EkyqrBtGklcPqTJ_E,798 +ddtrace/contrib/sqlite3/__init__.py,sha256=vP1mIts57RrXXdpdveCnCTWxb54ldQRxBMZ2vy9jODc,1604 +ddtrace/contrib/sqlite3/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/sqlite3/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/sqlite3/patch.py,sha256=UPRCErN3pdWiZaBM2lZSWEfPJt9Xd9PoV11BDVRcA2A,2999 +ddtrace/contrib/starlette/__init__.py,sha256=ohVJF2DVGOi-hWv71b-xUmifIsjrbpQjEZbUDM-i7yc,2317 +ddtrace/contrib/starlette/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/starlette/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/starlette/patch.py,sha256=iOM0W857ZkHcugmEqwtVPirmijq_DzGVFLeCyAdqR5E,7074 +ddtrace/contrib/structlog/__init__.py,sha256=JJpgY9efpqxiV7D1YqtQL_THivsg1vAJDPgde2_rZ78,1721 +ddtrace/contrib/structlog/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/structlog/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/structlog/patch.py,sha256=ql3QPna9bCZVQ-_OloNMafW_JJxQOJ5jWcLdnZbTNhU,3133 +ddtrace/contrib/subprocess/__init__.py,sha256=mzY7mir2dYoaumFAWtOG-qTlnc2dbB1gtNDwmeDR81s,1012 +ddtrace/contrib/subprocess/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/subprocess/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/subprocess/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/subprocess/constants.py,sha256=4f8Nsp5Q7RoYe1jOzh_4o-1gNsZ2mgP4bJgM75ILnwI,585 +ddtrace/contrib/subprocess/patch.py,sha256=bGbFSp1ZSr4d_OEFb3V97yS6RnhDAz-yU0xvl9v4Sd0,14442 +ddtrace/contrib/tornado/__init__.py,sha256=eLRgK_bPQrsB9aMLxEMIDvYplUB4Shoxjx2zFF39Kp4,4605 +ddtrace/contrib/tornado/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/application.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/decorators.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/handlers.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/stack_context.cpython-311.pyc,, +ddtrace/contrib/tornado/__pycache__/template.cpython-311.pyc,, +ddtrace/contrib/tornado/application.py,sha256=Ivn9_sQC6mM9bhkbyx9yBeUSf_on90Jchwwxfd_Rvlc,1843 +ddtrace/contrib/tornado/constants.py,sha256=PsZUkpzC0uUfshu3QrhvPMj5b669K9w1pXGHgitaK00,205 +ddtrace/contrib/tornado/decorators.py,sha256=qc8t-_uSVKmUERUQOLuNb7j1C5Q1TyGeoy-RK1PNaYA,3368 +ddtrace/contrib/tornado/handlers.py,sha256=jspe5TDJ4vP_p1S6_r4sa1gTSdgsntAQmpW3MbR1CQo,5569 +ddtrace/contrib/tornado/patch.py,sha256=7KGmMwD2AS-4BcBbYa6uiekZZ4_sD0jhxqAkl4-m7U0,1998 +ddtrace/contrib/tornado/stack_context.py,sha256=eKJjaSAlLu78-OvFK2tt8CXAskIyL2LBcaetmLI8tyo,6067 +ddtrace/contrib/tornado/template.py,sha256=7fLfkYziu9djuCl8F0hw_qC3fUzLoLFKkMuSvDXgGSs,1166 +ddtrace/contrib/trace_utils.py,sha256=urNH62swJDJca9ivwGqXmFuRsvAb3_mrXhVLFRNP3Ak,25317 +ddtrace/contrib/trace_utils_async.py,sha256=1b0AQ-_qk97P3Y-mLYLMzl2yo32SNjkhaKiSJ0WdmUw,1059 +ddtrace/contrib/trace_utils_redis.py,sha256=Zr_SiSVXUge57Nb5huQ0o3htyHLdP3zOIRIl2heW9aU,7256 +ddtrace/contrib/unittest/__init__.py,sha256=ItDIm3eH7jHfSeDZ8c4yguf8_WtFt2flkiJ2gVHhO9c,1250 +ddtrace/contrib/unittest/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/unittest/__pycache__/constants.cpython-311.pyc,, +ddtrace/contrib/unittest/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/unittest/constants.py,sha256=FqTAdWU1ZqPhTMuZ19Dx1a0XW6FP1jwHylw9ZhRzwPo,246 +ddtrace/contrib/unittest/patch.py,sha256=3nQGZfz-FadYwTj6Tv8MlH4DfbKPiN6X84OIP6dnAWw,35366 +ddtrace/contrib/urllib3/__init__.py,sha256=AMtocBCa7f3H5MgtZTjg1YFX5Wrw2R9fYF6IeE-8ypg,1597 +ddtrace/contrib/urllib3/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/urllib3/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/urllib3/patch.py,sha256=Y8N4R0GrMuqfSoB9uq3NopEBEslKrfhzMa41hzNS7u0,5290 +ddtrace/contrib/vertica/__init__.py,sha256=H1U-TD0OD6KmZYitew-MvYuMowAGzkXtnV9-chGZW0I,1420 +ddtrace/contrib/vertica/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/vertica/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/vertica/patch.py,sha256=tKpfdxR4eQ9TZfIZ_yw7KtGQiZe6JOExcEieoiQZqGA,8760 +ddtrace/contrib/wsgi/__init__.py,sha256=1-5ThMEpTx-Jt55Ni1DM4M06j8wTf0zuBB9NjTMd_gQ,868 +ddtrace/contrib/wsgi/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/wsgi/__pycache__/wsgi.cpython-311.pyc,, +ddtrace/contrib/wsgi/wsgi.py,sha256=Q8LFKYBoCATkvMQdXNsYdZ5JPof1-iSDrR4T44IhiPU,10523 +ddtrace/contrib/yaaredis/__init__.py,sha256=UiSo4UQXYrsP3Pcc9Yo4IuwHKwD6myL2Eh2BF160kug,2060 +ddtrace/contrib/yaaredis/__pycache__/__init__.cpython-311.pyc,, +ddtrace/contrib/yaaredis/__pycache__/patch.cpython-311.pyc,, +ddtrace/contrib/yaaredis/patch.py,sha256=DJFQivzCYqO7QdjzNQ8CP2N9qOSF0DqC6h8CStU0_bQ,2787 +ddtrace/data_streams.py,sha256=y5qpNXHaOgd2wLl5nVImreZW0nfHeLtfQS-osStQZh0,1663 +ddtrace/debugging/__init__.py,sha256=6q2f6Z5_A_PhEu8k70s4mTFy1DWlGAWx3zWEi2x9DVY,806 +ddtrace/debugging/__pycache__/__init__.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_async.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_config.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_debugger.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_encoding.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_expressions.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_metrics.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_redaction.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_safety.cpython-311.pyc,, +ddtrace/debugging/__pycache__/_uploader.cpython-311.pyc,, +ddtrace/debugging/_async.py,sha256=oAs-UL8NMhrI6GhgecrfRF2fRKG9eko1AMotJ_ZvlvM,758 +ddtrace/debugging/_config.py,sha256=OdbGrspuwbOl9x7u3LbVK_U0u88IfICUIVigJHxqcYY,246 +ddtrace/debugging/_debugger.py,sha256=XEq2UcOqptVL2-CdwNJlLCgDtT4lsSNpEp4HrjU0JS4,31180 +ddtrace/debugging/_encoding.py,sha256=K5ZH75NHm7Qq0ddwG2uKZKqdAPQmqLxNW_kzW51GADo,9307 +ddtrace/debugging/_exception/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/debugging/_exception/__pycache__/__init__.cpython-311.pyc,, +ddtrace/debugging/_exception/__pycache__/auto_instrument.cpython-311.pyc,, +ddtrace/debugging/_exception/auto_instrument.py,sha256=dKHEb1JCwJ1c1wd5ZIR3EFkSYuLLSM2sCBdR007t1wc,7011 +ddtrace/debugging/_expressions.py,sha256=T0MLsnLqsggS12C0M36lpnIRp4Gb5MApfsOZ2bcTSFI,13171 +ddtrace/debugging/_function/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/debugging/_function/__pycache__/__init__.cpython-311.pyc,, +ddtrace/debugging/_function/__pycache__/discovery.cpython-311.pyc,, +ddtrace/debugging/_function/__pycache__/store.cpython-311.pyc,, +ddtrace/debugging/_function/discovery.py,sha256=EsAUjo3WIgL9wOgHjnSyExn4opKM_S-w6O05YoYOWMs,8740 +ddtrace/debugging/_function/store.py,sha256=3EKl9AxFDWwI308InM5H-tmgX61AtgagWM08srKJFsU,4340 +ddtrace/debugging/_metrics.py,sha256=Q8MKihuNSIxLLhYy0s3jJs6r7S8owsao7eQBhxC4LZs,223 +ddtrace/debugging/_probe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/debugging/_probe/__pycache__/__init__.cpython-311.pyc,, +ddtrace/debugging/_probe/__pycache__/model.cpython-311.pyc,, +ddtrace/debugging/_probe/__pycache__/registry.cpython-311.pyc,, +ddtrace/debugging/_probe/__pycache__/remoteconfig.cpython-311.pyc,, +ddtrace/debugging/_probe/__pycache__/status.cpython-311.pyc,, +ddtrace/debugging/_probe/model.py,sha256=GOKZjdNH3XD4uPpE9GK6io0XgH043ajqZlG1FoRQ02Y,7585 +ddtrace/debugging/_probe/registry.py,sha256=GB8xaPo-Oqis411ThJQ6hjNrVUeYnX181xM3TZOo7QE,6877 +ddtrace/debugging/_probe/remoteconfig.py,sha256=CoPjHvVL3FR9_FBvFUxBw8ntA68kHQP4FXGao7jD1m0,13092 +ddtrace/debugging/_probe/status.py,sha256=O8vXceuim7R1BEKLctJ9d1eW28qx_2JvvRa2-IbQBfs,5317 +ddtrace/debugging/_redaction.py,sha256=X4qtpuhAfSNvBMJISO8hdS0bNB2T280je8_lDTlLFxs,4182 +ddtrace/debugging/_safety.py,sha256=aBd-fyAs7wqf30LiCfmFG5CzR-xbXb7osnh2nf8W61A,2152 +ddtrace/debugging/_signal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/debugging/_signal/__pycache__/__init__.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/collector.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/metric_sample.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/model.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/snapshot.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/tracing.cpython-311.pyc,, +ddtrace/debugging/_signal/__pycache__/utils.cpython-311.pyc,, +ddtrace/debugging/_signal/collector.py,sha256=6esIpqf4jkRVMX32Gi6jQXre2VaspOLV2orByKxjH8w,4323 +ddtrace/debugging/_signal/metric_sample.py,sha256=Pj49TDe3iqFv1eNM-jsFoFVGtc0GaYjor7-1ZkGmuXA,2773 +ddtrace/debugging/_signal/model.py,sha256=7D5rOwtxZtIjB-qR__hZs7JKIJsL9hehwE5jX39Kvrk,4942 +ddtrace/debugging/_signal/snapshot.py,sha256=IRl3zyqv2-HCPzQSV4u7WRc6x_DI61SFGJT5uWI5ags,8547 +ddtrace/debugging/_signal/tracing.py,sha256=rHXS4w6V_AGGbXIJzmSUR8BMWjfqxl3wTA8xaXwRlic,5677 +ddtrace/debugging/_signal/utils.py,sha256=48H65Um7jj7qfK0INw8RvTpbfPMz_yfaK7y2BG3cCXI,9759 +ddtrace/debugging/_uploader.py,sha256=tadYtxcBizkxsXL37FT1Ft5fb8QQYrKao8VIf9COyxE,3546 +ddtrace/ext/__init__.py,sha256=0Ax8C94xQ7C80xKs2_xYzP6RVESKZH2DB3rM1MjyLKQ,473 +ddtrace/ext/__pycache__/__init__.cpython-311.pyc,, +ddtrace/ext/__pycache__/aws.cpython-311.pyc,, +ddtrace/ext/__pycache__/cassandra.cpython-311.pyc,, +ddtrace/ext/__pycache__/ci.cpython-311.pyc,, +ddtrace/ext/__pycache__/consul.cpython-311.pyc,, +ddtrace/ext/__pycache__/db.cpython-311.pyc,, +ddtrace/ext/__pycache__/elasticsearch.cpython-311.pyc,, +ddtrace/ext/__pycache__/git.cpython-311.pyc,, +ddtrace/ext/__pycache__/http.cpython-311.pyc,, +ddtrace/ext/__pycache__/kafka.cpython-311.pyc,, +ddtrace/ext/__pycache__/kombu.cpython-311.pyc,, +ddtrace/ext/__pycache__/memcached.cpython-311.pyc,, +ddtrace/ext/__pycache__/mongo.cpython-311.pyc,, +ddtrace/ext/__pycache__/net.cpython-311.pyc,, +ddtrace/ext/__pycache__/redis.cpython-311.pyc,, +ddtrace/ext/__pycache__/sql.cpython-311.pyc,, +ddtrace/ext/__pycache__/test.cpython-311.pyc,, +ddtrace/ext/__pycache__/user.cpython-311.pyc,, +ddtrace/ext/aws.py,sha256=0t5-ftzdd4eCkpG4udnbvONnSe648nfsuH_D914oGCg,3538 +ddtrace/ext/cassandra.py,sha256=nT1jltcvXRSDddqWHWhFD3cxInDhhKd0ouGghNV3zgQ,191 +ddtrace/ext/ci.py,sha256=4VuQNSLToF0QSM8zt-GrX0BMjy6qxVIyODXT-I-IyhE,21950 +ddtrace/ext/consul.py,sha256=6Vq9cGCCOimwFMyQRR1rx0mnEWYKWn3JSYVihLnkPhk,76 +ddtrace/ext/db.py,sha256=dW0pOWQYQlAri4goGdlVIcDzmx6GjpURPXAMEoOvzt4,325 +ddtrace/ext/elasticsearch.py,sha256=1NbDiLzJYpzs0KVC7W2q8MsHkcayjFjZwasFP6mRSkI,211 +ddtrace/ext/git.py,sha256=BibJ42uXDj9Wnkg2JchffvUgJRiM-OOytHvQS-1GGaQ,14225 +ddtrace/ext/http.py,sha256=TegstAKHfmYMdxQBDdTKd8wHGGNK44xWq2JlECo7evg,447 +ddtrace/ext/kafka.py,sha256=Fwb5SfjCSAS5Q1cunqzTxpGD8MgwskrTJr_ofTYedmg,349 +ddtrace/ext/kombu.py,sha256=JqzezXNOH_wlcrQzNAOeM87v4CnJPFXrNeK9hX-UDTk,228 +ddtrace/ext/memcached.py,sha256=9OhpONkQtGpcQBAcX6AxdMmvWk7J7A8aJfknfemGZ4M,98 +ddtrace/ext/mongo.py,sha256=xLojJ7vdDhEpYaFLcYASCC2iUM0pPcZ83o-7JSgnmgc,96 +ddtrace/ext/net.py,sha256=jkuYQt8qHAceRflb0QNud6ramJXrrLUoSYgrf-7dOo0,215 +ddtrace/ext/redis.py,sha256=MCy19md0SCMkAXohiBxHmeye3f-3WXwbqpfyp9W9DoA,296 +ddtrace/ext/sql.py,sha256=PWbg2AmP3SFey-knezYm71HbFh76KV9K_JKhi0HaRN8,2297 +ddtrace/ext/test.py,sha256=Jlg8nZCsz2PKQWzEM7ZAAMeBypS9yn-bqW4XAlErp0s,2082 +ddtrace/ext/user.py,sha256=v3K0RLPK3xmQFS-jAq2oYclb09TKEsFc61fAJyZQ3DA,149 +ddtrace/filters.py,sha256=YfvhPWtyirFGQibO9Zp7q3GRci9T17CEPqJMbdm3H8A,2602 +ddtrace/internal/README.md,sha256=1uhyNSgCgy-mhrKDfkBxHD76hPffLFZFRGH7ma6JRdk,323 +ddtrace/internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/internal/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/__pycache__/agent.cpython-311.pyc,, +ddtrace/internal/__pycache__/assembly.cpython-311.pyc,, +ddtrace/internal/__pycache__/atexit.cpython-311.pyc,, +ddtrace/internal/__pycache__/codeowners.cpython-311.pyc,, +ddtrace/internal/__pycache__/compat.cpython-311.pyc,, +ddtrace/internal/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/__pycache__/debug.cpython-311.pyc,, +ddtrace/internal/__pycache__/dogstatsd.cpython-311.pyc,, +ddtrace/internal/__pycache__/encoding.cpython-311.pyc,, +ddtrace/internal/__pycache__/forksafe.cpython-311.pyc,, +ddtrace/internal/__pycache__/gitmetadata.cpython-311.pyc,, +ddtrace/internal/__pycache__/glob_matching.cpython-311.pyc,, +ddtrace/internal/__pycache__/hostname.cpython-311.pyc,, +ddtrace/internal/__pycache__/http.cpython-311.pyc,, +ddtrace/internal/__pycache__/injection.cpython-311.pyc,, +ddtrace/internal/__pycache__/log_writer.cpython-311.pyc,, +ddtrace/internal/__pycache__/logger.cpython-311.pyc,, +ddtrace/internal/__pycache__/metrics.cpython-311.pyc,, +ddtrace/internal/__pycache__/module.cpython-311.pyc,, +ddtrace/internal/__pycache__/packages.cpython-311.pyc,, +ddtrace/internal/__pycache__/periodic.cpython-311.pyc,, +ddtrace/internal/__pycache__/rate_limiter.cpython-311.pyc,, +ddtrace/internal/__pycache__/safety.cpython-311.pyc,, +ddtrace/internal/__pycache__/sampling.cpython-311.pyc,, +ddtrace/internal/__pycache__/service.cpython-311.pyc,, +ddtrace/internal/__pycache__/sma.cpython-311.pyc,, +ddtrace/internal/__pycache__/tracemethods.cpython-311.pyc,, +ddtrace/internal/__pycache__/uds.cpython-311.pyc,, +ddtrace/internal/__pycache__/uwsgi.cpython-311.pyc,, +ddtrace/internal/_encoding.c,sha256=A7JX7UkPMykG0eNxozk2RM2JVtv-Zda0zvJT7x9FYq0,1637460 +ddtrace/internal/_encoding.cpython-311-x86_64-linux-gnu.so,sha256=k8xvHXdyGo8wTGqkavFJtqdiGV9j-7zMWzojzTg2lgs,357568 +ddtrace/internal/_encoding.pyi,sha256=JNUuxuoZz2D3nKo5lGDUsxA9u1fS6MSffYD8he_hnz8,1063 +ddtrace/internal/_encoding.pyx,sha256=8J2uckAmCWnfzxmqdHx6Lh5mJItwbmGdmM0E29BASGA,36236 +ddtrace/internal/_rand.c,sha256=4UADi5JunmqpTSh5HHjAHiyJLz9mk3ZyYygHrf8XYPQ,296384 +ddtrace/internal/_rand.cpython-311-x86_64-linux-gnu.so,sha256=tTEd4T3xjRJAnmmH9Px71Pqo9olFkujGslzCFyIs38I,64144 +ddtrace/internal/_rand.pyi,sha256=VQEVI10a3YVrb7Bg5adUaUkI0J70QWdt6E0Bt5dH_MY,127 +ddtrace/internal/_rand.pyx,sha256=BXiqrnuT7h3wRsrWN4jhC1bFxfwyHMrDq4Bbxs1yZ5w,5107 +ddtrace/internal/_stdint.h,sha256=DY0jy2g-deZsm-PfT3vWeiGgMoldI8AZX-nQ2qnLSVo,109 +ddtrace/internal/_tagset.c,sha256=xf0v8CcBZcRVul9rgwB7l4y4Qgu4S5E6LNgphbTyox8,434000 +ddtrace/internal/_tagset.cpython-311-x86_64-linux-gnu.so,sha256=GnY8j-hJdHDzv8XllxAb6--gvDcn0RsIEPnuGKXKr9s,118400 +ddtrace/internal/_tagset.pyi,sha256=J-05rpWA3TH8ex_nckcYq8mY0yO1enHCKsm8f_XmL7Y,624 +ddtrace/internal/_tagset.pyx,sha256=6UeR66ZK3PbiCWb_-Sj0srNuxOy8HQmOfztaTDLJGD4,7483 +ddtrace/internal/_utils.h,sha256=eX_-G0pyK96mZ60uiUPeLsNqX5r_9PfMmy9y8fq3hSs,191 +ddtrace/internal/_utils.pxd,sha256=XmIW76IjCJ-yipWMi97KuUYu3LRU_LjHN4YxuEN-W4g,77 +ddtrace/internal/agent.py,sha256=pbfCcLDqpFUbbxtRgGJORAf0SzyiO83s4Z0bxELvvao,3304 +ddtrace/internal/assembly.py,sha256=mcRrXrHGk_caBpsGugJ-pj3EgY4M67AMCPFGjenP7Vw,9198 +ddtrace/internal/atexit.py,sha256=EBzMjEW4_gZ32QkrSQjkDmt9nY_MMspYaUFUVrRVtGA,2607 +ddtrace/internal/buff_converter.h,sha256=lQM0fFqpHtXpaprDrIE9bDXqIlCFbpt_h6nzJS1eRjk,658 +ddtrace/internal/ci_visibility/__init__.py,sha256=C5YraQmpFIH2KMYneZNA2FsBOFTtUKWtCN_xYi-5ODQ,353 +ddtrace/internal/ci_visibility/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/coverage.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/encoder.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/filters.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/git_client.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/recorder.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/utils.cpython-311.pyc,, +ddtrace/internal/ci_visibility/__pycache__/writer.cpython-311.pyc,, +ddtrace/internal/ci_visibility/constants.py,sha256=YjMp_YtDGMr776Hi00vexvde0Mep_EJ1GfvPL36Re-s,1967 +ddtrace/internal/ci_visibility/coverage.py,sha256=syHQAPVkWaS5jn_8SSDFcYSmP5PFVz64MpOFSjkJjHQ,5744 +ddtrace/internal/ci_visibility/encoder.py,sha256=9z2RQIen80ly3uqJAUxnHmpKElwtOa_WTsWDzUHtJbo,8005 +ddtrace/internal/ci_visibility/filters.py,sha256=1ayfP0sPlKGweR51MrNKY78b8hoqniGZpxLybYuEXQ4,1211 +ddtrace/internal/ci_visibility/git_client.py,sha256=8gCcNM5XkD6FlCcxH5JF1f14G94pdtHl2pTxYB294RU,21375 +ddtrace/internal/ci_visibility/recorder.py,sha256=kfdA8pDlHf3nONOxWRodKeE8jbpS3a8zZAmouaE4jAk,25338 +ddtrace/internal/ci_visibility/telemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/internal/ci_visibility/telemetry/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/ci_visibility/telemetry/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/ci_visibility/telemetry/__pycache__/git.cpython-311.pyc,, +ddtrace/internal/ci_visibility/telemetry/__pycache__/utils.cpython-311.pyc,, +ddtrace/internal/ci_visibility/telemetry/constants.py,sha256=NKfiMnsYyUE2UuFGy7GG6jCd1Ur3PiLocjAWh_l8HlE,1384 +ddtrace/internal/ci_visibility/telemetry/git.py,sha256=ic7wWqiPGNiDe9QhpXFhZwUXj0KNsVF_W1D6j8VkP4A,4137 +ddtrace/internal/ci_visibility/telemetry/utils.py,sha256=DEVzVw2llci3NqBRam5lbixKWsiL94l5MUBXJ7tZviY,491 +ddtrace/internal/ci_visibility/utils.py,sha256=wbefZtB9Z_YD3cyTGA4mbWnvHSbBVyTj3DP49DKOyjg,5257 +ddtrace/internal/ci_visibility/writer.py,sha256=1YDHO_UEOh5zXlAb4FmbFZMqhY4IZmENc7Hdm0wU3do,5401 +ddtrace/internal/codeowners.py,sha256=9uses9up7Jq24yGi_qulebiwn2M9Cfx6riQgS5E8pvA,7361 +ddtrace/internal/compat.py,sha256=nRCBVmjlDgxO5ea7bntNuE39heegORK6jph2I5c2dXE,13558 +ddtrace/internal/constants.py,sha256=lBoJ_oZQPv6Vs6izrvOjeZ4XJ5kY3f8ZbaxlBjoL7qo,4521 +ddtrace/internal/core/__init__.py,sha256=8CmrXGXXXHtCJS8LiS83jM1hoj759nai4JQZPgxrXEU,12372 +ddtrace/internal/core/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/core/__pycache__/event_hub.cpython-311.pyc,, +ddtrace/internal/core/event_hub.py,sha256=fIXb8T_h1VBOoF0LAmWe8AQeNmnt5x8GB0Ndj9T7PvI,3530 +ddtrace/internal/datadog/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/internal/datadog/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/datadog/profiling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/internal/datadog/profiling/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/datadog/profiling/__pycache__/ddup.cpython-311.pyc,, +ddtrace/internal/datadog/profiling/__pycache__/utils.cpython-311.pyc,, +ddtrace/internal/datadog/profiling/_ddup.cpp,sha256=OBhXOKGg_eigy87R85OhkILx1Z6D-SQS7szgV_IlpUE,555023 +ddtrace/internal/datadog/profiling/_ddup.cpython-311-x86_64-linux-gnu.so,sha256=TMkHk1LIW2XuFhFDMcsuOaOOqPLZr92s94xOXjidKv0,3667048 +ddtrace/internal/datadog/profiling/_ddup.pyi,sha256=5BDwwZ0wFSK1OA6jyt_3N-Xq62FP0gP0l4yEP0J_dGo,1181 +ddtrace/internal/datadog/profiling/_ddup.pyx,sha256=oBPnQND4FKaFK2_6H-2KCbcxVpZEMKC1kCkEsY6iY5c,6914 +ddtrace/internal/datadog/profiling/ddup.py,sha256=r-5tknAytd52G7r-zhy6VaJFv4yy_FwrEX1hO8XWn4o,2383 +ddtrace/internal/datadog/profiling/include/exporter.hpp,sha256=a9O2sqkmbcKe6L0doPcwM3hLzx_D0PABi81Jaz775N4,8438 +ddtrace/internal/datadog/profiling/include/interface.hpp,sha256=2fIXtkft9g-gO91qScunLEPshfMzJFoiNi7Dr45NJjQ,2117 +ddtrace/internal/datadog/profiling/src/exporter.cpp,sha256=v0_wQ0JEDl0esx92ySC0WSf6kh1yDWb_4GNuIOYyN20,19149 +ddtrace/internal/datadog/profiling/src/interface.cpp,sha256=I05bqNo4gaCN_kSKhHCd1p5b77uUzTa7pNaH7BiSv-s,6736 +ddtrace/internal/datadog/profiling/utils.py,sha256=ibJDCIZyJAK2qiUFIQzJkHSj-hSJMoWM18uJy7iOYBY,819 +ddtrace/internal/datastreams/__init__.py,sha256=Fb7HvVbejdmBFjPMQyPmdGY1Pho2kRozukRBKD5KE9k,817 +ddtrace/internal/datastreams/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/botocore.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/encoding.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/fnv.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/kafka.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/kombu.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/processor.cpython-311.pyc,, +ddtrace/internal/datastreams/__pycache__/utils.cpython-311.pyc,, +ddtrace/internal/datastreams/botocore.py,sha256=Q1iyGAwVSstVF-Kprl8O4KRbPzThxxL9YPOX5dhsoSo,6761 +ddtrace/internal/datastreams/encoding.py,sha256=SSLbXfaLgxR-jVPx6NnPYfbHbAafqEtHna66VEhzrPA,969 +ddtrace/internal/datastreams/fnv.py,sha256=EfOCo9miVh3ohlsQpqPMa7PvFeG124cwSv1aaP4R4_c,779 +ddtrace/internal/datastreams/kafka.py,sha256=fvMTF7t0TnEjZkDoZtCnVGKe5phZuGce7WR73-bmfbE,4737 +ddtrace/internal/datastreams/kombu.py,sha256=kgFGfXfbp6NSUPRuQaT7g2pJY0xrgLQT44NZl6-HHn0,2050 +ddtrace/internal/datastreams/processor.py,sha256=1ycLrFk-fsHqR__w6TgemUY9QVBC4dkZlBfrT8sNTio,18767 +ddtrace/internal/datastreams/utils.py,sha256=0AU8Wwly06bjP5lSCixR2SZpgTvi7sybYItEsjQk0cM,612 +ddtrace/internal/debug.py,sha256=FeWWWKI7g4xHlMbngqNaYFqsU6c4lE2Dxo18Fpx067A,10650 +ddtrace/internal/dogstatsd.py,sha256=efN0ySeNqX07_oFtQpJiKPmX6QDFHWWUrF2ywXV5zSc,1038 +ddtrace/internal/encoding.py,sha256=h5aSO2sNajjq5XeR-XzQj69lcrPmS_zCZDq85otwHRk,4632 +ddtrace/internal/forksafe.py,sha256=jwvxSkHG5ZfEml47mtIXhxOcqMcQ0HDkteTWriQpQrI,3896 +ddtrace/internal/gitmetadata.py,sha256=pXd1Mv-olBtaCt93LVYaN9Oy0h9i5_rTt4M1H1CxNmU,5197 +ddtrace/internal/glob_matching.py,sha256=BRhphDAbDGKvKqfMd-Z2m84ByljJbJX1nDhz-N_UBiI,1625 +ddtrace/internal/hostname.py,sha256=JxcUHAgbWLtnOAjghUb0MkQ4gHiFka15PwsDSDYJexk,285 +ddtrace/internal/http.py,sha256=F8XLr90sCfPG_KuAPwppsOdJoSwYzWjJB97Yaon3egw,1103 +ddtrace/internal/injection.py,sha256=FqCQipq4LnE_terTZomcnFDdhtnKHmc6Jl4DJY3P_k0,6546 +ddtrace/internal/llmobs/__init__.py,sha256=yVpxe-9lyZ-SOjbgkP2kvsOOmXTwUMna6QpQHYimBOU,62 +ddtrace/internal/llmobs/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/llmobs/__pycache__/writer.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/__init__.py,sha256=pptHo2prPaGFp2wFjWJmd-s3s0Zt5nbsTd-5nWOXBk8,261 +ddtrace/internal/llmobs/integrations/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/__pycache__/base.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/__pycache__/bedrock.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/__pycache__/langchain.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/__pycache__/openai.cpython-311.pyc,, +ddtrace/internal/llmobs/integrations/base.py,sha256=_S42QhGOAlBrSNx2u5RBfJx_UpO0BcHUWGJKx4nToIs,10029 +ddtrace/internal/llmobs/integrations/bedrock.py,sha256=MCSeBcazo9Z_kSeao_Ifn9DeaK1Iq7TUOMpY1F6HOSM,2997 +ddtrace/internal/llmobs/integrations/langchain.py,sha256=tZ7UaWCLU7ZFqRxPoDCxdMcSA83TJRjHkOi63pM6VnA,3091 +ddtrace/internal/llmobs/integrations/openai.py,sha256=0kzXxufGC0pZ1N-a1FQ0uQyNaGZQirSFcZumxkmxzD4,10146 +ddtrace/internal/llmobs/writer.py,sha256=w4Cvh8BfI7wiTgzl8dvU_fORzKddirjaXwnf_opX5Co,4858 +ddtrace/internal/log_writer.py,sha256=rp2JATtkdLWmINqPd_PkUT58qGH5v5BROrMhiLNRmtA,3675 +ddtrace/internal/logger.py,sha256=fpAztptlezDIA80oyoZ-N1ZMZjoUj_nLgRk8m9dA9FM,7597 +ddtrace/internal/metrics.py,sha256=qpPVykItcuEs_Y6GGxFyxf10FQPlYYDgIVDW1Fe3QiU,3151 +ddtrace/internal/module.py,sha256=n6LUvLreMXwkjkXUDJdiemGMKv77-8eB0Ump0fnKE-E,19554 +ddtrace/internal/pack.h,sha256=VnFr2XJ8XJHaYh62dJSsrNhmgZPJBgOxSkoIuNgfoQg,2534 +ddtrace/internal/pack_template.h,sha256=dIcX-OWgFLwJBeVRxKmShwhs4nvGfYUrqDewJAFyvjw,39017 +ddtrace/internal/packages.py,sha256=Mvl9Grw00vXEXFdetAYmEeYn_qH025XDmfDiuvavLEQ,4343 +ddtrace/internal/periodic.py,sha256=BYrTDM5fA8t7CPQcmtlkfnCMfT41ecvuUk2cC0mi1wY,5180 +ddtrace/internal/processor/__init__.py,sha256=P6T_QmsAqT9rtTOOkgtDGQtzMmLmhR_uI9kGpclZrJE,2342 +ddtrace/internal/processor/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/processor/__pycache__/endpoint_call_counter.cpython-311.pyc,, +ddtrace/internal/processor/__pycache__/stats.cpython-311.pyc,, +ddtrace/internal/processor/__pycache__/trace.cpython-311.pyc,, +ddtrace/internal/processor/endpoint_call_counter.py,sha256=pIAEWYdt0CERXV-9SBPlEqTfA7uyF_8oE1FoYw4Zd8E,1394 +ddtrace/internal/processor/stats.py,sha256=SP3aWerJD-47MeUFG7PgxU8ZIpGwJRvgdLwUBUgAU-4,9324 +ddtrace/internal/processor/trace.py,sha256=xfJDag2Bbv0v4KY9KCj7WK41X0USCIYvvaXKJGcQ2a0,15789 +ddtrace/internal/rate_limiter.py,sha256=Da2oKLfGF7RD-NvMBm3WzTMEETqha5ANP1fEqeaELHw,8974 +ddtrace/internal/remoteconfig/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/internal/remoteconfig/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/_connectors.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/_publishers.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/_pubsub.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/_subscribers.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/client.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/utils.cpython-311.pyc,, +ddtrace/internal/remoteconfig/__pycache__/worker.cpython-311.pyc,, +ddtrace/internal/remoteconfig/_connectors.py,sha256=JorVJOVB6Z7_IzZmj_ILq_UYwX4PUCdXG3Z5cOR5CXU,3386 +ddtrace/internal/remoteconfig/_publishers.py,sha256=E2wv4iRGD16Ky9M-d3Tx5iFXMkl781QeamPiR0uXD40,5479 +ddtrace/internal/remoteconfig/_pubsub.py,sha256=R29hzWRueVHSLrhfzqrPWZDIPqqMAkhNgLdlL3plUW4,5456 +ddtrace/internal/remoteconfig/_subscribers.py,sha256=kMWhI_qN41Y4HGou4y3oFGH92sj0PqV4g8n_RAQ9ceM,2212 +ddtrace/internal/remoteconfig/client.py,sha256=uG9HoFfjq45gu3I3uwnH1Umt89-Hbpb_CZMtyOmzvFU,22235 +ddtrace/internal/remoteconfig/constants.py,sha256=cUe9H0q6O1wMYA4fyM54CbfgtqAmTJDeRGp6qSN9vyU,83 +ddtrace/internal/remoteconfig/utils.py,sha256=x6lFRbYU9baAU4wpD8EApw8ZLTk5eo9HIur9oMYh010,132 +ddtrace/internal/remoteconfig/worker.py,sha256=LzVerzeL6N3lU1qhJHvPxJZkjjZNDt3-d8ltLb3U554,6498 +ddtrace/internal/runtime/__init__.py,sha256=mtD3VXezLQmvYO1gpGVwPVDQjSIfh27fnCI-wkq45_Y,1049 +ddtrace/internal/runtime/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/collector.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/container.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/metric_collectors.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/runtime_metrics.cpython-311.pyc,, +ddtrace/internal/runtime/__pycache__/tag_collectors.cpython-311.pyc,, +ddtrace/internal/runtime/collector.py,sha256=ZqeQ2lVLgMXrmisQPE4Nm-g0fbjfdVP1vsDeEMICzDM,3117 +ddtrace/internal/runtime/constants.py,sha256=tm1iOlpgtGM0a0uXubjnoDWUCMqB1DzjoNTFm0Z5ZaY,1036 +ddtrace/internal/runtime/container.py,sha256=fCd_Ge1SB0chqPaQptciMs-RvrCd2dUnX4qxC1q6O90,3838 +ddtrace/internal/runtime/metric_collectors.py,sha256=YxTwAQVJJlnjp543RMLGN9vxK202q6Zz3EXERNOx-VQ,2930 +ddtrace/internal/runtime/runtime_metrics.py,sha256=Bouu18-AyB6DEef-qzTK1y3RNY6wQPnsyiVJSrs05J8,5738 +ddtrace/internal/runtime/tag_collectors.py,sha256=7syAU56F5Y8snWyY3vDxo4fxswysiH11Tu7_59nHNR4,2338 +ddtrace/internal/safety.py,sha256=R9SVrTeahuaPcpKUvCuXVr9DB5x9MipRCqGfa_ydj6c,4099 +ddtrace/internal/sampling.py,sha256=rs5MVlbcuiZ29YSXthjrTOIf9WLdrVPlUzNTuIG0qBs,10577 +ddtrace/internal/schema/__init__.py,sha256=X8UqZKBuI8aRpQdJ6JjynG0oqMjKay78P5SCOkZLbfo,2558 +ddtrace/internal/schema/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/schema/__pycache__/span_attribute_schema.cpython-311.pyc,, +ddtrace/internal/schema/span_attribute_schema.py,sha256=TH6cLDqAhG9SLUsjJZTW0TdpFc1Un8ghgCX7TRYSOZU,3693 +ddtrace/internal/serverless/__init__.py,sha256=3y6rA68TpzSYSK4T96vrPK3l5jiEIETFYJyaqNifhmo,1791 +ddtrace/internal/serverless/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/serverless/__pycache__/mini_agent.cpython-311.pyc,, +ddtrace/internal/serverless/mini_agent.py,sha256=NPyY8h-mgtf_HUN1U6lTxIBVVa1lQxKGPXYensUADW4,1507 +ddtrace/internal/service.py,sha256=47l3CMtCbGkVjKQcd41MNKRVyikEa0tm-aFOTghacC0,2957 +ddtrace/internal/sma.py,sha256=CZJTxxoBnFeMN0_xgn-cms3y3BQfMCBTX0Z_H4N1WVY,1601 +ddtrace/internal/sysdep.h,sha256=d2ZWwMOPyhrZctSVL52Ctvrg4jaOGiPjZKOCe8NJLgU,7597 +ddtrace/internal/telemetry/__init__.py,sha256=VpcHpiqW0mZmK2WNc6PhoMn9IHE1DCTzUjgQS6-j1sw,2596 +ddtrace/internal/telemetry/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/telemetry/__pycache__/constants.cpython-311.pyc,, +ddtrace/internal/telemetry/__pycache__/data.cpython-311.pyc,, +ddtrace/internal/telemetry/__pycache__/metrics.cpython-311.pyc,, +ddtrace/internal/telemetry/__pycache__/metrics_namespaces.cpython-311.pyc,, +ddtrace/internal/telemetry/__pycache__/writer.cpython-311.pyc,, +ddtrace/internal/telemetry/constants.py,sha256=AaJz5TFGQ0HP6d_FheUPkAxcUI-WtnIf0KRnHTg5Erw,3868 +ddtrace/internal/telemetry/data.py,sha256=mrM-D2nZrDKne7C25g13nIXBcwH-s2poZhG6yXYiIw4,3992 +ddtrace/internal/telemetry/metrics.py,sha256=FZPRofKBa7MYmZ6aIFG_GcCa9RurYKsvK_AEEB7Au9w,5074 +ddtrace/internal/telemetry/metrics_namespaces.py,sha256=ZdofJKRQwo4khl3IM_v6xMxyfYx77gLbCTXVEREy_Ak,2495 +ddtrace/internal/telemetry/writer.py,sha256=m0j9-glfvqquDwbFMUiXsPVppBKyZg9bZBut9w5OUac,33941 +ddtrace/internal/tracemethods.py,sha256=tt9_-ghcywLVpDvs38B-epdvZaJLcftt2bn5ME8sVkk,4229 +ddtrace/internal/uds.py,sha256=Pcf8sWhGzh-MCcEMwlyaDsq7M-OQqH4DryRpHIP1D6c,848 +ddtrace/internal/utils/__init__.py,sha256=pAW2h7Vl9oJGDGEl9mvk_mjTuAwbUxP7Nmluf6UNLpk,2822 +ddtrace/internal/utils/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/attrdict.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/cache.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/config.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/deprecations.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/formats.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/http.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/importlib.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/inspection.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/retry.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/signals.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/time.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/version.cpython-311.pyc,, +ddtrace/internal/utils/__pycache__/wrappers.cpython-311.pyc,, +ddtrace/internal/utils/attrdict.py,sha256=-zaPkaVMhBzwWABgVs1MM6R5At53aCEDzebuWMeeD1M,1181 +ddtrace/internal/utils/cache.py,sha256=hupOr0tIsga9X4hNE6eIKSy40wP7rU_tm8lgzg4C3I4,4189 +ddtrace/internal/utils/config.py,sha256=T-t1Wj8qUOmQ98TBaZNb9o10X9zaLMEYphwNniNLBCw,446 +ddtrace/internal/utils/deprecations.py,sha256=l2a0vrVpTd9lm1DI9KFf8WdD0_YxzNWk9uvUnokefbs,369 +ddtrace/internal/utils/formats.py,sha256=7k6w76X8oGmnsSircnNcyyejrHDjT980LA5xI5N3ycA,5180 +ddtrace/internal/utils/http.py,sha256=wqctaZP4cvwCDGi3sbGQLITFAgpbZX7uYaMVnvlu1Qc,16031 +ddtrace/internal/utils/importlib.py,sha256=nfCdh0p50JqvpBjTI093rMgSzAaJQNWBmXRU05tiuJg,1404 +ddtrace/internal/utils/inspection.py,sha256=C2pLZr6GBHmlk1PAIQrNOdp_NVUTcErjyzJWD29cq3k,4450 +ddtrace/internal/utils/retry.py,sha256=3FxbFjM1C23tcbAzsYq0Z7Qzdqi3mqhAvumTUl6V7-U,1657 +ddtrace/internal/utils/signals.py,sha256=PZSn4_YNMALZXLhpo333n2LijDcegNYrAQJibXPvnQw,866 +ddtrace/internal/utils/time.py,sha256=lmK2CUxarFj4IMF-18QspZ08iAIb48FoTx-Jjj-xj28,4113 +ddtrace/internal/utils/version.py,sha256=HATPlVAyNyIHXip-0_-8uBhYnomBzYm1Tr0XYsGcLts,2937 +ddtrace/internal/utils/wrappers.py,sha256=9sF0cdQQb7NTZ5fAbiXNgs-Z57mFkK6iv5u4Q04mBOw,632 +ddtrace/internal/uwsgi.py,sha256=ZpAZEcrP9F32dM5uAO2xgXDW8Rzo3iv1j5Ih6BMojco,2770 +ddtrace/internal/wrapping/__init__.py,sha256=24xDiCnr0Atilv8Dj16ORl78O6Jw1X2nZiMFSecISec,9840 +ddtrace/internal/wrapping/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/wrapping/__pycache__/asyncs.cpython-311.pyc,, +ddtrace/internal/wrapping/__pycache__/generators.cpython-311.pyc,, +ddtrace/internal/wrapping/asyncs.py,sha256=vD66Tf836TbXCYE2SxxoD7U-fXICgJVYQcAaTVX93f4,19872 +ddtrace/internal/wrapping/generators.py,sha256=q7TrbVGf4Pl3kEqrjCbzmLC1NiL3GGCUeihFBTjvGqE,13579 +ddtrace/internal/writer/__init__.py,sha256=rOoVAxiRLPg-F6K_7AatS8iwWlQr8Fqtxi-AWE2adYY,431 +ddtrace/internal/writer/__pycache__/__init__.cpython-311.pyc,, +ddtrace/internal/writer/__pycache__/writer.cpython-311.pyc,, +ddtrace/internal/writer/__pycache__/writer_client.cpython-311.pyc,, +ddtrace/internal/writer/writer.py,sha256=fdW961Rmt_OoljdMme-o31SK_OP9ArLmaNCNIR4l7C8,24201 +ddtrace/internal/writer/writer_client.py,sha256=yY0T5VbCzRF_xWHVyKxlTuOrmK9oXYPUQSfGiRhYeb0,1198 +ddtrace/opentelemetry/__init__.py,sha256=SHqVDpXH5Od-0tffHvOX41YI5eBc16I043MXgUaWXEw,3455 +ddtrace/opentelemetry/__pycache__/__init__.cpython-311.pyc,, +ddtrace/opentelemetry/__pycache__/_context.cpython-311.pyc,, +ddtrace/opentelemetry/__pycache__/_span.cpython-311.pyc,, +ddtrace/opentelemetry/__pycache__/_trace.cpython-311.pyc,, +ddtrace/opentelemetry/_context.py,sha256=7ofcRQ8jMlpfEXsOrGTr70_pet_wQLL7XaYR44DLb9Q,3881 +ddtrace/opentelemetry/_span.py,sha256=Iu3WTVLh0DcZao5GOVjiO3TWICo98QBM-mMpjhF-fLE,11663 +ddtrace/opentelemetry/_trace.py,sha256=HEBJ3g-Sv7N5167v0NcHmpx56mqF5xLzUbiHkcIY5JM,6039 +ddtrace/opentracer/__init__.py,sha256=Z2NreXCsKituj_xkrBrxox6dJME-HY5dAQdoPrT6P4A,121 +ddtrace/opentracer/__pycache__/__init__.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/helpers.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/settings.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/span.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/span_context.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/tags.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/tracer.cpython-311.pyc,, +ddtrace/opentracer/__pycache__/utils.cpython-311.pyc,, +ddtrace/opentracer/helpers.py,sha256=3Ayxc9IgQPRxgSwEFb2ChKJmfVpn_syv1crODBUYFcg,491 +ddtrace/opentracer/propagation/__init__.py,sha256=dM61Rvq_oyksFQii8bRAUFjm2CZXksIsSf_5fSCjoQw,71 +ddtrace/opentracer/propagation/__pycache__/__init__.cpython-311.pyc,, +ddtrace/opentracer/propagation/__pycache__/binary.cpython-311.pyc,, +ddtrace/opentracer/propagation/__pycache__/http.cpython-311.pyc,, +ddtrace/opentracer/propagation/__pycache__/propagator.cpython-311.pyc,, +ddtrace/opentracer/propagation/__pycache__/text.cpython-311.pyc,, +ddtrace/opentracer/propagation/binary.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/opentracer/propagation/http.py,sha256=arRf_w6-nP5gi_bJKWpeC-8G8rXTxalUF1eazBwJveo,2562 +ddtrace/opentracer/propagation/propagator.py,sha256=It4jZZ0bf2yEwngyHLyXofoFd6WxVz1HqNmUTwziAdY,230 +ddtrace/opentracer/propagation/text.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/opentracer/settings.py,sha256=yinHnUraYQvARuRbs-CqpBKwg3pncm6umSdk8vT33VQ,1005 +ddtrace/opentracer/span.py,sha256=5RZDaDrNA6Gp_oCOBNA13cJsjKFVHirjnUVdQ-5UIuI,6494 +ddtrace/opentracer/span_context.py,sha256=RIsesab64GauFgWnolw9E4lDP9I6HTVsI2veg8hqbIw,2262 +ddtrace/opentracer/tags.py,sha256=J7h3lOXHloO69vDaMlERZCUxuEDnb8pz36jSKImPslE,466 +ddtrace/opentracer/tracer.py,sha256=SXwuAE6_4t1dM2bYp5rTYBJupBGh7EkWhq0jPdOajhE,16164 +ddtrace/opentracer/utils.py,sha256=oNiXe2G49awvP4CsYngbzmCpfVycF_u0LSv4RXFa8OQ,2171 +ddtrace/pin.py,sha256=cAXUJEMZdqWXyR9_oRqn7RmswXNv4Z-Oxm-mK3OMsdQ,7267 +ddtrace/profiling/__init__.py,sha256=55iYcZRG6WJMVRH5cF_iocmCVlPa3HyIVayeqimh9-Y,583 +ddtrace/profiling/__pycache__/__init__.cpython-311.pyc,, +ddtrace/profiling/__pycache__/_asyncio.cpython-311.pyc,, +ddtrace/profiling/__pycache__/_traceback.cpython-311.pyc,, +ddtrace/profiling/__pycache__/auto.cpython-311.pyc,, +ddtrace/profiling/__pycache__/event.cpython-311.pyc,, +ddtrace/profiling/__pycache__/profiler.cpython-311.pyc,, +ddtrace/profiling/__pycache__/recorder.cpython-311.pyc,, +ddtrace/profiling/__pycache__/scheduler.cpython-311.pyc,, +ddtrace/profiling/_asyncio.py,sha256=uQn3gCMVjTEAuZuzr6VUxz4aAp2pdLAz9olRXtL6m_U,1852 +ddtrace/profiling/_build.c,sha256=-IPUlnGO13bHG4wHBSnixKctNjQHPy8sB3JQd4SfyeY,166988 +ddtrace/profiling/_build.cpython-311-x86_64-linux-gnu.so,sha256=ALhe1kK1EOtef-zXPnx9TkQb_cxdKLR4oKJE610ywtw,28336 +ddtrace/profiling/_build.pyi,sha256=qScjYCL2Bcn8kdVuBq80FWiOFoWnLAnUTER93amencA,58 +ddtrace/profiling/_build.pyx,sha256=3ScNzOvIqQswf00e3gswCDAJ2_vcvXzEAxKYTXiFuM0,146 +ddtrace/profiling/_threading.c,sha256=UEgDY9-nf_9GKN50BrzVp0_UWv6kJWPAx2L8L1eiBZg,477347 +ddtrace/profiling/_threading.cpython-311-x86_64-linux-gnu.so,sha256=WStL_UjyA8AeoFa2_6i9g1creGOC0n9gDNoF2ve4kPQ,120872 +ddtrace/profiling/_threading.pyi,sha256=KiFDR9TLKLjmCuQN6V26nBThaF6nGuyOlzGC8IKa4Vg,379 +ddtrace/profiling/_threading.pyx,sha256=4rW6W_5qQ10ycisDqr3cUXh8vUvkJNHNDPKQn3G1fAw,4884 +ddtrace/profiling/_traceback.py,sha256=gtTV5xW0HKO2azPwCNlx2PdJ0KnM4TcKo7EeJm8cSrU,111 +ddtrace/profiling/auto.py,sha256=9FKbtu11hUCOsN4PrtRkDTVXRGBSIh2REzpaOhBW0tA,294 +ddtrace/profiling/bootstrap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/profiling/bootstrap/__pycache__/__init__.cpython-311.pyc,, +ddtrace/profiling/bootstrap/__pycache__/sitecustomize.cpython-311.pyc,, +ddtrace/profiling/bootstrap/sitecustomize.py,sha256=zSmUmKEOjy4hq1Jiw3yIrhGUQRCl7W3LAHLlLbRbvU0,431 +ddtrace/profiling/collector/__init__.py,sha256=vRaJ6PmEuxDlatggriSI4DoiWR_oRH-oN8YKZ_6s2iU,2091 +ddtrace/profiling/collector/__pycache__/__init__.cpython-311.pyc,, +ddtrace/profiling/collector/__pycache__/_lock.cpython-311.pyc,, +ddtrace/profiling/collector/__pycache__/asyncio.cpython-311.pyc,, +ddtrace/profiling/collector/__pycache__/memalloc.cpython-311.pyc,, +ddtrace/profiling/collector/__pycache__/stack_event.cpython-311.pyc,, +ddtrace/profiling/collector/__pycache__/threading.cpython-311.pyc,, +ddtrace/profiling/collector/_lock.py,sha256=JGnZlXdf6sJrlxImsblKGsNBFkwdtiagsK7CbGRB64Q,7993 +ddtrace/profiling/collector/_memalloc.c,sha256=Ml4NnFRwLG94t38ohmCz6gzoY-JkniR0w2c_0wbCl_E,16129 +ddtrace/profiling/collector/_memalloc.cpython-311-x86_64-linux-gnu.so,sha256=eeYgPdCBc-HqNT-ipxO18TfL3a7muo9CQmobrpuL1ZU,28688 +ddtrace/profiling/collector/_memalloc.pyi,sha256=ENw5uCdruLqV36nO1QabiG-od5pEpelbFQSeOIMdlbA,463 +ddtrace/profiling/collector/_memalloc_heap.c,sha256=RZzQo-eQBrof43OUgV7-BsGbpVXgtgeNGDyCIvPkZxg,7257 +ddtrace/profiling/collector/_memalloc_heap.h,sha256=6u-TDLfnzpWonCYBxNBuP-m3cvYXIcFhciQlnh7sTVw,765 +ddtrace/profiling/collector/_memalloc_reentrant.h,sha256=0aoD1DdLXUSl81Bhp2Su5sIy1xVz8uQOvrj9Mmi1uIQ,1055 +ddtrace/profiling/collector/_memalloc_tb.c,sha256=Dl5TeSDTDxqLtFHlMV-nHzsuCRiSDBmTYnc9UTHD0C8,6574 +ddtrace/profiling/collector/_memalloc_tb.h,sha256=vOjplniALMGIaJa1w7pXyY1Tja-9dfc0UgcXZF5IIyg,1517 +ddtrace/profiling/collector/_pymacro.h,sha256=qpyla0EzoHaikKmLVUG4uRYHf0Qop3r1RuWC_kwKQ-w,344 +ddtrace/profiling/collector/_task.c,sha256=jTkmFu0scXUaptjL2s1ztWsOYmKIbD8_YgSk3j8DcAM,471728 +ddtrace/profiling/collector/_task.cpython-311-x86_64-linux-gnu.so,sha256=MX8pEldhkEUgbBzxW3oG0CA6zBS-S6Ijj9vU-gusPZY,120712 +ddtrace/profiling/collector/_task.pyi,sha256=S3p21udMqXIsTNSVTxdKjoxXR_nDXQK3jnk478lB14U,241 +ddtrace/profiling/collector/_task.pyx,sha256=l-gF6mqMeW1VDkBCTX37m4_jjSpAATvkXBxM9OPlxF0,4655 +ddtrace/profiling/collector/_traceback.c,sha256=I8oYytqC0EncJMwytbNfYJJW1p_d7aoq5F8QUHpgcnE,361265 +ddtrace/profiling/collector/_traceback.cpython-311-x86_64-linux-gnu.so,sha256=oQ77Q3v1v0CVlgAb90hcoPoZvmb6Xsqd2nr5KDoOWIE,87768 +ddtrace/profiling/collector/_traceback.pyi,sha256=pFYKACbj-50H5U2OUNYdiQt-N9rWBE3lKJ4la_7kCN8,302 +ddtrace/profiling/collector/_traceback.pyx,sha256=72dvyn_meGBX09H9DoOpWJkU82Yzq5ylzWnIX8kl1XE,3591 +ddtrace/profiling/collector/_utils.h,sha256=zVoIoSfw9BSin0AO9OnkfyNKG_VGCU5hV4KWlCXatvM,10273 +ddtrace/profiling/collector/asyncio.py,sha256=IRhMR3xsYY8JTThQFOXaRNIFTTwCWznF3bAGObJYDyo,1250 +ddtrace/profiling/collector/memalloc.py,sha256=TUAO6s1BEVW0rJAjs7TEuRlskQSqF87NFnFieSs-6m8,8544 +ddtrace/profiling/collector/stack.c,sha256=96rpcYisDuZWJi6OdNW8ijCRDPixQYlRDNIf1MVhd_8,970390 +ddtrace/profiling/collector/stack.cpython-311-x86_64-linux-gnu.so,sha256=oNCm9K-aMulWmO6xsw4ui2z2JV_fV8A4fdBem5vcDzw,300672 +ddtrace/profiling/collector/stack.pyi,sha256=S1UO1iCbwIrI3V5GYplF1dXsxUDqW7R40T0tw-m37Rk,166 +ddtrace/profiling/collector/stack.pyx,sha256=MuBGNubfA3Mqfx0Lgc7cg_-WVfO3aWoSD7Z5QHl-NDs,20601 +ddtrace/profiling/collector/stack_event.py,sha256=2NzWb6KzjCZLVAhZq_LmJbs1OWSGKraBJXMKVvz1Q8E,547 +ddtrace/profiling/collector/threading.py,sha256=b9TK0j_nBX71GePtrtWSUDGYtXJvQq9Eg5gAPEcHJTo,944 +ddtrace/profiling/event.py,sha256=sRnYKPC15VEDnjcmqFPumMuosVfO-deOv7z1NZ_0jas,2272 +ddtrace/profiling/exporter/__init__.py,sha256=eXAfV1Fa6YIpsJLI23wZP_3CQOlKsj-qFPK6jtZvSmI,950 +ddtrace/profiling/exporter/__pycache__/__init__.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/file.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/http.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/pprof_312_pb2.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/pprof_319_pb2.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/pprof_3_pb2.cpython-311.pyc,, +ddtrace/profiling/exporter/__pycache__/pprof_421_pb2.cpython-311.pyc,, +ddtrace/profiling/exporter/file.py,sha256=09gHkRknzieU6r4TXMD0eb2oJ93pl0W00cvK068bwvY,1292 +ddtrace/profiling/exporter/http.py,sha256=uUaOuXVJo5YGRRQxh3lrH_gAz3FdlFpbGRRVB-ss66A,9387 +ddtrace/profiling/exporter/pprof.c,sha256=ynWy8C3UZ9Fq0gWSpSSBzZ2sI_4a8wZRB3-aYEVtuGE,1467390 +ddtrace/profiling/exporter/pprof.cpython-311-x86_64-linux-gnu.so,sha256=PbajgOQxi__H25pvJmEKAWUJbQN1ZVLwILT_hCPtcbE,516968 +ddtrace/profiling/exporter/pprof.proto,sha256=iWP1lCz5SeZQVec025rSgjwR6pX8WReAtRv3Eho0lJ4,8841 +ddtrace/profiling/exporter/pprof.pyi,sha256=eEfLtOjXlwXwGOJ1nS75_C4TTFfSbinYL6r9D_iDgW8,3917 +ddtrace/profiling/exporter/pprof.pyx,sha256=tHuHWOdNTPZiluBHTYybuIwyfIGH_s_8TogpEN6C1Ow,28222 +ddtrace/profiling/exporter/pprof_312_pb2.py,sha256=HuWbIP1UWvcAU5EhOYmXt42aW-5UfoD0oyQ-CPongs4,28539 +ddtrace/profiling/exporter/pprof_319_pb2.py,sha256=NDbRjoKkKw25uVofgPBYEvhbdSGKLUA_7zEemvrZXfE,6062 +ddtrace/profiling/exporter/pprof_3_pb2.py,sha256=lj4Su40oadwotRGrDGW8lG774Vs7d-Pu7AsEt_hzSuI,24821 +ddtrace/profiling/exporter/pprof_421_pb2.py,sha256=jgpfHGCEvbfvXF3ChGF5Iw-S6XDQ3yu3_2FB7LVGQCI,3736 +ddtrace/profiling/profiler.py,sha256=KN7irjGLDWjSRfzPmMfK1F5mBilivG7wsSue74r2hF0,14943 +ddtrace/profiling/recorder.py,sha256=XTHoRz1y6hNa25jkY3QZ2dU3nQMQsEwbXJCqhYutUUo,3127 +ddtrace/profiling/scheduler.py,sha256=IcaSJ86hm45ItRJczmY3lBCsl7OsGtqd0m68Epm3ehM,4089 +ddtrace/propagation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/propagation/__pycache__/__init__.cpython-311.pyc,, +ddtrace/propagation/__pycache__/_database_monitoring.cpython-311.pyc,, +ddtrace/propagation/__pycache__/_utils.cpython-311.pyc,, +ddtrace/propagation/__pycache__/http.cpython-311.pyc,, +ddtrace/propagation/_database_monitoring.py,sha256=OlGXOdBCzyAcipWY3w0UpOsdR5R1FMqxYNNql6uiT4w,3768 +ddtrace/propagation/_utils.py,sha256=EdttZRhZ85wNtZJ5nOKz50JWTW9RNqbKck_rGF3cNts,976 +ddtrace/propagation/http.py,sha256=IYjOXcKdRkcsYc9Uijub9OkF_GVf_2wFRfnFIxYIfn4,40548 +ddtrace/provider.py,sha256=g-sJdm0Sa52I5cuiBZOemFNXxW816f6TscVZmYG0m7Q,5764 +ddtrace/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/runtime/__init__.py,sha256=Aif4-abbGD3jRhKav4seFbLglncJhqtwLMyCK7mWnSw,1795 +ddtrace/runtime/__pycache__/__init__.cpython-311.pyc,, +ddtrace/sampler.py,sha256=5aVQ4zc54HjOSc0BoHXyAL5kNx9MyM3AutzhqqEo-l4,12314 +ddtrace/sampling_rule.py,sha256=WXoE3fRi10-E1_IN63UxlNL1P_lpbvLFH2ljCQbtEp8,8074 +ddtrace/settings/__init__.py,sha256=K7w4Jo7H8WFNSUlCP_cTwhHHG4C-oO8LlC7yBpYb4x4,319 +ddtrace/settings/__pycache__/__init__.cpython-311.pyc,, +ddtrace/settings/__pycache__/_database_monitoring.cpython-311.pyc,, +ddtrace/settings/__pycache__/asm.cpython-311.pyc,, +ddtrace/settings/__pycache__/config.cpython-311.pyc,, +ddtrace/settings/__pycache__/dynamic_instrumentation.cpython-311.pyc,, +ddtrace/settings/__pycache__/exception_debugging.cpython-311.pyc,, +ddtrace/settings/__pycache__/exceptions.cpython-311.pyc,, +ddtrace/settings/__pycache__/http.cpython-311.pyc,, +ddtrace/settings/__pycache__/integration.cpython-311.pyc,, +ddtrace/settings/__pycache__/peer_service.cpython-311.pyc,, +ddtrace/settings/__pycache__/profiling.cpython-311.pyc,, +ddtrace/settings/_database_monitoring.py,sha256=PdUCzgpCEsOmJVv2hjLQ3_lePN_SuTu-Cxbdg-oHKFo,400 +ddtrace/settings/asm.py,sha256=fRgtjQ0PjrwlGdqeb1UVylQnuTyU4y08QWqCkUFala0,3913 +ddtrace/settings/config.py,sha256=2b2phe_FII2UTPFyTw1-3h9Ey4tadaVivN0YNjjdEWo,33175 +ddtrace/settings/dynamic_instrumentation.py,sha256=zQJhp0GRBjV18X2d7reZHV1xqkQobtqFxYx1P_XdwoM,3853 +ddtrace/settings/exception_debugging.py,sha256=pOSY-q8hsIiGwuSphZJOnUPayhQWu0acdRKGFVn0Dnw,330 +ddtrace/settings/exceptions.py,sha256=w0BejP6qaqnGpwCBsn8oJ65gEmTg1ESDmvDrKS_YRDo,163 +ddtrace/settings/http.py,sha256=5zdHBrGd21h048yGjXwzM-KcmBMV46u1YH7b7no-Q9c,2976 +ddtrace/settings/integration.py,sha256=S-OknNhNTnkwth-Vz3MR-p7nvINrBTg65yZKnDgY0iY,5956 +ddtrace/settings/peer_service.py,sha256=vIEu-APjj66OUfurtBIdyYabsHfY6qkgZjoOTOZA2fE,1587 +ddtrace/settings/profiling.py,sha256=2PlfUgC4qxeanCENiNSD7ouC0O8LhyQl4Z6lheKfNfU,6084 +ddtrace/sourcecode/__init__.py,sha256=u-FB5twfJUJiYiPbptsm-QkAmJfKXIqxb-6P20wzNOQ,1055 +ddtrace/sourcecode/__pycache__/__init__.cpython-311.pyc,, +ddtrace/sourcecode/__pycache__/_utils.cpython-311.pyc,, +ddtrace/sourcecode/__pycache__/setuptools_auto.cpython-311.pyc,, +ddtrace/sourcecode/_utils.py,sha256=W3czbBD5ZOm_pkaGJMzZJiVr2KlMqRswMrU50kB9Q-4,1777 +ddtrace/sourcecode/setuptools_auto.py,sha256=5Y-U4pYFylJ5OtKXUpH7wbPUxZsHITVqEamZnrc4c-I,722 +ddtrace/span.py,sha256=6Br4M9muWOHqYzxOGbiVbaYRskId6mbBwouNuGMz0HI,21471 +ddtrace/tracer.py,sha256=usToTIk7INNewSGttvrV2jYYJwcrfAvxapmwPodGz7w,43015 +ddtrace/tracing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/tracing/__pycache__/__init__.cpython-311.pyc,, +ddtrace/tracing/__pycache__/_span_link.cpython-311.pyc,, +ddtrace/tracing/__pycache__/trace_handlers.cpython-311.pyc,, +ddtrace/tracing/_span_link.py,sha256=2XIDqE0d5wW1CQBJTo8mMavb0kFfsAvl5SqiEcvSR_c,3411 +ddtrace/tracing/trace_handlers.py,sha256=ZyFyz98IR3pTQVas7Ah2NqlJ9arBHRYi5bxOK9OLD28,23324 +ddtrace/vendor/__init__.py,sha256=zqXumNo12FW-Pe66cv93BkDDTnSO3Se_ZOCgKguxIKE,2962 +ddtrace/vendor/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/contextvars/__init__.py,sha256=TRvgsEausfPDhHwEBJ-z2tLSMWRC6E_088v4ODkcnXg,3898 +ddtrace/vendor/contextvars/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__init__.py,sha256=Q8OE09M7ZpXatj2Ft47FVL_ClA5KY5upavDWXg1DKtE,2175 +ddtrace/vendor/debtcollector/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__pycache__/_utils.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__pycache__/moves.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__pycache__/removals.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__pycache__/renames.cpython-311.pyc,, +ddtrace/vendor/debtcollector/__pycache__/updating.cpython-311.pyc,, +ddtrace/vendor/debtcollector/_utils.py,sha256=OXhJruEi9X2H0EaVIcNkByltmf2VKkczf2IAfOB71SE,6346 +ddtrace/vendor/debtcollector/moves.py,sha256=tapV2utvk2OtYcM_joe_l3Yg3XuTtEZkbeVT-tfKcCQ,8421 +ddtrace/vendor/debtcollector/removals.py,sha256=0q5MvpcXKP_W1Jx6Qp4URBwYqYc3q_rsJRgKT3VW-Kw,13890 +ddtrace/vendor/debtcollector/renames.py,sha256=Lok-0KYHlVCCho9vkFsFPFI6VtgUzk0z5NdUyEUH9wA,1715 +ddtrace/vendor/debtcollector/updating.py,sha256=2x54f0_UKhs_SsGHWzt0zpEFq6M0_RzmC7O42ZXaFLo,2426 +ddtrace/vendor/dogstatsd/__init__.py,sha256=IEA4PcepGTcsZQUetyoN10380qLjmtou7rX-CPURmW4,312 +ddtrace/vendor/dogstatsd/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/base.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/compat.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/container.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/context.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/context_async.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/format.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/__pycache__/route.cpython-311.pyc,, +ddtrace/vendor/dogstatsd/base.py,sha256=AwYw0ZKoF-BzGj1LugWxvvUHgg-RK3zujKPyyJ_88FM,46735 +ddtrace/vendor/dogstatsd/compat.py,sha256=BVmqw2TtupHEis35I9Cl_FVcb2wd1mGEiEADVWA-eJk,996 +ddtrace/vendor/dogstatsd/container.py,sha256=0doQtobT4ID8GKDwa-jUjUFr_NTsf0jgc2joaUT0y7o,2052 +ddtrace/vendor/dogstatsd/context.py,sha256=FU8kb8meKfgzSB6igyEM6iByqq2u0Hpr98FBqbt8Wco,2844 +ddtrace/vendor/dogstatsd/context_async.py,sha256=wJgbf9n_pHaN95I0I1RoxycjoK18L0ZBGUVrzcVsW4M,1543 +ddtrace/vendor/dogstatsd/format.py,sha256=maACZlLz8DuSv1sNyhQQQtgswyLPiR98HiHU6cwhxRE,1025 +ddtrace/vendor/dogstatsd/route.py,sha256=VOoCuD5XD9PPtEydVjpbz_FldgGEOd8Yazpt2YoVD-U,1253 +ddtrace/vendor/monotonic/__init__.py,sha256=1wJOetpAPQUteaP7IxAelyChpkITsxZf-eV4V2bTHrA,7117 +ddtrace/vendor/monotonic/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ddtrace/vendor/packaging/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/packaging/__pycache__/_structures.cpython-311.pyc,, +ddtrace/vendor/packaging/__pycache__/version.cpython-311.pyc,, +ddtrace/vendor/packaging/_structures.py,sha256=DCpKtb7u94_oqgVsIJQTrTyZcb3Gz7sSGbk9vYDMME0,1418 +ddtrace/vendor/packaging/version.py,sha256=MKL8nbKLPLGPouIwFvwSVnYRzNpkMo5AIcsa6LGqDF8,12219 +ddtrace/vendor/psutil/__init__.py,sha256=zLnjfXMB3aOXBkgwVbiaTzFYKvDpY3bRBS12BryUMn8,90581 +ddtrace/vendor/psutil/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_common.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_compat.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_psaix.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_psbsd.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_pslinux.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_psosx.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_psposix.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_pssunos.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/_pswindows.cpython-311.pyc,, +ddtrace/vendor/psutil/__pycache__/setup.cpython-311.pyc,, +ddtrace/vendor/psutil/_common.py,sha256=eA2kZtQPvlmMpKw_Fz3lswXSg35tthoM6OAb2UhdNNQ,20224 +ddtrace/vendor/psutil/_compat.py,sha256=c9jBW_7ZcDyl562gIeIh53drZ-oUVLPobOnuAJRhY2w,11191 +ddtrace/vendor/psutil/_psaix.py,sha256=IDY57Ybv5k4eSKH50lD7EJ9slfRoPJ-SLKyniXQFvkw,18564 +ddtrace/vendor/psutil/_psbsd.py,sha256=kvDbgjD38KtIZDhsTNbmMowvjSmKs701D-aYr_5wPQE,30566 +ddtrace/vendor/psutil/_pslinux.py,sha256=N-p3vkd-QG9132CIihCIZ47mPa_vwBWkHZf6x3pI6Xg,79839 +ddtrace/vendor/psutil/_psosx.py,sha256=JbNktzY5i5xQJTWNWdWbFSRPBZneb4_34Pm6GKyDiZs,17196 +ddtrace/vendor/psutil/_psposix.py,sha256=sQajYsNSDFV0HqN3GFf7Rvh8vu9eQLbzMpD2eqgakVk,6159 +ddtrace/vendor/psutil/_pssunos.py,sha256=ZayYw299DPsmA8TzA7UpuFuigW49OC0KrdrU4A1hOlY,25109 +ddtrace/vendor/psutil/_psutil_aix.c,sha256=fEoiu23Up7TxlsiUfeSWSubKRAO8te5we63MBZ6vOWI,31034 +ddtrace/vendor/psutil/_psutil_bsd.c,sha256=neGG1AtSb2f2TOSQ8w99XDn51yvjWjOZENvs5HsDv40,34969 +ddtrace/vendor/psutil/_psutil_common.c,sha256=HecdlMru0pRZAhuORp9xDelN4RaVK4muio4qS0Hfx-M,3136 +ddtrace/vendor/psutil/_psutil_common.h,sha256=mJvKu0yDQYOTpzbfkXnLOk3mFPBxAF56qsyyck29e9o,870 +ddtrace/vendor/psutil/_psutil_linux.c,sha256=PvXPmGMPnxWkqtnHwkOAZhIdfkISpPyklpGQc0la-kI,18930 +ddtrace/vendor/psutil/_psutil_linux.cpython-311-x86_64-linux-gnu.so,sha256=IyZF2O26ibj5PviSho1j16ljMmCgLvMkWyNsJFPO0v8,29376 +ddtrace/vendor/psutil/_psutil_osx.c,sha256=2HZi7AKgRu7lDVu6WYiJZ0Yc0c5Z0zffOzsbeuw1z6o,59036 +ddtrace/vendor/psutil/_psutil_posix.c,sha256=sHiCGhhXyF7t7AYpHvjsDXjVvc3NY3poNZpvNBa3CDo,18620 +ddtrace/vendor/psutil/_psutil_posix.cpython-311-x86_64-linux-gnu.so,sha256=E5JUP3XQTr8MqqYT125bNBaD5KnOZrZycT_DDGNWxxE,22696 +ddtrace/vendor/psutil/_psutil_posix.h,sha256=uWkIEoeQUPIFOuiMzvMo5XzPYkM6JtXsFVPNzyntlu0,256 +ddtrace/vendor/psutil/_psutil_sunos.c,sha256=-LKjVI0TP_n676lCoA-Wrh3u0GvplkrH1Oqc49uuO84,51184 +ddtrace/vendor/psutil/_psutil_windows.c,sha256=ACXdZpmW6Z8onahjDuHRYaBwnX9f0ddgxTsGLP7RPms,114213 +ddtrace/vendor/psutil/_pswindows.py,sha256=A2mlRzUvPxPrj8LETN25MAn6fRcLp12L-RP7bwP2TKM,37400 +ddtrace/vendor/psutil/arch/aix/common.c,sha256=zwuWitDFSTJzYjZVWWqGhrpyhueVxK5Yf0YU1KtCRzc,2284 +ddtrace/vendor/psutil/arch/aix/common.h,sha256=eLKtDGdfcV1kC_h2Wl4dc0kg2wr-qKs5WKIAoKiUx_o,894 +ddtrace/vendor/psutil/arch/aix/ifaddrs.c,sha256=RLyTuG2zstr53zze2KaS2qy0cnvnZ7GaB8ZGI7QD7vA,3839 +ddtrace/vendor/psutil/arch/aix/ifaddrs.h,sha256=Bq9yYVe8ggnStKukX9g7yhSv9teB9aLf3Ms_YSzwNaA,767 +ddtrace/vendor/psutil/arch/aix/net_connections.c,sha256=dw2HRw8Z9XCNeiWmcXk1mj_AukSrG2P_7zGwqNtfj20,8134 +ddtrace/vendor/psutil/arch/aix/net_connections.h,sha256=FWI-ALW2lInyvex3FPEB4Gnb_veTCtaxZah0g0jwyh4,355 +ddtrace/vendor/psutil/arch/aix/net_kernel_structs.h,sha256=3qhQr5WOzoQNjfCTatC6f1m0uJvYQKhvpA3kbW_O5N4,4059 +ddtrace/vendor/psutil/arch/freebsd/proc_socks.c,sha256=UcKuLJ3V5crqeB6z7KfQYWiNBzFaIBODDESMZ0iZwrI,10925 +ddtrace/vendor/psutil/arch/freebsd/proc_socks.h,sha256=8zCu73GZa_RUeX7DZVy--EDPAE2yzvMmW0L7KqrM1fk,263 +ddtrace/vendor/psutil/arch/freebsd/specific.c,sha256=l_QUghmeq9EFP-iamfFF1dRbkf8F-JM3lNtyXgV4ZSk,31802 +ddtrace/vendor/psutil/arch/freebsd/specific.h,sha256=2w0cLpvd91ZhLaFoRF436ag19e7JMqEtrN3QK62jlbY,1563 +ddtrace/vendor/psutil/arch/freebsd/sys_socks.c,sha256=PGWKfERuBDcIX1xEFT6TMotaFfRENknsUjNnpO6zsi0,9986 +ddtrace/vendor/psutil/arch/freebsd/sys_socks.h,sha256=_VQx5Cch72yHa5a73s7-6XVbY15kO_DOZ6vJb6Wj7Yg,265 +ddtrace/vendor/psutil/arch/netbsd/socks.c,sha256=NI4eN62mpbqgbxm2uJnMdUrqSPf9cL8k53wGFxNvYzk,12227 +ddtrace/vendor/psutil/arch/netbsd/socks.h,sha256=N8aNAVjqpoywUwhlRSXxe0dxQMOaBL2JU-y7vQuOmNs,331 +ddtrace/vendor/psutil/arch/netbsd/specific.c,sha256=cwyxa9_JWAf5m3x3Wvl-JA63F8HbXeu8AEvIiLImkeI,17351 +ddtrace/vendor/psutil/arch/netbsd/specific.h,sha256=DiMug_IhzJMkSwJKRcLHWrJJlc3XhPLLFFvfdDyT_qI,1220 +ddtrace/vendor/psutil/arch/openbsd/specific.c,sha256=vPG1fdC8EePuBbg5BodoOB1CHUOKKlmJsYx1_QjNwJ0,22349 +ddtrace/vendor/psutil/arch/openbsd/specific.h,sha256=c0_IddDQN7zplGtruCD_4-oPfgjQZxS7S9o2KG6DG5k,1086 +ddtrace/vendor/psutil/arch/osx/process_info.c,sha256=sLR5Vb4TYvdcoER5RxtVlTns-9FuhnU2GPYAT_gl4_c,9875 +ddtrace/vendor/psutil/arch/osx/process_info.h,sha256=dPBFqGTCs34kI8Mw1w-Ot3QhS6PrnDVkGKAkRM5C-lg,602 +ddtrace/vendor/psutil/arch/solaris/environ.c,sha256=UDkpvJMkAjPPUAYhcg7OOmuW_hxbz7W2HOa-a9vbwOU,10188 +ddtrace/vendor/psutil/arch/solaris/environ.h,sha256=aQIDTp6uPYLPCyd7qPK1go_BQAthD7IyG2kH2zwjcZM,511 +ddtrace/vendor/psutil/arch/solaris/v10/ifaddrs.c,sha256=73ckKxa0IQjH38Wawr-G5_Tb922goQQZ4eXqDbQKsZg,3253 +ddtrace/vendor/psutil/arch/solaris/v10/ifaddrs.h,sha256=VpaMZuVkenSRVoCBbqaAaaZnC9S6JI-VFiShSv747Go,567 +ddtrace/vendor/psutil/arch/windows/global.c,sha256=FgYmyGBKIP0HrngAra05m_kkbWGiscb2FQ4PQ7wwTMY,6654 +ddtrace/vendor/psutil/arch/windows/global.h,sha256=ROqv8VNajiYBUJ2ltJrsX9Ic57590BB8hyDZqMVV4Hc,1841 +ddtrace/vendor/psutil/arch/windows/inet_ntop.c,sha256=0Iqg1WKrC5mZ84xBVuqeBKmFm4U2iZCBr3jalGNct5s,1407 +ddtrace/vendor/psutil/arch/windows/inet_ntop.h,sha256=REJAYcwsuTpXo5KDh7Fd37EiTM882S0xL9LPjsqC9v8,575 +ddtrace/vendor/psutil/arch/windows/ntextapi.h,sha256=SF141Jz80K5EWZgKilLMtt3UcsW7cet3P8xzoW5Sd1k,13760 +ddtrace/vendor/psutil/arch/windows/process_handles.c,sha256=OYyUdPT2Dh924Mj4MPWsDIg3hDz-Gp4NJv7gH8OWpcc,14681 +ddtrace/vendor/psutil/arch/windows/process_handles.h,sha256=3XlBjAsF8Fw77dnaBiMUR6rpkIhF30h0yb_-vogZ9Jc,293 +ddtrace/vendor/psutil/arch/windows/process_info.c,sha256=Aursf0zYxWuktaFmtUjOqrnxXgCxW-X13chE28bhA0Y,29326 +ddtrace/vendor/psutil/arch/windows/process_info.h,sha256=29okjO4nZXHhag1M8ZEy2tYUdSZNdV9RoVFNh4zmo_I,1000 +ddtrace/vendor/psutil/arch/windows/security.c,sha256=nwkIAw66q-D9TrMrrmmp6529w3PE5YeGZ2IgyzXKOoc,3672 +ddtrace/vendor/psutil/arch/windows/security.h,sha256=OEwyZUlqk6Ipsmdg6ho7E1XULgJlciOteIr4Kxf-9nc,365 +ddtrace/vendor/psutil/arch/windows/services.c,sha256=rW4LUZZiFfLdNI6I5lgEAobbJesEvrSJBrYeWEs41q8,13168 +ddtrace/vendor/psutil/arch/windows/services.h,sha256=4Wo7Tn_AgeXd6UQ68uGphuFxxxeXc0u734skNmzHG1s,730 +ddtrace/vendor/psutil/arch/windows/wmi.c,sha256=TWJJmrLfNLZ-8Lj0SBMPMe2J5gUNmF3TF-O3uNsFLEI,3408 +ddtrace/vendor/psutil/arch/windows/wmi.h,sha256=E-fBKzcyeLaj3e2b_26jxtchpm0X7YVvKkHp-ix3BPA,282 +ddtrace/vendor/psutil/setup.py,sha256=Ze5fZ_gj3Y6e4i3Wb446CgVdiICXpfLYkZVvlllusqo,7931 +ddtrace/vendor/sqlcommenter/__init__.py,sha256=w33CHxf0qVmKrWygWIRc_bw_vK6ukSXvEli5p22XQL0,1723 +ddtrace/vendor/sqlcommenter/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/wrapt/LICENSE,sha256=-ORXVVFm6-jW4-VBEREdObh8EdASF-dmJ7gpCSKYLUQ,1303 +ddtrace/vendor/wrapt/__init__.py,sha256=Bh0h33Iapc_qaoLWsWfaXK5xJz9KJExF7gQKIWYdSsg,1200 +ddtrace/vendor/wrapt/__pycache__/__init__.cpython-311.pyc,, +ddtrace/vendor/wrapt/__pycache__/arguments.cpython-311.pyc,, +ddtrace/vendor/wrapt/__pycache__/decorators.cpython-311.pyc,, +ddtrace/vendor/wrapt/__pycache__/importer.cpython-311.pyc,, +ddtrace/vendor/wrapt/__pycache__/setup.cpython-311.pyc,, +ddtrace/vendor/wrapt/__pycache__/wrappers.cpython-311.pyc,, +ddtrace/vendor/wrapt/_wrappers.c,sha256=X3MBBnD_-XcLbVoRW73-9vQ5Qbl5J5SCUH-unjzjBRk,95815 +ddtrace/vendor/wrapt/_wrappers.cpython-311-x86_64-linux-gnu.so,sha256=f9xbe8ViWi0KK0yz3fZjQOsS4tqXCZ1P8Jg6BxSa9mQ,65368 +ddtrace/vendor/wrapt/arguments.py,sha256=RF0nTEdPzPIewJ-jnSY42i4JSzK3ctjPABV1SJxLymg,1746 +ddtrace/vendor/wrapt/decorators.py,sha256=gNy1PVq9NNVDAB9tujaAVhb0xtVKSSzqT-hdGFeWM34,21332 +ddtrace/vendor/wrapt/importer.py,sha256=yxFgVg6-lRTbSVJ2oZbw1TPCtB98fIF4A_qi_Dh2JRc,9981 +ddtrace/vendor/wrapt/setup.py,sha256=CF2p_6VhgEGASbK2JH_MARGMt3GHe6uADKRqu573QY0,191 +ddtrace/vendor/wrapt/wrappers.py,sha256=ofd5HIVcZ8-YCcMH1SCeUcxsueYhRLcBDByrP_366yQ,35222 +ddtrace/version.py,sha256=uvdArJQL_C2QEfxXrcI1hs28bkcgLWfgwzpLUj50HI0,527 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL new file mode 100644 index 0000000..4497ba5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: false +Tag: cp311-cp311-manylinux_2_17_x86_64 +Tag: cp311-cp311-manylinux2014_x86_64 + diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt new file mode 100644 index 0000000..a2ad0bb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt @@ -0,0 +1,10 @@ +[console_scripts] +ddtrace-run = ddtrace.commands.ddtrace_run:main + +[opentelemetry_context] +ddcontextvars_context = ddtrace.opentelemetry._context:DDRuntimeContext + +[pytest11] +ddtrace = ddtrace.contrib.pytest.plugin +ddtrace.pytest_bdd = ddtrace.contrib.pytest_bdd.plugin +ddtrace.pytest_benchmark = ddtrace.contrib.pytest_benchmark.plugin diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt new file mode 100644 index 0000000..749bf29 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt @@ -0,0 +1 @@ +ddtrace diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store new file mode 100644 index 0000000..78912b3 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py new file mode 100644 index 0000000..cab7b7a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py @@ -0,0 +1,58 @@ +import sys + +LOADED_MODULES = frozenset(sys.modules.keys()) + +from ddtrace.internal.module import ModuleWatchdog + +ModuleWatchdog.install() + +# Acquire a reference to the threading module. Some parts of the library (e.g. +# the profiler) might be enabled programmatically and therefore might end up +# getting a reference to the tracee's threading module. By storing a reference +# to the threading module used by ddtrace here, we make it easy for those parts +# to get a reference to the right threading module. +import threading as _threading + +from ._logger import configure_ddtrace_logger + + +# configure ddtrace logger before other modules log +configure_ddtrace_logger() # noqa: E402 + +from .settings import _config as config + +if config._telemetry_enabled: + from ddtrace.internal import telemetry + + telemetry.install_excepthook() + # In order to support 3.12, we start the writer upon initialization. + # See https://github.com/python/cpython/pull/104826. + # Telemetry events will only be sent after the `app-started` is queued. + # This will occur when the agent writer starts. + telemetry.telemetry_writer.enable() + +from ._monkey import patch # noqa: E402 +from ._monkey import patch_all # noqa: E402 +from .internal.utils.deprecations import DDTraceDeprecationWarning # noqa: E402 +from .pin import Pin # noqa: E402 +from .settings import _config as config # noqa: E402 +from .span import Span # noqa: E402 +from .tracer import Tracer # noqa: E402 +from .version import get_version # noqa: E402 + + +__version__ = get_version() + +# a global tracer instance with integration settings +tracer = Tracer() + +__all__ = [ + "patch", + "patch_all", + "Pin", + "Span", + "tracer", + "Tracer", + "config", + "DDTraceDeprecationWarning", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py new file mode 100644 index 0000000..07bc149 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py @@ -0,0 +1,137 @@ +import collections +from copy import deepcopy +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 + +import attr + +from .internal.logger import get_logger + + +try: + from typing import DefaultDict # noqa:F401 +except ImportError: + from collections import defaultdict as DefaultDict + +log = get_logger(__name__) + + +@attr.s(slots=True) +class Hooks(object): + """ + Hooks configuration object is used for registering and calling hook functions + + Example:: + + @config.falcon.hooks.on('request') + def on_request(span, request, response): + pass + """ + + _hooks = attr.ib(init=False, factory=lambda: collections.defaultdict(set), type=DefaultDict[str, Set]) + + def __deepcopy__(self, memodict=None): + hooks = Hooks() + hooks._hooks = deepcopy(self._hooks, memodict) + return hooks + + def register( + self, + hook, # type: Any + func=None, # type: Optional[Callable] + ): + # type: (...) -> Optional[Callable[..., Any]] + """ + Function used to register a hook for the provided name. + + Example:: + + def on_request(span, request, response): + pass + + config.falcon.hooks.register('request', on_request) + + + If no function is provided then a decorator is returned:: + + @config.falcon.hooks.register('request') + def on_request(span, request, response): + pass + + :param hook: The name of the hook to register the function for + :type hook: object + :param func: The function to register, or ``None`` if a decorator should be returned + :type func: function, None + :returns: Either a function decorator if ``func is None``, otherwise ``None`` + :rtype: function, None + """ + # If they didn't provide a function, then return a decorator + if not func: + + def wrapper(func): + self.register(hook, func) + return func + + return wrapper + self._hooks[hook].add(func) + return None + + # Provide shorthand `on` method for `register` + # >>> @config.falcon.hooks.on('request') + # def on_request(span, request, response): + # pass + on = register + + def deregister( + self, + hook, # type: Any + func, # type: Callable + ): + # type: (...) -> None + """ + Function to deregister a function from a hook it was registered under + + Example:: + + @config.falcon.hooks.on('request') + def on_request(span, request, response): + pass + + config.falcon.hooks.deregister('request', on_request) + + :param hook: The name of the hook to register the function for + :type hook: object + :param func: Function hook to register + :type func: function + """ + if hook in self._hooks: + try: + self._hooks[hook].remove(func) + except KeyError: + pass + + def emit( + self, + hook, # type: Any + *args, # type: Any + **kwargs, # type: Any + ): + # type: (...) -> None + """ + Function used to call registered hook functions. + + :param hook: The hook to call functions for + :type hook: str + :param args: Positional arguments to pass to the hook functions + :type args: list + :param kwargs: Keyword arguments to pass to the hook functions + :type kwargs: dict + """ + # Call registered hooks + for func in self._hooks.get(hook, ()): + try: + func(*args, **kwargs) + except Exception: + log.error("Failed to run hook %s function %s", hook, func, exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py new file mode 100644 index 0000000..f7989f8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py @@ -0,0 +1,88 @@ +import logging +from logging.handlers import RotatingFileHandler +import os + +from ddtrace.internal.utils.formats import asbool + + +DD_LOG_FORMAT = "%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] {}- %(message)s".format( + "[dd.service=%(dd.service)s dd.env=%(dd.env)s dd.version=%(dd.version)s" + " dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s] " +) + +DEFAULT_FILE_SIZE_BYTES = 15 << 20 # 15 MB + + +def configure_ddtrace_logger(): + # type: () -> None + """Configures ddtrace log levels and file paths. + + Customization is possible with the environment variables: + ``DD_TRACE_DEBUG``, ``DD_TRACE_LOG_FILE_LEVEL``, and ``DD_TRACE_LOG_FILE`` + + By default, when none of the settings have been changed, ddtrace loggers + inherit from the root logger in the logging module and no logs are written to a file. + + When DD_TRACE_DEBUG has been enabled: + - Logs are propagated up so that they appear in the application logs if a file path wasn't provided + - Logs are routed to a file when DD_TRACE_LOG_FILE is specified, using the log level in DD_TRACE_LOG_FILE_LEVEL. + - Child loggers inherit from the parent ddtrace logger + + Note(s): + 1) The ddtrace-run logs under commands/ddtrace_run do not follow DD_TRACE_LOG_FILE if DD_TRACE_DEBUG is enabled. + This is because ddtrace-run calls ``logging.basicConfig()`` when DD_TRACE_DEBUG is enabled, so + this configuration is not applied. + 2) Python 2: If the application is using DD_TRACE_DEBUG=true, logging will need to be configured, + ie: ``logging.basicConfig()``. + + """ + ddtrace_logger = logging.getLogger("ddtrace") + if asbool(os.environ.get("DD_TRACE_LOG_STREAM_HANDLER", "true")): + ddtrace_logger.addHandler(logging.StreamHandler()) + + _configure_ddtrace_debug_logger(ddtrace_logger) + _configure_ddtrace_file_logger(ddtrace_logger) + + +def _configure_ddtrace_debug_logger(logger): + if asbool(os.environ.get("DD_TRACE_DEBUG", "false")): + logger.setLevel(logging.DEBUG) + logger.debug("debug mode has been enabled for the ddtrace logger") + + +def _configure_ddtrace_file_logger(logger): + log_file_level = os.environ.get("DD_TRACE_LOG_FILE_LEVEL", "DEBUG").upper() + try: + file_log_level_value = getattr(logging, log_file_level) + except AttributeError: + raise ValueError( + "DD_TRACE_LOG_FILE_LEVEL is invalid. Log level must be CRITICAL/ERROR/WARNING/INFO/DEBUG.", + log_file_level, + ) + + log_path = os.environ.get("DD_TRACE_LOG_FILE") + if log_path is not None: + log_path = os.path.abspath(log_path) + max_file_bytes = int(os.environ.get("DD_TRACE_LOG_FILE_SIZE_BYTES", DEFAULT_FILE_SIZE_BYTES)) + num_backup = 1 + ddtrace_file_handler = RotatingFileHandler( + filename=log_path, mode="a", maxBytes=max_file_bytes, backupCount=num_backup + ) + log_format = "%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] - %(message)s" + log_formatter = logging.Formatter(log_format) + ddtrace_file_handler.setLevel(file_log_level_value) + ddtrace_file_handler.setFormatter(log_formatter) + logger.addHandler(ddtrace_file_handler) + logger.debug("ddtrace logs will be routed to %s", log_path) + + +def _configure_log_injection(): + """ + Ensures that logging is patched before we inject trace information into logs. + """ + from ddtrace import patch + + patch(logging=True) + ddtrace_logger = logging.getLogger("ddtrace") + for handler in ddtrace_logger.handlers: + handler.setFormatter(logging.Formatter(DD_LOG_FORMAT)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py new file mode 100644 index 0000000..6090e9e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py @@ -0,0 +1,273 @@ +import importlib +import os +import threading +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.vendor.wrapt.importer import when_imported + +from .internal.logger import get_logger +from .internal.utils import formats +from .settings import _config as config +from .settings.asm import config as asm_config + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import List # noqa:F401 + from typing import Union # noqa:F401 + + +log = get_logger(__name__) + +# Default set of modules to automatically patch or not +PATCH_MODULES = { + "aioredis": True, + "aiomysql": True, + "aredis": True, + "asyncio": True, + "boto": True, + "botocore": True, + "bottle": True, + "cassandra": True, + "celery": True, + "consul": True, + "django": True, + "elasticsearch": True, + "algoliasearch": True, + "futures": True, + "gevent": True, + "graphql": True, + "grpc": True, + "httpx": True, + "kafka": True, + "mongoengine": True, + "mysql": True, + "mysqldb": True, + "pymysql": True, + "mariadb": True, + "psycopg": True, + "pylibmc": True, + "pymemcache": True, + "pymongo": True, + "redis": True, + "rediscluster": True, + "requests": True, + "rq": True, + "sanic": True, + "snowflake": False, + "sqlalchemy": False, # Prefer DB client instrumentation + "sqlite3": True, + "aiohttp": True, # requires asyncio (Python 3.4+) + "aiohttp_jinja2": True, + "aiopg": True, + "aiobotocore": False, + "httplib": False, + "urllib3": False, + "vertica": True, + "molten": True, + "jinja2": True, + "mako": True, + "flask": True, + "flask_login": True, + "kombu": False, + "starlette": True, + # Ignore some web framework integrations that might be configured explicitly in code + "falcon": True, + "pyramid": True, + # Auto-enable logging if the environment variable DD_LOGS_INJECTION is true + "logbook": config.logs_injection, + "logging": config.logs_injection, + "loguru": config.logs_injection, + "structlog": config.logs_injection, + "pynamodb": True, + "pyodbc": True, + "fastapi": True, + "dogpile_cache": True, + "yaaredis": True, + "asyncpg": True, + "aws_lambda": True, # patch only in AWS Lambda environments + "tornado": False, + "openai": True, + "langchain": True, + "subprocess": True, + "unittest": True, + "coverage": False, +} + + +# this information would make sense to live in the contrib modules, +# but that would mean getting it would require importing those modules, +# which we need to avoid until as late as possible. +CONTRIB_DEPENDENCIES = { + "tornado": ("futures",), +} + + +_LOCK = threading.Lock() +_PATCHED_MODULES = set() + +# Module names that need to be patched for a given integration. If the module +# name coincides with the integration name, then there is no need to add an +# entry here. +_MODULES_FOR_CONTRIB = { + "elasticsearch": ( + "elasticsearch", + "elasticsearch1", + "elasticsearch2", + "elasticsearch5", + "elasticsearch6", + "elasticsearch7", + # Starting with version 8, the default transport which is what we + # actually patch is found in the separate elastic_transport package + "elastic_transport", + "opensearchpy", + ), + "psycopg": ( + "psycopg", + "psycopg2", + ), + "snowflake": ("snowflake.connector",), + "cassandra": ("cassandra.cluster",), + "dogpile_cache": ("dogpile.cache",), + "mysqldb": ("MySQLdb",), + "futures": ("concurrent.futures.thread",), + "vertica": ("vertica_python",), + "aws_lambda": ("datadog_lambda",), + "httplib": ("http.client",), + "kafka": ("confluent_kafka",), +} + + +DEFAULT_MODULES_PREFIX = "ddtrace.contrib" + + +class PatchException(Exception): + """Wraps regular `Exception` class when patching modules""" + + pass + + +class ModuleNotFoundException(PatchException): + pass + + +def _on_import_factory(module, prefix="ddtrace.contrib", raise_errors=True, patch_indicator=True): + # type: (str, str, bool, Union[bool, List[str]]) -> Callable[[Any], None] + """Factory to create an import hook for the provided module name""" + + def on_import(hook): + if config._telemetry_enabled: + from .internal import telemetry + # Import and patch module + path = "%s.%s" % (prefix, module) + try: + imported_module = importlib.import_module(path) + except Exception as e: + if raise_errors: + raise + error_msg = "failed to import ddtrace module %r when patching on import" % (path,) + log.error(error_msg, exc_info=True) + if config._telemetry_enabled: + telemetry.telemetry_writer.add_integration(module, False, PATCH_MODULES.get(module) is True, error_msg) + telemetry.telemetry_writer.add_count_metric( + "tracers", "integration_errors", 1, (("integration_name", module), ("error_type", type(e).__name__)) + ) + else: + imported_module.patch() + if config._telemetry_enabled: + if hasattr(imported_module, "get_versions"): + versions = imported_module.get_versions() + for name, v in versions.items(): + telemetry.telemetry_writer.add_integration( + name, True, PATCH_MODULES.get(module) is True, "", version=v + ) + else: + version = imported_module.get_version() + telemetry.telemetry_writer.add_integration( + module, True, PATCH_MODULES.get(module) is True, "", version=version + ) + + if hasattr(imported_module, "patch_submodules"): + imported_module.patch_submodules(patch_indicator) + + return on_import + + +def patch_all(**patch_modules): + # type: (bool) -> None + """Automatically patches all available modules. + + In addition to ``patch_modules``, an override can be specified via an + environment variable, ``DD_TRACE__ENABLED`` for each module. + + ``patch_modules`` have the highest precedence for overriding. + + :param dict patch_modules: Override whether particular modules are patched or not. + + >>> patch_all(redis=False, cassandra=False) + """ + modules = PATCH_MODULES.copy() + + # The enabled setting can be overridden by environment variables + for module, _enabled in modules.items(): + env_var = "DD_TRACE_%s_ENABLED" % module.upper() + if env_var in os.environ: + modules[module] = formats.asbool(os.environ[env_var]) + + # Enable all dependencies for the module + if modules[module]: + for dep in CONTRIB_DEPENDENCIES.get(module, ()): + modules[dep] = True + + # Arguments take precedence over the environment and the defaults. + modules.update(patch_modules) + + patch(raise_errors=False, **modules) + if asm_config._iast_enabled: + from ddtrace.appsec._iast._patch_modules import patch_iast + + patch_iast() + + +def patch(raise_errors=True, patch_modules_prefix=DEFAULT_MODULES_PREFIX, **patch_modules): + # type: (bool, str, Union[List[str], bool]) -> None + """Patch only a set of given modules. + + :param bool raise_errors: Raise error if one patch fail. + :param dict patch_modules: List of modules to patch. + + >>> patch(psycopg=True, elasticsearch=True) + """ + contribs = {c: patch_indicator for c, patch_indicator in patch_modules.items() if patch_indicator} + for contrib, patch_indicator in contribs.items(): + # Check if we have the requested contrib. + if not os.path.isfile(os.path.join(os.path.dirname(__file__), "contrib", contrib, "__init__.py")): + if raise_errors: + raise ModuleNotFoundException( + "integration module ddtrace.contrib.%s does not exist, " + "module will not have tracing available" % contrib + ) + modules_to_patch = _MODULES_FOR_CONTRIB.get(contrib, (contrib,)) + for module in modules_to_patch: + # Use factory to create handler to close over `module` and `raise_errors` values from this loop + when_imported(module)( + _on_import_factory(contrib, raise_errors=raise_errors, patch_indicator=patch_indicator) + ) + + # manually add module to patched modules + with _LOCK: + _PATCHED_MODULES.add(contrib) + + log.info( + "Configured ddtrace instrumentation for %s integration(s). The following modules have been patched: %s", + len(contribs), + ",".join(contribs), + ) + + +def _get_patched_modules(): + # type: () -> List[str] + """Get the list of patched modules""" + with _LOCK: + return sorted(_PATCHED_MODULES) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py new file mode 100644 index 0000000..2d773d0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py @@ -0,0 +1,6 @@ +""" +Limits for trace data. +""" + +MAX_SPAN_META_KEY_LEN = 200 +MAX_SPAN_META_VALUE_LEN = 25000 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py new file mode 100644 index 0000000..9c520e5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py @@ -0,0 +1,16 @@ +# file generated by setuptools_scm +# don't change, don't track in version control +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple, Union + VERSION_TUPLE = Tuple[Union[int, str], ...] +else: + VERSION_TUPLE = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE + +__version__ = version = '2.6.5' +__version_tuple__ = version_tuple = (2, 6, 5) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py new file mode 100644 index 0000000..60e3b14 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py @@ -0,0 +1,163 @@ +import base64 +import gzip +import json +import sys +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace._trace._limits import MAX_SPAN_META_VALUE_LEN +from ddtrace.appsec import _processor as appsec_processor +from ddtrace.appsec._asm_request_context import add_context_callback +from ddtrace.appsec._asm_request_context import call_waf_callback +from ddtrace.appsec._asm_request_context import remove_context_callback +from ddtrace.appsec._constants import API_SECURITY +from ddtrace.appsec._constants import SPAN_DATA_NAMES +from ddtrace.internal.logger import get_logger +from ddtrace.internal.metrics import Metrics +from ddtrace.internal.service import Service +from ddtrace.settings.asm import config as asm_config + + +if TYPE_CHECKING: + from typing import Optional # noqa:F401 + + +log = get_logger(__name__) +metrics = Metrics(namespace="datadog.api_security") +_sentinel = object() + + +class TooLargeSchemaException(Exception): + pass + + +class APIManager(Service): + COLLECTED = [ + ("REQUEST_HEADERS_NO_COOKIES", API_SECURITY.REQUEST_HEADERS_NO_COOKIES, dict), + ("REQUEST_COOKIES", API_SECURITY.REQUEST_COOKIES, dict), + ("REQUEST_QUERY", API_SECURITY.REQUEST_QUERY, dict), + ("REQUEST_PATH_PARAMS", API_SECURITY.REQUEST_PATH_PARAMS, dict), + ("REQUEST_BODY", API_SECURITY.REQUEST_BODY, None), + ("RESPONSE_HEADERS_NO_COOKIES", API_SECURITY.RESPONSE_HEADERS_NO_COOKIES, dict), + ("RESPONSE_BODY", API_SECURITY.RESPONSE_BODY, None), + ] + + _instance = None # type: Optional[APIManager] + + SAMPLE_START_VALUE = 1.0 - sys.float_info.epsilon + + @classmethod + def enable(cls): + # type: () -> None + if cls._instance is not None: + log.debug("%s already enabled", cls.__name__) + return + + log.debug("Enabling %s", cls.__name__) + metrics.enable() + cls._instance = cls() + cls._instance.start() + log.debug("%s enabled", cls.__name__) + + @classmethod + def disable(cls): + # type: () -> None + if cls._instance is None: + log.debug("%s not enabled", cls.__name__) + return + + log.debug("Disabling %s", cls.__name__) + cls._instance.stop() + cls._instance = None + metrics.disable() + log.debug("%s disabled", cls.__name__) + + def __init__(self): + # type: () -> None + super(APIManager, self).__init__() + + self.current_sampling_value = self.SAMPLE_START_VALUE + self._schema_meter = metrics.get_meter("schema") + log.debug("%s initialized", self.__class__.__name__) + + def _stop_service(self): + # type: () -> None + remove_context_callback(self._schema_callback, global_callback=True) + + def _start_service(self): + # type: () -> None + add_context_callback(self._schema_callback, global_callback=True) + + def _should_collect_schema(self, env): + method = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_METHOD) + route = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_ROUTE) + sample_rate = asm_config._api_security_sample_rate + # Framework is not fully supported + if not method or not route: + log.debug("unsupported groupkey for api security [method %s] [route %s]", bool(method), bool(route)) + return False + # Rate limit per route + self.current_sampling_value += sample_rate + if self.current_sampling_value >= 1.0: + self.current_sampling_value -= 1.0 + return True + return False + + def _schema_callback(self, env): + from ddtrace.appsec._utils import _appsec_apisec_features_is_active + + if env.span is None or not _appsec_apisec_features_is_active(): + return + root = env.span._local_root or env.span + if not root or any(meta_name in root._meta for _, meta_name, _ in self.COLLECTED): + return + + try: + if not self._should_collect_schema(env): + return + except Exception: + log.warning("Failed to sample request for schema generation", exc_info=True) + + # we need the request content type on the span + try: + headers = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, _sentinel) + if headers is not _sentinel: + appsec_processor._set_headers(root, headers, kind="request") + except Exception: + log.debug("Failed to enrich request span with headers", exc_info=True) + + waf_payload = {} + for address, _, transform in self.COLLECTED: + if not asm_config._api_security_parse_response_body and address == "RESPONSE_BODY": + continue + value = env.waf_addresses.get(SPAN_DATA_NAMES[address], _sentinel) + if value is _sentinel: + log.debug("no value for %s", address) + continue + if transform is not None: + value = transform(value) + waf_payload[address] = value + if waf_payload: + waf_payload["PROCESSOR_SETTINGS"] = {"extract-schema": True} + result = call_waf_callback(waf_payload) + if result is None: + return + for meta, schema in result.items(): + b64_gzip_content = b"" + try: + b64_gzip_content = base64.b64encode( + gzip.compress(json.dumps(schema, separators=",:").encode()) + ).decode() + if len(b64_gzip_content) >= MAX_SPAN_META_VALUE_LEN: + raise TooLargeSchemaException + root._meta[meta] = b64_gzip_content + except Exception as e: + self._schema_meter.increment("errors", tags={"exc": e.__class__.__name__, "address": address}) + self._log_limiter.limit( + log.warning, + "Failed to get schema from %r [schema length=%d]:\n%s", + address, + len(b64_gzip_content), + repr(value)[:256], + exc_info=True, + ) + self._schema_meter.increment("spans") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py new file mode 100644 index 0000000..1e1f28e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py @@ -0,0 +1,549 @@ +import contextlib +import functools +import json +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from urllib import parse + +from ddtrace.appsec import _handlers +from ddtrace.appsec._constants import APPSEC +from ddtrace.appsec._constants import SPAN_DATA_NAMES +from ddtrace.appsec._constants import WAF_CONTEXT_NAMES +from ddtrace.appsec._iast._utils import _is_iast_enabled +from ddtrace.internal import core +from ddtrace.internal.constants import REQUEST_PATH_PARAMS +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config +from ddtrace.span import Span + + +log = get_logger(__name__) + +# Stopgap module for providing ASM context for the blocking features wrapping some contextvars. + +_WAF_ADDRESSES = "waf_addresses" +_CALLBACKS = "callbacks" +_TELEMETRY = "telemetry" +_CONTEXT_CALL = "context" +_WAF_CALL = "waf_run" +_BLOCK_CALL = "block" +_WAF_RESULTS = "waf_results" + + +GLOBAL_CALLBACKS: Dict[str, List[Callable]] = {} + + +class ASM_Environment: + """ + an object of this class contains all asm data (waf and telemetry) + for a single request. It is bound to a single asm request context. + It is contained into a ContextVar. + """ + + def __init__(self, active: bool = False): + self.active: bool = active + self.span: Optional[Span] = None + self.span_asm_context: Optional[contextlib.AbstractContextManager] = None + self.waf_addresses: Dict[str, Any] = {} + self.callbacks: Dict[str, Any] = {} + self.telemetry: Dict[str, Any] = {} + self.addresses_sent: Set[str] = set() + self.must_call_globals: bool = True + self.waf_triggers: List[Dict[str, Any]] = [] + + +def _get_asm_context() -> ASM_Environment: + env = core.get_item("asm_env") + if env is None: + env = ASM_Environment() + core.set_item("asm_env", env) + return env + + +def free_context_available() -> bool: + env = _get_asm_context() + return env.active and env.span is None + + +def in_context() -> bool: + env = _get_asm_context() + return env.active + + +def is_blocked() -> bool: + try: + env = _get_asm_context() + if not env.active or env.span is None: + return False + return bool(core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=env.span)) + except BaseException: + return False + + +def register(span: Span, span_asm_context=None) -> None: + env = _get_asm_context() + if not env.active: + log.debug("registering a span with no active asm context") + return + env.span = span + env.span_asm_context = span_asm_context + + +def unregister(span: Span) -> None: + env = _get_asm_context() + if env.span_asm_context is not None and env.span is span: + env.span_asm_context.__exit__(None, None, None) + elif env.span is span and env.must_call_globals: + # needed for api security flushing information before end of the span + for function in GLOBAL_CALLBACKS.get(_CONTEXT_CALL, []): + function(env) + env.must_call_globals = False + + +def flush_waf_triggers(env: ASM_Environment) -> None: + if env.waf_triggers and env.span: + root_span = env.span._local_root or env.span + old_tags = root_span.get_tag(APPSEC.JSON) + if old_tags is not None: + try: + new_json = json.loads(old_tags) + if "triggers" not in new_json: + new_json["triggers"] = [] + new_json["triggers"].extend(env.waf_triggers) + except BaseException: + new_json = {"triggers": env.waf_triggers} + else: + new_json = {"triggers": env.waf_triggers} + root_span.set_tag_str(APPSEC.JSON, json.dumps(new_json, separators=(",", ":"))) + + env.waf_triggers = [] + + +GLOBAL_CALLBACKS[_CONTEXT_CALL] = [flush_waf_triggers] + + +class _DataHandler: + """ + An object of this class is created by each asm request context. + It handles the creation and destruction of ASM_Environment object. + It allows the ASM context to be reentrant. + """ + + main_id = 0 + + def __init__(self): + _DataHandler.main_id += 1 + env = ASM_Environment(True) + + self._id = _DataHandler.main_id + self.active = True + self.execution_context = core.ExecutionContext(__name__, **{"asm_env": env}) + + env.telemetry[_WAF_RESULTS] = [], [], [] + env.callbacks[_CONTEXT_CALL] = [] + + def finalise(self): + if self.active: + env = self.execution_context.get_item("asm_env") + callbacks = GLOBAL_CALLBACKS.get(_CONTEXT_CALL, []) if env.must_call_globals else [] + env.must_call_globals = False + if env is not None and env.callbacks is not None and env.callbacks.get(_CONTEXT_CALL): + callbacks += env.callbacks.get(_CONTEXT_CALL) + if callbacks: + if env is not None: + for function in callbacks: + function(env) + self.execution_context.end() + self.active = False + + +def set_value(category: str, address: str, value: Any) -> None: + env = _get_asm_context() + if not env.active: + log.debug("setting %s address %s with no active asm context", category, address) + return + asm_context_attr = getattr(env, category, None) + if asm_context_attr is not None: + asm_context_attr[address] = value + + +def set_headers_response(headers: Any) -> None: + if headers is not None: + set_waf_address(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, headers, _get_asm_context().span) + + +def set_body_response(body_response): + # local import to avoid circular import + from ddtrace.appsec._utils import parse_response_body + + parsed_body = parse_response_body(body_response) + + if parse_response_body is not None: + set_waf_address(SPAN_DATA_NAMES.RESPONSE_BODY, parsed_body) + + +def set_waf_address(address: str, value: Any, span: Optional[Span] = None) -> None: + if address == SPAN_DATA_NAMES.REQUEST_URI_RAW: + parse_address = parse.urlparse(value) + no_scheme = parse.ParseResult("", "", *parse_address[2:]) + waf_value = parse.urlunparse(no_scheme) + set_value(_WAF_ADDRESSES, address, waf_value) + else: + set_value(_WAF_ADDRESSES, address, value) + if span is None: + span = _get_asm_context().span + if span: + core.set_item(address, value, span=span) + + +def get_value(category: str, address: str, default: Any = None) -> Any: + env = _get_asm_context() + if not env.active: + log.debug("getting %s address %s with no active asm context", category, address) + return default + asm_context_attr = getattr(env, category, None) + if asm_context_attr is not None: + return asm_context_attr.get(address, default) + return default + + +def get_waf_address(address: str, default: Any = None) -> Any: + return get_value(_WAF_ADDRESSES, address, default=default) + + +def get_waf_addresses(default: Any = None) -> Any: + env = _get_asm_context() + if not env.active: + log.debug("getting WAF addresses with no active asm context") + return default + return env.waf_addresses + + +def add_context_callback(function, global_callback: bool = False) -> None: + if global_callback: + callbacks = GLOBAL_CALLBACKS.setdefault(_CONTEXT_CALL, []) + else: + callbacks = get_value(_CALLBACKS, _CONTEXT_CALL) + if callbacks is not None: + callbacks.append(function) + + +def remove_context_callback(function, global_callback: bool = False) -> None: + if global_callback: + callbacks = GLOBAL_CALLBACKS.get(_CONTEXT_CALL) + else: + callbacks = get_value(_CALLBACKS, _CONTEXT_CALL) + if callbacks: + callbacks[:] = list([cb for cb in callbacks if cb != function]) + + +def set_waf_callback(value) -> None: + set_value(_CALLBACKS, _WAF_CALL, value) + + +def call_waf_callback(custom_data: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, str]]: + if not asm_config._asm_enabled: + return None + callback = get_value(_CALLBACKS, _WAF_CALL) + if callback: + return callback(custom_data) + else: + log.warning("WAF callback called but not set") + return None + + +def set_ip(ip: Optional[str]) -> None: + if ip is not None: + set_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP, ip, _get_asm_context().span) + + +def get_ip() -> Optional[str]: + return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HTTP_IP) + + +# Note: get/set headers use Any since we just carry the headers here without changing or using them +# and different frameworks use different types that we don't want to force it into a Mapping at the +# early point set_headers is usually called + + +def set_headers(headers: Any) -> None: + if headers is not None: + set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, headers, _get_asm_context().span) + + +def get_headers() -> Optional[Any]: + return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, {}) + + +def set_headers_case_sensitive(case_sensitive: bool) -> None: + set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, case_sensitive, _get_asm_context().span) + + +def get_headers_case_sensitive() -> bool: + return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, False) # type : ignore + + +def set_block_request_callable(_callable: Optional[Callable], *_) -> None: + """ + Sets a callable that could be use to do a best-effort to block the request. If + the callable need any params, like headers, they should be curried with + functools.partial. + """ + if _callable: + set_value(_CALLBACKS, _BLOCK_CALL, _callable) + + +def block_request() -> None: + """ + Calls or returns the stored block request callable, if set. + """ + _callable = get_value(_CALLBACKS, _BLOCK_CALL) + if _callable: + _callable() + else: + log.debug("Block request called but block callable not set by framework") + + +def get_data_sent() -> Set[str]: + env = _get_asm_context() + if not env.active: + log.debug("getting addresses sent with no active asm context") + return set() + return env.addresses_sent + + +def asm_request_context_set( + remote_ip: Optional[str] = None, + headers: Any = None, + headers_case_sensitive: bool = False, + block_request_callable: Optional[Callable] = None, +) -> None: + set_ip(remote_ip) + set_headers(headers) + set_headers_case_sensitive(headers_case_sensitive) + set_block_request_callable(block_request_callable) + + +def set_waf_results(result_data, result_info, is_blocked) -> None: + three_lists = get_waf_results() + if three_lists is not None: + list_results_data, list_result_info, list_is_blocked = three_lists + list_results_data.append(result_data) + list_result_info.append(result_info) + list_is_blocked.append(is_blocked) + + +def get_waf_results() -> Optional[Tuple[List[Any], List[Any], List[bool]]]: + return get_value(_TELEMETRY, _WAF_RESULTS) + + +def reset_waf_results() -> None: + set_value(_TELEMETRY, _WAF_RESULTS, ([], [], [])) + + +def store_waf_results_data(data) -> None: + if not data: + return + env = _get_asm_context() + if not env.active: + log.debug("storing waf results data with no active asm context") + return + if not env.span: + log.debug("storing waf results data with no active span") + return + for d in data: + d["span_id"] = env.span.span_id + env.waf_triggers.extend(data) + + +@contextlib.contextmanager +def asm_request_context_manager( + remote_ip: Optional[str] = None, + headers: Any = None, + headers_case_sensitive: bool = False, + block_request_callable: Optional[Callable] = None, +) -> Generator[Optional[_DataHandler], None, None]: + """ + The ASM context manager + """ + resources = _start_context(remote_ip, headers, headers_case_sensitive, block_request_callable) + if resources is not None: + try: + yield resources + finally: + _end_context(resources) + else: + yield None + + +def _start_context( + remote_ip: Optional[str], headers: Any, headers_case_sensitive: bool, block_request_callable: Optional[Callable] +) -> Optional[_DataHandler]: + if asm_config._asm_enabled or asm_config._iast_enabled: + resources = _DataHandler() + if asm_config._asm_enabled: + asm_request_context_set(remote_ip, headers, headers_case_sensitive, block_request_callable) + _handlers.listen() + listen_context_handlers() + return resources + return None + + +def _on_context_started(ctx): + resources = _start_context( + ctx.get_item("remote_addr"), + ctx.get_item("headers"), + ctx.get_item("headers_case_sensitive"), + ctx.get_item("block_request_callable"), + ) + ctx.set_item("resources", resources) + + +def _end_context(resources): + resources.finalise() + core.set_item("asm_env", None) + + +def _on_context_ended(ctx): + resources = ctx.get_item("resources") + if resources is not None: + _end_context(resources) + + +core.on("context.started.wsgi.__call__", _on_context_started) +core.on("context.ended.wsgi.__call__", _on_context_ended) +core.on("context.started.django.traced_get_response", _on_context_started) +core.on("context.ended.django.traced_get_response", _on_context_ended) +core.on("django.traced_get_response.pre", set_block_request_callable) + + +def _on_wrapped_view(kwargs): + return_value = [None, None] + # if Appsec is enabled, we can try to block as we have the path parameters at that point + if asm_config._asm_enabled and in_context(): + log.debug("Flask WAF call for Suspicious Request Blocking on request") + if kwargs: + set_waf_address(REQUEST_PATH_PARAMS, kwargs) + call_waf_callback() + if is_blocked(): + callback_block = get_value(_CALLBACKS, "flask_block") + return_value[0] = callback_block + + # If IAST is enabled, taint the Flask function kwargs (path parameters) + if _is_iast_enabled() and kwargs: + from ddtrace.appsec._iast._taint_tracking import OriginType + from ddtrace.appsec._iast._taint_tracking import taint_pyobject + from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return return_value + + _kwargs = {} + for k, v in kwargs.items(): + _kwargs[k] = taint_pyobject( + pyobject=v, source_name=k, source_value=v, source_origin=OriginType.PATH_PARAMETER + ) + return_value[1] = _kwargs + return return_value + + +def _on_set_request_tags(request, span, flask_config): + if _is_iast_enabled(): + from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source + from ddtrace.appsec._iast._taint_tracking import OriginType + from ddtrace.appsec._iast._taint_utils import taint_structure + from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor + + _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME) + _set_metric_iast_instrumented_source(OriginType.COOKIE) + + if not AppSecIastSpanProcessor.is_span_analyzed(span._local_root or span): + return + + request.cookies = taint_structure( + request.cookies, + OriginType.COOKIE_NAME, + OriginType.COOKIE, + override_pyobject_tainted=True, + ) + + +def _on_pre_tracedrequest(ctx): + _on_set_request_tags(ctx.get_item("flask_request"), ctx["current_span"], ctx.get_item("flask_config")) + block_request_callable = ctx.get_item("block_request_callable") + current_span = ctx["current_span"] + if asm_config._asm_enabled: + set_block_request_callable(functools.partial(block_request_callable, current_span)) + if core.get_item(WAF_CONTEXT_NAMES.BLOCKED): + block_request() + + +def _set_headers_and_response(response, headers, *_): + if not asm_config._asm_enabled: + return + + from ddtrace.appsec._utils import _appsec_apisec_features_is_active + + if _appsec_apisec_features_is_active(): + if headers: + # start_response was not called yet, set the HTTP response headers earlier + if isinstance(headers, dict): + list_headers = list(headers.items()) + else: + list_headers = list(headers) + set_headers_response(list_headers) + if response and asm_config._api_security_parse_response_body: + set_body_response(response) + + +def _call_waf_first(integration, *_): + if not asm_config._asm_enabled: + return + + log.debug("%s WAF call for Suspicious Request Blocking on request", integration) + return call_waf_callback() + + +def _call_waf(integration, *_): + if not asm_config._asm_enabled: + return + + log.debug("%s WAF call for Suspicious Request Blocking on response", integration) + return call_waf_callback() + + +def _on_block_decided(callback): + if not asm_config._asm_enabled: + return + + set_value(_CALLBACKS, "flask_block", callback) + + +def _get_headers_if_appsec(): + if asm_config._asm_enabled: + return get_headers() + + +def listen_context_handlers(): + core.on("flask.finalize_request.post", _set_headers_and_response) + core.on("flask.wrapped_view", _on_wrapped_view, "callback_and_args") + core.on("flask._patched_request", _on_pre_tracedrequest) + core.on("wsgi.block_decided", _on_block_decided) + core.on("flask.start_response", _call_waf_first, "waf") + + core.on("django.start_response.post", _call_waf_first) + core.on("django.finalize_response", _call_waf) + core.on("django.after_request_headers", _get_headers_if_appsec, "headers") + core.on("django.extract_body", _get_headers_if_appsec, "headers") + core.on("django.after_request_headers.finalize", _set_headers_and_response) + core.on("flask.set_request_tags", _on_set_request_tags) + + core.on("asgi.start_request", _call_waf_first) + core.on("asgi.start_response", _call_waf) + core.on("asgi.finalize_response", _set_headers_and_response) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py new file mode 100644 index 0000000..b5ac759 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py @@ -0,0 +1,77 @@ +import base64 +import enum +import os +from typing import Optional + +import ddtrace +from ddtrace.appsec._utils import _appsec_rc_features_is_enabled +from ddtrace.settings.asm import config as asm_config + + +def _appsec_rc_file_is_not_static(): + return "DD_APPSEC_RULES" not in os.environ + + +def _asm_feature_is_required(): + flags = _rc_capabilities() + return Flags.ASM_ACTIVATION in flags or Flags.ASM_API_SECURITY_SAMPLE_RATE in flags + + +class Flags(enum.IntFlag): + ASM_ACTIVATION = 1 << 1 + ASM_IP_BLOCKING = 1 << 2 + ASM_DD_RULES = 1 << 3 + ASM_EXCLUSIONS = 1 << 4 + ASM_REQUEST_BLOCKING = 1 << 5 + ASM_ASM_RESPONSE_BLOCKING = 1 << 6 + ASM_USER_BLOCKING = 1 << 7 + ASM_CUSTOM_RULES = 1 << 8 + ASM_CUSTOM_BLOCKING_RESPONSE = 1 << 9 + ASM_TRUSTED_IPS = 1 << 10 + ASM_API_SECURITY_SAMPLE_RATE = 1 << 11 + + +_ALL_ASM_BLOCKING = ( + Flags.ASM_IP_BLOCKING + | Flags.ASM_DD_RULES + | Flags.ASM_EXCLUSIONS + | Flags.ASM_REQUEST_BLOCKING + | Flags.ASM_ASM_RESPONSE_BLOCKING + | Flags.ASM_USER_BLOCKING + | Flags.ASM_CUSTOM_RULES + | Flags.ASM_CUSTOM_RULES + | Flags.ASM_CUSTOM_BLOCKING_RESPONSE +) + + +def _rc_capabilities(test_tracer: Optional[ddtrace.Tracer] = None) -> Flags: + tracer = ddtrace.tracer if test_tracer is None else test_tracer + value = Flags(0) + if ddtrace.config._remote_config_enabled: + if _appsec_rc_features_is_enabled(): + value |= Flags.ASM_ACTIVATION + if tracer._appsec_processor and _appsec_rc_file_is_not_static(): + value |= _ALL_ASM_BLOCKING + if asm_config._api_security_enabled: + value |= Flags.ASM_API_SECURITY_SAMPLE_RATE + return value + + +def _appsec_rc_capabilities(test_tracer: Optional[ddtrace.Tracer] = None) -> str: + r"""return the bit representation of the composed capabilities in base64 + bit 0: Reserved + bit 1: ASM 1-click Activation + bit 2: ASM Ip blocking + + Int Number -> binary number -> bytes representation -> base64 representation + ASM Activation: + 2 -> 10 -> b'\x02' -> "Ag==" + ASM Ip blocking: + 4 -> 100 -> b'\x04' -> "BA==" + ASM Activation and ASM Ip blocking: + 6 -> 110 -> b'\x06' -> "Bg==" + ... + 256 -> 100000000 -> b'\x01\x00' -> b'AQA=' + """ + value = _rc_capabilities(test_tracer=test_tracer) + return base64.b64encode(value.to_bytes((value.bit_length() + 7) // 8, "big")).decode() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py new file mode 100644 index 0000000..fc0f4a4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py @@ -0,0 +1,216 @@ +import os +from typing import Any +from typing import Iterator + +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.constants import REQUEST_PATH_PARAMS +from ddtrace.internal.constants import RESPONSE_HEADERS +from ddtrace.internal.constants import STATUS_403_TYPE_AUTO + + +class Constant_Class(type): + """ + metaclass for Constant Classes + - You can access constants with APPSEC.ENV or APPSEC["ENV"] + - Direct assignment will fail: APPSEC.ENV = "something" raise TypeError, like other immutable types + - Constant Classes can be iterated: + for constant_name, constant_value in APPSEC: ... + """ + + def __setattr__(self, __name: str, __value: Any) -> None: + raise TypeError("Constant class does not support item assignment: %s.%s" % (self.__name__, __name)) + + def __iter__(self) -> Iterator[str]: + def aux(): + for t in self.__dict__.items(): + if not t[0].startswith("_"): + yield t + + return aux() + + def get(self, k: str, default: Any = None) -> Any: + return self.__dict__.get(k, default) + + def __contains__(self, k: str) -> bool: + return k in self.__dict__ + + def __getitem__(self, k: str) -> Any: + return self.__dict__[k] + + +class APPSEC(metaclass=Constant_Class): + """Specific constants for AppSec""" + + ENV = "DD_APPSEC_ENABLED" + ENABLED = "_dd.appsec.enabled" + JSON = "_dd.appsec.json" + EVENT_RULE_VERSION = "_dd.appsec.event_rules.version" + EVENT_RULE_ERRORS = "_dd.appsec.event_rules.errors" + EVENT_RULE_LOADED = "_dd.appsec.event_rules.loaded" + EVENT_RULE_ERROR_COUNT = "_dd.appsec.event_rules.error_count" + WAF_DURATION = "_dd.appsec.waf.duration" + WAF_DURATION_EXT = "_dd.appsec.waf.duration_ext" + WAF_TIMEOUTS = "_dd.appsec.waf.timeouts" + WAF_VERSION = "_dd.appsec.waf.version" + ORIGIN_VALUE = "appsec" + CUSTOM_EVENT_PREFIX = "appsec.events" + USER_LOGIN_EVENT_PREFIX = "_dd.appsec.events.users.login" + USER_LOGIN_EVENT_PREFIX_PUBLIC = "appsec.events.users.login" + USER_LOGIN_EVENT_SUCCESS_TRACK = "appsec.events.users.login.success.track" + USER_LOGIN_EVENT_FAILURE_TRACK = "appsec.events.users.login.failure.track" + USER_SIGNUP_EVENT = "appsec.events.users.signup.track" + AUTO_LOGIN_EVENTS_SUCCESS_MODE = "_dd.appsec.events.users.login.success.auto.mode" + AUTO_LOGIN_EVENTS_FAILURE_MODE = "_dd.appsec.events.users.login.failure.auto.mode" + BLOCKED = "appsec.blocked" + EVENT = "appsec.event" + AUTOMATIC_USER_EVENTS_TRACKING = "DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING" + USER_MODEL_LOGIN_FIELD = "DD_USER_MODEL_LOGIN_FIELD" + USER_MODEL_EMAIL_FIELD = "DD_USER_MODEL_EMAIL_FIELD" + USER_MODEL_NAME_FIELD = "DD_USER_MODEL_NAME_FIELD" + + +class IAST(metaclass=Constant_Class): + """Specific constants for IAST""" + + ENV = "DD_IAST_ENABLED" + ENV_DEBUG = "_DD_IAST_DEBUG" + TELEMETRY_REPORT_LVL = "DD_IAST_TELEMETRY_VERBOSITY" + LAZY_TAINT = "_DD_IAST_LAZY_TAINT" + JSON = "_dd.iast.json" + ENABLED = "_dd.iast.enabled" + CONTEXT_KEY = "_iast_data" + PATCH_MODULES = "_DD_IAST_PATCH_MODULES" + DENY_MODULES = "_DD_IAST_DENY_MODULES" + SEP_MODULES = "," + REQUEST_IAST_ENABLED = "_dd.iast.request_enabled" + + +class IAST_SPAN_TAGS(metaclass=Constant_Class): + """Specific constants for IAST span tags""" + + TELEMETRY_REQUEST_TAINTED = "_dd.iast.telemetry.request.tainted" + TELEMETRY_EXECUTED_SINK = "_dd.iast.telemetry.executed.sink" + + +class WAF_DATA_NAMES(metaclass=Constant_Class): + """string names used by the waf library for requesting data from requests""" + + REQUEST_BODY = "server.request.body" + REQUEST_QUERY = "server.request.query" + REQUEST_HEADERS_NO_COOKIES = "server.request.headers.no_cookies" + REQUEST_URI_RAW = "server.request.uri.raw" + REQUEST_METHOD = "server.request.method" + REQUEST_PATH_PARAMS = "server.request.path_params" + REQUEST_COOKIES = "server.request.cookies" + REQUEST_HTTP_IP = "http.client_ip" + REQUEST_USER_ID = "usr.id" + RESPONSE_STATUS = "server.response.status" + RESPONSE_HEADERS_NO_COOKIES = "server.response.headers.no_cookies" + RESPONSE_BODY = "server.response.body" + PROCESSOR_SETTINGS = "waf.context.processor" + + +class SPAN_DATA_NAMES(metaclass=Constant_Class): + """string names used by the library for tagging data from requests in context or span""" + + REQUEST_BODY = "http.request.body" + REQUEST_QUERY = "http.request.query" + REQUEST_HEADERS_NO_COOKIES = "http.request.headers" + REQUEST_HEADERS_NO_COOKIES_CASE = "http.request.headers_case_sensitive" + REQUEST_URI_RAW = "http.request.uri" + REQUEST_ROUTE = "http.request.route" + REQUEST_METHOD = "http.request.method" + REQUEST_PATH_PARAMS = REQUEST_PATH_PARAMS + REQUEST_COOKIES = "http.request.cookies" + REQUEST_HTTP_IP = "http.request.remote_ip" + REQUEST_USER_ID = "usr.id" + RESPONSE_STATUS = "http.response.status" + RESPONSE_HEADERS_NO_COOKIES = RESPONSE_HEADERS + RESPONSE_BODY = "http.response.body" + + +class API_SECURITY(metaclass=Constant_Class): + """constants related to API Security""" + + ENABLED = "_dd.appsec.api_security.enabled" + ENV_VAR_ENABLED = "DD_API_SECURITY_ENABLED" + PARSE_RESPONSE_BODY = "DD_API_SECURITY_PARSE_RESPONSE_BODY" + REQUEST_HEADERS_NO_COOKIES = "_dd.appsec.s.req.headers" + REQUEST_COOKIES = "_dd.appsec.s.req.cookies" + REQUEST_QUERY = "_dd.appsec.s.req.query" + REQUEST_PATH_PARAMS = "_dd.appsec.s.req.params" + REQUEST_BODY = "_dd.appsec.s.req.body" + RESPONSE_HEADERS_NO_COOKIES = "_dd.appsec.s.res.headers" + RESPONSE_BODY = "_dd.appsec.s.res.body" + SAMPLE_RATE = "DD_API_SECURITY_REQUEST_SAMPLE_RATE" + MAX_PAYLOAD_SIZE = 0x1000000 # 16MB maximum size + + +class WAF_CONTEXT_NAMES(metaclass=Constant_Class): + """string names used by the library for tagging data from requests in context""" + + RESULTS = "http.request.waf.results" + BLOCKED = HTTP_REQUEST_BLOCKED + CALLBACK = "http.request.waf.callback" + + +class WAF_ACTIONS(metaclass=Constant_Class): + """string identifier for actions returned by the waf""" + + BLOCK = "block" + PARAMETERS = "parameters" + TYPE = "type" + ID = "id" + DEFAULT_PARAMETERS = STATUS_403_TYPE_AUTO + BLOCK_ACTION = "block_request" + REDIRECT_ACTION = "redirect_request" + DEFAULT_ACTIONS = { + BLOCK: { + ID: BLOCK, + TYPE: BLOCK_ACTION, + PARAMETERS: DEFAULT_PARAMETERS, + } + } + + +class PRODUCTS(metaclass=Constant_Class): + """string identifier for remote config products""" + + ASM = "ASM" + ASM_DATA = "ASM_DATA" + ASM_DD = "ASM_DD" + ASM_FEATURES = "ASM_FEATURES" + + +class LOGIN_EVENTS_MODE(metaclass=Constant_Class): + """ + string identifier for the mode of the user login events. Can be: + DISABLED: automatic login events are disabled. + SAFE: automatic login events are enabled but will only store non-PII fields (id, pk uid...) + EXTENDED: automatic login events are enabled and will store potentially PII fields (username, + email, ...). + SDK: manually issued login events using the SDK. + """ + + DISABLED = "disabled" + SAFE = "safe" + EXTENDED = "extended" + SDK = "sdk" + + +class DEFAULT(metaclass=Constant_Class): + ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + RULES = os.path.join(ROOT_DIR, "rules.json") + TRACE_RATE_LIMIT = 100 + WAF_TIMEOUT = 5.0 # float (milliseconds) + APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP = ( + rb"(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?" + rb"(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization" + ) + APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = ( + rb"(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)" + rb"key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)" + rb'(?:\s*=[^;]|"\s*:\s*"[^"]+")|bearer\s+[a-z0-9\._\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}' + rb"|ey[I-L][\w=-]+\.ey[I-L][\w=-]+(?:\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY[\-]{5}[^\-]+[\-]" + rb"{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}" + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py new file mode 100644 index 0000000..cee6eda --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py @@ -0,0 +1,214 @@ +import ctypes +import time +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from ddtrace.appsec._constants import DEFAULT +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +LOGGER = get_logger(__name__) + +if asm_config._asm_libddwaf_available: + try: + from .ddwaf_types import DDWafRulesType + from .ddwaf_types import _observator + from .ddwaf_types import ddwaf_config + from .ddwaf_types import ddwaf_context_capsule + from .ddwaf_types import ddwaf_get_version + from .ddwaf_types import ddwaf_object + from .ddwaf_types import ddwaf_object_free + from .ddwaf_types import ddwaf_result + from .ddwaf_types import ddwaf_run + from .ddwaf_types import py_ddwaf_context_init + from .ddwaf_types import py_ddwaf_init + from .ddwaf_types import py_ddwaf_known_addresses + from .ddwaf_types import py_ddwaf_update + + _DDWAF_LOADED = True + except BaseException: + _DDWAF_LOADED = False + LOGGER.warning("DDWaf features disabled. WARNING: Dynamic Library not loaded", exc_info=True) +else: + _DDWAF_LOADED = False + + +class DDWaf_result(object): + __slots__ = ["data", "actions", "runtime", "total_runtime", "timeout", "truncation", "derivatives"] + + def __init__( + self, + data: Optional[str], + actions: List[str], + runtime: float, + total_runtime: float, + timeout: bool, + truncation: int, + derivatives: Dict[str, Any], + ): + self.data = data + self.actions = actions + self.runtime = runtime + self.total_runtime = total_runtime + self.timeout = timeout + self.truncation = truncation + self.derivatives = derivatives + + +class DDWaf_info(object): + __slots__ = ["loaded", "failed", "errors", "version"] + + def __init__(self, loaded: int, failed: int, errors: Dict[str, Any], version: str): + self.loaded = loaded + self.failed = failed + self.errors = errors + self.version = version + + def __repr__(self): + return "{loaded: %d, failed: %d, errors: %s, version: %s}" % ( + self.loaded, + self.failed, + str(self.errors), + self.version, + ) + + +if _DDWAF_LOADED: + + class DDWaf(object): + def __init__( + self, + ruleset_map: Dict[str, Any], + obfuscation_parameter_key_regexp: bytes, + obfuscation_parameter_value_regexp: bytes, + ): + config = ddwaf_config( + key_regex=obfuscation_parameter_key_regexp, value_regex=obfuscation_parameter_value_regexp + ) + diagnostics = ddwaf_object() + ruleset_map_object = ddwaf_object.create_without_limits(ruleset_map) + self._handle = py_ddwaf_init(ruleset_map_object, ctypes.byref(config), ctypes.byref(diagnostics)) + self._set_info(diagnostics) + info = self.info + if not self._handle or info.failed: + # We keep the handle alive in case of errors, as some valid rules can be loaded + # at the same time some invalid ones are rejected + LOGGER.debug( + "DDWAF.__init__: invalid rules\n ruleset: %s\nloaded:%s\nerrors:%s\n", + ruleset_map_object.struct, + info.failed, + info.errors, + ) + ddwaf_object_free(ctypes.byref(ruleset_map_object)) + + @property + def required_data(self) -> List[str]: + return py_ddwaf_known_addresses(self._handle) if self._handle else [] + + def _set_info(self, diagnostics: ddwaf_object) -> None: + info_struct = diagnostics.struct + rules = info_struct.get("rules", {}) if info_struct else {} # type: ignore + errors_result = rules.get("errors", {}) + version = info_struct.get("ruleset_version", "") if info_struct else "" # type: ignore + self._info = DDWaf_info(len(rules.get("loaded", [])), len(rules.get("failed", [])), errors_result, version) + ddwaf_object_free(diagnostics) + + @property + def info(self) -> DDWaf_info: + return self._info + + def update_rules(self, new_rules: Dict[str, DDWafRulesType]) -> bool: + """update the rules of the WAF instance. return True if an error occurs.""" + rules = ddwaf_object.create_without_limits(new_rules) + diagnostics = ddwaf_object() + result = py_ddwaf_update(self._handle, rules, diagnostics) + self._set_info(diagnostics) + ddwaf_object_free(rules) + if result: + LOGGER.debug("DDWAF.update_rules success.\ninfo %s", self.info) + self._handle = result + return True + else: + LOGGER.debug("DDWAF.update_rules: keeping the previous handle.") + return False + + def _at_request_start(self) -> Optional[ddwaf_context_capsule]: + ctx = None + if self._handle: + ctx = py_ddwaf_context_init(self._handle) + if not ctx: + LOGGER.debug("DDWaf._at_request_start: failure to create the context.") + return ctx + + def _at_request_end(self) -> None: + pass + + def run( + self, + ctx: ddwaf_context_capsule, + data: DDWafRulesType, + timeout_ms: float = DEFAULT.WAF_TIMEOUT, + ) -> DDWaf_result: + start = time.time() + if not ctx: + LOGGER.debug("DDWaf.run: dry run. no context created.") + return DDWaf_result(None, [], 0, (time.time() - start) * 1e6, False, 0, {}) + + result = ddwaf_result() + observator = _observator() + wrapper = ddwaf_object(data, observator=observator) + error = ddwaf_run(ctx.ctx, wrapper, None, ctypes.byref(result), int(timeout_ms * 1000)) + if error < 0: + LOGGER.debug("run DDWAF error: %d\ninput %s\nerror %s", error, wrapper.struct, self.info.errors) + return DDWaf_result( + result.events.struct, + result.actions.struct, + result.total_runtime / 1e3, + (time.time() - start) * 1e6, + result.timeout, + observator.truncation, + result.derivatives.struct, + ) + + def version() -> str: + return ddwaf_get_version().decode("UTF-8") + +else: + # Mockup of the DDWaf class doing nothing + class DDWaf(object): # type: ignore + required_data: List[str] = [] + info: DDWaf_info = DDWaf_info(0, 0, {}, "") + + def __init__( + self, + rules: Dict[str, Any], + obfuscation_parameter_key_regexp: bytes, + obfuscation_parameter_value_regexp: bytes, + ): + self._handle = None + + def run( + self, + ctx: Any, + data: Any, + timeout_ms: float = DEFAULT.WAF_TIMEOUT, + ) -> DDWaf_result: + LOGGER.debug("DDWaf features disabled. dry run") + return DDWaf_result(None, [], 0.0, 0.0, False, 0, {}) + + def update_rules(self, _: Dict[str, Any]) -> bool: + LOGGER.debug("DDWaf features disabled. dry update") + return False + + def _at_request_start(self) -> None: + return None + + def _at_request_end(self) -> None: + pass + + def version() -> str: + LOGGER.debug("DDWaf features disabled. null version") + return "0.0.0" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py new file mode 100644 index 0000000..aecfd79 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py @@ -0,0 +1,555 @@ +import ctypes +import ctypes.util +from enum import IntEnum +from platform import machine +from platform import system +from typing import Any +from typing import Dict +from typing import List +from typing import Union + +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +DDWafRulesType = Union[None, int, str, List[Any], Dict[str, Any]] + +log = get_logger(__name__) + +# +# Dynamic loading of libddwaf. For now it requires the file or a link to be in current directory +# + +if system() == "Linux": + try: + ctypes.CDLL(ctypes.util.find_library("rt"), mode=ctypes.RTLD_GLOBAL) + except BaseException: # nosec + pass + +ARCHI = machine().lower() + +# 32-bit-Python on 64-bit-Windows + +ddwaf = ctypes.CDLL(asm_config._asm_libddwaf) +# +# Constants +# + +DDWAF_MAX_STRING_LENGTH = 4096 +DDWAF_MAX_CONTAINER_DEPTH = 20 +DDWAF_MAX_CONTAINER_SIZE = 256 +DDWAF_NO_LIMIT = 1 << 31 +DDWAF_DEPTH_NO_LIMIT = 1000 +_TRUNC_STRING_LENGTH = 1 +_TRUNC_CONTAINER_DEPTH = 4 +_TRUNC_CONTAINER_SIZE = 2 + + +class DDWAF_OBJ_TYPE(IntEnum): + DDWAF_OBJ_INVALID = 0 + # Value shall be decoded as a int64_t (or int32_t on 32bits platforms). + DDWAF_OBJ_SIGNED = 1 << 0 + # Value shall be decoded as a uint64_t (or uint32_t on 32bits platforms). + DDWAF_OBJ_UNSIGNED = 1 << 1 + # Value shall be decoded as a UTF-8 string of length nbEntries. + DDWAF_OBJ_STRING = 1 << 2 + # Value shall be decoded as an array of ddwaf_object of length nbEntries, each item having no parameterName. + DDWAF_OBJ_ARRAY = 1 << 3 + # Value shall be decoded as an array of ddwaf_object of length nbEntries, each item having a parameterName. + DDWAF_OBJ_MAP = 1 << 4 + # Value shall be decode as bool + DDWAF_OBJ_BOOL = 1 << 5 + # 64-bit float (or double) type + DDWAF_OBJ_FLOAT = 1 << 6 + # Null type, only used for its semantical value + DDWAF_OBJ_NULL = 1 << 7 + + +class DDWAF_RET_CODE(IntEnum): + DDWAF_ERR_INTERNAL = -3 + DDWAF_ERR_INVALID_OBJECT = -2 + DDWAF_ERR_INVALID_ARGUMENT = -1 + DDWAF_OK = 0 + DDWAF_MATCH = 1 + + +class DDWAF_LOG_LEVEL(IntEnum): + DDWAF_LOG_TRACE = 0 + DDWAF_LOG_DEBUG = 1 + DDWAF_LOG_INFO = 2 + DDWAF_LOG_WARN = 3 + DDWAF_LOG_ERROR = 4 + DDWAF_LOG_OFF = 5 + + +# +# Objects Definitions +# + +# obj_struct = DDWafRulesType + + +class _observator: + def __init__(self): + self.truncation = 0 + + +# to allow cyclic references, ddwaf_object fields are defined later +class ddwaf_object(ctypes.Structure): + # "type" define how to read the "value" union field + # defined in ddwaf.h + # 1 is intValue + # 2 is uintValue + # 4 is stringValue as UTF-8 encoded + # 8 is array of length "nbEntries" without parameterName + # 16 is a map : array of length "nbEntries" with parameterName + # 32 is boolean + + def __init__( + self, + struct: DDWafRulesType = None, + observator: _observator = _observator(), # noqa : B008 + max_objects: int = DDWAF_MAX_CONTAINER_SIZE, + max_depth: int = DDWAF_MAX_CONTAINER_DEPTH, + max_string_length: int = DDWAF_MAX_STRING_LENGTH, + ) -> None: + def truncate_string(string: bytes) -> bytes: + if len(string) > max_string_length - 1: + observator.truncation |= _TRUNC_STRING_LENGTH + # difference of 1 to take null char at the end on the C side into account + return string[: max_string_length - 1] + return string + + if isinstance(struct, bool): + ddwaf_object_bool(self, struct) + elif isinstance(struct, int): + ddwaf_object_signed(self, struct) + elif isinstance(struct, str): + ddwaf_object_string(self, truncate_string(struct.encode("UTF-8", errors="ignore"))) + elif isinstance(struct, bytes): + ddwaf_object_string(self, truncate_string(struct)) + elif isinstance(struct, float): + ddwaf_object_float(self, struct) + elif isinstance(struct, list): + if max_depth <= 0: + observator.truncation |= _TRUNC_CONTAINER_DEPTH + max_objects = 0 + array = ddwaf_object_array(self) + for counter_object, elt in enumerate(struct): + if counter_object >= max_objects: + observator.truncation |= _TRUNC_CONTAINER_SIZE + break + obj = ddwaf_object( + elt, + observator=observator, + max_objects=max_objects, + max_depth=max_depth - 1, + max_string_length=max_string_length, + ) + ddwaf_object_array_add(array, obj) + elif isinstance(struct, dict): + if max_depth <= 0: + observator.truncation |= _TRUNC_CONTAINER_DEPTH + max_objects = 0 + map_o = ddwaf_object_map(self) + # order is unspecified and could lead to problems if max_objects is reached + for counter_object, (key, val) in enumerate(struct.items()): + if not isinstance(key, (bytes, str)): # discards non string keys + continue + if counter_object >= max_objects: + observator.truncation |= _TRUNC_CONTAINER_SIZE + break + res_key = truncate_string(key.encode("UTF-8", errors="ignore") if isinstance(key, str) else key) + obj = ddwaf_object( + val, + observator=observator, + max_objects=max_objects, + max_depth=max_depth - 1, + max_string_length=max_string_length, + ) + ddwaf_object_map_add(map_o, res_key, obj) + elif struct is not None: + ddwaf_object_string(self, truncate_string(str(struct).encode("UTF-8", errors="ignore"))) + else: + ddwaf_object_null(self) + + @classmethod + def create_without_limits(cls, struct: DDWafRulesType) -> "ddwaf_object": + return cls(struct, max_objects=DDWAF_NO_LIMIT, max_depth=DDWAF_DEPTH_NO_LIMIT, max_string_length=DDWAF_NO_LIMIT) + + @property + def struct(self) -> DDWafRulesType: + """Generate a python structure from ddwaf_object""" + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_STRING: + return self.value.stringValue.decode("UTF-8", errors="ignore") + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_MAP: + return { + self.value.array[i].parameterName.decode("UTF-8", errors="ignore"): self.value.array[i].struct + for i in range(self.nbEntries) + } + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_ARRAY: + return [self.value.array[i].struct for i in range(self.nbEntries)] + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_SIGNED: + return self.value.intValue + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_UNSIGNED: + return self.value.uintValue + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_BOOL: + return self.value.boolean + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_FLOAT: + return self.value.f64 + if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_NULL or self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_INVALID: + return None + log.debug("ddwaf_object struct: unknown object type: %s", repr(type(self.type))) + return None + + def __repr__(self): + return repr(self.struct) + + +ddwaf_object_p = ctypes.POINTER(ddwaf_object) + + +class ddwaf_value(ctypes.Union): + _fields_ = [ + ("stringValue", ctypes.c_char_p), + ("uintValue", ctypes.c_ulonglong), + ("intValue", ctypes.c_longlong), + ("array", ddwaf_object_p), + ("boolean", ctypes.c_bool), + ("f64", ctypes.c_double), + ] + + +ddwaf_object._fields_ = [ + ("parameterName", ctypes.c_char_p), + ("parameterNameLength", ctypes.c_uint64), + ("value", ddwaf_value), + ("nbEntries", ctypes.c_uint64), + ("type", ctypes.c_int), +] + + +class ddwaf_result(ctypes.Structure): + _fields_ = [ + ("timeout", ctypes.c_bool), + ("events", ddwaf_object), + ("actions", ddwaf_object), + ("derivatives", ddwaf_object), + ("total_runtime", ctypes.c_uint64), + ] + + def __repr__(self): + return "total_runtime=%r, events=%r, timeout=%r, action=[%r]" % ( + self.total_runtime, + self.events.struct, + self.timeout.struct, + self.actions, + ) + + def __del__(self): + try: + ddwaf_result_free(self) + except TypeError: + pass + + +ddwaf_result_p = ctypes.POINTER(ddwaf_result) + + +class ddwaf_config_limits(ctypes.Structure): + _fields_ = [ + ("max_container_size", ctypes.c_uint32), + ("max_container_depth", ctypes.c_uint32), + ("max_string_length", ctypes.c_uint32), + ] + + +class ddwaf_config_obfuscator(ctypes.Structure): + _fields_ = [ + ("key_regex", ctypes.c_char_p), + ("value_regex", ctypes.c_char_p), + ] + + +ddwaf_object_free_fn = ctypes.CFUNCTYPE(None, ddwaf_object_p) +ddwaf_object_free = ddwaf_object_free_fn( + ("ddwaf_object_free", ddwaf), + ((1, "object"),), +) + + +class ddwaf_config(ctypes.Structure): + _fields_ = [ + ("limits", ddwaf_config_limits), + ("obfuscator", ddwaf_config_obfuscator), + ("free_fn", ddwaf_object_free_fn), + ] + # TODO : initial value of free_fn + + def __init__( + self, + max_container_size: int = 0, + max_container_depth: int = 0, + max_string_length: int = 0, + key_regex: bytes = b"", + value_regex: bytes = b"", + free_fn=ddwaf_object_free, + ) -> None: + self.limits.max_container_size = max_container_size + self.limits.max_container_depth = max_container_depth + self.limits.max_string_length = max_string_length + self.obfuscator.key_regex = key_regex + self.obfuscator.value_regex = value_regex + self.free_fn = free_fn + + +ddwaf_config_p = ctypes.POINTER(ddwaf_config) + + +ddwaf_handle = ctypes.c_void_p # may stay as this because it's mainly an abstract type in the interface +ddwaf_context = ctypes.c_void_p # may stay as this because it's mainly an abstract type in the interface + + +class ddwaf_handle_capsule: + def __init__(self, handle: ddwaf_handle) -> None: + self.handle = handle + self.free_fn = ddwaf_destroy + + def __del__(self): + if self.handle: + try: + self.free_fn(self.handle) + except TypeError: + pass + self.handle = None + + def __bool__(self): + return bool(self.handle) + + +class ddwaf_context_capsule: + def __init__(self, ctx: ddwaf_context) -> None: + self.ctx = ctx + self.free_fn = ddwaf_context_destroy + + def __del__(self): + if self.ctx: + try: + self.free_fn(self.ctx) + except TypeError: + pass + self.ctx = None + + def __bool__(self): + return bool(self.ctx) + + +ddwaf_log_cb = ctypes.POINTER( + ctypes.CFUNCTYPE( + None, ctypes.c_int, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_uint, ctypes.c_char_p, ctypes.c_uint64 + ) +) + + +# +# Functions Prototypes (creating python counterpart function from C function with ) +# + +ddwaf_init = ctypes.CFUNCTYPE(ddwaf_handle, ddwaf_object_p, ddwaf_config_p, ddwaf_object_p)( + ("ddwaf_init", ddwaf), + ( + (1, "ruleset_map"), + (1, "config", None), + (1, "diagnostics", None), + ), +) + + +def py_ddwaf_init(ruleset_map: ddwaf_object, config, info) -> ddwaf_handle_capsule: + return ddwaf_handle_capsule(ddwaf_init(ruleset_map, config, info)) + + +ddwaf_update = ctypes.CFUNCTYPE(ddwaf_handle, ddwaf_handle, ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_update", ddwaf), + ( + (1, "handle"), + (1, "ruleset_map"), + (1, "diagnostics", None), + ), +) + + +def py_ddwaf_update(handle: ddwaf_handle_capsule, ruleset_map: ddwaf_object, info) -> ddwaf_handle_capsule: + return ddwaf_handle_capsule(ddwaf_update(handle.handle, ruleset_map, ctypes.byref(info))) + + +ddwaf_destroy = ctypes.CFUNCTYPE(None, ddwaf_handle)( + ("ddwaf_destroy", ddwaf), + ((1, "handle"),), +) + +ddwaf_known_addresses = ctypes.CFUNCTYPE( + ctypes.POINTER(ctypes.c_char_p), ddwaf_handle, ctypes.POINTER(ctypes.c_uint32) +)( + ("ddwaf_known_addresses", ddwaf), + ( + (1, "handle"), + (1, "size"), + ), +) + + +def py_ddwaf_known_addresses(handle: ddwaf_handle_capsule) -> List[str]: + size = ctypes.c_uint32() + obj = ddwaf_known_addresses(handle.handle, ctypes.byref(size)) + return [obj[i].decode("UTF-8") for i in range(size.value)] + + +ddwaf_context_init = ctypes.CFUNCTYPE(ddwaf_context, ddwaf_handle)( + ("ddwaf_context_init", ddwaf), + ((1, "handle"),), +) + + +def py_ddwaf_context_init(handle: ddwaf_handle_capsule) -> ddwaf_context_capsule: + return ddwaf_context_capsule(ddwaf_context_init(handle.handle)) + + +ddwaf_run = ctypes.CFUNCTYPE( + ctypes.c_int, ddwaf_context, ddwaf_object_p, ddwaf_object_p, ddwaf_result_p, ctypes.c_uint64 +)(("ddwaf_run", ddwaf), ((1, "context"), (1, "persistent_data"), (1, "ephemeral_data"), (1, "result"), (1, "timeout"))) + +ddwaf_context_destroy = ctypes.CFUNCTYPE(None, ddwaf_context)( + ("ddwaf_context_destroy", ddwaf), + ((1, "context"),), +) + +ddwaf_result_free = ctypes.CFUNCTYPE(None, ddwaf_result_p)( + ("ddwaf_result_free", ddwaf), + ((1, "result"),), +) + +ddwaf_object_invalid = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_object_invalid", ddwaf), + ((3, "object"),), +) + +ddwaf_object_string = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_char_p)( + ("ddwaf_object_string", ddwaf), + ( + (3, "object"), + (1, "string"), + ), +) + +# object_string variants not used + +ddwaf_object_string_from_unsigned = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_uint64)( + ("ddwaf_object_string_from_unsigned", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + +ddwaf_object_string_from_signed = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_int64)( + ("ddwaf_object_string_from_signed", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + +ddwaf_object_unsigned = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_uint64)( + ("ddwaf_object_unsigned", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + +ddwaf_object_signed = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_int64)( + ("ddwaf_object_signed", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + +# object_(un)signed_forced : not used ? + +ddwaf_object_bool = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_bool)( + ("ddwaf_object_bool", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + + +ddwaf_object_float = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_double)( + ("ddwaf_object_float", ddwaf), + ( + (3, "object"), + (1, "value"), + ), +) + +ddwaf_object_null = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_object_null", ddwaf), + ((3, "object"),), +) + +ddwaf_object_array = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_object_array", ddwaf), + ((3, "object"),), +) + +ddwaf_object_map = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_object_map", ddwaf), + ((3, "object"),), +) + +ddwaf_object_array_add = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_object_p, ddwaf_object_p)( + ("ddwaf_object_array_add", ddwaf), + ( + (1, "array"), + (1, "object"), + ), +) + +ddwaf_object_map_add = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_object_p, ctypes.c_char_p, ddwaf_object_p)( + ("ddwaf_object_map_add", ddwaf), + ( + (1, "map"), + (1, "key"), + (1, "object"), + ), +) + +# unused because accessible from python part +# ddwaf_object_type +# ddwaf_object_size +# ddwaf_object_length +# ddwaf_object_get_key +# ddwaf_object_get_string +# ddwaf_object_get_unsigned +# ddwaf_object_get_signed +# ddwaf_object_get_index +# ddwaf_object_get_bool https://github.com/DataDog/libddwaf/commit/7dc68dacd972ae2e2a3c03a69116909c98dbd9cb +# ddwaf_object_get_float + + +ddwaf_get_version = ctypes.CFUNCTYPE(ctypes.c_char_p)( + ("ddwaf_get_version", ddwaf), + (), +) + + +ddwaf_set_log_cb = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_log_cb, ctypes.c_int)( + ("ddwaf_set_log_cb", ddwaf), + ( + (1, "cb"), + (1, "min_level"), + ), +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so new file mode 100644 index 0000000..ac482e1 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py new file mode 100644 index 0000000..b3d9c07 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py @@ -0,0 +1,32 @@ +import os +import time +from typing import Dict + +from ddtrace.internal.utils.formats import asbool + + +class deduplication: + _time_lapse = 3600 + + def __init__(self, func): + self.func = func + self._last_timestamp: float = time.time() + self.reported_logs: Dict[int, float] = dict() + + def get_last_time_reported(self, raw_log_hash: int) -> float: + return self.reported_logs.get(raw_log_hash, 0.0) + + def is_deduplication_enabled(self) -> bool: + return asbool(os.environ.get("_DD_APPSEC_DEDUPLICATION_ENABLED", "true")) + + def __call__(self, *args, **kwargs): + result = None + if self.is_deduplication_enabled() is False: + result = self.func(*args, **kwargs) + else: + raw_log_hash = hash("".join([str(arg) for arg in args])) + last_reported_timestamp = self.get_last_time_reported(raw_log_hash) + if time.time() > last_reported_timestamp: + result = self.func(*args, **kwargs) + self.reported_logs[raw_log_hash] = time.time() + self._time_lapse + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py new file mode 100644 index 0000000..32efb83 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py @@ -0,0 +1,377 @@ +import functools +import io +import json + +import xmltodict + +from ddtrace.appsec._constants import SPAN_DATA_NAMES +from ddtrace.appsec._iast._patch import if_iast_taint_returned_object_for +from ddtrace.appsec._iast._patch import if_iast_taint_yield_tuple_for +from ddtrace.appsec._iast._utils import _is_iast_enabled +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanTypes +from ddtrace.internal import core +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.http import parse_form_multipart +from ddtrace.settings.asm import config as asm_config +from ddtrace.vendor.wrapt import when_imported +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + + +log = get_logger(__name__) +_BODY_METHODS = {"POST", "PUT", "DELETE", "PATCH"} + + +def _get_content_length(environ): + content_length = environ.get("CONTENT_LENGTH") + transfer_encoding = environ.get("HTTP_TRANSFER_ENCODING") + + if transfer_encoding == "chunked" or content_length is None: + return None + + try: + return max(0, int(content_length)) + except ValueError: + return 0 + + +# set_http_meta + + +def _on_set_http_meta( + span, + request_ip, + raw_uri, + route, + method, + request_headers, + request_cookies, + parsed_query, + request_path_params, + request_body, + status_code, + response_headers, + response_cookies, +): + if _is_iast_enabled(): + from ddtrace.appsec._iast.taint_sinks.insecure_cookie import asm_check_cookies + + if response_cookies: + asm_check_cookies(response_cookies) + + if asm_config._asm_enabled and span.span_type == SpanTypes.WEB: + # avoid circular import + from ddtrace.appsec._asm_request_context import set_waf_address + + status_code = str(status_code) if status_code is not None else None + + addresses = [ + (SPAN_DATA_NAMES.REQUEST_HTTP_IP, request_ip), + (SPAN_DATA_NAMES.REQUEST_URI_RAW, raw_uri), + (SPAN_DATA_NAMES.REQUEST_ROUTE, route), + (SPAN_DATA_NAMES.REQUEST_METHOD, method), + (SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, request_headers), + (SPAN_DATA_NAMES.REQUEST_COOKIES, request_cookies), + (SPAN_DATA_NAMES.REQUEST_QUERY, parsed_query), + (SPAN_DATA_NAMES.REQUEST_PATH_PARAMS, request_path_params), + (SPAN_DATA_NAMES.REQUEST_BODY, request_body), + (SPAN_DATA_NAMES.RESPONSE_STATUS, status_code), + (SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, response_headers), + ] + for k, v in addresses: + if v is not None: + set_waf_address(k, v, span) + + +core.on("set_http_meta_for_asm", _on_set_http_meta) + + +# ASGI + + +async def _on_asgi_request_parse_body(receive, headers): + if asm_config._asm_enabled: + data_received = await receive() + body = data_received.get("body", b"") + + async def receive(): + return data_received + + content_type = headers.get("content-type") or headers.get("Content-Type") + try: + if content_type in ("application/json", "text/json"): + if body is None or body == b"": + req_body = None + else: + req_body = json.loads(body.decode()) + elif content_type in ("application/xml", "text/xml"): + req_body = xmltodict.parse(body) + elif content_type == "text/plain": + req_body = None + else: + req_body = parse_form_multipart(body.decode(), headers) or None + return receive, req_body + except BaseException: + return receive, None + + return receive, None + + +# FLASK + + +def _on_request_span_modifier( + ctx, flask_config, request, environ, _HAS_JSON_MIXIN, flask_version, flask_version_str, exception_type +): + req_body = None + if asm_config._asm_enabled and request.method in _BODY_METHODS: + content_type = request.content_type + wsgi_input = environ.get("wsgi.input", "") + + # Copy wsgi input if not seekable + if wsgi_input: + try: + seekable = wsgi_input.seekable() + except AttributeError: + seekable = False + if not seekable: + # https://gist.github.com/mitsuhiko/5721547 + # Provide wsgi.input as an end-of-file terminated stream. + # In that case wsgi.input_terminated is set to True + # and an app is required to read to the end of the file and disregard CONTENT_LENGTH for reading. + if environ.get("wsgi.input_terminated"): + body = wsgi_input.read() + else: + content_length = _get_content_length(environ) + body = wsgi_input.read(content_length) if content_length else b"" + environ["wsgi.input"] = io.BytesIO(body) + + try: + if content_type in ("application/json", "text/json"): + if _HAS_JSON_MIXIN and hasattr(request, "json") and request.json: + req_body = request.json + elif request.data is None or request.data == b"": + req_body = None + else: + req_body = json.loads(request.data.decode("UTF-8")) + elif content_type in ("application/xml", "text/xml"): + req_body = xmltodict.parse(request.get_data()) + elif hasattr(request, "form"): + req_body = request.form.to_dict() + else: + # no raw body + req_body = None + except ( + exception_type, + AttributeError, + RuntimeError, + TypeError, + ValueError, + json.JSONDecodeError, + xmltodict.expat.ExpatError, + xmltodict.ParsingInterrupted, + ): + log.debug("Failed to parse request body", exc_info=True) + finally: + # Reset wsgi input to the beginning + if wsgi_input: + if seekable: + wsgi_input.seek(0) + else: + environ["wsgi.input"] = io.BytesIO(body) + return req_body + + +def _on_request_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + if _is_iast_enabled(): + try: + from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source + from ddtrace.appsec._iast._taint_tracking import OriginType + from ddtrace.appsec._iast._taint_tracking import taint_pyobject + from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor + + _set_metric_iast_instrumented_source(OriginType.PATH) + _set_metric_iast_instrumented_source(OriginType.QUERY) + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return + + # TODO: instance.query_string = ?? + instance.query_string = taint_pyobject( + pyobject=instance.query_string, + source_name=OriginType.QUERY, + source_value=instance.query_string, + source_origin=OriginType.QUERY, + ) + instance.path = taint_pyobject( + pyobject=instance.path, + source_name=OriginType.PATH, + source_value=instance.path, + source_origin=OriginType.PATH, + ) + except Exception: + log.debug("Unexpected exception while tainting pyobject", exc_info=True) + + +def _on_flask_patch(flask_version): + if _is_iast_enabled(): + try: + from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source + from ddtrace.appsec._iast._taint_tracking import OriginType + + _w( + "werkzeug.datastructures", + "Headers.items", + functools.partial(if_iast_taint_yield_tuple_for, (OriginType.HEADER_NAME, OriginType.HEADER)), + ) + _set_metric_iast_instrumented_source(OriginType.HEADER_NAME) + _set_metric_iast_instrumented_source(OriginType.HEADER) + + _w( + "werkzeug.datastructures", + "ImmutableMultiDict.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), + ) + _set_metric_iast_instrumented_source(OriginType.PARAMETER) + + _w( + "werkzeug.datastructures", + "EnvironHeaders.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.HEADER), + ) + _set_metric_iast_instrumented_source(OriginType.HEADER) + + _w("werkzeug.wrappers.request", "Request.__init__", _on_request_init) + _w( + "werkzeug.wrappers.request", + "Request.get_data", + functools.partial(if_iast_taint_returned_object_for, OriginType.BODY), + ) + _set_metric_iast_instrumented_source(OriginType.BODY) + + if flask_version < (2, 0, 0): + _w( + "werkzeug._internal", + "_DictAccessorProperty.__get__", + functools.partial(if_iast_taint_returned_object_for, OriginType.QUERY), + ) + _set_metric_iast_instrumented_source(OriginType.QUERY) + except Exception: + log.debug("Unexpected exception while patch IAST functions", exc_info=True) + + +def _on_flask_blocked_request(_): + core.set_item(HTTP_REQUEST_BLOCKED, True) + + +def _on_django_func_wrapped(fn_args, fn_kwargs, first_arg_expected_type, *_): + # If IAST is enabled and we're wrapping a Django view call, taint the kwargs (view's + # path parameters) + if _is_iast_enabled() and fn_args and isinstance(fn_args[0], first_arg_expected_type): + from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401 + from ddtrace.appsec._iast._taint_tracking import is_pyobject_tainted + from ddtrace.appsec._iast._taint_tracking import taint_pyobject + from ddtrace.appsec._iast._taint_utils import taint_structure + from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return + + http_req = fn_args[0] + + http_req.COOKIES = taint_structure(http_req.COOKIES, OriginType.COOKIE_NAME, OriginType.COOKIE) + http_req.GET = taint_structure(http_req.GET, OriginType.PARAMETER_NAME, OriginType.PARAMETER) + http_req.POST = taint_structure(http_req.POST, OriginType.BODY, OriginType.BODY) + if not is_pyobject_tainted(getattr(http_req, "_body", None)): + http_req._body = taint_pyobject( + http_req.body, + source_name="body", + source_value=http_req.body, + source_origin=OriginType.BODY, + ) + + http_req.headers = taint_structure(http_req.headers, OriginType.HEADER_NAME, OriginType.HEADER) + http_req.path = taint_pyobject( + http_req.path, source_name="path", source_value=http_req.path, source_origin=OriginType.PATH + ) + http_req.path_info = taint_pyobject( + http_req.path_info, + source_name="path", + source_value=http_req.path, + source_origin=OriginType.PATH, + ) + http_req.environ["PATH_INFO"] = taint_pyobject( + http_req.environ["PATH_INFO"], + source_name="path", + source_value=http_req.path, + source_origin=OriginType.PATH, + ) + http_req.META = taint_structure(http_req.META, OriginType.HEADER_NAME, OriginType.HEADER) + if fn_kwargs: + try: + for k, v in fn_kwargs.items(): + fn_kwargs[k] = taint_pyobject( + v, source_name=k, source_value=v, source_origin=OriginType.PATH_PARAMETER + ) + except Exception: + log.debug("IAST: Unexpected exception while tainting path parameters", exc_info=True) + + +def _on_wsgi_environ(wrapped, _instance, args, kwargs): + if _is_iast_enabled(): + if not args: + return wrapped(*args, **kwargs) + + from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source + from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401 + from ddtrace.appsec._iast._taint_utils import taint_structure + from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor + + _set_metric_iast_instrumented_source(OriginType.HEADER_NAME) + _set_metric_iast_instrumented_source(OriginType.HEADER) + # we instrument those sources on _on_django_func_wrapped + _set_metric_iast_instrumented_source(OriginType.PATH_PARAMETER) + _set_metric_iast_instrumented_source(OriginType.PATH) + _set_metric_iast_instrumented_source(OriginType.COOKIE) + _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME) + _set_metric_iast_instrumented_source(OriginType.PARAMETER) + _set_metric_iast_instrumented_source(OriginType.PARAMETER_NAME) + _set_metric_iast_instrumented_source(OriginType.BODY) + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return wrapped(*args, **kwargs) + + return wrapped(*((taint_structure(args[0], OriginType.HEADER_NAME, OriginType.HEADER),) + args[1:]), **kwargs) + + return wrapped(*args, **kwargs) + + +def _on_django_patch(): + try: + from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401 + + when_imported("django.http.request")( + lambda m: trace_utils.wrap( + m, + "QueryDict.__getitem__", + functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER), + ) + ) + except Exception: + log.debug("Unexpected exception while patch IAST functions", exc_info=True) + + +def listen(): + core.on("flask.request_call_modifier", _on_request_span_modifier, "request_body") + core.on("flask.request_init", _on_request_init) + core.on("flask.blocked_request_callable", _on_flask_blocked_request) + + +core.on("django.func.wrapped", _on_django_func_wrapped) +core.on("django.wsgi_environ", _on_wsgi_environ, "wrapped_result") +core.on("django.patch", _on_django_patch) +core.on("flask.patch", _on_flask_patch) + +core.on("asgi.request.parse.body", _on_asgi_request_parse_body, "await_receive_and_body") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py new file mode 100644 index 0000000..ddbdd58 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py @@ -0,0 +1,74 @@ +"""IAST (interactive application security testing) analyzes code for security vulnerabilities. + +To add new vulnerabilities analyzers (Taint sink) we should update `IAST_PATCH` in +`ddtrace/appsec/iast/_patch_modules.py` + +Create new file with the same name: `ddtrace/appsec/iast/taint_sinks/[my_new_vulnerability].py` + +Then, implement the `patch()` function and its wrappers. + +In order to have the better performance, the Overhead control engine (OCE) helps us to control the overhead of our +wrapped functions. We should create a class that inherit from `ddtrace.appsec._iast.taint_sinks._base.VulnerabilityBase` +and register with `ddtrace.appsec._iast.oce`. + +@oce.register +class MyVulnerability(VulnerabilityBase): + vulnerability_type = "MyVulnerability" + evidence_type = "kind_of_Vulnerability" + +Before that, we should decorate our wrappers with `wrap` method and +report the vulnerabilities with `report` method. OCE will manage the number of requests, number of vulnerabilities +to reduce the overhead. + +@WeakHash.wrap +def wrapped_function(wrapped, instance, args, kwargs): + # type: (Callable, str, Any, Any, Any) -> Any + WeakHash.report( + evidence_value=evidence, + ) + return wrapped(*args, **kwargs) +""" # noqa: RST201, RST213, RST210 +import inspect +import sys + +from ddtrace.internal.logger import get_logger + +from ._overhead_control_engine import OverheadControl +from ._utils import _is_iast_enabled + + +log = get_logger(__name__) + +oce = OverheadControl() + + +def ddtrace_iast_flask_patch(): + """ + Patch the code inside the Flask main app source code file (typically "app.py") so + IAST/Custom Code propagation works also for the functions and methods defined inside it. + This must be called on the top level or inside the `if __name__ == "__main__"` + and must be before the `app.run()` call. It also requires `DD_IAST_ENABLED` to be + activated. + """ + if not _is_iast_enabled(): + return + + from ._ast.ast_patching import astpatch_module + + module_name = inspect.currentframe().f_back.f_globals["__name__"] + module = sys.modules[module_name] + try: + module_path, patched_ast = astpatch_module(module, remove_flask_run=True) + except Exception: + log.debug("Unexpected exception while AST patching", exc_info=True) + return + + compiled_code = compile(patched_ast, module_path, "exec") + exec(compiled_code, module.__dict__) # nosec B102 + sys.modules[module_name] = compiled_code + + +__all__ = [ + "oce", + "ddtrace_iast_flask_patch", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py new file mode 100644 index 0000000..9eb516a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 + +import ast +import codecs +import os +import re +from sys import builtin_module_names +from types import ModuleType +from typing import TYPE_CHECKING # noqa:F401 +from typing import Tuple + + +if TYPE_CHECKING: + from typing import Optional # noqa:F401 + +from ddtrace.appsec._constants import IAST +from ddtrace.appsec._python_info.stdlib import _stdlib_for_python_version +from ddtrace.internal.logger import get_logger +from ddtrace.internal.module import origin + +from .visitor import AstVisitor + + +# Prefixes for modules where IAST patching is allowed +IAST_ALLOWLIST = ("tests.appsec.iast",) # type: tuple[str, ...] +IAST_DENYLIST = ("ddtrace", "pkg_resources") # type: tuple[str, ...] + + +if IAST.PATCH_MODULES in os.environ: + IAST_ALLOWLIST += tuple(os.environ[IAST.PATCH_MODULES].split(IAST.SEP_MODULES)) + +if IAST.DENY_MODULES in os.environ: + IAST_DENYLIST += tuple(os.environ[IAST.DENY_MODULES].split(IAST.SEP_MODULES)) + + +ENCODING = "" + +log = get_logger(__name__) + + +def get_encoding(module_path): # type: (str) -> str + """ + First tries to detect the encoding for the file, + otherwise, returns global encoding default + """ + global ENCODING + if not ENCODING: + try: + ENCODING = codecs.lookup("utf-8-sig").name + except LookupError: + ENCODING = codecs.lookup("utf-8").name + return ENCODING + + +try: + import importlib.metadata as il_md +except ImportError: + import importlib_metadata as il_md # type: ignore[no-redef] + + +def _build_installed_package_names_list(): # type: (...) -> set[str] + return { + ilmd_d.metadata["name"] for ilmd_d in il_md.distributions() if ilmd_d is not None and ilmd_d.files is not None + } + + +_NOT_PATCH_MODULE_NAMES = ( + _build_installed_package_names_list() | _stdlib_for_python_version() | set(builtin_module_names) +) + + +def _in_python_stdlib_or_third_party(module_name): # type: (str) -> bool + return module_name.split(".")[0].lower() in [x.lower() for x in _NOT_PATCH_MODULE_NAMES] + + +def _should_iast_patch(module_name): # type: (str) -> bool + """ + select if module_name should be patch from the longuest prefix that match in allow or deny list. + if a prefix is in both list, deny is selected. + """ + max_allow = max((len(prefix) for prefix in IAST_ALLOWLIST if module_name.startswith(prefix)), default=-1) + max_deny = max((len(prefix) for prefix in IAST_DENYLIST if module_name.startswith(prefix)), default=-1) + diff = max_allow - max_deny + return diff > 0 or (diff == 0 and not _in_python_stdlib_or_third_party(module_name)) + + +def visit_ast( + source_text, # type: str + module_path, # type: str + module_name="", # type: str +): # type: (...) -> Optional[str] + parsed_ast = ast.parse(source_text, module_path) + + visitor = AstVisitor( + filename=module_path, + module_name=module_name, + ) + modified_ast = visitor.visit(parsed_ast) + + if not visitor.ast_modified: + return None + + ast.fix_missing_locations(modified_ast) + return modified_ast + + +_FLASK_INSTANCE_REGEXP = re.compile(r"(\S*)\s*=.*Flask\(.*") + + +def _remove_flask_run(text): # type (str) -> str + """ + Find and remove flask app.run() call. This is used for patching + the app.py file and exec'ing to replace the module without creating + a new instance. + """ + flask_instance_name = re.search(_FLASK_INSTANCE_REGEXP, text) + groups = flask_instance_name.groups() + if not groups: + return text + + instance_name = groups[-1] + new_text = re.sub(instance_name + r"\.run\(.*\)", "pass", text) + return new_text + + +def astpatch_module(module: ModuleType, remove_flask_run: bool = False) -> Tuple[str, str]: + module_name = module.__name__ + module_path = str(origin(module)) + try: + if os.stat(module_path).st_size == 0: + # Don't patch empty files like __init__.py + log.debug("empty file: %s", module_path) + return "", "" + except OSError: + log.debug("astpatch_source couldn't find the file: %s", module_path, exc_info=True) + return "", "" + + # Get the file extension, if it's dll, os, pyd, dyn, dynlib: return + # If its pyc or pyo, change to .py and check that the file exists. If not, + # return with warning. + _, module_ext = os.path.splitext(module_path) + + if module_ext.lower() not in {".pyo", ".pyc", ".pyw", ".py"}: + # Probably native or built-in module + log.debug("extension not supported: %s for: %s", module_ext, module_path) + return "", "" + + with open(module_path, "r", encoding=get_encoding(module_path)) as source_file: + try: + source_text = source_file.read() + except UnicodeDecodeError: + log.debug("unicode decode error for file: %s", module_path, exc_info=True) + return "", "" + + if len(source_text.strip()) == 0: + # Don't patch empty files like __init__.py + log.debug("empty file: %s", module_path) + return "", "" + + if remove_flask_run: + source_text = _remove_flask_run(source_text) + + new_source = visit_ast( + source_text, + module_path, + module_name=module_name, + ) + if new_source is None: + log.debug("file not ast patched: %s", module_path) + return "", "" + + return module_path, new_source diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py new file mode 100644 index 0000000..d7a4b08 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py @@ -0,0 +1,759 @@ +#!/usr/bin/env python3 +from _ast import Expr +from _ast import ImportFrom +import ast +import copy +import sys +from typing import Any # noqa:F401 +from typing import List # noqa:F401 +from typing import Set # noqa:F401 + +from .._metrics import _set_metric_iast_instrumented_propagation +from ..constants import DEFAULT_PATH_TRAVERSAL_FUNCTIONS +from ..constants import DEFAULT_WEAK_RANDOMNESS_FUNCTIONS + + +PY3 = sys.version_info[0] >= 3 +PY30_37 = sys.version_info >= (3, 0, 0) and sys.version_info < (3, 8, 0) +PY38_PLUS = sys.version_info >= (3, 8, 0) +PY39_PLUS = sys.version_info >= (3, 9, 0) + +CODE_TYPE_FIRST_PARTY = "first_party" +CODE_TYPE_DD = "datadog" +CODE_TYPE_SITE_PACKAGES = "site_packages" +CODE_TYPE_STDLIB = "stdlib" +TAINT_SINK_FUNCTION_REPLACEMENT = "ddtrace_taint_sinks.ast_function" + + +def _mark_avoid_convert_recursively(node): + if node is not None: + node.avoid_convert = True + for child in ast.iter_child_nodes(node): + _mark_avoid_convert_recursively(child) + + +class AstVisitor(ast.NodeTransformer): + def __init__( + self, + filename="", + module_name="", + ): + # Offset caused by inserted lines. Will be adjusted in visit_Generic + self._aspects_spec = { + "definitions_module": "ddtrace.appsec._iast._taint_tracking.aspects", + "alias_module": "ddtrace_aspects", + "functions": { + "str": "ddtrace_aspects.str_aspect", + "bytes": "ddtrace_aspects.bytes_aspect", + "bytearray": "ddtrace_aspects.bytearray_aspect", + "ddtrace_iast_flask_patch": "ddtrace_aspects.empty_func", # To avoid recursion + }, + "stringalike_methods": { + "decode": "ddtrace_aspects.decode_aspect", + "join": "ddtrace_aspects.join_aspect", + "encode": "ddtrace_aspects.encode_aspect", + "extend": "ddtrace_aspects.bytearray_extend_aspect", + "upper": "ddtrace_aspects.upper_aspect", + "lower": "ddtrace_aspects.lower_aspect", + "replace": "ddtrace_aspects.replace_aspect", + "swapcase": "ddtrace_aspects.swapcase_aspect", + "title": "ddtrace_aspects.title_aspect", + "capitalize": "ddtrace_aspects.capitalize_aspect", + "casefold": "ddtrace_aspects.casefold_aspect", + "translate": "ddtrace_aspects.translate_aspect", + "format": "ddtrace_aspects.format_aspect", + "format_map": "ddtrace_aspects.format_map_aspect", + "zfill": "ddtrace_aspects.zfill_aspect", + "ljust": "ddtrace_aspects.ljust_aspect", + }, + # Replacement function for indexes and ranges + "slices": { + "index": "ddtrace_aspects.index_aspect", + "slice": "ddtrace_aspects.slice_aspect", + }, + # Replacement functions for modules + "module_functions": { + # "BytesIO": "ddtrace_aspects.stringio_aspect", + # "StringIO": "ddtrace_aspects.stringio_aspect", + # "format": "ddtrace_aspects.format_aspect", + # "format_map": "ddtrace_aspects.format_map_aspect", + }, + "operators": { + ast.Add: "ddtrace_aspects.add_aspect", + "FORMAT_VALUE": "ddtrace_aspects.format_value_aspect", + ast.Mod: "ddtrace_aspects.modulo_aspect", + "BUILD_STRING": "ddtrace_aspects.build_string_aspect", + }, + "excluded_from_patching": { + # Key: module being patched + # Value: dict with more info + "django.utils.formats": { + # Key: called functions that won't be patched. E.g.: for this module + # not a single call for format on any function will be patched. + # + # Value: function definitions. E.g.: we won't patch any Call node inside + # the iter_format_modules(). If we, for example, had 'foo': ('bar', 'baz') + # it would mean that we wouldn't patch any call to foo() done inside the + # bar() or baz() function definitions. + "format": ("",), + "": ("iter_format_modules",), + }, + "django.utils.log": { + "": ("",), + }, + "django.utils.html": {"": ("format_html", "format_html_join")}, + }, + # This is a set since all functions will be replaced by taint_sink_functions + "taint_sinks": { + "weak_randomness": DEFAULT_WEAK_RANDOMNESS_FUNCTIONS, + "path_traversal": DEFAULT_PATH_TRAVERSAL_FUNCTIONS, + "other": { + "load", + "run", + "path", + "exit", + "sleep", + "socket", + }, + # These explicitly WON'T be replaced by taint_sink_function: + "disabled": { + "__new__", + "__init__", + "__dir__", + "__repr__", + "super", + }, + }, + } + self._sinkpoints_spec = { + "definitions_module": "ddtrace.appsec._iast.taint_sinks", + "alias_module": "ddtrace_taint_sinks", + "functions": { + "open": "ddtrace_taint_sinks.open_path_traversal", + }, + } + self._sinkpoints_functions = self._sinkpoints_spec["functions"] + self.ast_modified = False + self.filename = filename + self.module_name = module_name + + self._aspect_index = self._aspects_spec["slices"]["index"] + self._aspect_slice = self._aspects_spec["slices"]["slice"] + self._aspect_functions = self._aspects_spec["functions"] + self._aspect_operators = self._aspects_spec["operators"] + self._aspect_methods = self._aspects_spec["stringalike_methods"] + self._aspect_modules = self._aspects_spec["module_functions"] + self._aspect_format_value = self._aspects_spec["operators"]["FORMAT_VALUE"] + self._aspect_build_string = self._aspects_spec["operators"]["BUILD_STRING"] + self.excluded_functions = self._aspects_spec["excluded_from_patching"].get(self.module_name, {}) + + # Sink points + self._taint_sink_replace_any = self._merge_taint_sinks( + self._aspects_spec["taint_sinks"]["other"], + self._aspects_spec["taint_sinks"]["weak_randomness"], + *[functions for module, functions in self._aspects_spec["taint_sinks"]["path_traversal"].items()], + ) + self._taint_sink_replace_disabled = self._aspects_spec["taint_sinks"]["disabled"] + + self.dont_patch_these_functionsdefs = set() + for _, v in self.excluded_functions.items(): + if v: + for i in v: + self.dont_patch_these_functionsdefs.add(i) + + # This will be enabled when we find a module and function where we avoid doing + # replacements and enabled again on all the others + self.replacements_disabled_for_functiondef = False + + self.codetype = CODE_TYPE_FIRST_PARTY + if "ast/tests/fixtures" in self.filename: + self.codetype = CODE_TYPE_FIRST_PARTY + elif "ddtrace" in self.filename and ("site-packages" in self.filename or "dist-packages" in self.filename): + self.codetype = CODE_TYPE_DD + elif "site-packages" in self.filename or "dist-packages" in self.filename: + self.codetype = CODE_TYPE_SITE_PACKAGES + elif "lib/python" in self.filename: + self.codetype = CODE_TYPE_STDLIB + + @staticmethod + def _merge_taint_sinks(*args_functions: Set[str]) -> Set[str]: + merged_set = set() + + for functions in args_functions: + merged_set.update(functions) + + return merged_set + + def _is_string_node(self, node): # type: (Any) -> bool + if PY30_37 and isinstance(node, ast.Bytes): + return True + + if PY3 and (isinstance(node, ast.Constant) and isinstance(node.value, (str, bytes, bytearray))): + return True + + return False + + def _is_numeric_node(self, node): # type: (Any) -> bool + if PY30_37 and isinstance(node, ast.Num): + return True + + if PY38_PLUS and (isinstance(node, ast.Constant) and isinstance(node.value, (int, float))): + return True + + return False + + def _is_node_constant_or_binop(self, node): # type: (Any) -> bool + return self._is_string_node(node) or self._is_numeric_node(node) or isinstance(node, ast.BinOp) + + def _is_call_excluded(self, func_name_node): # type: (str) -> bool + if not self.excluded_functions: + return False + excluded_for_caller = self.excluded_functions.get(func_name_node, tuple()) + self.excluded_functions.get( + "", tuple() + ) + return "" in excluded_for_caller or self._current_function_name in excluded_for_caller + + def _is_string_format_with_literals(self, call_node): + # type: (ast.Call) -> bool + return ( + self._is_string_node(call_node.func.value) # type: ignore[attr-defined] + and call_node.func.attr == "format" # type: ignore[attr-defined] + and all(map(self._is_node_constant_or_binop, call_node.args)) + and all(map(lambda x: self._is_node_constant_or_binop(x.value), call_node.keywords)) + ) + + def _get_function_name(self, call_node, is_function): # type: (ast.Call, bool) -> str + if is_function: + return call_node.func.id # type: ignore[attr-defined] + # If the call is to a method + elif type(call_node.func) == ast.Name: + return call_node.func.id + + return call_node.func.attr # type: ignore[attr-defined] + + def _should_replace_with_taint_sink(self, call_node, is_function): # type: (ast.Call, bool) -> bool + function_name = self._get_function_name(call_node, is_function) + + if function_name in self._taint_sink_replace_disabled: + return False + + return any(allowed in function_name for allowed in self._taint_sink_replace_any) + + def _add_original_function_as_arg(self, call_node, is_function): # type: (ast.Call, bool) -> Any + """ + Creates the arguments for the original function + """ + function_name = self._get_function_name(call_node, is_function) + function_name_arg = ( + self._name_node(call_node, function_name, ctx=ast.Load()) if is_function else copy.copy(call_node.func) + ) + + # Arguments for stack info change from: + # my_function(self, *args, **kwargs) + # to: + # _add_original_function_as_arg(function_name=my_function, self, *args, **kwargs) + new_args = [ + function_name_arg, + ] + call_node.args + + return new_args + + def _node(self, type_, pos_from_node, **kwargs): + # type: (Any, Any, Any) -> Any + """ + Abstract some basic differences in node structure between versions + """ + + # Some nodes (like Module) dont have position + lineno = getattr(pos_from_node, "lineno", 1) + col_offset = getattr(pos_from_node, "col_offset", 0) + + if PY30_37: + # No end_lineno or end_pos_offset + return type_(lineno=lineno, col_offset=col_offset, **kwargs) + + # Py38+ + end_lineno = getattr(pos_from_node, "end_lineno", 1) + end_col_offset = getattr(pos_from_node, "end_col_offset", 0) + + return type_( + lineno=lineno, end_lineno=end_lineno, col_offset=col_offset, end_col_offset=end_col_offset, **kwargs + ) + + def _name_node(self, from_node, _id, ctx=ast.Load()): # noqa: B008 + # type: (Any, str, Any) -> ast.Name + return self._node( + ast.Name, + from_node, + id=_id, + ctx=ctx, + ) + + def _attr_node(self, from_node, attr, ctx=ast.Load()): # noqa: B008 + # type: (Any, str, Any) -> ast.Name + attr_attr = "" + name_attr = "" + if attr: + aspect_split = attr.split(".") + if len(aspect_split) > 1: + attr_attr = aspect_split[1] + name_attr = aspect_split[0] + + name_node = self._name_node(from_node, name_attr, ctx=ctx) + return self._node(ast.Attribute, from_node, attr=attr_attr, ctx=ctx, value=name_node) + + def _assign_node(self, from_node, targets, value): # type: (Any, List[Any], Any) -> Any + return self._node( + ast.Assign, + from_node, + targets=targets, + value=value, + type_comment=None, + ) + + def find_insert_position(self, module_node): # type: (ast.Module) -> int + insert_position = 0 + from_future_import_found = False + import_found = False + + # Check all nodes that are "from __future__ import...", as we must insert after them. + # + # Caveat: + # - body_node.lineno doesn't work because a large docstring changes the lineno + # but not the position in the nodes (i.e. this can happen: lineno==52, position==2) + # TODO: Test and implement cases with docstrings before future imports, etc. + for body_node in module_node.body: + insert_position += 1 + if isinstance(body_node, ImportFrom) and body_node.module == "__future__": + import_found = True + from_future_import_found = True + # As soon as we start a non-futuristic import we can stop looking + elif isinstance(body_node, ImportFrom): + import_found = True + elif isinstance(body_node, Expr) and not import_found: + continue + elif from_future_import_found: + insert_position -= 1 + break + else: + break + + if not from_future_import_found: + # No futuristic import found, reset the position to 0 + insert_position = 0 + + return insert_position + + def _none_constant(self, from_node, ctx=ast.Load()): # noqa: B008 + # type: (Any, Any) -> Any + if PY30_37: + return ast.NameConstant(lineno=from_node.lineno, col_offset=from_node.col_offset, value=None) + + # 3.8+ + return ast.Constant( + lineno=from_node.lineno, + col_offset=from_node.col_offset, + end_lineno=from_node.end_lineno, + end_col_offset=from_node.end_col_offset, + value=None, + kind=None, + ) + + def _int_constant(self, from_node, value): + return ast.Constant( + lineno=from_node.lineno, + col_offset=from_node.col_offset, + end_lineno=getattr(from_node, "end_lineno", from_node.lineno), + end_col_offset=from_node.col_offset + 1, + value=value, + kind=None, + ) + + def _call_node(self, from_node, func, args): # type: (Any, Any, List[Any]) -> Any + return self._node(ast.Call, from_node, func=func, args=args, keywords=[]) + + def visit_Module(self, module_node): + # type: (ast.Module) -> Any + """ + Insert the import statement for the replacements module + """ + insert_position = self.find_insert_position(module_node) + + definitions_module = self._aspects_spec["definitions_module"] + replacements_import = self._node( + ast.Import, + module_node, + names=[ + ast.alias( + lineno=1, + col_offset=0, + name=definitions_module, + asname=self._aspects_spec["alias_module"], + ) + ], + ) + module_node.body.insert(insert_position, replacements_import) + + definitions_module = self._sinkpoints_spec["definitions_module"] + replacements_import = self._node( + ast.Import, + module_node, + names=[ + ast.alias( + lineno=1, + col_offset=0, + name=definitions_module, + asname=self._sinkpoints_spec["alias_module"], + ) + ], + ) + module_node.body.insert(insert_position, replacements_import) + # Must be called here instead of the start so the line offset is already + # processed + self.generic_visit(module_node) + return module_node + + def visit_FunctionDef(self, def_node): + # type: (ast.FunctionDef) -> Any + """ + Special case for some tests which would enter in a patching + loop otherwise when visiting the check functions + """ + self.replacements_disabled_for_functiondef = def_node.name in self.dont_patch_these_functionsdefs + + if hasattr(def_node.args, "vararg") and def_node.args.vararg: + if def_node.args.vararg.annotation: + _mark_avoid_convert_recursively(def_node.args.vararg.annotation) + + if hasattr(def_node.args, "kwarg") and def_node.args.kwarg: + if def_node.args.kwarg.annotation: + _mark_avoid_convert_recursively(def_node.args.kwarg.annotation) + + if hasattr(def_node, "returns"): + _mark_avoid_convert_recursively(def_node.returns) + + for i in def_node.args.args: + if hasattr(i, "annotation"): + _mark_avoid_convert_recursively(i.annotation) + + if hasattr(def_node.args, "kwonlyargs"): + for i in def_node.args.kwonlyargs: + if hasattr(i, "annotation"): + _mark_avoid_convert_recursively(i.annotation) + + if hasattr(def_node.args, "posonlyargs"): + for i in def_node.args.posonlyargs: + if hasattr(i, "annotation"): + _mark_avoid_convert_recursively(i.annotation) + + self.generic_visit(def_node) + self._current_function_name = None + + return def_node + + def visit_Call(self, call_node): # type: (ast.Call) -> Any + """ + Replace a call or method + """ + self.generic_visit(call_node) + func_member = call_node.func + call_modified = False + if self.replacements_disabled_for_functiondef: + return call_node + + if isinstance(func_member, ast.Name) and func_member.id: + # Normal function call with func=Name(...), just change the name + func_name_node = func_member.id + aspect = self._aspect_functions.get(func_name_node) + if aspect: + # Send 0 as flag_added_args value + call_node.args.insert(0, self._int_constant(call_node, 0)) + # Insert original function name as first parameter + call_node.args = self._add_original_function_as_arg(call_node, True) + # Substitute function call + call_node.func = self._attr_node(call_node, aspect) + self.ast_modified = call_modified = True + else: + sink_point = self._sinkpoints_functions.get(func_name_node) + if sink_point: + call_node.func = self._attr_node(call_node, sink_point) + self.ast_modified = call_modified = True + # Call [attr] -> Attribute [value]-> Attribute [value]-> Attribute + # a.b.c.method() + # replaced_method(a.b.c) + elif isinstance(func_member, ast.Attribute): + # Method call: + method_name = func_member.attr + + if self._is_call_excluded(method_name): + # Early return if method is excluded + return call_node + + if self._is_string_format_with_literals(call_node): + return call_node + + aspect = self._aspect_methods.get(method_name) + + if aspect: + # Move the Attribute.value to 'args' + new_arg = func_member.value + call_node.args.insert(0, new_arg) + # Send 1 as flag_added_args value + call_node.args.insert(0, self._int_constant(call_node, 1)) + + # Insert None as first parameter instead of a.b.c.method + # to avoid unexpected side effects such as a.b.read(4).method + call_node.args.insert(0, self._none_constant(call_node)) + + # Create a new Name node for the replacement and set it as node.func + call_node.func = self._attr_node(call_node, aspect) + self.ast_modified = call_modified = True + + elif hasattr(func_member.value, "id") or hasattr(func_member.value, "attr"): + aspect = self._aspect_modules.get(method_name, None) + if aspect: + # Send 0 as flag_added_args value + call_node.args.insert(0, self._int_constant(call_node, 0)) + # Move the Function to 'args' + call_node.args.insert(0, call_node.func) + + # Create a new Name node for the replacement and set it as node.func + call_node.func = self._attr_node(call_node, aspect) + self.ast_modified = call_modified = True + + if self.codetype == CODE_TYPE_FIRST_PARTY: + # Function replacement case + if isinstance(call_node.func, ast.Name): + aspect = self._should_replace_with_taint_sink(call_node, True) + if aspect: + # Send 0 as flag_added_args value + call_node.args.insert(0, self._int_constant(call_node, 0)) + call_node.args = self._add_original_function_as_arg(call_node, False) + call_node.func = self._attr_node(call_node, TAINT_SINK_FUNCTION_REPLACEMENT) + self.ast_modified = call_modified = True + + # Method replacement case + elif isinstance(call_node.func, ast.Attribute): + aspect = self._should_replace_with_taint_sink(call_node, False) + if aspect: + # Send 0 as flag_added_args value + call_node.args.insert(0, self._int_constant(call_node, 0)) + # Create a new Name node for the replacement and set it as node.func + call_node.args = self._add_original_function_as_arg(call_node, False) + call_node.func = self._attr_node(call_node, TAINT_SINK_FUNCTION_REPLACEMENT) + self.ast_modified = call_modified = True + + if call_modified: + _set_metric_iast_instrumented_propagation() + + return call_node + + def visit_BinOp(self, call_node): # type: (ast.BinOp) -> Any + """ + Replace a binary operator + """ + self.generic_visit(call_node) + operator = call_node.op + + aspect = self._aspect_operators.get(operator.__class__) + if aspect: + self.ast_modified = True + _set_metric_iast_instrumented_propagation() + + return ast.Call(self._attr_node(call_node, aspect), [call_node.left, call_node.right], []) + + return call_node + + def visit_FormattedValue(self, fmt_value_node): # type: (ast.FormattedValue) -> Any + """ + Visit a FormattedValue node which are the constituent atoms for the + JoinedStr which are used to implement f-strings. + """ + + self.generic_visit(fmt_value_node) + + if hasattr(fmt_value_node, "value") and self._is_node_constant_or_binop(fmt_value_node.value): + return fmt_value_node + + func_name_node = self._attr_node(fmt_value_node, self._aspect_format_value) + + options_int = self._node( + ast.Constant, + fmt_value_node, + value=fmt_value_node.conversion, + kind=None, + ) + + format_spec = fmt_value_node.format_spec if fmt_value_node.format_spec else self._none_constant(fmt_value_node) + call_node = self._call_node( + fmt_value_node, + func=func_name_node, + args=[fmt_value_node.value, options_int, format_spec], + ) + + self.ast_modified = True + _set_metric_iast_instrumented_propagation() + return call_node + + def visit_JoinedStr(self, joinedstr_node): # type: (ast.JoinedStr) -> Any + """ + Replaced the JoinedStr AST node with a Call to the replacement function. Most of + the work inside fstring is done by visit_FormattedValue above. + """ + self.generic_visit(joinedstr_node) + + if all( + map( + lambda x: isinstance(x, ast.FormattedValue) or self._is_node_constant_or_binop(x), + joinedstr_node.values, + ) + ): + return joinedstr_node + + func_name_node = self._attr_node( + joinedstr_node, + self._aspect_build_string, + ctx=ast.Load(), + ) + call_node = self._call_node( + joinedstr_node, + func=func_name_node, + args=joinedstr_node.values, + ) + + self.ast_modified = True + _set_metric_iast_instrumented_propagation() + return call_node + + def visit_AugAssign(self, augassign_node): # type: (ast.AugAssign) -> Any + """Replace an inplace add or multiply.""" + if isinstance(augassign_node.target, ast.Subscript): + # Can't augassign to function call, ignore this node + augassign_node.target.avoid_convert = True # type: ignore[attr-defined] + self.generic_visit(augassign_node) + return augassign_node + + # TODO: Replace an inplace add or multiply (+= / *=) + return augassign_node + + def visit_Assign(self, assign_node): # type: (ast.Assign) -> Any + """ + Add the ignore marks for left-side subscripts or list/tuples to avoid problems + later with the visit_Subscript node. + """ + if isinstance(assign_node.value, ast.Subscript): + if hasattr(assign_node.value, "value") and hasattr(assign_node.value.value, "id"): + # Best effort to avoid converting type definitions + if assign_node.value.value.id in ( + "Callable", + "Dict", + "Generator", + "List", + "Optional", + "Sequence", + "Tuple", + "Type", + "TypeVar", + "Union", + ): + _mark_avoid_convert_recursively(assign_node.value) + + for target in assign_node.targets: + if isinstance(target, ast.Subscript): + # We can't assign to a function call, which is anyway going to rewrite + # the index destination so we just ignore that target + target.avoid_convert = True # type: ignore[attr-defined] + elif isinstance(target, (List, ast.Tuple)): + # Same for lists/tuples on the left side of the assignment + for element in target.elts: + if isinstance(element, ast.Subscript): + element.avoid_convert = True # type: ignore[attr-defined] + + # Create a normal assignment. This way we decompose multiple assignments + self.generic_visit(assign_node) + return assign_node + + def visit_Delete(self, assign_node): # type: (ast.Delete) -> Any + # del replaced_index(foo, bar) would fail so avoid converting the right hand side + # since it's going to be deleted anyway + + for target in assign_node.targets: + if isinstance(target, ast.Subscript): + target.avoid_convert = True # type: ignore[attr-defined] + + self.generic_visit(assign_node) + return assign_node + + def visit_AnnAssign(self, node): # type: (ast.AnnAssign) -> Any + # AnnAssign is a type annotation, we don't need to convert it + # and we avoid converting any subscript inside it. + _mark_avoid_convert_recursively(node) + self.generic_visit(node) + return node + + def visit_ClassDef(self, node): # type: (ast.ClassDef) -> Any + for i in node.bases: + _mark_avoid_convert_recursively(i) + + self.generic_visit(node) + return node + + def visit_Subscript(self, subscr_node): # type: (ast.Subscript) -> Any + """ + Turn an indexes[1] and slices[0:1:2] into the replacement function call + Optimization: dont convert if the indexes are strings + """ + self.generic_visit(subscr_node) + + # We mark nodes to avoid_convert (check visit_Delete, visit_AugAssign, visit_Assign) due to complex + # expressions that raise errors when try to replace with index aspects + if hasattr(subscr_node, "avoid_convert"): + return subscr_node + + # Optimization: String literal slices and indexes are not patched + if self._is_string_node(subscr_node.value): + return subscr_node + + attr_node = self._attr_node(subscr_node, "") + + call_node = self._call_node( + subscr_node, + func=attr_node, + args=[], + ) + if isinstance(subscr_node.slice, ast.Slice): + # Slice[0:1:2]. The other cases in this if are Indexes[0] + aspect_split = self._aspect_slice.split(".") + call_node.func.attr = aspect_split[1] + call_node.func.value.id = aspect_split[0] + none_node = self._none_constant(subscr_node) + lower = none_node if subscr_node.slice.lower is None else subscr_node.slice.lower + upper = none_node if subscr_node.slice.upper is None else subscr_node.slice.upper + step = none_node if subscr_node.slice.step is None else subscr_node.slice.step + call_node.args.extend([subscr_node.value, lower, upper, step]) + self.ast_modified = True + elif PY39_PLUS: + if self._is_string_node(subscr_node.slice): + return subscr_node + # In Py39+ the if subscr_node.slice member is not a Slice, is directly an unwrapped value + # for the index (e.g. Constant for a number, Name for a var, etc) + aspect_split = self._aspect_index.split(".") + call_node.func.attr = aspect_split[1] + call_node.func.value.id = aspect_split[0] + call_node.args.extend([subscr_node.value, subscr_node.slice]) + # TODO: python 3.8 isn't working correctly with index_aspect, tests raise: + # corrupted size vs. prev_size in fastbins + # Test failed with exit code -6 + # https://app.circleci.com/pipelines/github/DataDog/dd-trace-py/46665/workflows/3cf1257c-feaf-4653-bb9c-fb840baa1776/jobs/3031799 + # elif isinstance(subscr_node.slice, ast.Index): + # if self._is_string_node(subscr_node.slice.value): # type: ignore[attr-defined] + # return subscr_node + # aspect_split = self._aspect_index.split(".") + # call_node.func.attr = aspect_split[1] + # call_node.func.value.id = aspect_split[0] + # call_node.args.extend([subscr_node.value, subscr_node.slice.value]) # type: ignore[attr-defined] + else: + return subscr_node + + self.ast_modified = True + return call_node diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py new file mode 100644 index 0000000..64a11c8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py @@ -0,0 +1,13 @@ +class Input_info(object): + __slots__ = ["name", "value", "origin"] + + def __init__(self, name, value, origin): + self.name = name + self.value = value + self.origin = origin + + def __eq__(self, other): + return self.name == other.name and self.value == other.value and self.origin == other.origin + + def __repr__(self): + return "input_info(%s, %s, %s)" % (str(self.name), str(self.value), str(self.origin)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py new file mode 100644 index 0000000..24da1ee --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +from ddtrace.internal.logger import get_logger + +from ._ast.ast_patching import astpatch_module +from ._utils import _is_iast_enabled + + +log = get_logger(__name__) + + +IS_IAST_ENABLED = _is_iast_enabled() + + +def _exec_iast_patched_module(module_watchdog, module): + patched_source = None + if IS_IAST_ENABLED: + try: + module_path, patched_source = astpatch_module(module) + except Exception: + log.debug("Unexpected exception while AST patching", exc_info=True) + patched_source = None + + if patched_source: + # Patched source is executed instead of original module + compiled_code = compile(patched_source, module_path, "exec") + exec(compiled_code, module.__dict__) # nosec B102 + elif module_watchdog.loader is not None: + module_watchdog.loader.exec_module(module) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py new file mode 100644 index 0000000..b1fad73 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py @@ -0,0 +1,163 @@ +import os +import sys +import traceback +from typing import Dict + +from ddtrace.appsec._constants import IAST +from ddtrace.appsec._constants import IAST_SPAN_TAGS +from ddtrace.appsec._deduplications import deduplication +from ddtrace.internal import telemetry +from ddtrace.internal.logger import get_logger +from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_IAST + + +log = get_logger(__name__) + +TELEMETRY_OFF_NAME = "OFF" +TELEMETRY_DEBUG_NAME = "DEBUG" +TELEMETRY_MANDATORY_NAME = "MANDATORY" +TELEMETRY_INFORMATION_NAME = "INFORMATION" + +TELEMETRY_DEBUG_VERBOSITY = 10 +TELEMETRY_INFORMATION_VERBOSITY = 20 +TELEMETRY_MANDATORY_VERBOSITY = 30 +TELEMETRY_OFF_VERBOSITY = 40 + +METRICS_REPORT_LVLS = ( + (TELEMETRY_DEBUG_VERBOSITY, TELEMETRY_DEBUG_NAME), + (TELEMETRY_INFORMATION_VERBOSITY, TELEMETRY_INFORMATION_NAME), + (TELEMETRY_MANDATORY_VERBOSITY, TELEMETRY_MANDATORY_NAME), + (TELEMETRY_OFF_VERBOSITY, TELEMETRY_OFF_NAME), +) + +_IAST_SPAN_METRICS: Dict[str, int] = {} + + +def get_iast_metrics_report_lvl(*args, **kwargs): + report_lvl_name = os.environ.get(IAST.TELEMETRY_REPORT_LVL, TELEMETRY_INFORMATION_NAME).upper() + report_lvl = 3 + for lvl, lvl_name in METRICS_REPORT_LVLS: + if report_lvl_name == lvl_name: + return lvl + return report_lvl + + +def metric_verbosity(lvl): + def wrapper(f): + if lvl >= get_iast_metrics_report_lvl(): + try: + return f + except Exception: + log.warning("Error reporting IAST metrics", exc_info=True) + return lambda: None # noqa: E731 + + return wrapper + + +@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) +@deduplication +def _set_iast_error_metric(msg): + # type: (str) -> None + # Due to format_exc and format_exception returns the error and the last frame + try: + exception_type, exception_instance, _traceback_list = sys.exc_info() + res = [] + # first 3 frames are this function, the exception in aspects and the error line + res.extend(traceback.format_stack(limit=10)[:-3]) + + # get the frame with the error and the error message + result = traceback.format_exception(exception_type, exception_instance, _traceback_list) + res.extend(result[1:]) + + stack_trace = "".join(res) + tags = { + "lib_language": "python", + } + telemetry.telemetry_writer.add_log("ERROR", msg, stack_trace=stack_trace, tags=tags) + except Exception: + log.warning("Error reporting ASM WAF logs metrics", exc_info=True) + + +@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) +def _set_metric_iast_instrumented_source(source_type): + from ._taint_tracking._native.taint_tracking import origin_to_str # noqa: F401 + + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.source", 1, (("source_type", origin_to_str(source_type)),) + ) + + +@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) +def _set_metric_iast_instrumented_propagation(): + telemetry.telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.propagation", 1) + + +@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY) +def _set_metric_iast_instrumented_sink(vulnerability_type, counter=1): + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.sink", counter, (("vulnerability_type", vulnerability_type),) + ) + + +@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY) +def _set_metric_iast_executed_source(source_type): + from ._taint_tracking._native.taint_tracking import origin_to_str # noqa: F401 + + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_IAST, "executed.source", 1, (("source_type", origin_to_str(source_type)),) + ) + + +@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY) +def _set_metric_iast_executed_sink(vulnerability_type): + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_IAST, "executed.sink", 1, (("vulnerability_type", vulnerability_type),) + ) + + +def _request_tainted(): + from ._taint_tracking import num_objects_tainted + + return num_objects_tainted() + + +@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY) +def _set_metric_iast_request_tainted(): + total_objects_tainted = _request_tainted() + if total_objects_tainted > 0: + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_IAST, "request.tainted", total_objects_tainted + ) + + +def _set_span_tag_iast_request_tainted(span): + total_objects_tainted = _request_tainted() + + if total_objects_tainted > 0: + span.set_tag(IAST_SPAN_TAGS.TELEMETRY_REQUEST_TAINTED, total_objects_tainted) + + +def _set_span_tag_iast_executed_sink(span): + data = get_iast_span_metrics() + + if data is not None: + for key, value in data.items(): + if key.startswith(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK): + span.set_tag(key, value) + + reset_iast_span_metrics() + + +def increment_iast_span_metric(prefix: str, metric_key: str, counter: int = 1) -> None: + data = get_iast_span_metrics() + full_key = prefix + "." + metric_key.lower() + result = data.get(full_key, 0) + data[full_key] = result + counter + + +def get_iast_span_metrics() -> Dict: + return _IAST_SPAN_METRICS + + +def reset_iast_span_metrics() -> None: + _IAST_SPAN_METRICS.clear() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py new file mode 100644 index 0000000..7862375 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py @@ -0,0 +1,133 @@ +""" +The Overhead control engine (OCE) is an element that by design ensures that the overhead does not go over a maximum +limit. It will measure operations being executed in a request and it will deactivate detection +(and therefore reduce the overhead to nearly 0) if a certain threshold is reached. +""" +import os +import threading +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.sampler import RateSampler + + +if TYPE_CHECKING: # pragma: no cover + from typing import Set # noqa:F401 + from typing import Tuple # noqa:F401 + from typing import Type # noqa:F401 + + from ddtrace.span import Span # noqa:F401 + +log = get_logger(__name__) + + +def get_request_sampling_value(): # type: () -> float + # Percentage of requests analyzed by IAST (default: 30%) + return float(os.environ.get("DD_IAST_REQUEST_SAMPLING", 30.0)) + + +MAX_REQUESTS = int(os.environ.get("DD_IAST_MAX_CONCURRENT_REQUESTS", 2)) +MAX_VULNERABILITIES_PER_REQUEST = int(os.environ.get("DD_IAST_VULNERABILITIES_PER_REQUEST", 2)) + + +class Operation(object): + """Common operation related to Overhead Control Engine (OCE). Every vulnerabilities/taint_sinks should inherit + from this class. OCE instance calls these methods to control the overhead produced in each request. + """ + + _lock = threading.Lock() + _vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST + _reported_vulnerabilities = set() # type: Set[Tuple[str, int]] + + @classmethod + def reset(cls): + cls._vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST + cls._reported_vulnerabilities = set() + + @classmethod + def acquire_quota(cls): + # type: () -> bool + cls._lock.acquire() + result = False + if cls._vulnerability_quota > 0: + cls._vulnerability_quota -= 1 + result = True + cls._lock.release() + return result + + @classmethod + def increment_quota(cls): + # type: () -> bool + cls._lock.acquire() + result = False + if cls._vulnerability_quota < MAX_VULNERABILITIES_PER_REQUEST: + cls._vulnerability_quota += 1 + result = True + cls._lock.release() + return result + + @classmethod + def has_quota(cls): + # type: () -> bool + cls._lock.acquire() + result = cls._vulnerability_quota > 0 + cls._lock.release() + return result + + @classmethod + def is_not_reported(cls, filename, lineno): + # type: (str, int) -> bool + vulnerability_id = (filename, lineno) + if vulnerability_id in cls._reported_vulnerabilities: + return False + + cls._reported_vulnerabilities.add(vulnerability_id) + return True + + +class OverheadControl(object): + """This class is meant to control the overhead introduced by IAST analysis. + The goal is to do sampling at different levels of the IAST analysis (per process, per request, etc) + """ + + _lock = threading.Lock() + _request_quota = MAX_REQUESTS + _vulnerabilities = set() # type: Set[Type[Operation]] + _sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0) + + def reconfigure(self): + self._sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0) + + def acquire_request(self, span): + # type: (Span) -> bool + """Decide whether if IAST analysis will be done for this request. + - Block a request's quota at start of the request to limit simultaneous requests analyzed. + - Use sample rating to analyze only a percentage of the total requests (30% by default). + """ + if self._request_quota <= 0 or not self._sampler.sample(span): + return False + + with self._lock: + if self._request_quota <= 0: + return False + + self._request_quota -= 1 + + return True + + def release_request(self): + """increment request's quota at end of the request.""" + with self._lock: + self._request_quota += 1 + self.vulnerabilities_reset_quota() + + def register(self, klass): + # type: (Type[Operation]) -> Type[Operation] + """Register vulnerabilities/taint_sinks. This set of elements will restart for each request.""" + self._vulnerabilities.add(klass) + return klass + + def vulnerabilities_reset_quota(self): + # type: () -> None + for k in self._vulnerabilities: + k.reset() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py new file mode 100644 index 0000000..d6d8fef --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py @@ -0,0 +1,177 @@ +import ctypes +import gc +import sys +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.vendor.wrapt import FunctionWrapper +from ddtrace.vendor.wrapt import resolve_path + +from ._utils import _is_iast_enabled + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import Dict # noqa:F401 + from typing import Optional # noqa:F401 + + +_DD_ORIGINAL_ATTRIBUTES = {} # type: Dict[Any, Any] + +log = get_logger(__name__) + + +def set_and_check_module_is_patched(module_str, default_attr="_datadog_patch"): + # type: (str, str) -> Optional[bool] + try: + __import__(module_str) + module = sys.modules[module_str] + if getattr(module, default_attr, False): + return False + setattr(module, default_attr, True) + except ImportError: + pass + return True + + +def set_module_unpatched(module_str, default_attr="_datadog_patch"): + # type: (str, str) -> None + try: + __import__(module_str) + module = sys.modules[module_str] + setattr(module, default_attr, False) + except ImportError: + pass + + +def try_wrap_function_wrapper(module, name, wrapper): + # type: (str, str, Callable) -> None + try: + wrap_object(module, name, FunctionWrapper, (wrapper,)) + except (ImportError, AttributeError): + log.debug("IAST patching. Module %s.%s not exists", module, name) + + +def try_unwrap(module, name): + try: + (parent, attribute, _) = resolve_path(module, name) + if (parent, attribute) in _DD_ORIGINAL_ATTRIBUTES: + original = _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)] + apply_patch(parent, attribute, original) + del _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)] + except ModuleNotFoundError: + pass + + +def apply_patch(parent, attribute, replacement): + try: + current_attribute = getattr(parent, attribute) + # Avoid overwriting the original function if we call this twice + if not isinstance(current_attribute, FunctionWrapper): + _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)] = current_attribute + setattr(parent, attribute, replacement) + except (TypeError, AttributeError): + patch_builtins(parent, attribute, replacement) + + +def wrap_object(module, name, factory, args=(), kwargs=None): + if kwargs is None: + kwargs = {} + (parent, attribute, original) = resolve_path(module, name) + wrapper = factory(original, *args, **kwargs) + apply_patch(parent, attribute, wrapper) + return wrapper + + +def patchable_builtin(klass): + refs = gc.get_referents(klass.__dict__) + return refs[0] + + +def patch_builtins(klass, attr, value): + """Based on forbiddenfruit package: + https://github.com/clarete/forbiddenfruit/blob/master/forbiddenfruit/__init__.py#L421 + --- + Patch a built-in `klass` with `attr` set to `value` + + This function monkey-patches the built-in python object `attr` adding a new + attribute to it. You can add any kind of argument to the `class`. + + It's possible to attach methods as class methods, just do the following: + + >>> def myclassmethod(cls): + ... return cls(1.5) + >>> curse(float, "myclassmethod", classmethod(myclassmethod)) + >>> float.myclassmethod() + 1.5 + + Methods will be automatically bound, so don't forget to add a self + parameter to them, like this: + + >>> def hello(self): + ... return self * 2 + >>> curse(str, "hello", hello) + >>> "yo".hello() + "yoyo" + """ + dikt = patchable_builtin(klass) + + old_value = dikt.get(attr, None) + old_name = "_c_%s" % attr # do not use .format here, it breaks py2.{5,6} + + # Patch the thing + dikt[attr] = value + + if old_value: + dikt[old_name] = old_value + + try: + dikt[attr].__name__ = old_value.__name__ + except (AttributeError, TypeError): # py2.5 will raise `TypeError` + pass + try: + dikt[attr].__qualname__ = old_value.__qualname__ + except AttributeError: + pass + + ctypes.pythonapi.PyType_Modified(ctypes.py_object(klass)) + + +def if_iast_taint_returned_object_for(origin, wrapped, instance, args, kwargs): + value = wrapped(*args, **kwargs) + + if _is_iast_enabled(): + try: + from ._taint_tracking import is_pyobject_tainted + from ._taint_tracking import taint_pyobject + from .processor import AppSecIastSpanProcessor + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return value + + if not is_pyobject_tainted(value): + name = str(args[0]) if len(args) else "http.request.body" + return taint_pyobject(pyobject=value, source_name=name, source_value=value, source_origin=origin) + except Exception: + log.debug("Unexpected exception while tainting pyobject", exc_info=True) + return value + + +def if_iast_taint_yield_tuple_for(origins, wrapped, instance, args, kwargs): + if _is_iast_enabled(): + from ._taint_tracking import taint_pyobject + from .processor import AppSecIastSpanProcessor + + if not AppSecIastSpanProcessor.is_span_analyzed(): + for key, value in wrapped(*args, **kwargs): + yield key, value + + for key, value in wrapped(*args, **kwargs): + new_key = taint_pyobject(pyobject=key, source_name=key, source_value=key, source_origin=origins[0]) + new_value = taint_pyobject(pyobject=value, source_name=key, source_value=value, source_origin=origins[1]) + yield new_key, new_value + + else: + for key, value in wrapped(*args, **kwargs): + yield key, value diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py new file mode 100644 index 0000000..05a6900 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py @@ -0,0 +1,27 @@ +from ddtrace.vendor.wrapt.importer import when_imported + + +IAST_PATCH = { + "command_injection": True, + "path_traversal": True, + "weak_cipher": True, + "weak_hash": True, +} + + +def patch_iast(patch_modules=IAST_PATCH): + """Load IAST vulnerabilities sink points. + + IAST_PATCH: list of implemented vulnerabilities + """ + # TODO: Devise the correct patching strategy for IAST + from ddtrace._monkey import _on_import_factory + + for module in (m for m, e in patch_modules.items() if e): + when_imported("hashlib")( + _on_import_factory(module, prefix="ddtrace.appsec._iast.taint_sinks", raise_errors=False) + ) + + when_imported("json")( + _on_import_factory("json_tainting", prefix="ddtrace.appsec._iast._patches", raise_errors=False) + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py new file mode 100644 index 0000000..0984b7a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py @@ -0,0 +1,82 @@ +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + +from .._patch import set_and_check_module_is_patched +from .._patch import set_module_unpatched +from .._patch import try_unwrap +from .._patch import try_wrap_function_wrapper + + +log = get_logger(__name__) + + +_DEFAULT_ATTR = "_datadog_json_tainting_patch" + + +def get_version(): + # type: () -> str + return "" + + +def unpatch_iast(): + # type: () -> None + set_module_unpatched("json", default_attr=_DEFAULT_ATTR) + try_unwrap("json", "loads") + if asm_config._iast_lazy_taint: + try_unwrap("json.encoder", "JSONEncoder.default") + try_unwrap("simplejson.encoder", "JSONEncoder.default") + + +def patch(): + # type: () -> None + """Wrap functions which interact with file system.""" + if not set_and_check_module_is_patched("json", default_attr=_DEFAULT_ATTR): + return + try_wrap_function_wrapper("json", "loads", wrapped_loads) + if asm_config._iast_lazy_taint: + try_wrap_function_wrapper("json.encoder", "JSONEncoder.default", patched_json_encoder_default) + try_wrap_function_wrapper("simplejson.encoder", "JSONEncoder.default", patched_json_encoder_default) + + +def wrapped_loads(wrapped, instance, args, kwargs): + from .._taint_utils import taint_structure + + obj = wrapped(*args, **kwargs) + if asm_config._iast_enabled: + try: + from .._taint_tracking import get_tainted_ranges + from .._taint_tracking import is_pyobject_tainted + from .._taint_tracking import taint_pyobject + from ..processor import AppSecIastSpanProcessor + + if not AppSecIastSpanProcessor.is_span_analyzed(): + return obj + + if is_pyobject_tainted(args[0]) and obj: + # tainting object + ranges = get_tainted_ranges(args[0]) + if not ranges: + return obj + # take the first source as main source + source = ranges[0].source + if isinstance(obj, dict): + obj = taint_structure(obj, source.origin, source.origin) + elif isinstance(obj, list): + obj = taint_structure(obj, source.origin, source.origin) + elif isinstance(obj, (str, bytes, bytearray)): + obj = taint_pyobject(obj, source.name, source.value, source.origin) + pass + except Exception: + log.debug("Unexpected exception while reporting vulnerability", exc_info=True) + raise + return obj + + +def patched_json_encoder_default(original_func, instance, args, kwargs): + from .._taint_utils import LazyTaintDict + from .._taint_utils import LazyTaintList + + if isinstance(args[0], (LazyTaintList, LazyTaintDict)): + return args[0]._obj + + return original_func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..28a86bf Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py new file mode 100644 index 0000000..97df240 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +# +from typing import TYPE_CHECKING # noqa:F401 + + +if TYPE_CHECKING: + from typing import Dict # noqa:F401 + from typing import Tuple # noqa:F401 + + from ._taint_tracking import Source # noqa:F401 + +_IAST_TAINT_DICT = {} # type: Dict[int, Tuple[Tuple[Source, int, int],...]] + + +def get_taint_dict(): # type: () -> Dict[int, Tuple[Tuple[Source, int, int],...]] + return _IAST_TAINT_DICT + + +def clear_taint_mapping(): # type: () -> None + global _IAST_TAINT_DICT + _IAST_TAINT_DICT = {} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt new file mode 100644 index 0000000..9caa9fc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt @@ -0,0 +1,76 @@ +cmake_minimum_required(VERSION 3.19) +include(FetchContent) + +set(APP_NAME _native) +option(BUILD_MACOS "Build for MacOS" OFF) + +project(${APP_NAME}) + +set(CMAKE_CXX_STANDARD 17) + +# -U_FORTIFY_SOURCE to fix a bug in alpine and pybind11 +# https://github.com/pybind/pybind11/issues/1650 +# https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626 +add_compile_options(-fPIC -fexceptions -fvisibility=hidden -fpermissive -pthread -Wall -Wno-unknown-pragmas -U_FORTIFY_SOURCE) + +if(BUILD_MACOS) + # https://pybind11.readthedocs.io/en/stable/compiling.html#building-manually + message(STATUS "Compile options for MacOS") + add_link_options(-ldl -undefined dynamic_lookup) +else() + message(STATUS "Compile options for Linux/Win") +endif(BUILD_MACOS) +unset(BUILD_MACOS CACHE) + +if(CMAKE_BUILD_TYPE STREQUAL "Release") + message("Release mode: using abseil") + FetchContent_Declare( + absl + URL "https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.1.zip" + ) + FetchContent_MakeAvailable(absl) +else() + message("Debug mode: not using abseil") +endif() + +include_directories(".") + +file(GLOB SOURCE_FILES "*.cpp" + "Aspects/*.cpp" + "Initializer/*.cpp" + "TaintedOps/*.cpp" + "TaintTracking/*.cpp" + "Utils/*.cpp") +file(GLOB HEADER_FILES "*.h" + "Aspects/*.h" + "Initializer/*.h" + "TaintedOps/*.h" + "TaintTracking/*.h" + "Utils/*.h" + ) + +# Debug messages +message(STATUS "PYTHON_LIBRARIES = ${Python_LIBRARIES}") +message(STATUS "PYTHON_EXECUTABLE = ${Python_EXECUTABLE}") +message(STATUS "PYTHON_INCLUDE_DIRS = ${Python_INCLUDE_DIRS}") +message(STATUS "Python_EXECUTABLE = ${Python_EXECUTABLE}") + +add_subdirectory(_vendor/pybind11) + +pybind11_add_module(_native SHARED ${SOURCE_FILES} ${HEADER_FILES}) +get_filename_component(PARENT_DIR ${CMAKE_CURRENT_LIST_DIR} DIRECTORY) +set_target_properties( + _native + PROPERTIES + LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}" +) + +if(CMAKE_BUILD_TYPE STREQUAL "Release") + target_link_libraries(${APP_NAME} PRIVATE absl::node_hash_map) +endif() + +install(TARGETS _native DESTINATION + LIBRARY DESTINATION ${LIB_INSTALL_DIR} + ARCHIVE DESTINATION ${LIB_INSTALL_DIR} + RUNTIME DESTINATION ${LIB_INSTALL_DIR} +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt new file mode 100644 index 0000000..f7f67f2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt @@ -0,0 +1,37 @@ +# Compile extension with Cmake + +```bash +sh clean.sh +cmake -DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python3.11 . && \ + make -j _native && \ + mv lib_native.so _native.so +``` + +## Verify compilation was correctly + +```bash +python3.11 +``` +```python +from _native import Source, TaintRange +source = Source(name="aaa", value="bbbb", origin="ccc") +source = Source("aaa", "bbbb", "ccc") +``` + +## Clean Cmake folders + +```bash +./clean.sh +``` + + +## Debug with Valgrind + +wget http://svn.python.org/projects/python/trunk/Misc/valgrind-python.supp + +valgrind --tool=memcheck --suppressions=ddtrace/appsec/_iast/_taint_tracking/valgrind-python.supp \ +python ddtrace/appsec/_iast/_taint_tracking/bench_overload.py --log-file="valgrind_bench_overload.out" + +# Debug with gdb + +gdb --args python -m pytest tests/appsec/iast/test_command_injection.py diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py new file mode 100644 index 0000000..9506d7a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py @@ -0,0 +1,143 @@ +# #!/usr/bin/env python3 +# flake8: noqa +from typing import TYPE_CHECKING + +from .._metrics import _set_metric_iast_executed_source +from .._utils import _is_python_version_supported + + +if _is_python_version_supported(): + from .. import oce + from ._native import ops + from ._native.aspect_helpers import _convert_escaped_text_to_tainted_text + from ._native.aspect_helpers import as_formatted_evidence + from ._native.aspect_helpers import common_replace + from ._native.aspect_format import _format_aspect + from ._native.aspect_helpers import parse_params + from ._native.initializer import active_map_addreses_size + from ._native.initializer import create_context + from ._native.initializer import debug_taint_map + from ._native.initializer import destroy_context + from ._native.initializer import initializer_size + from ._native.initializer import num_objects_tainted + from ._native.initializer import reset_context + from ._native.taint_tracking import OriginType + from ._native.taint_tracking import Source + from ._native.taint_tracking import TagMappingMode + from ._native.taint_tracking import are_all_text_all_ranges + from ._native.taint_tracking import get_range_by_hash + from ._native.taint_tracking import get_ranges + from ._native.taint_tracking import is_notinterned_notfasttainted_unicode + from ._native.taint_tracking import is_tainted + from ._native.taint_tracking import origin_to_str + from ._native.taint_tracking import set_fast_tainted_if_notinterned_unicode + from ._native.taint_tracking import set_ranges + from ._native.taint_tracking import copy_ranges_from_strings + from ._native.taint_tracking import copy_and_shift_ranges_from_strings + from ._native.taint_tracking import shift_taint_range + from ._native.taint_tracking import shift_taint_ranges + from ._native.taint_tracking import str_to_origin + from ._native.taint_tracking import taint_range as TaintRange + + new_pyobject_id = ops.new_pyobject_id + set_ranges_from_values = ops.set_ranges_from_values + is_pyobject_tainted = is_tainted + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import List + from typing import Tuple + from typing import Union + + +__all__ = [ + "_convert_escaped_text_to_tainted_text", + "new_pyobject_id", + "setup", + "Source", + "OriginType", + "TagMappingMode", + "TaintRange", + "get_ranges", + "set_ranges", + "copy_ranges_from_strings", + "copy_and_shift_ranges_from_strings", + "are_all_text_all_ranges", + "shift_taint_range", + "shift_taint_ranges", + "get_range_by_hash", + "is_notinterned_notfasttainted_unicode", + "set_fast_tainted_if_notinterned_unicode", + "aspect_helpers", + "reset_context", + "destroy_context", + "initializer_size", + "active_map_addreses_size", + "create_context", + "str_to_origin", + "origin_to_str", + "common_replace", + "_format_aspect", + "as_formatted_evidence", + "parse_params", + "num_objects_tainted", + "debug_taint_map", +] + + +def taint_pyobject(pyobject, source_name, source_value, source_origin=None): + # type: (Any, Any, Any, OriginType) -> Any + + # Pyobject must be Text with len > 1 + if not pyobject or not isinstance(pyobject, (str, bytes, bytearray)): + return pyobject + + if isinstance(source_name, (bytes, bytearray)): + source_name = str(source_name, encoding="utf8", errors="ignore") + if isinstance(source_name, OriginType): + source_name = origin_to_str(source_name) + + if isinstance(source_value, (bytes, bytearray)): + source_value = str(source_value, encoding="utf8", errors="ignore") + if source_origin is None: + source_origin = OriginType.PARAMETER + + pyobject_newid = set_ranges_from_values(pyobject, len(pyobject), source_name, source_value, source_origin) + _set_metric_iast_executed_source(source_origin) + return pyobject_newid + + +def taint_pyobject_with_ranges(pyobject, ranges): # type: (Any, tuple) -> None + set_ranges(pyobject, tuple(ranges)) + + +def get_tainted_ranges(pyobject): # type: (Any) -> tuple + return get_ranges(pyobject) + + +def taint_ranges_as_evidence_info(pyobject): + # type: (Any) -> Tuple[List[Dict[str, Union[Any, int]]], list[Source]] + value_parts = [] + sources = [] + current_pos = 0 + tainted_ranges = get_tainted_ranges(pyobject) + if not len(tainted_ranges): + return ([{"value": pyobject}], []) + + for _range in tainted_ranges: + if _range.start > current_pos: + value_parts.append({"value": pyobject[current_pos : _range.start]}) + + if _range.source not in sources: + sources.append(_range.source) + + value_parts.append( + {"value": pyobject[_range.start : _range.start + _range.length], "source": sources.index(_range.source)} + ) + current_pos = _range.start + _range.length + + if current_pos < len(pyobject): + value_parts.append({"value": pyobject[current_pos:]}) + + return value_parts, sources diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..ee286c9 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt new file mode 100644 index 0000000..87ec103 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt @@ -0,0 +1,322 @@ +# CMakeLists.txt -- Build system for the pybind11 modules +# +# Copyright (c) 2015 Wenzel Jakob +# +# All rights reserved. Use of this source code is governed by a +# BSD-style license that can be found in the LICENSE file. + +cmake_minimum_required(VERSION 3.5) + +# The `cmake_minimum_required(VERSION 3.5...3.26)` syntax does not work with +# some versions of VS that have a patched CMake 3.11. This forces us to emulate +# the behavior using the following workaround: +if(${CMAKE_VERSION} VERSION_LESS 3.26) + cmake_policy(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}) +else() + cmake_policy(VERSION 3.26) +endif() + +# Avoid infinite recursion if tests include this as a subdirectory +if(DEFINED PYBIND11_MASTER_PROJECT) + return() +endif() + +# Extract project version from source +file(STRINGS "${CMAKE_CURRENT_SOURCE_DIR}/include/pybind11/detail/common.h" + pybind11_version_defines REGEX "#define PYBIND11_VERSION_(MAJOR|MINOR|PATCH) ") + +foreach(ver ${pybind11_version_defines}) + if(ver MATCHES [[#define PYBIND11_VERSION_(MAJOR|MINOR|PATCH) +([^ ]+)$]]) + set(PYBIND11_VERSION_${CMAKE_MATCH_1} "${CMAKE_MATCH_2}") + endif() +endforeach() + +if(PYBIND11_VERSION_PATCH MATCHES [[\.([a-zA-Z0-9]+)$]]) + set(pybind11_VERSION_TYPE "${CMAKE_MATCH_1}") +endif() +string(REGEX MATCH "^[0-9]+" PYBIND11_VERSION_PATCH "${PYBIND11_VERSION_PATCH}") + +project( + pybind11 + LANGUAGES CXX + VERSION "${PYBIND11_VERSION_MAJOR}.${PYBIND11_VERSION_MINOR}.${PYBIND11_VERSION_PATCH}") + +# Standard includes +include(GNUInstallDirs) +include(CMakePackageConfigHelpers) +include(CMakeDependentOption) + +if(NOT pybind11_FIND_QUIETLY) + message(STATUS "pybind11 v${pybind11_VERSION} ${pybind11_VERSION_TYPE}") +endif() + +# Check if pybind11 is being used directly or via add_subdirectory +if(CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR) + ### Warn if not an out-of-source builds + if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR) + set(lines + "You are building in-place. If that is not what you intended to " + "do, you can clean the source directory with:\n" + "rm -r CMakeCache.txt CMakeFiles/ cmake_uninstall.cmake pybind11Config.cmake " + "pybind11ConfigVersion.cmake tests/CMakeFiles/\n") + message(AUTHOR_WARNING ${lines}) + endif() + + set(PYBIND11_MASTER_PROJECT ON) + + if(OSX AND CMAKE_VERSION VERSION_LESS 3.7) + # Bug in macOS CMake < 3.7 is unable to download catch + message(WARNING "CMAKE 3.7+ needed on macOS to download catch, and newer HIGHLY recommended") + elseif(WINDOWS AND CMAKE_VERSION VERSION_LESS 3.8) + # Only tested with 3.8+ in CI. + message(WARNING "CMAKE 3.8+ tested on Windows, previous versions untested") + endif() + + message(STATUS "CMake ${CMAKE_VERSION}") + + if(CMAKE_CXX_STANDARD) + set(CMAKE_CXX_EXTENSIONS OFF) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + endif() + + set(pybind11_system "") + + set_property(GLOBAL PROPERTY USE_FOLDERS ON) +else() + set(PYBIND11_MASTER_PROJECT OFF) + set(pybind11_system SYSTEM) +endif() + +# Options +option(PYBIND11_INSTALL "Install pybind11 header files?" ${PYBIND11_MASTER_PROJECT}) +option(PYBIND11_TEST "Build pybind11 test suite?" ${PYBIND11_MASTER_PROJECT}) +option(PYBIND11_NOPYTHON "Disable search for Python" OFF) +option(PYBIND11_SIMPLE_GIL_MANAGEMENT + "Use simpler GIL management logic that does not support disassociation" OFF) +set(PYBIND11_INTERNALS_VERSION + "" + CACHE STRING "Override the ABI version, may be used to enable the unstable ABI.") + +if(PYBIND11_SIMPLE_GIL_MANAGEMENT) + add_compile_definitions(PYBIND11_SIMPLE_GIL_MANAGEMENT) +endif() + +cmake_dependent_option( + USE_PYTHON_INCLUDE_DIR + "Install pybind11 headers in Python include directory instead of default installation prefix" + OFF "PYBIND11_INSTALL" OFF) + +cmake_dependent_option(PYBIND11_FINDPYTHON "Force new FindPython" OFF + "NOT CMAKE_VERSION VERSION_LESS 3.12" OFF) + +# NB: when adding a header don't forget to also add it to setup.py +set(PYBIND11_HEADERS + include/pybind11/detail/class.h + include/pybind11/detail/common.h + include/pybind11/detail/descr.h + include/pybind11/detail/init.h + include/pybind11/detail/internals.h + include/pybind11/detail/type_caster_base.h + include/pybind11/detail/typeid.h + include/pybind11/attr.h + include/pybind11/buffer_info.h + include/pybind11/cast.h + include/pybind11/chrono.h + include/pybind11/common.h + include/pybind11/complex.h + include/pybind11/options.h + include/pybind11/eigen.h + include/pybind11/eigen/common.h + include/pybind11/eigen/matrix.h + include/pybind11/eigen/tensor.h + include/pybind11/embed.h + include/pybind11/eval.h + include/pybind11/gil.h + include/pybind11/iostream.h + include/pybind11/functional.h + include/pybind11/numpy.h + include/pybind11/operators.h + include/pybind11/pybind11.h + include/pybind11/pytypes.h + include/pybind11/stl.h + include/pybind11/stl_bind.h + include/pybind11/stl/filesystem.h + include/pybind11/type_caster_pyobject_ptr.h) + +# Compare with grep and warn if mismatched +if(PYBIND11_MASTER_PROJECT AND NOT CMAKE_VERSION VERSION_LESS 3.12) + file( + GLOB_RECURSE _pybind11_header_check + LIST_DIRECTORIES false + RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" + CONFIGURE_DEPENDS "include/pybind11/*.h") + set(_pybind11_here_only ${PYBIND11_HEADERS}) + set(_pybind11_disk_only ${_pybind11_header_check}) + list(REMOVE_ITEM _pybind11_here_only ${_pybind11_header_check}) + list(REMOVE_ITEM _pybind11_disk_only ${PYBIND11_HEADERS}) + if(_pybind11_here_only) + message(AUTHOR_WARNING "PYBIND11_HEADERS has extra files:" ${_pybind11_here_only}) + endif() + if(_pybind11_disk_only) + message(AUTHOR_WARNING "PYBIND11_HEADERS is missing files:" ${_pybind11_disk_only}) + endif() +endif() + +# CMake 3.12 added list(TRANSFORM PREPEND +# But we can't use it yet +string(REPLACE "include/" "${CMAKE_CURRENT_SOURCE_DIR}/include/" PYBIND11_HEADERS + "${PYBIND11_HEADERS}") + +# Cache variable so this can be used in parent projects +set(pybind11_INCLUDE_DIR + "${CMAKE_CURRENT_LIST_DIR}/include" + CACHE INTERNAL "Directory where pybind11 headers are located") + +# Backward compatible variable for add_subdirectory mode +if(NOT PYBIND11_MASTER_PROJECT) + set(PYBIND11_INCLUDE_DIR + "${pybind11_INCLUDE_DIR}" + CACHE INTERNAL "") +endif() + +# Note: when creating targets, you cannot use if statements at configure time - +# you need generator expressions, because those will be placed in the target file. +# You can also place ifs *in* the Config.in, but not here. + +# This section builds targets, but does *not* touch Python +# Non-IMPORT targets cannot be defined twice +if(NOT TARGET pybind11_headers) + # Build the headers-only target (no Python included): + # (long name used here to keep this from clashing in subdirectory mode) + add_library(pybind11_headers INTERFACE) + add_library(pybind11::pybind11_headers ALIAS pybind11_headers) # to match exported target + add_library(pybind11::headers ALIAS pybind11_headers) # easier to use/remember + + target_include_directories( + pybind11_headers ${pybind11_system} INTERFACE $ + $) + + target_compile_features(pybind11_headers INTERFACE cxx_inheriting_constructors cxx_user_literals + cxx_right_angle_brackets) + if(NOT "${PYBIND11_INTERNALS_VERSION}" STREQUAL "") + target_compile_definitions( + pybind11_headers INTERFACE "PYBIND11_INTERNALS_VERSION=${PYBIND11_INTERNALS_VERSION}") + endif() +else() + # It is invalid to install a target twice, too. + set(PYBIND11_INSTALL OFF) +endif() + +include("${CMAKE_CURRENT_SOURCE_DIR}/tools/pybind11Common.cmake") +# https://github.com/jtojnar/cmake-snips/#concatenating-paths-when-building-pkg-config-files +# TODO: cmake 3.20 adds the cmake_path() function, which obsoletes this snippet +include("${CMAKE_CURRENT_SOURCE_DIR}/tools/JoinPaths.cmake") + +# Relative directory setting +if(USE_PYTHON_INCLUDE_DIR AND DEFINED Python_INCLUDE_DIRS) + file(RELATIVE_PATH CMAKE_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_PREFIX} ${Python_INCLUDE_DIRS}) +elseif(USE_PYTHON_INCLUDE_DIR AND DEFINED PYTHON_INCLUDE_DIR) + file(RELATIVE_PATH CMAKE_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_PREFIX} ${PYTHON_INCLUDE_DIRS}) +endif() + +if(PYBIND11_INSTALL) + install(DIRECTORY ${pybind11_INCLUDE_DIR}/pybind11 DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + set(PYBIND11_CMAKECONFIG_INSTALL_DIR + "${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}" + CACHE STRING "install path for pybind11Config.cmake") + + if(IS_ABSOLUTE "${CMAKE_INSTALL_INCLUDEDIR}") + set(pybind11_INCLUDEDIR "${CMAKE_INSTALL_FULL_INCLUDEDIR}") + else() + set(pybind11_INCLUDEDIR "\$\{PACKAGE_PREFIX_DIR\}/${CMAKE_INSTALL_INCLUDEDIR}") + endif() + + configure_package_config_file( + tools/${PROJECT_NAME}Config.cmake.in "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" + INSTALL_DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR}) + + if(CMAKE_VERSION VERSION_LESS 3.14) + # Remove CMAKE_SIZEOF_VOID_P from ConfigVersion.cmake since the library does + # not depend on architecture specific settings or libraries. + set(_PYBIND11_CMAKE_SIZEOF_VOID_P ${CMAKE_SIZEOF_VOID_P}) + unset(CMAKE_SIZEOF_VOID_P) + + write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake + VERSION ${PROJECT_VERSION} + COMPATIBILITY AnyNewerVersion) + + set(CMAKE_SIZEOF_VOID_P ${_PYBIND11_CMAKE_SIZEOF_VOID_P}) + else() + # CMake 3.14+ natively supports header-only libraries + write_basic_package_version_file( + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake + VERSION ${PROJECT_VERSION} + COMPATIBILITY AnyNewerVersion ARCH_INDEPENDENT) + endif() + + install( + FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake + tools/FindPythonLibsNew.cmake + tools/pybind11Common.cmake + tools/pybind11Tools.cmake + tools/pybind11NewTools.cmake + DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR}) + + if(NOT PYBIND11_EXPORT_NAME) + set(PYBIND11_EXPORT_NAME "${PROJECT_NAME}Targets") + endif() + + install(TARGETS pybind11_headers EXPORT "${PYBIND11_EXPORT_NAME}") + + install( + EXPORT "${PYBIND11_EXPORT_NAME}" + NAMESPACE "pybind11::" + DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR}) + + # pkg-config support + if(NOT prefix_for_pc_file) + set(prefix_for_pc_file "${CMAKE_INSTALL_PREFIX}") + endif() + join_paths(includedir_for_pc_file "\${prefix}" "${CMAKE_INSTALL_INCLUDEDIR}") + configure_file("${CMAKE_CURRENT_SOURCE_DIR}/tools/pybind11.pc.in" + "${CMAKE_CURRENT_BINARY_DIR}/pybind11.pc" @ONLY) + install(FILES "${CMAKE_CURRENT_BINARY_DIR}/pybind11.pc" + DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig/") + + # Uninstall target + if(PYBIND11_MASTER_PROJECT) + configure_file("${CMAKE_CURRENT_SOURCE_DIR}/tools/cmake_uninstall.cmake.in" + "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY) + + add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P + ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) + endif() +endif() + +# BUILD_TESTING takes priority, but only if this is the master project +if(PYBIND11_MASTER_PROJECT AND DEFINED BUILD_TESTING) + if(BUILD_TESTING) + if(_pybind11_nopython) + message(FATAL_ERROR "Cannot activate tests in NOPYTHON mode") + else() + add_subdirectory(tests) + endif() + endif() +else() + if(PYBIND11_TEST) + if(_pybind11_nopython) + message(FATAL_ERROR "Cannot activate tests in NOPYTHON mode") + else() + add_subdirectory(tests) + endif() + endif() +endif() + +# Better symmetry with find_package(pybind11 CONFIG) mode. +if(NOT PYBIND11_MASTER_PROJECT) + set(pybind11_FOUND + TRUE + CACHE INTERNAL "True if pybind11 and all required components found on the system") +endif() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py new file mode 100644 index 0000000..7c10b30 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py @@ -0,0 +1,17 @@ +import sys + +if sys.version_info < (3, 6): # noqa: UP036 + msg = "pybind11 does not support Python < 3.6. 2.9 was the last release supporting Python 2.7 and 3.5." + raise ImportError(msg) + + +from ._version import __version__, version_info +from .commands import get_cmake_dir, get_include, get_pkgconfig_dir + +__all__ = ( + "version_info", + "__version__", + "get_include", + "get_cmake_dir", + "get_pkgconfig_dir", +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py new file mode 100644 index 0000000..180665c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py @@ -0,0 +1,62 @@ +# pylint: disable=missing-function-docstring + +import argparse +import sys +import sysconfig + +from ._version import __version__ +from .commands import get_cmake_dir, get_include, get_pkgconfig_dir + + +def print_includes() -> None: + dirs = [ + sysconfig.get_path("include"), + sysconfig.get_path("platinclude"), + get_include(), + ] + + # Make unique but preserve order + unique_dirs = [] + for d in dirs: + if d and d not in unique_dirs: + unique_dirs.append(d) + + print(" ".join("-I" + d for d in unique_dirs)) + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + "--version", + action="version", + version=__version__, + help="Print the version and exit.", + ) + parser.add_argument( + "--includes", + action="store_true", + help="Include flags for both pybind11 and Python headers.", + ) + parser.add_argument( + "--cmakedir", + action="store_true", + help="Print the CMake module directory, ideal for setting -Dpybind11_ROOT in CMake.", + ) + parser.add_argument( + "--pkgconfigdir", + action="store_true", + help="Print the pkgconfig directory, ideal for setting $PKG_CONFIG_PATH.", + ) + args = parser.parse_args() + if not sys.argv[1:]: + parser.print_help() + if args.includes: + print_includes() + if args.cmakedir: + print(get_cmake_dir()) + if args.pkgconfigdir: + print(get_pkgconfig_dir()) + + +if __name__ == "__main__": + main() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py new file mode 100644 index 0000000..9280fa0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py @@ -0,0 +1,12 @@ +from typing import Union + + +def _to_int(s: str) -> Union[int, str]: + try: + return int(s) + except ValueError: + return s + + +__version__ = "2.11.1" +version_info = tuple(_to_int(s) for s in __version__.split(".")) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py new file mode 100644 index 0000000..b11690f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py @@ -0,0 +1,37 @@ +import os + +DIR = os.path.abspath(os.path.dirname(__file__)) + + +def get_include(user: bool = False) -> str: # noqa: ARG001 + """ + Return the path to the pybind11 include directory. The historical "user" + argument is unused, and may be removed. + """ + installed_path = os.path.join(DIR, "include") + source_path = os.path.join(os.path.dirname(DIR), "include") + return installed_path if os.path.exists(installed_path) else source_path + + +def get_cmake_dir() -> str: + """ + Return the path to the pybind11 CMake module directory. + """ + cmake_installed_path = os.path.join(DIR, "share", "cmake", "pybind11") + if os.path.exists(cmake_installed_path): + return cmake_installed_path + + msg = "pybind11 not installed, installation required to access the CMake files" + raise ImportError(msg) + + +def get_pkgconfig_dir() -> str: + """ + Return the path to the pybind11 pkgconfig directory. + """ + pkgconfig_installed_path = os.path.join(DIR, "share", "pkgconfig") + if os.path.exists(pkgconfig_installed_path): + return pkgconfig_installed_path + + msg = "pybind11 not installed, installation required to access the pkgconfig files" + raise ImportError(msg) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py new file mode 100644 index 0000000..aeeee9d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py @@ -0,0 +1,498 @@ +""" +This module provides helpers for C++11+ projects using pybind11. + +LICENSE: + +Copyright (c) 2016 Wenzel Jakob , All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +# IMPORTANT: If you change this file in the pybind11 repo, also review +# setup_helpers.pyi for matching changes. +# +# If you copy this file in, you don't +# need the .pyi file; it's just an interface file for static type checkers. + +import contextlib +import os +import platform +import shlex +import shutil +import sys +import sysconfig +import tempfile +import threading +import warnings +from functools import lru_cache +from pathlib import Path +from typing import ( + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Optional, + Tuple, + TypeVar, + Union, +) + +try: + from setuptools import Extension as _Extension + from setuptools.command.build_ext import build_ext as _build_ext +except ImportError: + from distutils.command.build_ext import build_ext as _build_ext # type: ignore[assignment] + from distutils.extension import Extension as _Extension # type: ignore[assignment] + +import distutils.ccompiler +import distutils.errors + +WIN = sys.platform.startswith("win32") and "mingw" not in sysconfig.get_platform() +MACOS = sys.platform.startswith("darwin") +STD_TMPL = "/std:c++{}" if WIN else "-std=c++{}" + + +# It is recommended to use PEP 518 builds if using this module. However, this +# file explicitly supports being copied into a user's project directory +# standalone, and pulling pybind11 with the deprecated setup_requires feature. +# If you copy the file, remember to add it to your MANIFEST.in, and add the current +# directory into your path if it sits beside your setup.py. + + +class Pybind11Extension(_Extension): + """ + Build a C++11+ Extension module with pybind11. This automatically adds the + recommended flags when you init the extension and assumes C++ sources - you + can further modify the options yourself. + + The customizations are: + + * ``/EHsc`` and ``/bigobj`` on Windows + * ``stdlib=libc++`` on macOS + * ``visibility=hidden`` and ``-g0`` on Unix + + Finally, you can set ``cxx_std`` via constructor or afterwards to enable + flags for C++ std, and a few extra helper flags related to the C++ standard + level. It is _highly_ recommended you either set this, or use the provided + ``build_ext``, which will search for the highest supported extension for + you if the ``cxx_std`` property is not set. Do not set the ``cxx_std`` + property more than once, as flags are added when you set it. Set the + property to None to disable the addition of C++ standard flags. + + If you want to add pybind11 headers manually, for example for an exact + git checkout, then set ``include_pybind11=False``. + """ + + # flags are prepended, so that they can be further overridden, e.g. by + # ``extra_compile_args=["-g"]``. + + def _add_cflags(self, flags: List[str]) -> None: + self.extra_compile_args[:0] = flags + + def _add_ldflags(self, flags: List[str]) -> None: + self.extra_link_args[:0] = flags + + def __init__(self, *args: Any, **kwargs: Any) -> None: + self._cxx_level = 0 + cxx_std = kwargs.pop("cxx_std", 0) + + if "language" not in kwargs: + kwargs["language"] = "c++" + + include_pybind11 = kwargs.pop("include_pybind11", True) + + super().__init__(*args, **kwargs) + + # Include the installed package pybind11 headers + if include_pybind11: + # If using setup_requires, this fails the first time - that's okay + try: + import pybind11 + + pyinc = pybind11.get_include() + + if pyinc not in self.include_dirs: + self.include_dirs.append(pyinc) + except ModuleNotFoundError: + pass + + self.cxx_std = cxx_std + + cflags = [] + if WIN: + cflags += ["/EHsc", "/bigobj"] + else: + cflags += ["-fvisibility=hidden"] + env_cflags = os.environ.get("CFLAGS", "") + env_cppflags = os.environ.get("CPPFLAGS", "") + c_cpp_flags = shlex.split(env_cflags) + shlex.split(env_cppflags) + if not any(opt.startswith("-g") for opt in c_cpp_flags): + cflags += ["-g0"] + self._add_cflags(cflags) + + @property + def cxx_std(self) -> int: + """ + The CXX standard level. If set, will add the required flags. If left at + 0, it will trigger an automatic search when pybind11's build_ext is + used. If None, will have no effect. Besides just the flags, this may + add a macos-min 10.9 or 10.14 flag if MACOSX_DEPLOYMENT_TARGET is + unset. + """ + return self._cxx_level + + @cxx_std.setter + def cxx_std(self, level: int) -> None: + if self._cxx_level: + warnings.warn( + "You cannot safely change the cxx_level after setting it!", stacklevel=2 + ) + + # MSVC 2015 Update 3 and later only have 14 (and later 17) modes, so + # force a valid flag here. + if WIN and level == 11: + level = 14 + + self._cxx_level = level + + if not level: + return + + cflags = [STD_TMPL.format(level)] + ldflags = [] + + if MACOS and "MACOSX_DEPLOYMENT_TARGET" not in os.environ: + # C++17 requires a higher min version of macOS. An earlier version + # (10.12 or 10.13) can be set manually via environment variable if + # you are careful in your feature usage, but 10.14 is the safest + # setting for general use. However, never set higher than the + # current macOS version! + current_macos = tuple(int(x) for x in platform.mac_ver()[0].split(".")[:2]) + desired_macos = (10, 9) if level < 17 else (10, 14) + macos_string = ".".join(str(x) for x in min(current_macos, desired_macos)) + macosx_min = f"-mmacosx-version-min={macos_string}" + cflags += [macosx_min] + ldflags += [macosx_min] + + self._add_cflags(cflags) + self._add_ldflags(ldflags) + + +# Just in case someone clever tries to multithread +tmp_chdir_lock = threading.Lock() + + +@contextlib.contextmanager +def tmp_chdir() -> Iterator[str]: + "Prepare and enter a temporary directory, cleanup when done" + + # Threadsafe + with tmp_chdir_lock: + olddir = os.getcwd() + try: + tmpdir = tempfile.mkdtemp() + os.chdir(tmpdir) + yield tmpdir + finally: + os.chdir(olddir) + shutil.rmtree(tmpdir) + + +# cf http://bugs.python.org/issue26689 +def has_flag(compiler: Any, flag: str) -> bool: + """ + Return the flag if a flag name is supported on the + specified compiler, otherwise None (can be used as a boolean). + If multiple flags are passed, return the first that matches. + """ + + with tmp_chdir(): + fname = Path("flagcheck.cpp") + # Don't trigger -Wunused-parameter. + fname.write_text("int main (int, char **) { return 0; }", encoding="utf-8") + + try: + compiler.compile([str(fname)], extra_postargs=[flag]) + except distutils.errors.CompileError: + return False + return True + + +# Every call will cache the result +cpp_flag_cache = None + + +@lru_cache() +def auto_cpp_level(compiler: Any) -> Union[str, int]: + """ + Return the max supported C++ std level (17, 14, or 11). Returns latest on Windows. + """ + + if WIN: + return "latest" + + levels = [17, 14, 11] + + for level in levels: + if has_flag(compiler, STD_TMPL.format(level)): + return level + + msg = "Unsupported compiler -- at least C++11 support is needed!" + raise RuntimeError(msg) + + +class build_ext(_build_ext): # noqa: N801 + """ + Customized build_ext that allows an auto-search for the highest supported + C++ level for Pybind11Extension. This is only needed for the auto-search + for now, and is completely optional otherwise. + """ + + def build_extensions(self) -> None: + """ + Build extensions, injecting C++ std for Pybind11Extension if needed. + """ + + for ext in self.extensions: + if hasattr(ext, "_cxx_level") and ext._cxx_level == 0: + ext.cxx_std = auto_cpp_level(self.compiler) + + super().build_extensions() + + +def intree_extensions( + paths: Iterable[str], package_dir: Optional[Dict[str, str]] = None +) -> List[Pybind11Extension]: + """ + Generate Pybind11Extensions from source files directly located in a Python + source tree. + + ``package_dir`` behaves as in ``setuptools.setup``. If unset, the Python + package root parent is determined as the first parent directory that does + not contain an ``__init__.py`` file. + """ + exts = [] + + if package_dir is None: + for path in paths: + parent, _ = os.path.split(path) + while os.path.exists(os.path.join(parent, "__init__.py")): + parent, _ = os.path.split(parent) + relname, _ = os.path.splitext(os.path.relpath(path, parent)) + qualified_name = relname.replace(os.path.sep, ".") + exts.append(Pybind11Extension(qualified_name, [path])) + return exts + + for path in paths: + for prefix, parent in package_dir.items(): + if path.startswith(parent): + relname, _ = os.path.splitext(os.path.relpath(path, parent)) + qualified_name = relname.replace(os.path.sep, ".") + if prefix: + qualified_name = prefix + "." + qualified_name + exts.append(Pybind11Extension(qualified_name, [path])) + break + else: + msg = ( + f"path {path} is not a child of any of the directories listed " + f"in 'package_dir' ({package_dir})" + ) + raise ValueError(msg) + + return exts + + +def naive_recompile(obj: str, src: str) -> bool: + """ + This will recompile only if the source file changes. It does not check + header files, so a more advanced function or Ccache is better if you have + editable header files in your package. + """ + return os.stat(obj).st_mtime < os.stat(src).st_mtime + + +def no_recompile(obg: str, src: str) -> bool: # noqa: ARG001 + """ + This is the safest but slowest choice (and is the default) - will always + recompile sources. + """ + return True + + +S = TypeVar("S", bound="ParallelCompile") + +CCompilerMethod = Callable[ + [ + distutils.ccompiler.CCompiler, + List[str], + Optional[str], + Optional[Union[Tuple[str], Tuple[str, Optional[str]]]], + Optional[List[str]], + bool, + Optional[List[str]], + Optional[List[str]], + Optional[List[str]], + ], + List[str], +] + + +# Optional parallel compile utility +# inspired by: http://stackoverflow.com/questions/11013851/speeding-up-build-process-with-distutils +# and: https://github.com/tbenthompson/cppimport/blob/stable/cppimport/build_module.py +# and NumPy's parallel distutils module: +# https://github.com/numpy/numpy/blob/master/numpy/distutils/ccompiler.py +class ParallelCompile: + """ + Make a parallel compile function. Inspired by + numpy.distutils.ccompiler.CCompiler.compile and cppimport. + + This takes several arguments that allow you to customize the compile + function created: + + envvar: + Set an environment variable to control the compilation threads, like + NPY_NUM_BUILD_JOBS + default: + 0 will automatically multithread, or 1 will only multithread if the + envvar is set. + max: + The limit for automatic multithreading if non-zero + needs_recompile: + A function of (obj, src) that returns True when recompile is needed. No + effect in isolated mode; use ccache instead, see + https://github.com/matplotlib/matplotlib/issues/1507/ + + To use:: + + ParallelCompile("NPY_NUM_BUILD_JOBS").install() + + or:: + + with ParallelCompile("NPY_NUM_BUILD_JOBS"): + setup(...) + + By default, this assumes all files need to be recompiled. A smarter + function can be provided via needs_recompile. If the output has not yet + been generated, the compile will always run, and this function is not + called. + """ + + __slots__ = ("envvar", "default", "max", "_old", "needs_recompile") + + def __init__( + self, + envvar: Optional[str] = None, + default: int = 0, + max: int = 0, # pylint: disable=redefined-builtin + needs_recompile: Callable[[str, str], bool] = no_recompile, + ) -> None: + self.envvar = envvar + self.default = default + self.max = max + self.needs_recompile = needs_recompile + self._old: List[CCompilerMethod] = [] + + def function(self) -> CCompilerMethod: + """ + Builds a function object usable as distutils.ccompiler.CCompiler.compile. + """ + + def compile_function( + compiler: distutils.ccompiler.CCompiler, + sources: List[str], + output_dir: Optional[str] = None, + macros: Optional[Union[Tuple[str], Tuple[str, Optional[str]]]] = None, + include_dirs: Optional[List[str]] = None, + debug: bool = False, + extra_preargs: Optional[List[str]] = None, + extra_postargs: Optional[List[str]] = None, + depends: Optional[List[str]] = None, + ) -> Any: + # These lines are directly from distutils.ccompiler.CCompiler + macros, objects, extra_postargs, pp_opts, build = compiler._setup_compile( # type: ignore[attr-defined] + output_dir, macros, include_dirs, sources, depends, extra_postargs + ) + cc_args = compiler._get_cc_args(pp_opts, debug, extra_preargs) # type: ignore[attr-defined] + + # The number of threads; start with default. + threads = self.default + + # Determine the number of compilation threads, unless set by an environment variable. + if self.envvar is not None: + threads = int(os.environ.get(self.envvar, self.default)) + + def _single_compile(obj: Any) -> None: + try: + src, ext = build[obj] + except KeyError: + return + + if not os.path.exists(obj) or self.needs_recompile(obj, src): + compiler._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) # type: ignore[attr-defined] + + try: + # Importing .synchronize checks for platforms that have some multiprocessing + # capabilities but lack semaphores, such as AWS Lambda and Android Termux. + import multiprocessing.synchronize + from multiprocessing.pool import ThreadPool + except ImportError: + threads = 1 + + if threads == 0: + try: + threads = multiprocessing.cpu_count() + threads = self.max if self.max and self.max < threads else threads + except NotImplementedError: + threads = 1 + + if threads > 1: + with ThreadPool(threads) as pool: + for _ in pool.imap_unordered(_single_compile, objects): + pass + else: + for ob in objects: + _single_compile(ob) + + return objects + + return compile_function + + def install(self: S) -> S: + """ + Installs the compile function into distutils.ccompiler.CCompiler.compile. + """ + distutils.ccompiler.CCompiler.compile = self.function() # type: ignore[assignment] + return self + + def __enter__(self: S) -> S: + self._old.append(distutils.ccompiler.CCompiler.compile) + return self.install() + + def __exit__(self, *args: Any) -> None: + distutils.ccompiler.CCompiler.compile = self._old.pop() # type: ignore[assignment] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake new file mode 100644 index 0000000..5d3fcbf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake @@ -0,0 +1,76 @@ +# - Find the Catch test framework or download it (single header) +# +# This is a quick module for internal use. It assumes that Catch is +# REQUIRED and that a minimum version is provided (not EXACT). If +# a suitable version isn't found locally, the single header file +# will be downloaded and placed in the build dir: PROJECT_BINARY_DIR. +# +# This code sets the following variables: +# CATCH_INCLUDE_DIR - path to catch.hpp +# CATCH_VERSION - version number + +option(DOWNLOAD_CATCH "Download catch2 if not found") + +if(NOT Catch_FIND_VERSION) + message(FATAL_ERROR "A version number must be specified.") +elseif(Catch_FIND_REQUIRED) + message(FATAL_ERROR "This module assumes Catch is not required.") +elseif(Catch_FIND_VERSION_EXACT) + message(FATAL_ERROR "Exact version numbers are not supported, only minimum.") +endif() + +# Extract the version number from catch.hpp +function(_get_catch_version) + file( + STRINGS "${CATCH_INCLUDE_DIR}/catch.hpp" version_line + REGEX "Catch v.*" + LIMIT_COUNT 1) + if(version_line MATCHES "Catch v([0-9]+)\\.([0-9]+)\\.([0-9]+)") + set(CATCH_VERSION + "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}" + PARENT_SCOPE) + endif() +endfunction() + +# Download the single-header version of Catch +function(_download_catch version destination_dir) + message(STATUS "Downloading catch v${version}...") + set(url https://github.com/philsquared/Catch/releases/download/v${version}/catch.hpp) + file( + DOWNLOAD ${url} "${destination_dir}/catch.hpp" + STATUS status + LOG log) + list(GET status 0 error) + if(error) + string(REPLACE "\n" "\n " log " ${log}") + message(FATAL_ERROR "Could not download URL:\n" " ${url}\n" "Log:\n" "${log}") + endif() + set(CATCH_INCLUDE_DIR + "${destination_dir}" + CACHE INTERNAL "") +endfunction() + +# Look for catch locally +find_path( + CATCH_INCLUDE_DIR + NAMES catch.hpp + PATH_SUFFIXES catch2) +if(CATCH_INCLUDE_DIR) + _get_catch_version() +endif() + +# Download the header if it wasn't found or if it's outdated +if(NOT CATCH_VERSION OR CATCH_VERSION VERSION_LESS ${Catch_FIND_VERSION}) + if(DOWNLOAD_CATCH) + _download_catch(${Catch_FIND_VERSION} "${PROJECT_BINARY_DIR}/catch/") + _get_catch_version() + else() + set(CATCH_FOUND FALSE) + return() + endif() +endif() + +add_library(Catch2::Catch2 IMPORTED INTERFACE) +set_property(TARGET Catch2::Catch2 PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${CATCH_INCLUDE_DIR}") + +set(CATCH_FOUND TRUE) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake new file mode 100644 index 0000000..83625d9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake @@ -0,0 +1,86 @@ +# - Try to find Eigen3 lib +# +# This module supports requiring a minimum version, e.g. you can do +# find_package(Eigen3 3.1.2) +# to require version 3.1.2 or newer of Eigen3. +# +# Once done this will define +# +# EIGEN3_FOUND - system has eigen lib with correct version +# EIGEN3_INCLUDE_DIR - the eigen include directory +# EIGEN3_VERSION - eigen version + +# Copyright (c) 2006, 2007 Montel Laurent, +# Copyright (c) 2008, 2009 Gael Guennebaud, +# Copyright (c) 2009 Benoit Jacob +# Redistribution and use is allowed according to the terms of the 2-clause BSD license. + +if(NOT Eigen3_FIND_VERSION) + if(NOT Eigen3_FIND_VERSION_MAJOR) + set(Eigen3_FIND_VERSION_MAJOR 2) + endif(NOT Eigen3_FIND_VERSION_MAJOR) + if(NOT Eigen3_FIND_VERSION_MINOR) + set(Eigen3_FIND_VERSION_MINOR 91) + endif(NOT Eigen3_FIND_VERSION_MINOR) + if(NOT Eigen3_FIND_VERSION_PATCH) + set(Eigen3_FIND_VERSION_PATCH 0) + endif(NOT Eigen3_FIND_VERSION_PATCH) + + set(Eigen3_FIND_VERSION + "${Eigen3_FIND_VERSION_MAJOR}.${Eigen3_FIND_VERSION_MINOR}.${Eigen3_FIND_VERSION_PATCH}") +endif(NOT Eigen3_FIND_VERSION) + +macro(_eigen3_check_version) + file(READ "${EIGEN3_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h" _eigen3_version_header) + + string(REGEX MATCH "define[ \t]+EIGEN_WORLD_VERSION[ \t]+([0-9]+)" _eigen3_world_version_match + "${_eigen3_version_header}") + set(EIGEN3_WORLD_VERSION "${CMAKE_MATCH_1}") + string(REGEX MATCH "define[ \t]+EIGEN_MAJOR_VERSION[ \t]+([0-9]+)" _eigen3_major_version_match + "${_eigen3_version_header}") + set(EIGEN3_MAJOR_VERSION "${CMAKE_MATCH_1}") + string(REGEX MATCH "define[ \t]+EIGEN_MINOR_VERSION[ \t]+([0-9]+)" _eigen3_minor_version_match + "${_eigen3_version_header}") + set(EIGEN3_MINOR_VERSION "${CMAKE_MATCH_1}") + + set(EIGEN3_VERSION ${EIGEN3_WORLD_VERSION}.${EIGEN3_MAJOR_VERSION}.${EIGEN3_MINOR_VERSION}) + if(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION}) + set(EIGEN3_VERSION_OK FALSE) + else(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION}) + set(EIGEN3_VERSION_OK TRUE) + endif(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION}) + + if(NOT EIGEN3_VERSION_OK) + + message(STATUS "Eigen3 version ${EIGEN3_VERSION} found in ${EIGEN3_INCLUDE_DIR}, " + "but at least version ${Eigen3_FIND_VERSION} is required") + endif(NOT EIGEN3_VERSION_OK) +endmacro(_eigen3_check_version) + +if(EIGEN3_INCLUDE_DIR) + + # in cache already + _eigen3_check_version() + set(EIGEN3_FOUND ${EIGEN3_VERSION_OK}) + +else(EIGEN3_INCLUDE_DIR) + if(NOT DEFINED KDE4_INCLUDE_DIR) + set(KDE4_INCLUDE_DIR "") + endif() + + find_path( + EIGEN3_INCLUDE_DIR + NAMES signature_of_eigen3_matrix_library + PATHS ${CMAKE_INSTALL_PREFIX}/include ${KDE4_INCLUDE_DIR} + PATH_SUFFIXES eigen3 eigen) + + if(EIGEN3_INCLUDE_DIR) + _eigen3_check_version() + endif(EIGEN3_INCLUDE_DIR) + + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(Eigen3 DEFAULT_MSG EIGEN3_INCLUDE_DIR EIGEN3_VERSION_OK) + + mark_as_advanced(EIGEN3_INCLUDE_DIR) + +endif(EIGEN3_INCLUDE_DIR) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake new file mode 100644 index 0000000..ce558d4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake @@ -0,0 +1,287 @@ +# - Find python libraries +# This module finds the libraries corresponding to the Python interpreter +# FindPythonInterp provides. +# This code sets the following variables: +# +# PYTHONLIBS_FOUND - have the Python libs been found +# PYTHON_PREFIX - path to the Python installation +# PYTHON_LIBRARIES - path to the python library +# PYTHON_INCLUDE_DIRS - path to where Python.h is found +# PYTHON_MODULE_EXTENSION - lib extension, e.g. '.so' or '.pyd' +# PYTHON_MODULE_PREFIX - lib name prefix: usually an empty string +# PYTHON_SITE_PACKAGES - path to installation site-packages +# PYTHON_IS_DEBUG - whether the Python interpreter is a debug build +# +# Thanks to talljimbo for the patch adding the 'LDVERSION' config +# variable usage. + +#============================================================================= +# Copyright 2001-2009 Kitware, Inc. +# Copyright 2012 Continuum Analytics, Inc. +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the names of Kitware, Inc., the Insight Software Consortium, +# nor the names of their contributors may be used to endorse or promote +# products derived from this software without specific prior written +# permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#============================================================================= + +# Checking for the extension makes sure that `LibsNew` was found and not just `Libs`. +if(PYTHONLIBS_FOUND AND PYTHON_MODULE_EXTENSION) + return() +endif() + +if(PythonLibsNew_FIND_QUIETLY) + set(_pythonlibs_quiet QUIET) +else() + set(_pythonlibs_quiet "") +endif() + +if(PythonLibsNew_FIND_REQUIRED) + set(_pythonlibs_required REQUIRED) +endif() + +# Check to see if the `python` command is present and from a virtual +# environment, conda, or GHA activation - if it is, try to use that. + +if(NOT DEFINED PYTHON_EXECUTABLE) + if(DEFINED ENV{VIRTUAL_ENV}) + find_program( + PYTHON_EXECUTABLE python + PATHS "$ENV{VIRTUAL_ENV}" "$ENV{VIRTUAL_ENV}/bin" + NO_DEFAULT_PATH) + elseif(DEFINED ENV{CONDA_PREFIX}) + find_program( + PYTHON_EXECUTABLE python + PATHS "$ENV{CONDA_PREFIX}" "$ENV{CONDA_PREFIX}/bin" + NO_DEFAULT_PATH) + elseif(DEFINED ENV{pythonLocation}) + find_program( + PYTHON_EXECUTABLE python + PATHS "$ENV{pythonLocation}" "$ENV{pythonLocation}/bin" + NO_DEFAULT_PATH) + endif() + if(NOT PYTHON_EXECUTABLE) + unset(PYTHON_EXECUTABLE) + endif() +endif() + +# Use the Python interpreter to find the libs. +if(NOT PythonLibsNew_FIND_VERSION) + set(PythonLibsNew_FIND_VERSION "3.6") +endif() + +find_package(PythonInterp ${PythonLibsNew_FIND_VERSION} ${_pythonlibs_required} + ${_pythonlibs_quiet}) + +if(NOT PYTHONINTERP_FOUND) + set(PYTHONLIBS_FOUND FALSE) + set(PythonLibsNew_FOUND FALSE) + return() +endif() + +# According to https://stackoverflow.com/questions/646518/python-how-to-detect-debug-interpreter +# testing whether sys has the gettotalrefcount function is a reliable, cross-platform +# way to detect a CPython debug interpreter. +# +# The library suffix is from the config var LDVERSION sometimes, otherwise +# VERSION. VERSION will typically be like "2.7" on unix, and "27" on windows. +execute_process( + COMMAND + "${PYTHON_EXECUTABLE}" "-c" " +import sys;import struct; +import sysconfig as s +USE_SYSCONFIG = sys.version_info >= (3, 10) +if not USE_SYSCONFIG: + from distutils import sysconfig as ds +print('.'.join(str(v) for v in sys.version_info)); +print(sys.prefix); +if USE_SYSCONFIG: + scheme = s.get_default_scheme() + if scheme == 'posix_local': + # Debian's default scheme installs to /usr/local/ but we want to find headers in /usr/ + scheme = 'posix_prefix' + print(s.get_path('platinclude', scheme)) + print(s.get_path('platlib')) + print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO')) +else: + print(ds.get_python_inc(plat_specific=True)); + print(ds.get_python_lib(plat_specific=True)); + print(ds.get_config_var('EXT_SUFFIX') or ds.get_config_var('SO')); +print(hasattr(sys, 'gettotalrefcount')+0); +print(struct.calcsize('@P')); +print(s.get_config_var('LDVERSION') or s.get_config_var('VERSION')); +print(s.get_config_var('LIBDIR') or ''); +print(s.get_config_var('MULTIARCH') or ''); +" + RESULT_VARIABLE _PYTHON_SUCCESS + OUTPUT_VARIABLE _PYTHON_VALUES + ERROR_VARIABLE _PYTHON_ERROR_VALUE) + +if(NOT _PYTHON_SUCCESS MATCHES 0) + if(PythonLibsNew_FIND_REQUIRED) + message(FATAL_ERROR "Python config failure:\n${_PYTHON_ERROR_VALUE}") + endif() + set(PYTHONLIBS_FOUND FALSE) + set(PythonLibsNew_FOUND FALSE) + return() +endif() + +option( + PYBIND11_PYTHONLIBS_OVERWRITE + "Overwrite cached values read from Python library (classic search). Turn off if cross-compiling and manually setting these values." + ON) +# Can manually set values when cross-compiling +macro(_PYBIND11_GET_IF_UNDEF lst index name) + if(PYBIND11_PYTHONLIBS_OVERWRITE OR NOT DEFINED "${name}") + list(GET "${lst}" "${index}" "${name}") + endif() +endmacro() + +# Convert the process output into a list +if(WIN32) + string(REGEX REPLACE "\\\\" "/" _PYTHON_VALUES ${_PYTHON_VALUES}) +endif() +string(REGEX REPLACE ";" "\\\\;" _PYTHON_VALUES ${_PYTHON_VALUES}) +string(REGEX REPLACE "\n" ";" _PYTHON_VALUES ${_PYTHON_VALUES}) +_pybind11_get_if_undef(_PYTHON_VALUES 0 _PYTHON_VERSION_LIST) +_pybind11_get_if_undef(_PYTHON_VALUES 1 PYTHON_PREFIX) +_pybind11_get_if_undef(_PYTHON_VALUES 2 PYTHON_INCLUDE_DIR) +_pybind11_get_if_undef(_PYTHON_VALUES 3 PYTHON_SITE_PACKAGES) +_pybind11_get_if_undef(_PYTHON_VALUES 4 PYTHON_MODULE_EXTENSION) +_pybind11_get_if_undef(_PYTHON_VALUES 5 PYTHON_IS_DEBUG) +_pybind11_get_if_undef(_PYTHON_VALUES 6 PYTHON_SIZEOF_VOID_P) +_pybind11_get_if_undef(_PYTHON_VALUES 7 PYTHON_LIBRARY_SUFFIX) +_pybind11_get_if_undef(_PYTHON_VALUES 8 PYTHON_LIBDIR) +_pybind11_get_if_undef(_PYTHON_VALUES 9 PYTHON_MULTIARCH) + +# Make sure the Python has the same pointer-size as the chosen compiler +# Skip if CMAKE_SIZEOF_VOID_P is not defined +# This should be skipped for (non-Apple) cross-compiles (like EMSCRIPTEN) +if(NOT CMAKE_CROSSCOMPILING + AND CMAKE_SIZEOF_VOID_P + AND (NOT "${PYTHON_SIZEOF_VOID_P}" STREQUAL "${CMAKE_SIZEOF_VOID_P}")) + if(PythonLibsNew_FIND_REQUIRED) + math(EXPR _PYTHON_BITS "${PYTHON_SIZEOF_VOID_P} * 8") + math(EXPR _CMAKE_BITS "${CMAKE_SIZEOF_VOID_P} * 8") + message(FATAL_ERROR "Python config failure: Python is ${_PYTHON_BITS}-bit, " + "chosen compiler is ${_CMAKE_BITS}-bit") + endif() + set(PYTHONLIBS_FOUND FALSE) + set(PythonLibsNew_FOUND FALSE) + return() +endif() + +# The built-in FindPython didn't always give the version numbers +string(REGEX REPLACE "\\." ";" _PYTHON_VERSION_LIST ${_PYTHON_VERSION_LIST}) +list(GET _PYTHON_VERSION_LIST 0 PYTHON_VERSION_MAJOR) +list(GET _PYTHON_VERSION_LIST 1 PYTHON_VERSION_MINOR) +list(GET _PYTHON_VERSION_LIST 2 PYTHON_VERSION_PATCH) +set(PYTHON_VERSION "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}.${PYTHON_VERSION_PATCH}") + +# Make sure all directory separators are '/' +string(REGEX REPLACE "\\\\" "/" PYTHON_PREFIX "${PYTHON_PREFIX}") +string(REGEX REPLACE "\\\\" "/" PYTHON_INCLUDE_DIR "${PYTHON_INCLUDE_DIR}") +string(REGEX REPLACE "\\\\" "/" PYTHON_SITE_PACKAGES "${PYTHON_SITE_PACKAGES}") + +if(DEFINED PYTHON_LIBRARY) + # Don't write to PYTHON_LIBRARY if it's already set +elseif(CMAKE_HOST_WIN32) + set(PYTHON_LIBRARY "${PYTHON_PREFIX}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib") + + # when run in a venv, PYTHON_PREFIX points to it. But the libraries remain in the + # original python installation. They may be found relative to PYTHON_INCLUDE_DIR. + if(NOT EXISTS "${PYTHON_LIBRARY}") + get_filename_component(_PYTHON_ROOT ${PYTHON_INCLUDE_DIR} DIRECTORY) + set(PYTHON_LIBRARY "${_PYTHON_ROOT}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib") + endif() + + # if we are in MSYS & MINGW, and we didn't find windows python lib, look for system python lib + if(DEFINED ENV{MSYSTEM} + AND MINGW + AND NOT EXISTS "${PYTHON_LIBRARY}") + if(PYTHON_MULTIARCH) + set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}") + else() + set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}") + endif() + unset(PYTHON_LIBRARY) + find_library( + PYTHON_LIBRARY + NAMES "python${PYTHON_LIBRARY_SUFFIX}" + PATHS ${_PYTHON_LIBS_SEARCH} + NO_DEFAULT_PATH) + endif() + + # raise an error if the python libs are still not found. + if(NOT EXISTS "${PYTHON_LIBRARY}") + message(FATAL_ERROR "Python libraries not found") + endif() + +else() + if(PYTHON_MULTIARCH) + set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}") + else() + set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}") + endif() + #message(STATUS "Searching for Python libs in ${_PYTHON_LIBS_SEARCH}") + # Probably this needs to be more involved. It would be nice if the config + # information the python interpreter itself gave us were more complete. + find_library( + PYTHON_LIBRARY + NAMES "python${PYTHON_LIBRARY_SUFFIX}" + PATHS ${_PYTHON_LIBS_SEARCH} + NO_DEFAULT_PATH) + + # If all else fails, just set the name/version and let the linker figure out the path. + if(NOT PYTHON_LIBRARY) + set(PYTHON_LIBRARY python${PYTHON_LIBRARY_SUFFIX}) + endif() +endif() + +mark_as_advanced(PYTHON_LIBRARY PYTHON_INCLUDE_DIR) + +# We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the +# cache entries because they are meant to specify the location of a single +# library. We now set the variables listed by the documentation for this +# module. +set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}") +set(PYTHON_LIBRARIES "${PYTHON_LIBRARY}") +if(NOT PYTHON_DEBUG_LIBRARY) + set(PYTHON_DEBUG_LIBRARY "") +endif() +set(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}") + +find_package_message(PYTHON "Found PythonLibs: ${PYTHON_LIBRARIES}" + "${PYTHON_EXECUTABLE}${PYTHON_VERSION_STRING}") + +set(PYTHONLIBS_FOUND TRUE) +set(PythonLibsNew_FOUND TRUE) + +if(NOT PYTHON_MODULE_PREFIX) + set(PYTHON_MODULE_PREFIX "") +endif() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake new file mode 100644 index 0000000..c68d91b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake @@ -0,0 +1,23 @@ +# This module provides function for joining paths +# known from most languages +# +# SPDX-License-Identifier: (MIT OR CC0-1.0) +# Copyright 2020 Jan Tojnar +# https://github.com/jtojnar/cmake-snips +# +# Modelled after Python’s os.path.join +# https://docs.python.org/3.7/library/os.path.html#os.path.join +# Windows not supported +function(join_paths joined_path first_path_segment) + set(temp_path "${first_path_segment}") + foreach(current_segment IN LISTS ARGN) + if(NOT ("${current_segment}" STREQUAL "")) + if(IS_ABSOLUTE "${current_segment}") + set(temp_path "${current_segment}") + else() + set(temp_path "${temp_path}/${current_segment}") + endif() + endif() + endforeach() + set(${joined_path} "${temp_path}" PARENT_SCOPE) +endfunction() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh new file mode 100644 index 0000000..6d83252 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# Script to check include/test code for common pybind11 code style errors. +# +# This script currently checks for +# +# 1. missing space between keyword and parenthesis, e.g.: for(, if(, while( +# 2. Missing space between right parenthesis and brace, e.g. 'for (...){' +# 3. opening brace on its own line. It should always be on the same line as the +# if/while/for/do statement. +# +# Invoke as: tools/check-style.sh +# + +check_style_errors=0 +IFS=$'\n' + + +found="$(grep '\<\(if\|for\|while\|catch\)(\|){' "$@" -rn --color=always)" +if [ -n "$found" ]; then + echo -e '\033[31;01mError: found the following coding style problems:\033[0m' + check_style_errors=1 + echo "${found//^/ /}" +fi + +found="$(awk ' +function prefix(filename, lineno) { + return " \033[35m" filename "\033[36m:\033[32m" lineno "\033[36m:\033[0m" +} +function mark(pattern, string) { sub(pattern, "\033[01;31m&\033[0m", string); return string } +last && /^\s*{/ { + print prefix(FILENAME, FNR-1) mark("\\)\\s*$", last) + print prefix(FILENAME, FNR) mark("^\\s*{", $0) + last="" +} +{ last = /(if|for|while|catch|switch)\s*\(.*\)\s*$/ ? $0 : "" } +' "$(find include -type f)" "$@")" +if [ -n "$found" ]; then + check_style_errors=1 + echo -e '\033[31;01mError: braces should occur on the same line as the if/while/.. statement. Found issues in the following files:\033[0m' + echo "$found" +fi + +exit $check_style_errors diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in new file mode 100644 index 0000000..1e5d2bb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in @@ -0,0 +1,23 @@ +# Source: https://gitlab.kitware.com/cmake/community/-/wikis/FAQ#can-i-do-make-uninstall-with-cmake + +if(NOT EXISTS "@CMAKE_BINARY_DIR@/install_manifest.txt") + message(FATAL_ERROR "Cannot find install manifest: @CMAKE_BINARY_DIR@/install_manifest.txt") +endif() + +file(READ "@CMAKE_BINARY_DIR@/install_manifest.txt" files) +string(REGEX REPLACE "\n" ";" files "${files}") +foreach(file ${files}) + message(STATUS "Uninstalling $ENV{DESTDIR}${file}") + if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}") + exec_program( + "@CMAKE_COMMAND@" ARGS + "-E remove \"$ENV{DESTDIR}${file}\"" + OUTPUT_VARIABLE rm_out + RETURN_VALUE rm_retval) + if(NOT "${rm_retval}" STREQUAL 0) + message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}") + endif() + else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}") + message(STATUS "File $ENV{DESTDIR}${file} does not exist.") + endif() +endforeach() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py new file mode 100644 index 0000000..4ec9add --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py @@ -0,0 +1,39 @@ +"""Simple script for rebuilding .codespell-ignore-lines + +Usage: + +cat < /dev/null > .codespell-ignore-lines +pre-commit run --all-files codespell >& /tmp/codespell_errors.txt +python3 tools/codespell_ignore_lines_from_errors.py /tmp/codespell_errors.txt > .codespell-ignore-lines + +git diff to review changes, then commit, push. +""" + +import sys +from typing import List + + +def run(args: List[str]) -> None: + assert len(args) == 1, "codespell_errors.txt" + cache = {} + done = set() + with open(args[0]) as f: + lines = f.read().splitlines() + + for line in sorted(lines): + i = line.find(" ==> ") + if i > 0: + flds = line[:i].split(":") + if len(flds) >= 2: + filename, line_num = flds[:2] + if filename not in cache: + with open(filename) as f: + cache[filename] = f.read().splitlines() + supp = cache[filename][int(line_num) - 1] + if supp not in done: + print(supp) + done.add(supp) + + +if __name__ == "__main__": + run(args=sys.argv[1:]) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py new file mode 100644 index 0000000..1ac9afb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py @@ -0,0 +1,36 @@ +import os +import sys + +# Internal build script for generating debugging test .so size. +# Usage: +# python libsize.py file.so save.txt -- displays the size of file.so and, if save.txt exists, compares it to the +# size in it, then overwrites save.txt with the new size for future runs. + +if len(sys.argv) != 3: + sys.exit("Invalid arguments: usage: python libsize.py file.so save.txt") + +lib = sys.argv[1] +save = sys.argv[2] + +if not os.path.exists(lib): + sys.exit(f"Error: requested file ({lib}) does not exist") + +libsize = os.path.getsize(lib) + +print("------", os.path.basename(lib), "file size:", libsize, end="") + +if os.path.exists(save): + with open(save) as sf: + oldsize = int(sf.readline()) + + if oldsize > 0: + change = libsize - oldsize + if change == 0: + print(" (no change)") + else: + print(f" (change of {change:+} bytes = {change / oldsize:+.2%})") +else: + print() + +with open(save, "w") as sf: + sf.write(str(libsize)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py new file mode 100644 index 0000000..b5bd832 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +import re + +import ghapi.all +from rich import print +from rich.syntax import Syntax + +ENTRY = re.compile( + r""" + Suggested \s changelog \s entry: + .* + ```rst + \s* + (.*?) + \s* + ``` +""", + re.DOTALL | re.VERBOSE, +) + +print() + + +api = ghapi.all.GhApi(owner="pybind", repo="pybind11") + +issues_pages = ghapi.page.paged( + api.issues.list_for_repo, labels="needs changelog", state="closed" +) +issues = (issue for page in issues_pages for issue in page) +missing = [] + +for issue in issues: + changelog = ENTRY.findall(issue.body or "") + if not changelog or not changelog[0]: + missing.append(issue) + else: + (msg,) = changelog + if not msg.startswith("* "): + msg = "* " + msg + if not msg.endswith("."): + msg += "." + + msg += f"\n `#{issue.number} <{issue.html_url}>`_" + + print(Syntax(msg, "rst", theme="ansi_light", word_wrap=True)) + print() + +if missing: + print() + print("[blue]" + "-" * 30) + print() + + for issue in missing: + print(f"[red bold]Missing:[/red bold][red] {issue.title}") + print(f"[red] {issue.html_url}\n") + + print("[bold]Template:\n") + msg = "## Suggested changelog entry:\n\n```rst\n\n```" + print(Syntax(msg, "md", theme="ansi_light")) + +print() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in new file mode 100644 index 0000000..402f0b3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in @@ -0,0 +1,7 @@ +prefix=@prefix_for_pc_file@ +includedir=@includedir_for_pc_file@ + +Name: @PROJECT_NAME@ +Description: Seamless operability between C++11 and Python +Version: @PROJECT_VERSION@ +Cflags: -I${includedir} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake new file mode 100644 index 0000000..308d1b7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake @@ -0,0 +1,405 @@ +#[======================================================[.rst + +Adds the following targets:: + + pybind11::pybind11 - link to headers and pybind11 + pybind11::module - Adds module links + pybind11::embed - Adds embed links + pybind11::lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set) + pybind11::thin_lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set) + pybind11::python_link_helper - Adds link to Python libraries + pybind11::windows_extras - MSVC bigobj and mp for building multithreaded + pybind11::opt_size - avoid optimizations that increase code size + +Adds the following functions:: + + pybind11_strip(target) - strip target after building on linux/macOS + pybind11_find_import(module) - See if a module is installed. + +#]======================================================] + +# CMake 3.10 has an include_guard command, but we can't use that yet +# include_guard(global) (pre-CMake 3.10) +if(TARGET pybind11::pybind11) + return() +endif() + +# If we are in subdirectory mode, all IMPORTED targets must be GLOBAL. If we +# are in CONFIG mode, they should be "normal" targets instead. +# In CMake 3.11+ you can promote a target to global after you create it, +# which might be simpler than this check. +get_property( + is_config + TARGET pybind11::headers + PROPERTY IMPORTED) +if(NOT is_config) + set(optional_global GLOBAL) +endif() + +# If not run in Python mode, we still would like this to at least +# include pybind11's include directory: +set(pybind11_INCLUDE_DIRS + "${pybind11_INCLUDE_DIR}" + CACHE INTERNAL "Include directory for pybind11 (Python not requested)") + +# --------------------- Shared targets ---------------------------- + +# Build an interface library target: +add_library(pybind11::pybind11 IMPORTED INTERFACE ${optional_global}) +set_property( + TARGET pybind11::pybind11 + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::headers) + +# Build a module target: +add_library(pybind11::module IMPORTED INTERFACE ${optional_global}) +set_property( + TARGET pybind11::module + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11) + +# Build an embed library target: +add_library(pybind11::embed IMPORTED INTERFACE ${optional_global}) +set_property( + TARGET pybind11::embed + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11) + +# --------------------------- link helper --------------------------- + +add_library(pybind11::python_link_helper IMPORTED INTERFACE ${optional_global}) + +if(CMAKE_VERSION VERSION_LESS 3.13) + # In CMake 3.11+, you can set INTERFACE properties via the normal methods, and + # this would be simpler. + set_property( + TARGET pybind11::python_link_helper + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES "$<$:-undefined dynamic_lookup>") +else() + # link_options was added in 3.13+ + # This is safer, because you are ensured the deduplication pass in CMake will not consider + # these separate and remove one but not the other. + set_property( + TARGET pybind11::python_link_helper + APPEND + PROPERTY INTERFACE_LINK_OPTIONS "$<$:LINKER:-undefined,dynamic_lookup>") +endif() + +# ------------------------ Windows extras ------------------------- + +add_library(pybind11::windows_extras IMPORTED INTERFACE ${optional_global}) + +if(MSVC) # That's also clang-cl + # /bigobj is needed for bigger binding projects due to the limit to 64k + # addressable sections + set_property( + TARGET pybind11::windows_extras + APPEND + PROPERTY INTERFACE_COMPILE_OPTIONS $<$:/bigobj>) + + # /MP enables multithreaded builds (relevant when there are many files) for MSVC + if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # no Clang no Intel + if(CMAKE_VERSION VERSION_LESS 3.11) + set_property( + TARGET pybind11::windows_extras + APPEND + PROPERTY INTERFACE_COMPILE_OPTIONS $<$>:/MP>) + else() + # Only set these options for C++ files. This is important so that, for + # instance, projects that include other types of source files like CUDA + # .cu files don't get these options propagated to nvcc since that would + # cause the build to fail. + set_property( + TARGET pybind11::windows_extras + APPEND + PROPERTY INTERFACE_COMPILE_OPTIONS + $<$>:$<$:/MP>>) + endif() + endif() +endif() + +# ----------------------- Optimize binary size -------------------------- + +add_library(pybind11::opt_size IMPORTED INTERFACE ${optional_global}) + +if(MSVC) + set(PYBIND11_OPT_SIZE /Os) +else() + set(PYBIND11_OPT_SIZE -Os) +endif() + +set_property( + TARGET pybind11::opt_size + APPEND + PROPERTY INTERFACE_COMPILE_OPTIONS $<$:${PYBIND11_OPT_SIZE}> + $<$:${PYBIND11_OPT_SIZE}> + $<$:${PYBIND11_OPT_SIZE}>) + +# ----------------------- Legacy option -------------------------- + +# Warn or error if old variable name used +if(PYBIND11_CPP_STANDARD) + string(REGEX MATCH [[..$]] VAL "${PYBIND11_CPP_STANDARD}") + if(CMAKE_CXX_STANDARD) + if(NOT CMAKE_CXX_STANDARD STREQUAL VAL) + message(WARNING "CMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD} does not match " + "PYBIND11_CPP_STANDARD=${PYBIND11_CPP_STANDARD}, " + "please remove PYBIND11_CPP_STANDARD from your cache") + endif() + else() + set(supported_standards 11 14 17 20) + if("${VAL}" IN_LIST supported_standards) + message(WARNING "USE -DCMAKE_CXX_STANDARD=${VAL} instead of PYBIND11_CPP_STANDARD") + set(CMAKE_CXX_STANDARD + ${VAL} + CACHE STRING "From PYBIND11_CPP_STANDARD") + else() + message(FATAL_ERROR "PYBIND11_CPP_STANDARD should be replaced with CMAKE_CXX_STANDARD " + "(last two chars: ${VAL} not understood as a valid CXX std)") + endif() + endif() +endif() + +# --------------------- Python specifics ------------------------- + +# CMake 3.27 removes the classic FindPythonInterp if CMP0148 is NEW +if(CMAKE_VERSION VERSION_LESS "3.27") + set(_pybind11_missing_old_python "OLD") +else() + cmake_policy(GET CMP0148 _pybind11_missing_old_python) +endif() + +# Check to see which Python mode we are in, new, old, or no python +if(PYBIND11_NOPYTHON) + set(_pybind11_nopython ON) +elseif( + _pybind11_missing_old_python STREQUAL "NEW" + OR PYBIND11_FINDPYTHON + OR Python_FOUND + OR Python2_FOUND + OR Python3_FOUND) + # New mode + include("${CMAKE_CURRENT_LIST_DIR}/pybind11NewTools.cmake") + +else() + + # Classic mode + include("${CMAKE_CURRENT_LIST_DIR}/pybind11Tools.cmake") + +endif() + +# --------------------- pybind11_find_import ------------------------------- + +if(NOT _pybind11_nopython) + # Check to see if modules are importable. Use REQUIRED to force an error if + # one of the modules is not found. _FOUND will be set if the + # package was found (underscores replace dashes if present). QUIET will hide + # the found message, and VERSION will require a minimum version. A successful + # find will cache the result. + function(pybind11_find_import PYPI_NAME) + # CMake variables need underscores (PyPI doesn't care) + string(REPLACE "-" "_" NORM_PYPI_NAME "${PYPI_NAME}") + + # Return if found previously + if(${NORM_PYPI_NAME}_FOUND) + return() + endif() + + set(options "REQUIRED;QUIET") + set(oneValueArgs "VERSION") + cmake_parse_arguments(ARG "${options}" "${oneValueArgs}" "" ${ARGN}) + + if(ARG_REQUIRED) + set(status_level FATAL_ERROR) + else() + set(status_level WARNING) + endif() + + execute_process( + COMMAND + ${${_Python}_EXECUTABLE} -c + "from pkg_resources import get_distribution; print(get_distribution('${PYPI_NAME}').version)" + RESULT_VARIABLE RESULT_PRESENT + OUTPUT_VARIABLE PKG_VERSION + ERROR_QUIET) + + string(STRIP "${PKG_VERSION}" PKG_VERSION) + + # If a result is present, this failed + if(RESULT_PRESENT) + set(${NORM_PYPI_NAME}_FOUND + ${NORM_PYPI_NAME}-NOTFOUND + CACHE INTERNAL "") + # Always warn or error + message( + ${status_level} + "Missing: ${PYPI_NAME} ${ARG_VERSION}\nTry: ${${_Python}_EXECUTABLE} -m pip install ${PYPI_NAME}" + ) + else() + if(ARG_VERSION AND PKG_VERSION VERSION_LESS ARG_VERSION) + message( + ${status_level} + "Version incorrect: ${PYPI_NAME} ${PKG_VERSION} found, ${ARG_VERSION} required - try upgrading" + ) + else() + set(${NORM_PYPI_NAME}_FOUND + YES + CACHE INTERNAL "") + set(${NORM_PYPI_NAME}_VERSION + ${PKG_VERSION} + CACHE INTERNAL "") + endif() + if(NOT ARG_QUIET) + message(STATUS "Found ${PYPI_NAME} ${PKG_VERSION}") + endif() + endif() + if(NOT ARG_VERSION OR (NOT PKG_VERSION VERSION_LESS ARG_VERSION)) + # We have successfully found a good version, cache to avoid calling again. + endif() + endfunction() +endif() + +# --------------------- LTO ------------------------------- + +include(CheckCXXCompilerFlag) + +# Checks whether the given CXX/linker flags can compile and link a cxx file. +# cxxflags and linkerflags are lists of flags to use. The result variable is a +# unique variable name for each set of flags: the compilation result will be +# cached base on the result variable. If the flags work, sets them in +# cxxflags_out/linkerflags_out internal cache variables (in addition to +# ${result}). +function(_pybind11_return_if_cxx_and_linker_flags_work result cxxflags linkerflags cxxflags_out + linkerflags_out) + set(CMAKE_REQUIRED_LIBRARIES ${linkerflags}) + check_cxx_compiler_flag("${cxxflags}" ${result}) + if(${result}) + set(${cxxflags_out} + "${cxxflags}" + PARENT_SCOPE) + set(${linkerflags_out} + "${linkerflags}" + PARENT_SCOPE) + endif() +endfunction() + +function(_pybind11_generate_lto target prefer_thin_lto) + if(MINGW) + message(STATUS "${target} disabled (problems with undefined symbols for MinGW for now)") + return() + endif() + + if(CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang") + set(cxx_append "") + set(linker_append "") + if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND NOT APPLE) + # Clang Gold plugin does not support -Os; append -O3 to MinSizeRel builds to override it + set(linker_append ";$<$:-O3>") + elseif(CMAKE_CXX_COMPILER_ID MATCHES "GNU" AND NOT MINGW) + set(cxx_append ";-fno-fat-lto-objects") + endif() + + if(CMAKE_SYSTEM_PROCESSOR MATCHES "ppc64le" OR CMAKE_SYSTEM_PROCESSOR MATCHES "mips64") + set(NO_FLTO_ARCH TRUE) + else() + set(NO_FLTO_ARCH FALSE) + endif() + + if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" + AND prefer_thin_lto + AND NOT NO_FLTO_ARCH) + _pybind11_return_if_cxx_and_linker_flags_work( + HAS_FLTO_THIN "-flto=thin${cxx_append}" "-flto=thin${linker_append}" + PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS) + endif() + + if(NOT HAS_FLTO_THIN AND NOT NO_FLTO_ARCH) + _pybind11_return_if_cxx_and_linker_flags_work( + HAS_FLTO "-flto${cxx_append}" "-flto${linker_append}" PYBIND11_LTO_CXX_FLAGS + PYBIND11_LTO_LINKER_FLAGS) + endif() + elseif(CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM") + # IntelLLVM equivalent to LTO is called IPO; also IntelLLVM is WIN32/UNIX + # WARNING/HELP WANTED: This block of code is currently not covered by pybind11 GitHub Actions! + if(WIN32) + _pybind11_return_if_cxx_and_linker_flags_work( + HAS_INTEL_IPO "-Qipo" "-Qipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS) + else() + _pybind11_return_if_cxx_and_linker_flags_work( + HAS_INTEL_IPO "-ipo" "-ipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS) + endif() + elseif(CMAKE_CXX_COMPILER_ID MATCHES "Intel") + # Intel equivalent to LTO is called IPO + _pybind11_return_if_cxx_and_linker_flags_work(HAS_INTEL_IPO "-ipo" "-ipo" + PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS) + elseif(MSVC) + # cmake only interprets libraries as linker flags when they start with a - (otherwise it + # converts /LTCG to \LTCG as if it was a Windows path). Luckily MSVC supports passing flags + # with - instead of /, even if it is a bit non-standard: + _pybind11_return_if_cxx_and_linker_flags_work(HAS_MSVC_GL_LTCG "/GL" "-LTCG" + PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS) + endif() + + # Enable LTO flags if found, except for Debug builds + if(PYBIND11_LTO_CXX_FLAGS) + # CONFIG takes multiple values in CMake 3.19+, until then we have to use OR + set(is_debug "$,$>") + set(not_debug "$") + set(cxx_lang "$") + if(MSVC AND CMAKE_VERSION VERSION_LESS 3.11) + set(genex "${not_debug}") + else() + set(genex "$") + endif() + set_property( + TARGET ${target} + APPEND + PROPERTY INTERFACE_COMPILE_OPTIONS "$<${genex}:${PYBIND11_LTO_CXX_FLAGS}>") + if(CMAKE_PROJECT_NAME STREQUAL "pybind11") + message(STATUS "${target} enabled") + endif() + else() + if(CMAKE_PROJECT_NAME STREQUAL "pybind11") + message(STATUS "${target} disabled (not supported by the compiler and/or linker)") + endif() + endif() + + if(PYBIND11_LTO_LINKER_FLAGS) + if(CMAKE_VERSION VERSION_LESS 3.11) + set_property( + TARGET ${target} + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>") + else() + set_property( + TARGET ${target} + APPEND + PROPERTY INTERFACE_LINK_OPTIONS "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>") + endif() + endif() +endfunction() + +if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION) + add_library(pybind11::lto IMPORTED INTERFACE ${optional_global}) + _pybind11_generate_lto(pybind11::lto FALSE) + + add_library(pybind11::thin_lto IMPORTED INTERFACE ${optional_global}) + _pybind11_generate_lto(pybind11::thin_lto TRUE) +endif() + +# ---------------------- pybind11_strip ----------------------------- + +function(pybind11_strip target_name) + # Strip unnecessary sections of the binary on Linux/macOS + if(CMAKE_STRIP) + if(APPLE) + set(x_opt -x) + endif() + + add_custom_command( + TARGET ${target_name} + POST_BUILD + COMMAND ${CMAKE_STRIP} ${x_opt} $) + endif() +endfunction() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in new file mode 100644 index 0000000..5734f43 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in @@ -0,0 +1,233 @@ +#[=============================================================================[.rst: + +pybind11Config.cmake +#################### + +Exported variables +================== + +This module sets the following variables in your project: + +``pybind11_FOUND`` + true if pybind11 and all required components found on the system +``pybind11_VERSION`` + pybind11 version in format Major.Minor.Release +``pybind11_VERSION_TYPE`` + pybind11 version type (``dev*`` or empty for a release) +``pybind11_INCLUDE_DIRS`` + Directories where pybind11 and python headers are located. +``pybind11_INCLUDE_DIR`` + Directory where pybind11 headers are located. +``pybind11_DEFINITIONS`` + Definitions necessary to use pybind11, namely USING_pybind11. +``pybind11_LIBRARIES`` + Compile flags and python libraries (as needed) to link against. +``pybind11_LIBRARY`` + Empty. + +Available components: None + + +Exported targets +================ + +If pybind11 is found, this module defines the following ``IMPORTED`` +interface library targets: + +``pybind11::module`` + for extension modules. +``pybind11::embed`` + for embedding the Python interpreter. + +Python headers, libraries (as needed by platform), and the C++ standard +are attached to the target. + +Advanced targets are also supplied - these are primary for users building +complex applications, and they are available in all modes: + +``pybind11::headers`` + Just the pybind11 headers and minimum compile requirements. +``pybind11::pybind11`` + Python headers too. +``pybind11::python_link_helper`` + Just the "linking" part of ``pybind11:module``, for CMake < 3.15. +``pybind11::thin_lto`` + An alternative to ``INTERPROCEDURAL_OPTIMIZATION``. +``pybind11::lto`` + An alternative to ``INTERPROCEDURAL_OPTIMIZATION`` (also avoids thin LTO on clang). +``pybind11::windows_extras`` + Adds bigobj and mp for MSVC. + +Modes +===== + +There are two modes provided; classic, which is built on the old Python +discovery packages in CMake, or the new FindPython mode, which uses FindPython +from 3.12+ forward (3.15+ _highly_ recommended). If you set the minimum or +maximum version of CMake to 3.27+, then FindPython is the default (since +FindPythonInterp/FindPythonLibs has been removed via policy `CMP0148`). + +New FindPython mode +^^^^^^^^^^^^^^^^^^^ + +To activate this mode, either call ``find_package(Python COMPONENTS Interpreter Development)`` +before finding this package, or set the ``PYBIND11_FINDPYTHON`` variable to ON. In this mode, +you can either use the basic targets, or use the FindPython tools: + +.. code-block:: cmake + + find_package(Python COMPONENTS Interpreter Development) + find_package(pybind11 CONFIG) + + # pybind11 method: + pybind11_add_module(MyModule1 src1.cpp) + + # Python method: + Python_add_library(MyModule2 src2.cpp) + target_link_libraries(MyModule2 pybind11::headers) + set_target_properties(MyModule2 PROPERTIES + INTERPROCEDURAL_OPTIMIZATION ON + CXX_VISIBILITY_PRESET ON + VISIBILITY_INLINES_HIDDEN ON) + +If you build targets yourself, you may be interested in stripping the output +for reduced size; this is the one other feature that the helper function gives you. + +Classic mode +^^^^^^^^^^^^ + +Set PythonLibsNew variables to influence python detection and +CMAKE_CXX_STANDARD to influence standard setting. + +.. code-block:: cmake + + find_package(pybind11 CONFIG REQUIRED) + + # Create an extension module + add_library(mylib MODULE main.cpp) + target_link_libraries(mylib PUBLIC pybind11::module) + + # Or embed the Python interpreter into an executable + add_executable(myexe main.cpp) + target_link_libraries(myexe PUBLIC pybind11::embed) + + +Hints +===== + +The following variables can be set to guide the search for this package: + +``pybind11_DIR`` + CMake variable, set to directory containing this Config file. +``CMAKE_PREFIX_PATH`` + CMake variable, set to root directory of this package. +``PATH`` + Environment variable, set to bin directory of this package. +``CMAKE_DISABLE_FIND_PACKAGE_pybind11`` + CMake variable, disables ``find_package(pybind11)`` when not ``REQUIRED``, + perhaps to force internal build. + +Commands +======== + +pybind11_add_module +^^^^^^^^^^^^^^^^^^^ + +This module defines the following commands to assist with creating Python modules: + +.. code-block:: cmake + + pybind11_add_module( + [STATIC|SHARED|MODULE] + [THIN_LTO] [OPT_SIZE] [NO_EXTRAS] [WITHOUT_SOABI] + ... + ) + +Add a module and setup all helpers. You can select the type of the library; the +default is ``MODULE``. There are several options: + +``OPT_SIZE`` + Optimize for size, even if the ``CMAKE_BUILD_TYPE`` is not ``MinSizeRel``. +``THIN_LTO`` + Use thin TLO instead of regular if there's a choice (pybind11's selection + is disabled if ``CMAKE_INTERPROCEDURAL_OPTIMIZATIONS`` is set). +``WITHOUT_SOABI`` + Disable the SOABI component (``PYBIND11_NEWPYTHON`` mode only). +``NO_EXTRAS`` + Disable all extras, exit immediately after making the module. + +pybind11_strip +^^^^^^^^^^^^^^ + +.. code-block:: cmake + + pybind11_strip() + +Strip a target after building it (linux/macOS), called by ``pybind11_add_module``. + +pybind11_extension +^^^^^^^^^^^^^^^^^^ + +.. code-block:: cmake + + pybind11_extension() + +Sets the Python extension name correctly for Python on your platform, called by +``pybind11_add_module``. + +pybind11_find_import(module) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: cmake + + pybind11_find_import( [VERSION ] [REQUIRED] [QUIET]) + +See if a module is installed. Use the registered name (the one on PyPI). You +can specify a ``VERSION``, and you can specify ``REQUIRED`` or ``QUIET``. Only available if +``NOPYTHON`` mode is not active. Sets ``module_VERSION`` and ``module_FOUND``. Caches the +result once a valid install is found. + +Suggested usage +=============== + +Using ``find_package`` with version info is not recommended except for release versions. + +.. code-block:: cmake + + find_package(pybind11 CONFIG) + find_package(pybind11 2.9 EXACT CONFIG REQUIRED) + +#]=============================================================================] +@PACKAGE_INIT@ + +# Location of pybind11/pybind11.h +# This will be relative unless explicitly set as absolute +set(pybind11_INCLUDE_DIR "@pybind11_INCLUDEDIR@") + +set(pybind11_LIBRARY "") +set(pybind11_DEFINITIONS USING_pybind11) +set(pybind11_VERSION_TYPE "@pybind11_VERSION_TYPE@") + +check_required_components(pybind11) + +if(TARGET pybind11::python_link_helper) + # This has already been setup elsewhere, such as with a previous call or + # add_subdirectory + return() +endif() + +include("${CMAKE_CURRENT_LIST_DIR}/pybind11Targets.cmake") + +# Easier to use / remember +add_library(pybind11::headers IMPORTED INTERFACE) +set_target_properties(pybind11::headers PROPERTIES INTERFACE_LINK_LIBRARIES + pybind11::pybind11_headers) + +include("${CMAKE_CURRENT_LIST_DIR}/pybind11Common.cmake") + +if(NOT pybind11_FIND_QUIETLY) + message( + STATUS + "Found pybind11: ${pybind11_INCLUDE_DIR} (found version \"${pybind11_VERSION}${pybind11_VERSION_TYPE}\")" + ) +endif() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake new file mode 100644 index 0000000..7d7424a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake @@ -0,0 +1,256 @@ +# tools/pybind11NewTools.cmake -- Build system for the pybind11 modules +# +# Copyright (c) 2020 Wenzel Jakob and Henry Schreiner +# +# All rights reserved. Use of this source code is governed by a +# BSD-style license that can be found in the LICENSE file. + +if(CMAKE_VERSION VERSION_LESS 3.12) + message(FATAL_ERROR "You cannot use the new FindPython module with CMake < 3.12") +endif() + +include_guard(DIRECTORY) + +get_property( + is_config + TARGET pybind11::headers + PROPERTY IMPORTED) + +if(pybind11_FIND_QUIETLY) + set(_pybind11_quiet QUIET) +else() + set(_pybind11_quiet "") +endif() + +if(NOT Python_FOUND AND NOT Python3_FOUND) + if(NOT DEFINED Python_FIND_IMPLEMENTATIONS) + set(Python_FIND_IMPLEMENTATIONS CPython PyPy) + endif() + + # GitHub Actions like activation + if(NOT DEFINED Python_ROOT_DIR AND DEFINED ENV{pythonLocation}) + set(Python_ROOT_DIR "$ENV{pythonLocation}") + endif() + + find_package(Python 3.6 REQUIRED COMPONENTS Interpreter Development ${_pybind11_quiet}) + + # If we are in submodule mode, export the Python targets to global targets. + # If this behavior is not desired, FindPython _before_ pybind11. + if(NOT is_config) + set_property(TARGET Python::Python PROPERTY IMPORTED_GLOBAL TRUE) + set_property(TARGET Python::Interpreter PROPERTY IMPORTED_GLOBAL TRUE) + if(TARGET Python::Module) + set_property(TARGET Python::Module PROPERTY IMPORTED_GLOBAL TRUE) + endif() + endif() +endif() + +if(Python_FOUND) + set(_Python + Python + CACHE INTERNAL "" FORCE) +elseif(Python3_FOUND) + set(_Python + Python3 + CACHE INTERNAL "" FORCE) +endif() + +if(PYBIND11_MASTER_PROJECT) + if(${_Python}_INTERPRETER_ID MATCHES "PyPy") + message(STATUS "PyPy ${${_Python}_PyPy_VERSION} (Py ${${_Python}_VERSION})") + else() + message(STATUS "${_Python} ${${_Python}_VERSION}") + endif() +endif() + +# If a user finds Python, they may forget to include the Interpreter component +# and the following two steps require it. It is highly recommended by CMake +# when finding development libraries anyway, so we will require it. +if(NOT DEFINED ${_Python}_EXECUTABLE) + message( + FATAL_ERROR + "${_Python} was found without the Interpreter component. Pybind11 requires this component.") + +endif() + +if(NOT ${_Python}_EXECUTABLE STREQUAL PYBIND11_PYTHON_EXECUTABLE_LAST) + # Detect changes to the Python version/binary in subsequent CMake runs, and refresh config if needed + unset(PYTHON_IS_DEBUG CACHE) + unset(PYTHON_MODULE_EXTENSION CACHE) + set(PYBIND11_PYTHON_EXECUTABLE_LAST + "${${_Python}_EXECUTABLE}" + CACHE INTERNAL "Python executable during the last CMake run") +endif() + +if(NOT DEFINED PYTHON_IS_DEBUG) + # Debug check - see https://stackoverflow.com/questions/646518/python-how-to-detect-debug-Interpreter + execute_process( + COMMAND "${${_Python}_EXECUTABLE}" "-c" + "import sys; sys.exit(hasattr(sys, 'gettotalrefcount'))" + RESULT_VARIABLE _PYTHON_IS_DEBUG) + set(PYTHON_IS_DEBUG + "${_PYTHON_IS_DEBUG}" + CACHE INTERNAL "Python debug status") +endif() + +# Get the suffix - SO is deprecated, should use EXT_SUFFIX, but this is +# required for PyPy3 (as of 7.3.1) +if(NOT DEFINED PYTHON_MODULE_EXTENSION) + execute_process( + COMMAND + "${${_Python}_EXECUTABLE}" "-c" + "import sys, importlib; s = importlib.import_module('distutils.sysconfig' if sys.version_info < (3, 10) else 'sysconfig'); print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO'))" + OUTPUT_VARIABLE _PYTHON_MODULE_EXTENSION + ERROR_VARIABLE _PYTHON_MODULE_EXTENSION_ERR + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if(_PYTHON_MODULE_EXTENSION STREQUAL "") + message( + FATAL_ERROR "pybind11 could not query the module file extension, likely the 'distutils'" + "package is not installed. Full error message:\n${_PYTHON_MODULE_EXTENSION_ERR}") + endif() + + # This needs to be available for the pybind11_extension function + set(PYTHON_MODULE_EXTENSION + "${_PYTHON_MODULE_EXTENSION}" + CACHE INTERNAL "") +endif() + +# Python debug libraries expose slightly different objects before 3.8 +# https://docs.python.org/3.6/c-api/intro.html#debugging-builds +# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib +if(PYTHON_IS_DEBUG) + set_property( + TARGET pybind11::pybind11 + APPEND + PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG) +endif() + +# Check on every access - since Python can change - do nothing in that case. + +if(DEFINED ${_Python}_INCLUDE_DIRS) + # Only add Python for build - must be added during the import for config + # since it has to be re-discovered. + # + # This needs to be a target to be included after the local pybind11 + # directory, just in case there there is an installed pybind11 sitting + # next to Python's includes. It also ensures Python is a SYSTEM library. + add_library(pybind11::python_headers INTERFACE IMPORTED) + set_property( + TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES + "$") + set_property( + TARGET pybind11::pybind11 + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers) + set(pybind11_INCLUDE_DIRS + "${pybind11_INCLUDE_DIR}" "${${_Python}_INCLUDE_DIRS}" + CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located") +endif() + +# In CMake 3.18+, you can find these separately, so include an if +if(TARGET ${_Python}::Python) + set_property( + TARGET pybind11::embed + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Python) +endif() + +# CMake 3.15+ has this +if(TARGET ${_Python}::Module) + set_property( + TARGET pybind11::module + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Module) +else() + set_property( + TARGET pybind11::module + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_link_helper) +endif() + +# WITHOUT_SOABI and WITH_SOABI will disable the custom extension handling used by pybind11. +# WITH_SOABI is passed on to python_add_library. +function(pybind11_add_module target_name) + cmake_parse_arguments(PARSE_ARGV 1 ARG + "STATIC;SHARED;MODULE;THIN_LTO;OPT_SIZE;NO_EXTRAS;WITHOUT_SOABI" "" "") + + if(ARG_STATIC) + set(lib_type STATIC) + elseif(ARG_SHARED) + set(lib_type SHARED) + else() + set(lib_type MODULE) + endif() + + if("${_Python}" STREQUAL "Python") + python_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS}) + elseif("${_Python}" STREQUAL "Python3") + python3_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS}) + else() + message(FATAL_ERROR "Cannot detect FindPython version: ${_Python}") + endif() + + target_link_libraries(${target_name} PRIVATE pybind11::headers) + + if(lib_type STREQUAL "MODULE") + target_link_libraries(${target_name} PRIVATE pybind11::module) + else() + target_link_libraries(${target_name} PRIVATE pybind11::embed) + endif() + + if(MSVC) + target_link_libraries(${target_name} PRIVATE pybind11::windows_extras) + endif() + + # -fvisibility=hidden is required to allow multiple modules compiled against + # different pybind versions to work properly, and for some features (e.g. + # py::module_local). We force it on everything inside the `pybind11` + # namespace; also turning it on for a pybind module compilation here avoids + # potential warnings or issues from having mixed hidden/non-hidden types. + if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET) + set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden") + endif() + + if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET) + set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden") + endif() + + # If we don't pass a WITH_SOABI or WITHOUT_SOABI, use our own default handling of extensions + if(NOT ARG_WITHOUT_SOABI AND NOT "WITH_SOABI" IN_LIST ARG_UNPARSED_ARGUMENTS) + pybind11_extension(${target_name}) + endif() + + if(ARG_NO_EXTRAS) + return() + endif() + + if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION) + if(ARG_THIN_LTO) + target_link_libraries(${target_name} PRIVATE pybind11::thin_lto) + else() + target_link_libraries(${target_name} PRIVATE pybind11::lto) + endif() + endif() + + # Use case-insensitive comparison to match the result of $ + string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) + if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO) + # Strip unnecessary sections of the binary on Linux/macOS + pybind11_strip(${target_name}) + endif() + + if(MSVC) + target_link_libraries(${target_name} PRIVATE pybind11::windows_extras) + endif() + + if(ARG_OPT_SIZE) + target_link_libraries(${target_name} PRIVATE pybind11::opt_size) + endif() +endfunction() + +function(pybind11_extension name) + # The extension is precomputed + set_target_properties(${name} PROPERTIES PREFIX "" SUFFIX "${PYTHON_MODULE_EXTENSION}") + +endfunction() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake new file mode 100644 index 0000000..66ad00a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake @@ -0,0 +1,233 @@ +# tools/pybind11Tools.cmake -- Build system for the pybind11 modules +# +# Copyright (c) 2020 Wenzel Jakob +# +# All rights reserved. Use of this source code is governed by a +# BSD-style license that can be found in the LICENSE file. + +# include_guard(global) (pre-CMake 3.10) +if(TARGET pybind11::python_headers) + return() +endif() + +# Built-in in CMake 3.5+ +include(CMakeParseArguments) + +if(pybind11_FIND_QUIETLY) + set(_pybind11_quiet QUIET) +else() + set(_pybind11_quiet "") +endif() + +# If this is the first run, PYTHON_VERSION can stand in for PYBIND11_PYTHON_VERSION +if(NOT DEFINED PYBIND11_PYTHON_VERSION AND DEFINED PYTHON_VERSION) + message(WARNING "Set PYBIND11_PYTHON_VERSION to search for a specific version, not " + "PYTHON_VERSION (which is an output). Assuming that is what you " + "meant to do and continuing anyway.") + set(PYBIND11_PYTHON_VERSION + "${PYTHON_VERSION}" + CACHE STRING "Python version to use for compiling modules") + unset(PYTHON_VERSION) + unset(PYTHON_VERSION CACHE) +elseif(DEFINED PYBIND11_PYTHON_VERSION) + # If this is set as a normal variable, promote it + set(PYBIND11_PYTHON_VERSION + "${PYBIND11_PYTHON_VERSION}" + CACHE STRING "Python version to use for compiling modules") +else() + # Make an empty cache variable. + set(PYBIND11_PYTHON_VERSION + "" + CACHE STRING "Python version to use for compiling modules") +endif() + +# A user can set versions manually too +set(Python_ADDITIONAL_VERSIONS + "3.11;3.10;3.9;3.8;3.7;3.6" + CACHE INTERNAL "") + +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}") +find_package(PythonLibsNew ${PYBIND11_PYTHON_VERSION} MODULE REQUIRED ${_pybind11_quiet}) +list(REMOVE_AT CMAKE_MODULE_PATH -1) + +# Makes a normal variable a cached variable +macro(_PYBIND11_PROMOTE_TO_CACHE NAME) + set(_tmp_ptc "${${NAME}}") + # CMake 3.21 complains if a cached variable is shadowed by a normal one + unset(${NAME}) + set(${NAME} + "${_tmp_ptc}" + CACHE INTERNAL "") +endmacro() + +# Cache variables so pybind11_add_module can be used in parent projects +_pybind11_promote_to_cache(PYTHON_INCLUDE_DIRS) +_pybind11_promote_to_cache(PYTHON_LIBRARIES) +_pybind11_promote_to_cache(PYTHON_MODULE_PREFIX) +_pybind11_promote_to_cache(PYTHON_MODULE_EXTENSION) +_pybind11_promote_to_cache(PYTHON_VERSION_MAJOR) +_pybind11_promote_to_cache(PYTHON_VERSION_MINOR) +_pybind11_promote_to_cache(PYTHON_VERSION) +_pybind11_promote_to_cache(PYTHON_IS_DEBUG) + +if(PYBIND11_MASTER_PROJECT) + if(PYTHON_MODULE_EXTENSION MATCHES "pypy") + if(NOT DEFINED PYPY_VERSION) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -c + [=[import sys; sys.stdout.write(".".join(map(str, sys.pypy_version_info[:3])))]=] + OUTPUT_VARIABLE pypy_version) + set(PYPY_VERSION + ${pypy_version} + CACHE INTERNAL "") + endif() + message(STATUS "PYPY ${PYPY_VERSION} (Py ${PYTHON_VERSION})") + else() + message(STATUS "PYTHON ${PYTHON_VERSION}") + endif() +endif() + +# Only add Python for build - must be added during the import for config since +# it has to be re-discovered. +# +# This needs to be an target to it is included after the local pybind11 +# directory, just in case there are multiple versions of pybind11, we want the +# one we expect. +add_library(pybind11::python_headers INTERFACE IMPORTED) +set_property(TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES + "$") +set_property( + TARGET pybind11::pybind11 + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers) + +set(pybind11_INCLUDE_DIRS + "${pybind11_INCLUDE_DIR}" "${PYTHON_INCLUDE_DIRS}" + CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located") + +# Python debug libraries expose slightly different objects before 3.8 +# https://docs.python.org/3.6/c-api/intro.html#debugging-builds +# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib +if(PYTHON_IS_DEBUG) + set_property( + TARGET pybind11::pybind11 + APPEND + PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG) +endif() + +# The <3.11 code here does not support release/debug builds at the same time, like on vcpkg +if(CMAKE_VERSION VERSION_LESS 3.11) + set_property( + TARGET pybind11::module + APPEND + PROPERTY + INTERFACE_LINK_LIBRARIES + pybind11::python_link_helper + "$<$,$>:$>" + ) + + set_property( + TARGET pybind11::embed + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11 $) +else() + # The IMPORTED INTERFACE library here is to ensure that "debug" and "release" get processed outside + # of a generator expression - https://gitlab.kitware.com/cmake/cmake/-/issues/18424, as they are + # target_link_library keywords rather than real libraries. + add_library(pybind11::_ClassicPythonLibraries IMPORTED INTERFACE) + target_link_libraries(pybind11::_ClassicPythonLibraries INTERFACE ${PYTHON_LIBRARIES}) + target_link_libraries( + pybind11::module + INTERFACE + pybind11::python_link_helper + "$<$,$>:pybind11::_ClassicPythonLibraries>") + + target_link_libraries(pybind11::embed INTERFACE pybind11::pybind11 + pybind11::_ClassicPythonLibraries) +endif() + +function(pybind11_extension name) + # The prefix and extension are provided by FindPythonLibsNew.cmake + set_target_properties(${name} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}" + SUFFIX "${PYTHON_MODULE_EXTENSION}") +endfunction() + +# Build a Python extension module: +# pybind11_add_module( [MODULE | SHARED] [EXCLUDE_FROM_ALL] +# [NO_EXTRAS] [THIN_LTO] [OPT_SIZE] source1 [source2 ...]) +# +function(pybind11_add_module target_name) + set(options "MODULE;SHARED;EXCLUDE_FROM_ALL;NO_EXTRAS;SYSTEM;THIN_LTO;OPT_SIZE") + cmake_parse_arguments(ARG "${options}" "" "" ${ARGN}) + + if(ARG_MODULE AND ARG_SHARED) + message(FATAL_ERROR "Can't be both MODULE and SHARED") + elseif(ARG_SHARED) + set(lib_type SHARED) + else() + set(lib_type MODULE) + endif() + + if(ARG_EXCLUDE_FROM_ALL) + set(exclude_from_all EXCLUDE_FROM_ALL) + else() + set(exclude_from_all "") + endif() + + add_library(${target_name} ${lib_type} ${exclude_from_all} ${ARG_UNPARSED_ARGUMENTS}) + + target_link_libraries(${target_name} PRIVATE pybind11::module) + + if(ARG_SYSTEM) + message( + STATUS + "Warning: this does not have an effect - use NO_SYSTEM_FROM_IMPORTED if using imported targets" + ) + endif() + + pybind11_extension(${target_name}) + + # -fvisibility=hidden is required to allow multiple modules compiled against + # different pybind versions to work properly, and for some features (e.g. + # py::module_local). We force it on everything inside the `pybind11` + # namespace; also turning it on for a pybind module compilation here avoids + # potential warnings or issues from having mixed hidden/non-hidden types. + if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET) + set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden") + endif() + + if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET) + set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden") + endif() + + if(ARG_NO_EXTRAS) + return() + endif() + + if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION) + if(ARG_THIN_LTO) + target_link_libraries(${target_name} PRIVATE pybind11::thin_lto) + else() + target_link_libraries(${target_name} PRIVATE pybind11::lto) + endif() + endif() + + # Use case-insensitive comparison to match the result of $ + string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) + if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO) + pybind11_strip(${target_name}) + endif() + + if(MSVC) + target_link_libraries(${target_name} PRIVATE pybind11::windows_extras) + endif() + + if(ARG_OPT_SIZE) + target_link_libraries(${target_name} PRIVATE pybind11::opt_size) + endif() +endfunction() + +# Provide general way to call common Python commands in "common" file. +set(_Python + PYTHON + CACHE INTERNAL "" FORCE) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml new file mode 100644 index 0000000..8fe2f47 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in new file mode 100644 index 0000000..885ac5c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +# Setup script for pybind11-global (in the sdist or in tools/setup_global.py in the repository) +# This package is targeted for easy use from CMake. + +import glob +import os +import re + +# Setuptools has to be before distutils +from setuptools import setup + +from distutils.command.install_headers import install_headers + +class InstallHeadersNested(install_headers): + def run(self): + headers = self.distribution.headers or [] + for header in headers: + # Remove pybind11/include/ + short_header = header.split("/", 2)[-1] + + dst = os.path.join(self.install_dir, os.path.dirname(short_header)) + self.mkpath(dst) + (out, _) = self.copy_file(header, dst) + self.outfiles.append(out) + + +main_headers = glob.glob("pybind11/include/pybind11/*.h") +detail_headers = glob.glob("pybind11/include/pybind11/detail/*.h") +eigen_headers = glob.glob("pybind11/include/pybind11/eigen/*.h") +stl_headers = glob.glob("pybind11/include/pybind11/stl/*.h") +cmake_files = glob.glob("pybind11/share/cmake/pybind11/*.cmake") +pkgconfig_files = glob.glob("pybind11/share/pkgconfig/*.pc") +headers = main_headers + detail_headers + stl_headers + eigen_headers + +cmdclass = {"install_headers": InstallHeadersNested} +$extra_cmd + +# This will _not_ affect installing from wheels, +# only building wheels or installing from SDist. +# Primarily intended on Windows, where this is sometimes +# customized (for example, conda-forge uses Library/) +base = os.environ.get("PYBIND11_GLOBAL_PREFIX", "") + +# Must have a separator +if base and not base.endswith("/"): + base += "/" + +setup( + name="pybind11_global", + version="$version", + packages=[], + headers=headers, + data_files=[ + (base + "share/cmake/pybind11", cmake_files), + (base + "share/pkgconfig", pkgconfig_files), + (base + "include/pybind11", main_headers), + (base + "include/pybind11/detail", detail_headers), + (base + "include/pybind11/eigen", eigen_headers), + (base + "include/pybind11/stl", stl_headers), + ], + cmdclass=cmdclass, +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in new file mode 100644 index 0000000..6358cc7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +# Setup script (in the sdist or in tools/setup_main.py in the repository) + +from setuptools import setup + +cmdclass = {} +$extra_cmd + +setup( + name="pybind11", + version="$version", + download_url='https://github.com/pybind/pybind11/tarball/v$version', + packages=[ + "pybind11", + "pybind11.include.pybind11", + "pybind11.include.pybind11.detail", + "pybind11.include.pybind11.eigen", + "pybind11.include.pybind11.stl", + "pybind11.share.cmake.pybind11", + "pybind11.share.pkgconfig", + ], + package_data={ + "pybind11": ["py.typed"], + "pybind11.include.pybind11": ["*.h"], + "pybind11.include.pybind11.detail": ["*.h"], + "pybind11.include.pybind11.eigen": ["*.h"], + "pybind11.include.pybind11.stl": ["*.h"], + "pybind11.share.cmake.pybind11": ["*.cmake"], + "pybind11.share.pkgconfig": ["*.pc"], + }, + extras_require={ + "global": ["pybind11_global==$version"] + }, + entry_points={ + "console_scripts": [ + "pybind11-config = pybind11.__main__:main", + ], + "pipx.run": [ + "pybind11 = pybind11.__main__:main", + ] + }, + cmdclass=cmdclass +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py new file mode 100644 index 0000000..f52651d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py @@ -0,0 +1,942 @@ +from builtins import bytearray as builtin_bytearray +from builtins import bytes as builtin_bytes +from builtins import str as builtin_str +import codecs +from types import BuiltinFunctionType +from typing import TYPE_CHECKING +from typing import Any + +from .._metrics import _set_iast_error_metric +from .._taint_tracking import TagMappingMode +from .._taint_tracking import TaintRange +from .._taint_tracking import _convert_escaped_text_to_tainted_text +from .._taint_tracking import _format_aspect +from .._taint_tracking import are_all_text_all_ranges +from .._taint_tracking import as_formatted_evidence +from .._taint_tracking import common_replace +from .._taint_tracking import copy_and_shift_ranges_from_strings +from .._taint_tracking import copy_ranges_from_strings +from .._taint_tracking import get_ranges +from .._taint_tracking import get_tainted_ranges +from .._taint_tracking import is_pyobject_tainted +from .._taint_tracking import new_pyobject_id +from .._taint_tracking import parse_params +from .._taint_tracking import set_ranges +from .._taint_tracking import shift_taint_range +from .._taint_tracking import taint_pyobject_with_ranges +from .._taint_tracking._native import aspects # noqa: F401 + + +if TYPE_CHECKING: + from typing import Callable # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Sequence # noqa:F401 + from typing import Tuple # noqa:F401 + from typing import Union # noqa:F401 + + TEXT_TYPE = Union[str, bytes, bytearray] + +TEXT_TYPES = (str, bytes, bytearray) + + +_add_aspect = aspects.add_aspect +_extend_aspect = aspects.extend_aspect +_index_aspect = aspects.index_aspect +_join_aspect = aspects.join_aspect +_slice_aspect = aspects.slice_aspect + +__all__ = ["add_aspect", "str_aspect", "bytearray_extend_aspect", "decode_aspect", "encode_aspect"] + + +def add_aspect(op1, op2): + if not isinstance(op1, TEXT_TYPES) or not isinstance(op2, TEXT_TYPES) or type(op1) != type(op2): + return op1 + op2 + return _add_aspect(op1, op2) + + +def str_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> str + if orig_function: + if orig_function != builtin_str: + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + result = builtin_str(*args, **kwargs) + else: + result = args[0].str(*args[1:], **kwargs) + + if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]): + try: + if isinstance(args[0], (bytes, bytearray)): + encoding = parse_params(1, "encoding", "utf-8", *args, **kwargs) + errors = parse_params(2, "errors", "strict", *args, **kwargs) + check_offset = args[0].decode(encoding, errors) + else: + check_offset = args[0] + offset = result.index(check_offset) + copy_and_shift_ranges_from_strings(args[0], result, offset) + except Exception as e: + _set_iast_error_metric("IAST propagation error. str_aspect. {}".format(e)) + return result + + +def bytes_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> bytes + if orig_function: + if orig_function != builtin_bytes: + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + result = builtin_bytes(*args, **kwargs) + else: + result = args[0].bytes(*args[1:], **kwargs) + + if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]): + try: + copy_ranges_from_strings(args[0], result) + except Exception as e: + _set_iast_error_metric("IAST propagation error. bytes_aspect. {}".format(e)) + return result + + +def bytearray_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> bytearray + if orig_function: + if orig_function != builtin_bytearray: + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + result = builtin_bytearray(*args, **kwargs) + else: + result = args[0].bytearray(*args[1:], **kwargs) + + if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]): + try: + copy_ranges_from_strings(args[0], result) + except Exception as e: + _set_iast_error_metric("IAST propagation error. bytearray_aspect. {}".format(e)) + return result + + +def join_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> Any + if not orig_function: + orig_function = args[0].join + if not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + if not args: + return orig_function(*args, **kwargs) + + joiner = args[0] + args = args[flag_added_args:] + if not isinstance(joiner, TEXT_TYPES): + return joiner.join(*args, **kwargs) + try: + return _join_aspect(joiner, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. join_aspect. {}".format(e)) + return joiner.join(*args, **kwargs) + + +def index_aspect(candidate_text, index) -> Any: + result = candidate_text[index] + + if not isinstance(candidate_text, TEXT_TYPES) or not isinstance(index, int): + return result + + try: + return _index_aspect(candidate_text, index) + except Exception as e: + _set_iast_error_metric("IAST propagation error. index_aspect. {}".format(e)) + return result + + +def slice_aspect(candidate_text, start, stop, step) -> Any: + if ( + not isinstance(candidate_text, TEXT_TYPES) + or (start is not None and not isinstance(start, int)) + or (stop is not None and not isinstance(stop, int)) + or (step is not None and not isinstance(step, int)) + ): + return candidate_text[start:stop:step] + result = candidate_text[start:stop:step] + try: + new_result = _slice_aspect(candidate_text, start, stop, step) + if new_result != result: + raise Exception("Propagation result %r is different to candidate_text[slice] %r" % (new_result, result)) + return new_result + except Exception as e: + _set_iast_error_metric("IAST propagation error. slice_aspect. {}".format(e)) + return result + + +def bytearray_extend_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> Any + if orig_function and not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + if len(args) < 2: + # If we're not receiving at least 2 arguments, means the call was + # ``x.extend()`` and not ``x.extend(y)`` + # so either not the extend we're looking for, or no changes in taint ranges. + return args[0].extend(*args[1:], **kwargs) + + op1 = args[0] + op2 = args[1] + if not isinstance(op1, bytearray) or not isinstance(op2, (bytearray, bytes)): + return op1.extend(*args[1:], **kwargs) + try: + return _extend_aspect(op1, op2) + except Exception as e: + _set_iast_error_metric("IAST propagation error. extend_aspect. {}".format(e)) + return op1.extend(op2) + + +def modulo_aspect(candidate_text, candidate_tuple): + # type: (Any, Any) -> Any + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text % candidate_tuple + + try: + if isinstance(candidate_tuple, tuple): + parameter_list = candidate_tuple + else: + parameter_list = (candidate_tuple,) + + ranges_orig, candidate_text_ranges = are_all_text_all_ranges(candidate_text, parameter_list) + if not ranges_orig: + return candidate_text % candidate_tuple + + return _convert_escaped_text_to_tainted_text( + as_formatted_evidence( + candidate_text, + candidate_text_ranges, + tag_mapping_function=TagMappingMode.Mapper, + ) + % tuple( + as_formatted_evidence( + parameter, + tag_mapping_function=TagMappingMode.Mapper, + ) + if isinstance(parameter, TEXT_TYPES) + else parameter + for parameter in parameter_list + ), + ranges_orig=ranges_orig, + ) + except Exception as e: + _set_iast_error_metric("IAST propagation error. modulo_aspect. {}".format(e)) + return candidate_text % candidate_tuple + + +def build_string_aspect(*args): # type: (List[Any]) -> str + return join_aspect("".join, 1, "", args) + + +def ljust_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> Union[str, bytes, bytearray] + if not orig_function: + orig_function = args[0].ljust + if not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + + result = candidate_text.ljust(*args, **kwargs) + + if not isinstance(candidate_text, TEXT_TYPES): + return result + + try: + ranges_new = get_ranges(candidate_text) + fillchar = parse_params(1, "fillchar", " ", *args, **kwargs) + fillchar_ranges = get_ranges(fillchar) + if ranges_new is None or (not ranges_new and not fillchar_ranges): + return result + + if fillchar_ranges: + # Can only be one char, so we create one range to cover from the start to the end + ranges_new = ranges_new + [shift_taint_range(fillchar_ranges[0], len(candidate_text))] + + new_result = candidate_text.ljust(parse_params(0, "width", None, *args, **kwargs), fillchar) + taint_pyobject_with_ranges(new_result, ranges_new) + return new_result + except Exception as e: + _set_iast_error_metric("IAST propagation error. ljust_aspect. {}".format(e)) + + return result + + +def zfill_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], int, Any, Any) -> Any + if orig_function and not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + + result = candidate_text.zfill(*args, **kwargs) + + if not isinstance(candidate_text, TEXT_TYPES): + return result + + try: + ranges_orig = get_ranges(candidate_text) + if not ranges_orig: + return result + prefix = candidate_text[0] in ("-", "+") + + difflen = len(result) - len(candidate_text) + ranges_new = [] # type: List[TaintRange] + ranges_new_append = ranges_new.append + ranges_new_extend = ranges_new.extend + + for r in ranges_orig: + if not prefix or r.start > 0: + ranges_new_append(TaintRange(start=r.start + difflen, length=r.length, source=r.source)) + else: + ranges_new_extend( + [ + TaintRange(start=0, length=1, source=r.source), + TaintRange(start=r.start + difflen + 1, length=r.length - 1, source=r.source), + ] + ) + taint_pyobject_with_ranges(result, tuple(ranges_new)) + except Exception as e: + _set_iast_error_metric("IAST propagation error. format_aspect. {}".format(e)) + + return result + + +def format_aspect( + orig_function, # type: Optional[Callable] + flag_added_args, # type: int + *args, # type: Any + **kwargs, # type: Dict[str, Any] +): # type: (...) -> str + if not orig_function: + orig_function = args[0].format + + if not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + if not args: + return orig_function(*args, **kwargs) + + candidate_text = args[0] # type: str + args = args[flag_added_args:] + + result = candidate_text.format(*args, **kwargs) + + if not isinstance(candidate_text, TEXT_TYPES): + return result + + try: + params = tuple(args) + tuple(kwargs.values()) + new_result = _format_aspect(candidate_text, params, *args, **kwargs) + if new_result != result: + raise Exception("Propagation result %r is different to candidate_text.format %r" % (new_result, result)) + return new_result + except Exception as e: + _set_iast_error_metric("IAST propagation error. format_aspect. {}".format(e)) + + return result + + +def format_map_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> str + if orig_function and not isinstance(orig_function, BuiltinFunctionType): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + if orig_function and not args: + return orig_function(*args, **kwargs) + + candidate_text = args[0] # type: str + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.format_map(*args, **kwargs) + + try: + mapping = parse_params(0, "mapping", None, *args, **kwargs) + mapping_tuple = tuple(mapping if not isinstance(mapping, dict) else mapping.values()) + ranges_orig, candidate_text_ranges = are_all_text_all_ranges( + candidate_text, + args + mapping_tuple, + ) + if not ranges_orig: + return candidate_text.format_map(*args, **kwargs) + + return _convert_escaped_text_to_tainted_text( + as_formatted_evidence( + candidate_text, candidate_text_ranges, tag_mapping_function=TagMappingMode.Mapper + ).format_map( + { + key: as_formatted_evidence(value, tag_mapping_function=TagMappingMode.Mapper) + if isinstance(value, TEXT_TYPES) + else value + for key, value in mapping.items() + } + ), + ranges_orig=ranges_orig, + ) + except Exception as e: + _set_iast_error_metric("IAST propagation error. format_map_aspect. {}".format(e)) + return candidate_text.format_map(*args, **kwargs) + + +def repr_aspect(orig_function, flag_added_args, *args, **kwargs): + # type: (Optional[Callable], Any, Any, Any) -> Any + + # DEV: We call this function directly passing None as orig_function + if orig_function is not None and not ( + orig_function is repr or getattr(orig_function, "__name__", None) == "__repr__" + ): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + result = repr(*args, **kwargs) + + if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]): + try: + if isinstance(args[0], bytes): + check_offset = ascii(args[0])[2:-1] + elif isinstance(args[0], bytearray): + check_offset = ascii(args[0])[12:-2] + else: + check_offset = args[0] + try: + offset = result.index(check_offset) + except ValueError: + offset = 0 + + copy_and_shift_ranges_from_strings(args[0], result, offset, len(check_offset)) + except Exception as e: + _set_iast_error_metric("IAST propagation error. repr_aspect. {}".format(e)) + return result + + +def format_value_aspect( + element, # type: Any + options=0, # type: int + format_spec=None, # type: Optional[str] +): # type: (...) -> str + if options == 115: + new_text = str_aspect(str, 0, element) + elif options == 114: + # TODO: use our repr once we have implemented it + new_text = repr_aspect(repr, 0, element) + elif options == 97: + new_text = ascii(element) + else: + new_text = element + if not isinstance(new_text, TEXT_TYPES): + return format(new_text) + + try: + if format_spec: + # Apply formatting + text_ranges = get_tainted_ranges(new_text) + if text_ranges: + new_new_text = ("{:%s}" % format_spec).format(new_text) + try: + new_ranges = list() + for text_range in text_ranges: + new_ranges.append(shift_taint_range(text_range, new_new_text.index(new_text))) + if new_ranges: + taint_pyobject_with_ranges(new_new_text, tuple(new_ranges)) + return new_new_text + except ValueError: + return ("{:%s}" % format_spec).format(new_text) + else: + return ("{:%s}" % format_spec).format(new_text) + else: + return str_aspect(str, 0, new_text) + except Exception as e: + _set_iast_error_metric("IAST propagation error. format_value_aspect. {}".format(e)) + return new_text + + +def incremental_translation(self, incr_coder, funcode, empty): + tainted_ranges = iter(get_tainted_ranges(self)) + result_list, new_ranges = [], [] + result_length, i = 0, 0 + tainted_range = next(tainted_ranges, None) + tainted_new_length = 0 + in_tainted = False + tainted_start = 0 + bytes_iterated = 0 + try: + for i in range(len(self)): + if tainted_range is None: + # no more tainted ranges, finish decoding all at once + new_prod = funcode(self[i:]) + result_list.append(new_prod) + break + if i == tainted_range.start: + # start new tainted range + tainted_start = bytes_iterated + tainted_new_length = 0 + in_tainted = True + + new_prod = funcode(self[i : i + 1]) + result_list.append(new_prod) + result_length += len(new_prod) + + if in_tainted: + tainted_new_length += len(new_prod) + else: + bytes_iterated += len(new_prod) + + if i + 1 == tainted_range.start + tainted_range.length and tainted_new_length > 0: + # end range. Do no taint partial multi-bytes character that comes next. + new_ranges.append( + TaintRange( + start=tainted_start, + length=tainted_new_length, + source=tainted_range.source, + ) + ) + + tainted_range = next(tainted_ranges, None) + result_list.append(funcode(self[:0], True)) + except UnicodeDecodeError as e: + offset = -len(incr_coder.getstate()[0]) + raise UnicodeDecodeError(e.args[0], self, i + e.args[2] + offset, i + e.args[3] + offset, *e.args[4:]) + except UnicodeEncodeError: + funcode(self) + result = empty.join(result_list) + taint_pyobject_with_ranges(result, new_ranges) + return result + + +def decode_aspect(orig_function, flag_added_args, *args, **kwargs): + if orig_function and (not flag_added_args or not args): + # This patch is unexpected, so we fallback + # to executing the original function + return orig_function(*args, **kwargs) + + self = args[0] + args = args[(flag_added_args or 1) :] + # Assume we call decode method of the first argument + result = self.decode(*args, **kwargs) + + if not is_pyobject_tainted(self) or not isinstance(self, bytes): + return result + + try: + codec = args[0] if args else "utf-8" + inc_dec = codecs.getincrementaldecoder(codec)(**kwargs) + return incremental_translation(self, inc_dec, inc_dec.decode, "") + except Exception as e: + _set_iast_error_metric("IAST propagation error. decode_aspect. {}".format(e)) + return result + + +def encode_aspect(orig_function, flag_added_args, *args, **kwargs): + if orig_function and (not flag_added_args or not args): + # This patch is unexpected, so we fallback + # to executing the original function + return orig_function(*args, **kwargs) + + self = args[0] + args = args[(flag_added_args or 1) :] + # Assume we call encode method of the first argument + result = self.encode(*args, **kwargs) + + if not is_pyobject_tainted(self) or not isinstance(self, str): + return result + + try: + codec = args[0] if args else "utf-8" + inc_enc = codecs.getincrementalencoder(codec)(**kwargs) + return incremental_translation(self, inc_enc, inc_enc.encode, b"") + except Exception as e: + _set_iast_error_metric("IAST propagation error. encode_aspect. {}".format(e)) + return result + + +def upper_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.upper(*args, **kwargs) + + try: + return common_replace("upper", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. upper_aspect. {}".format(e)) + return candidate_text.upper(*args, **kwargs) + + +def lower_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.lower(*args, **kwargs) + + try: + return common_replace("lower", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. lower_aspect. {}".format(e)) + return candidate_text.lower(*args, **kwargs) + + +def _distribute_ranges_and_escape( + split_elements, # type: List[Optional[TEXT_TYPE]] + len_separator, # type: int + ranges, # type: Tuple[TaintRange, ...] +): # type: (...) -> List[Optional[TEXT_TYPE]] + # FIXME: converts to set, and then to list again, probably to remove + # duplicates. This should be removed once the ranges values on the + # taint dictionary are stored in a set. + range_set = set(ranges) + range_set_remove = range_set.remove + formatted_elements = [] # type: List[Optional[TEXT_TYPE]] + formatted_elements_append = formatted_elements.append + element_start = 0 + extra = 0 + + for element in split_elements: + if element is None: + extra += len_separator + continue + # DEV: If this if is True, it means that the element is part of bytes/bytearray + if isinstance(element, int): + len_element = 1 + else: + len_element = len(element) + element_end = element_start + len_element + new_ranges = {} # type: Dict[TaintRange, TaintRange] + + for taint_range in ranges: + if (taint_range.start + taint_range.length) <= (element_start + extra): + try: + range_set_remove(taint_range) + except KeyError: + # If the range appears twice in ranges, it will be + # iterated twice, but it's only once in range_set, + # raising KeyError at remove, so it can be safely ignored + pass + continue + + if taint_range.start > element_end: + continue + + start = max(taint_range.start, element_start) + end = min((taint_range.start + taint_range.length), element_end) + if end <= start: + continue + + if end - element_start < 1: + continue + + new_range = TaintRange( + start=start - element_start, + length=end - element_start, + source=taint_range.source, + ) + new_ranges[new_range] = taint_range + + element_ranges = tuple(new_ranges.keys()) + # DEV: If this if is True, it means that the element is part of bytes/bytearray + if isinstance(element, int): + element_new_id = new_pyobject_id(bytes([element])) + else: + element_new_id = new_pyobject_id(element) + set_ranges(element_new_id, element_ranges) + + formatted_elements_append( + as_formatted_evidence( + element_new_id, + element_ranges, + TagMappingMode.Mapper_Replace, + new_ranges, + ) + ) + + element_start = element_end + len_separator + return formatted_elements + + +def aspect_replace_api( + candidate_text, old_value, new_value, count, orig_result +): # type: (Any, Any, Any, int, Any) -> str + ranges_orig, candidate_text_ranges = are_all_text_all_ranges(candidate_text, (old_value, new_value)) + if not ranges_orig: # Ranges in args/kwargs are checked + return orig_result + + empty = b"" if isinstance(candidate_text, (bytes, bytearray)) else "" # type: TEXT_TYPE + + if old_value: + elements = candidate_text.split(old_value, count) # type: Sequence[TEXT_TYPE] + else: + if count == -1: + elements = ( + [ + empty, + ] + + ( + list(candidate_text) if isinstance(candidate_text, str) else [bytes([x]) for x in candidate_text] # type: ignore + ) + + [ + empty, + ] + ) + else: + if isinstance(candidate_text, str): + elements = ( + [ + empty, + ] + + list(candidate_text[: count - 1]) + + [candidate_text[count - 1 :]] + ) + if len(elements) == count and elements[-1] != "": + elements.append(empty) + else: + elements = ( + [ + empty, + ] + + [bytes([x]) for x in candidate_text[: count - 1]] + + [bytes([x for x in candidate_text[count - 1 :]])] + ) + if len(elements) == count and elements[-1] != b"": + elements.append(empty) + i = 0 + new_elements = [] # type: List[Optional[TEXT_TYPE]] + new_elements_append = new_elements.append + + # if new value is blank, _distribute_ranges_and_escape function doesn't + # understand what is the replacement to move the ranges. + # In the other hand, Split function splits a string and the occurrence is + # in the first or last position, split adds ''. IE: + # 'XabcX'.split('X') -> ['', 'abc', ''] + # We add "None" in the old position and _distribute_ranges_and_escape + # knows that this is the position of a old value and move len(old_value) + # positions of the range + if new_value in ("", b""): + len_elements = len(elements) + for element in elements: + if i == 0 and element in ("", b""): + new_elements_append(None) + i += 1 + continue + if i + 1 == len_elements and element in ("", b""): + new_elements_append(None) + continue + + new_elements_append(element) + + if count < 0 and i + 1 < len(elements): + new_elements_append(None) + elif i >= count and i + 1 < len(elements): + new_elements_append(old_value) + i += 1 + else: + new_elements = elements # type: ignore + + if candidate_text_ranges: + new_elements = _distribute_ranges_and_escape( + new_elements, + len(old_value), + candidate_text_ranges, + ) + + result_formatted = as_formatted_evidence(new_value, tag_mapping_function=TagMappingMode.Mapper).join(new_elements) + + result = _convert_escaped_text_to_tainted_text( + result_formatted, + ranges_orig=ranges_orig, + ) + + return result + + +def replace_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + orig_result = candidate_text.replace(*args, **kwargs) + if not isinstance(candidate_text, TEXT_TYPES): + return orig_result + + ### + # Optimization: if we're not going to replace, just return the original string + count = parse_params(2, "count", -1, *args, **kwargs) + if count == 0: + return candidate_text + ### + try: + old_value = parse_params(0, "old_value", None, *args, **kwargs) + new_value = parse_params(1, "new_value", None, *args, **kwargs) + + if old_value is None or new_value is None: + return orig_result + + if old_value not in candidate_text or old_value == new_value: + return candidate_text + + if orig_result in ("", b"", bytearray(b"")): + return orig_result + + if count < -1: + count = -1 + + aspect_result = aspect_replace_api(candidate_text, old_value, new_value, count, orig_result) + + if aspect_result != orig_result: + return orig_result + + return aspect_result + except Exception as e: + _set_iast_error_metric("IAST propagation error. replace_aspect. {}".format(e)) + return orig_result + + +def swapcase_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.swapcase(*args, **kwargs) + try: + return common_replace("swapcase", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. swapcase_aspect. {}".format(e)) + return candidate_text.swapcase(*args, **kwargs) + + +def title_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.title(*args, **kwargs) + try: + return common_replace("title", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. title_aspect. {}".format(e)) + return candidate_text.title(*args, **kwargs) + + +def capitalize_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.capitalize(*args, **kwargs) + + try: + return common_replace("capitalize", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. capitalize_aspect. {}".format(e)) + return candidate_text.capitalize(*args, **kwargs) + + +def casefold_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function: + if not isinstance(orig_function, BuiltinFunctionType) or not args: + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + else: + orig_function = getattr(args[0], "casefold", None) + + if orig_function and orig_function.__qualname__ not in ("str.casefold", "bytes.casefold", "bytearray.casefold"): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + if flag_added_args > 0: + args = args[flag_added_args:] + return candidate_text.casefold(*args, **kwargs) + try: + return common_replace("casefold", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. casefold_aspect. {}".format(e)) + return candidate_text.casefold(*args, **kwargs) # type: ignore[union-attr] + + +def translate_aspect( + orig_function, flag_added_args, *args, **kwargs +): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE + if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args): + if flag_added_args > 0: + args = args[flag_added_args:] + return orig_function(*args, **kwargs) + + candidate_text = args[0] + args = args[flag_added_args:] + if not isinstance(candidate_text, TEXT_TYPES): + return candidate_text.translate(*args, **kwargs) + try: + return common_replace("translate", candidate_text, *args, **kwargs) + except Exception as e: + _set_iast_error_metric("IAST propagation error. translate_aspect. {}".format(e)) + return candidate_text.translate(*args, **kwargs) + + +def empty_func(*args, **kwargs): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh new file mode 100644 index 0000000..c86b825 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -exu +cd -- "$(dirname -- "${BASH_SOURCE[0]}")" || exit + +rm -rf CMakeFiles/ CMakeCache.txt Makefile cmake_install.cmake __pycache__/ .cmake *.cbp Testing +rm -rf cmake-build-debug cmake-build-default cmake-build-tests \ No newline at end of file diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py new file mode 100644 index 0000000..5beb9c4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py @@ -0,0 +1,548 @@ +#!/usr/bin/env python3 +from collections import abc +from typing import Any +from typing import List +from typing import Optional +from typing import Union + +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +DBAPI_INTEGRATIONS = ("sqlite", "psycopg", "mysql", "mariadb") +DBAPI_PREFIXES = ("django-",) + +log = get_logger(__name__) + + +# Non Lazy Tainting + + +# don't use dataclass that can create circular import problems here +# @dataclasses.dataclass +class _DeepTaintCommand: + def __init__( + self, + pre: bool, + source_key: str, + obj: Any, + store_struct: Union[list, dict], + key: Optional[List[str]] = None, + struct: Optional[Union[list, dict]] = None, + is_key: bool = False, + ): + self.pre = pre + self.source_key = source_key + self.obj = obj + self.store_struct = store_struct + self.key = key + self.struct = struct + self.is_key = is_key + + def store(self, value): + if isinstance(self.store_struct, list): + self.store_struct.append(value) + elif isinstance(self.store_struct, dict): + key = self.key[0] if self.key else None + self.store_struct[key] = value + else: + raise ValueError(f"store_struct of type {type(self.store_struct)}") + + def post(self, struct): + return self.__class__(False, self.source_key, self.obj, self.store_struct, self.key, struct) + + +def build_new_tainted_object_from_generic_object(initial_object, wanted_object): + if initial_object.__class__ is wanted_object.__class__: + return wanted_object + #### custom tailor actions + wanted_type = initial_object.__class__.__module__, initial_object.__class__.__name__ + if wanted_type == ("builtins", "tuple"): + return tuple(wanted_object) + # Django + if wanted_type == ("django.http.request", "HttpHeaders"): + res = initial_object.__class__({}) + res._store = {k.lower(): (k, v) for k, v in wanted_object.items()} + return res + if wanted_type == ("django.http.request", "QueryDict"): + res = initial_object.__class__() + for k, v in wanted_object.items(): + dict.__setitem__(res, k, v) + return res + # Flask 2+ + if wanted_type == ("werkzeug.datastructures.structures", "ImmutableMultiDict"): + return initial_object.__class__(wanted_object) + # Flask 1 + if wanted_type == ("werkzeug.datastructures", "ImmutableMultiDict"): + return initial_object.__class__(wanted_object) + + # if the class is unknown, return the initial object + # this may prevent interned string to be tainted but ensure + # that normal behavior of the code is not changed. + return initial_object + + +def taint_structure(main_obj, source_key, source_value, override_pyobject_tainted=False): + """taint any structured object + use a queue like mechanism to avoid recursion + Best effort: mutate mutable structures and rebuild immutable ones if possible + """ + from ._taint_tracking import is_pyobject_tainted + from ._taint_tracking import taint_pyobject + + if not main_obj: + return main_obj + + main_res = [] + try: + # fifo contains tuple (pre/post:bool, source key, object to taint, + # key to use, struct to store result, struct to ) + stack = [_DeepTaintCommand(True, source_key, main_obj, main_res)] + while stack: + command = stack.pop() + if command.pre: # first processing of the object + if not command.obj: + command.store(command.obj) + elif isinstance(command.obj, (str, bytes, bytearray)): + if override_pyobject_tainted or not is_pyobject_tainted(command.obj): + new_obj = taint_pyobject( + pyobject=command.obj, + source_name=command.source_key, + source_value=command.obj, + source_origin=source_key if command.is_key else source_value, + ) + command.store(new_obj) + else: + command.store(command.obj) + elif isinstance(command.obj, abc.Mapping): + res = {} + stack.append(command.post(res)) + # use dict fondamental enumeration if possible to bypass any override of custom classes + iterable = dict.items(command.obj) if isinstance(command.obj, dict) else command.obj.items() + todo = [] + for k, v in list(iterable): + key_store = [] + todo.append(_DeepTaintCommand(True, k, k, key_store, is_key=True)) + todo.append(_DeepTaintCommand(True, k, v, res, key_store)) + stack.extend(reversed(todo)) + elif isinstance(command.obj, abc.Sequence): + res = [] + stack.append(command.post(res)) + todo = [_DeepTaintCommand(True, command.source_key, v, res) for v in command.obj] + stack.extend(reversed(todo)) + else: + command.store(command.obj) + else: + command.store(build_new_tainted_object_from_generic_object(command.obj, command.struct)) + except BaseException: + log.debug("taint_structure error", exc_info=True) + pass + finally: + return main_res[0] if main_res else main_obj + + +# Lazy Tainting + + +def _is_tainted_struct(obj): + return hasattr(obj, "_origins") + + +class LazyTaintList: + """ + Encapsulate a list to lazily taint all content on any depth + It will appear and act as the original list except for some additional private fields + """ + + def __init__(self, original_list, origins=(0, 0), override_pyobject_tainted=False, source_name="[]"): + self._obj = original_list._obj if _is_tainted_struct(original_list) else original_list + self._origins = origins + self._origin_value = origins[1] + self._override_pyobject_tainted = override_pyobject_tainted + self._source_name = source_name + + def _taint(self, value): + if value: + if isinstance(value, (str, bytes, bytearray)): + from ._taint_tracking import is_pyobject_tainted + from ._taint_tracking import taint_pyobject + + if not is_pyobject_tainted(value) or self._override_pyobject_tainted: + try: + # TODO: migrate this part to shift ranges instead of creating a new one + value = taint_pyobject( + pyobject=value, + source_name=self._source_name, + source_value=value, + source_origin=self._origin_value, + ) + except SystemError: + # TODO: Find the root cause for + # SystemError: NULL object passed to Py_BuildValue + log.debug("IAST SystemError while tainting value: %s", value, exc_info=True) + except Exception: + log.debug("IAST Unexpected exception while tainting value", exc_info=True) + elif isinstance(value, abc.Mapping) and not _is_tainted_struct(value): + value = LazyTaintDict( + value, origins=self._origins, override_pyobject_tainted=self._override_pyobject_tainted + ) + elif isinstance(value, abc.Sequence) and not _is_tainted_struct(value): + value = LazyTaintList( + value, + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + source_name=self._source_name, + ) + return value + + def __add__(self, other): + if _is_tainted_struct(other): + other = other._obj + return LazyTaintList( + self._obj + other, + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + source_name=self._source_name, + ) + + @property # type: ignore + def __class__(self): + return list + + def __contains__(self, item): + return item in self._obj + + def __delitem__(self, key): + del self._obj[key] + + def __eq__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj == other + + def __ge__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj >= other + + def __getitem__(self, key): + return self._taint(self._obj[key]) + + def __gt__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj > other + + def __iadd__(self, other): + if _is_tainted_struct(other): + other = other._obj + self._obj += other + + def __imul__(self, other): + self._obj *= other + + def __iter__(self): + return (self[i] for i in range(len(self._obj))) + + def __le__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj <= other + + def __len__(self): + return len(self._obj) + + def __lt__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj < other + + def __mul__(self, other): + return LazyTaintList( + self._obj * other, + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + source_name=self._source_name, + ) + + def __ne__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj != other + + def __repr__(self): + return repr(self._obj) + + def __reversed__(self): + return (self[i] for i in reversed(range(len(self._obj)))) + + def __setitem__(self, key, value): + self._obj[key] = value + + def __str__(self): + return str(self._obj) + + def append(self, item): + self._obj.append(item) + + def clear(self): + # TODO: stop tainting in this case + self._obj.clear() + + def copy(self): + return LazyTaintList( + self._obj.copy(), + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + source_name=self._source_name, + ) + + def count(self, *args): + return self._obj.count(*args) + + def extend(self, *args): + return self._obj.extend(*args) + + def index(self, *args): + return self._obj.index(*args) + + def insert(self, *args): + return self._obj.insert(*args) + + def pop(self, *args): + return self._taint(self._obj.pop(*args)) + + def remove(self, *args): + return self._obj.remove(*args) + + def reverse(self, *args): + return self._obj.reverse(*args) + + def sort(self, *args): + return self._obj.sort(*args) + + # psycopg2 support + def __conform__(self, proto): + return self + + def getquoted(self) -> bytes: + import psycopg2.extensions as ext + + value = ext.adapt(self._obj).getquoted() + value = self._taint(value) + return value + + +class LazyTaintDict: + def __init__(self, original_dict, origins=(0, 0), override_pyobject_tainted=False): + self._obj = original_dict + self._origins = origins + self._origin_key = origins[0] + self._origin_value = origins[1] + self._override_pyobject_tainted = override_pyobject_tainted + + def _taint(self, value, key, origin=None): + if origin is None: + origin = self._origin_value + if value: + if isinstance(value, (str, bytes, bytearray)): + from ._taint_tracking import is_pyobject_tainted + from ._taint_tracking import taint_pyobject + + if not is_pyobject_tainted(value) or self._override_pyobject_tainted: + try: + # TODO: migrate this part to shift ranges instead of creating a new one + value = taint_pyobject( + pyobject=value, + source_name=key, + source_value=value, + source_origin=origin, + ) + except SystemError: + # TODO: Find the root cause for + # SystemError: NULL object passed to Py_BuildValue + log.debug("IAST SystemError while tainting value: %s", value, exc_info=True) + except Exception: + log.debug("IAST Unexpected exception while tainting value", exc_info=True) + elif isinstance(value, abc.Mapping) and not _is_tainted_struct(value): + value = LazyTaintDict( + value, origins=self._origins, override_pyobject_tainted=self._override_pyobject_tainted + ) + elif isinstance(value, abc.Sequence) and not _is_tainted_struct(value): + value = LazyTaintList( + value, + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + source_name=key, + ) + return value + + @property # type: ignore + def __class__(self): + return dict + + def __contains__(self, item): + return item in self._obj + + def __delitem__(self, key): + del self._obj[key] + + def __eq__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj == other + + def __ge__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj >= other + + def __getitem__(self, key): + return self._taint(self._obj[key], key) + + def __gt__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj > other + + def __ior__(self, other): + if _is_tainted_struct(other): + other = other._obj + self._obj |= other + + def __iter__(self): + return iter(self.keys()) + + def __le__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj <= other + + def __len__(self): + return len(self._obj) + + def __lt__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj < other + + def __ne__(self, other): + if _is_tainted_struct(other): + other = other._obj + return self._obj != other + + def __or__(self, other): + if _is_tainted_struct(other): + other = other._obj + return LazyTaintDict( + self._obj | other, + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + ) + + def __repr__(self): + return repr(self._obj) + + def __reversed__(self): + return reversed(self.keys()) + + def __setitem__(self, key, value): + self._obj[key] = value + + def __str__(self): + return str(self._obj) + + def clear(self): + # TODO: stop tainting in this case + self._obj.clear() + + def copy(self): + return LazyTaintDict( + self._obj.copy(), + origins=self._origins, + override_pyobject_tainted=self._override_pyobject_tainted, + ) + + @classmethod + def fromkeys(cls, *args): + return dict.fromkeys(*args) + + def get(self, key, default=None): + observer = object() + res = self._obj.get(key, observer) + if res is observer: + return default + return self._taint(res, key) + + def items(self): + for k in self.keys(): + yield (k, self[k]) + + def keys(self): + for k in self._obj.keys(): + yield self._taint(k, k, self._origin_key) + + def pop(self, *args): + return self._taint(self._obj.pop(*args), "pop") + + def popitem(self): + k, v = self._obj.popitem() + return self._taint(k, k), self._taint(v, k) + + def remove(self, *args): + return self._obj.remove(*args) + + def setdefault(self, *args): + return self._taint(self._obj.setdefault(*args), args[0]) + + def update(self, *args, **kargs): + self._obj.update(*args, **kargs) + + def values(self): + for _, v in self.items(): + yield v + + # Django Query Dict support + def getlist(self, key, default=None): + return self._taint(self._obj.getlist(key, default=default), key) + + def setlist(self, key, list_): + self._obj.setlist(key, list_) + + def appendlist(self, key, item): + self._obj.appendlist(key, item) + + def setlistdefault(self, key, default_list=None): + return self._taint(self._obj.setlistdefault(key, default_list=default_list), key) + + def lists(self): + return self._taint(self._obj.lists(), self._origin_value) + + def dict(self): + return self + + def urlencode(self, safe=None): + return self._taint(self._obj.urlencode(safe=safe), self._origin_value) + + +def supported_dbapi_integration(integration_name): + return integration_name in DBAPI_INTEGRATIONS or integration_name.startswith(DBAPI_PREFIXES) + + +def check_tainted_args(args, kwargs, tracer, integration_name, method): + if supported_dbapi_integration(integration_name) and method.__name__ == "execute": + from ._taint_tracking import is_pyobject_tainted + + return len(args) and args[0] and is_pyobject_tainted(args[0]) + + return False + + +if asm_config._iast_lazy_taint: + # redefining taint_structure to use lazy object if required + + def taint_structure(main_obj, source_key, source_value, override_pyobject_tainted=False): # noqa: F811 + if isinstance(main_obj, abc.Mapping): + return LazyTaintDict(main_obj, source_key, source_value, override_pyobject_tainted) + elif isinstance(main_obj, abc.Sequence): + return LazyTaintList(main_obj, source_key, source_value, override_pyobject_tainted) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py new file mode 100644 index 0000000..3f994b8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py @@ -0,0 +1,124 @@ +import json +import re +import string +import sys +from typing import TYPE_CHECKING # noqa:F401 + +import attr + +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +if TYPE_CHECKING: + from typing import Any # noqa:F401 + from typing import List # noqa:F401 + from typing import Set # noqa:F401 + from typing import Tuple # noqa:F401 + + +def _is_python_version_supported(): # type: () -> bool + # IAST supports Python versions 3.6 to 3.12 + return (3, 6, 0) <= sys.version_info < (3, 13, 0) + + +def _is_iast_enabled(): + if not asm_config._iast_enabled: + return False + + if not _is_python_version_supported(): + log = get_logger(__name__) + log.info("IAST is not compatible with the current Python version") + return False + + return True + + +# Used to cache the compiled regular expression +_SOURCE_NAME_SCRUB = None +_SOURCE_VALUE_SCRUB = None + + +def _has_to_scrub(s): # type: (str) -> bool + global _SOURCE_NAME_SCRUB + global _SOURCE_VALUE_SCRUB + + if _SOURCE_NAME_SCRUB is None: + _SOURCE_NAME_SCRUB = re.compile(asm_config._iast_redaction_name_pattern) + _SOURCE_VALUE_SCRUB = re.compile(asm_config._iast_redaction_value_pattern) + + return _SOURCE_NAME_SCRUB.match(s) is not None or _SOURCE_VALUE_SCRUB.match(s) is not None + + +_REPLACEMENTS = string.ascii_letters +_LEN_REPLACEMENTS = len(_REPLACEMENTS) + + +def _scrub(s, has_range=False): # type: (str, bool) -> str + if has_range: + return "".join([_REPLACEMENTS[i % _LEN_REPLACEMENTS] for i in range(len(s))]) + return "*" * len(s) + + +def _is_evidence_value_parts(value): # type: (Any) -> bool + return isinstance(value, (set, list)) + + +def _scrub_get_tokens_positions(text, tokens): + # type: (str, Set[str]) -> List[Tuple[int, int]] + token_positions = [] + + for token in tokens: + position = text.find(token) + if position != -1: + token_positions.append((position, position + len(token))) + + token_positions.sort() + return token_positions + + +def _iast_report_to_str(data): + from ._taint_tracking import OriginType + from ._taint_tracking import origin_to_str + + class OriginTypeEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, OriginType): + # if the obj is uuid, we simply return the value of uuid + return origin_to_str(obj) + return json.JSONEncoder.default(self, obj) + + return json.dumps(attr.asdict(data, filter=lambda attr, x: x is not None), cls=OriginTypeEncoder) + + +def _get_patched_code(module_path, module_name): # type: (str, str) -> str + """ + Print the patched code to stdout, for debugging purposes. + """ + import astunparse + + from ddtrace.appsec._iast._ast.ast_patching import get_encoding + from ddtrace.appsec._iast._ast.ast_patching import visit_ast + + with open(module_path, "r", encoding=get_encoding(module_path)) as source_file: + source_text = source_file.read() + + new_source = visit_ast( + source_text, + module_path, + module_name=module_name, + ) + + # If no modifications are done, + # visit_ast returns None + if not new_source: + return "" + + new_code = astunparse.unparse(new_source) + return new_code + + +if __name__ == "__main__": + MODULE_PATH = sys.argv[1] + MODULE_NAME = sys.argv[2] + print(_get_patched_code(MODULE_PATH, MODULE_NAME)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py new file mode 100644 index 0000000..bd9e739 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py @@ -0,0 +1,89 @@ +from typing import Any +from typing import Dict + + +VULN_INSECURE_HASHING_TYPE = "WEAK_HASH" +VULN_WEAK_CIPHER_TYPE = "WEAK_CIPHER" +VULN_SQL_INJECTION = "SQL_INJECTION" +VULN_PATH_TRAVERSAL = "PATH_TRAVERSAL" +VULN_WEAK_RANDOMNESS = "WEAK_RANDOMNESS" +VULN_INSECURE_COOKIE = "INSECURE_COOKIE" +VULN_NO_HTTPONLY_COOKIE = "NO_HTTPONLY_COOKIE" +VULN_NO_SAMESITE_COOKIE = "NO_SAMESITE_COOKIE" +VULN_CMDI = "COMMAND_INJECTION" +VULN_SSRF = "SSRF" + +VULNERABILITY_TOKEN_TYPE = Dict[int, Dict[str, Any]] + +EVIDENCE_ALGORITHM_TYPE = "ALGORITHM" +EVIDENCE_SQL_INJECTION = "SQL_INJECTION" +EVIDENCE_PATH_TRAVERSAL = "PATH_TRAVERSAL" +EVIDENCE_WEAK_RANDOMNESS = "WEAK_RANDOMNESS" +EVIDENCE_COOKIE = "COOKIE" +EVIDENCE_CMDI = "COMMAND" +EVIDENCE_SSRF = "SSRF" + +MD5_DEF = "md5" +SHA1_DEF = "sha1" + +DES_DEF = "des" +BLOWFISH_DEF = "blowfish" +RC2_DEF = "rc2" +RC4_DEF = "rc4" +IDEA_DEF = "idea" + +DD_IAST_TELEMETRY_VERBOSITY = "DD_IAST_TELEMETRY_VERBOSITY" + +DEFAULT_WEAK_HASH_ALGORITHMS = {MD5_DEF, SHA1_DEF} + +DEFAULT_WEAK_CIPHER_ALGORITHMS = {DES_DEF, BLOWFISH_DEF, RC2_DEF, RC4_DEF, IDEA_DEF} + +DEFAULT_WEAK_RANDOMNESS_FUNCTIONS = { + "random", + "randint", + "randrange", + "choice", + "shuffle", + "betavariate", + "gammavariate", + "expovariate", + "choices", + "gauss", + "uniform", + "lognormvariate", + "normalvariate", + "paretovariate", + "sample", + "triangular", + "vonmisesvariate", + "weibullvariate", + "randbytes", +} + +DEFAULT_PATH_TRAVERSAL_FUNCTIONS = { + "glob": {"glob"}, + "os": { + "mkdir", + "remove", + "rename", + "rmdir", + "listdir", + }, + "pickle": {"load"}, + "_pickle": {"load"}, + "posix": { + "mkdir", + "remove", + "rename", + "rmdir", + "listdir", + }, + "shutil": { + "copy", + "copytree", + "move", + "rmtree", + }, + "tarfile": {"open"}, + "zipfile": {"ZipFile"}, +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py new file mode 100644 index 0000000..4109b8e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py @@ -0,0 +1,95 @@ +from typing import TYPE_CHECKING + +import attr + +from ddtrace.appsec._constants import APPSEC +from ddtrace.appsec._constants import IAST +from ddtrace.constants import ORIGIN_KEY +from ddtrace.ext import SpanTypes +from ddtrace.internal import core +from ddtrace.internal.logger import get_logger +from ddtrace.internal.processor import SpanProcessor + +from .._trace_utils import _asm_manual_keep +from . import oce +from ._metrics import _set_metric_iast_request_tainted +from ._metrics import _set_span_tag_iast_executed_sink +from ._metrics import _set_span_tag_iast_request_tainted +from ._utils import _iast_report_to_str +from ._utils import _is_iast_enabled + + +if TYPE_CHECKING: # pragma: no cover + from typing import Optional # noqa:F401 + + from ddtrace.span import Span # noqa:F401 + +log = get_logger(__name__) + + +@attr.s(eq=False) +class AppSecIastSpanProcessor(SpanProcessor): + @staticmethod + def is_span_analyzed(span=None): + # type: (Optional[Span]) -> bool + if span is None: + from ddtrace import tracer + + span = tracer.current_root_span() + + if span and span.span_type == SpanTypes.WEB and core.get_item(IAST.REQUEST_IAST_ENABLED, span=span): + return True + return False + + def on_span_start(self, span): + # type: (Span) -> None + if span.span_type != SpanTypes.WEB: + return + + if not _is_iast_enabled(): + return + + request_iast_enabled = False + if oce.acquire_request(span): + from ._taint_tracking import create_context + + request_iast_enabled = True + create_context() + + core.set_item(IAST.REQUEST_IAST_ENABLED, request_iast_enabled, span=span) + + def on_span_finish(self, span): + # type: (Span) -> None + """Report reported vulnerabilities. + + Span Tags: + - `_dd.iast.json`: Only when one or more vulnerabilities have been detected will we include the custom tag. + - `_dd.iast.enabled`: Set to 1 when IAST is enabled in a request. If a request is disabled + (e.g. by sampling), then it is not set. + """ + if span.span_type != SpanTypes.WEB: + return + + if not core.get_item(IAST.REQUEST_IAST_ENABLED, span=span): + span.set_metric(IAST.ENABLED, 0.0) + return + + from ._taint_tracking import reset_context # noqa: F401 + + span.set_metric(IAST.ENABLED, 1.0) + + data = core.get_item(IAST.CONTEXT_KEY, span=span) + + if data: + span.set_tag_str(IAST.JSON, _iast_report_to_str(data)) + _asm_manual_keep(span) + + _set_metric_iast_request_tainted() + _set_span_tag_iast_request_tainted(span) + _set_span_tag_iast_executed_sink(span) + reset_context() + + if span.get_tag(ORIGIN_KEY) is None: + span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) + + oce.release_request() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py new file mode 100644 index 0000000..5a95aa1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py @@ -0,0 +1,87 @@ +from functools import reduce +import json +import operator +import os +from typing import TYPE_CHECKING +from typing import List +from typing import Set +import zlib + +import attr + + +if TYPE_CHECKING: + import Any # noqa:F401 + import Dict # noqa:F401 + import Optional # noqa:F401 + + +def _only_if_true(value): + return value if value else None + + +@attr.s(eq=False, hash=False) +class Evidence(object): + value = attr.ib(type=str, default=None) # type: Optional[str] + pattern = attr.ib(type=str, default=None) # type: Optional[str] + valueParts = attr.ib(type=list, default=None) # type: Optional[List[Dict[str, Any]]] + redacted = attr.ib(type=bool, default=False, converter=_only_if_true) # type: bool + + def _valueParts_hash(self): + if not self.valueParts: + return + + _hash = 0 + for part in self.valueParts: + json_str = json.dumps(part, sort_keys=True) + part_hash = zlib.crc32(json_str.encode()) + _hash ^= part_hash + + return _hash + + def __hash__(self): + return hash((self.value, self.pattern, self._valueParts_hash(), self.redacted)) + + def __eq__(self, other): + return ( + self.value == other.value + and self.pattern == other.pattern + and self._valueParts_hash() == other._valueParts_hash() + and self.redacted == other.redacted + ) + + +@attr.s(eq=True, hash=True) +class Location(object): + spanId = attr.ib(type=int, eq=False, hash=False, repr=False) # type: int + path = attr.ib(type=str, default=None) # type: Optional[str] + line = attr.ib(type=int, default=None) # type: Optional[int] + + +@attr.s(eq=True, hash=True) +class Vulnerability(object): + type = attr.ib(type=str) # type: str + evidence = attr.ib(type=Evidence, repr=False) # type: Evidence + location = attr.ib(type=Location, hash="PYTEST_CURRENT_TEST" in os.environ) # type: Location + hash = attr.ib(init=False, eq=False, hash=False, repr=False) # type: int + + def __attrs_post_init__(self): + self.hash = zlib.crc32(repr(self).encode()) + + +@attr.s(eq=True, hash=True) +class Source(object): + origin = attr.ib(type=str) # type: str + name = attr.ib(type=str) # type: str + redacted = attr.ib(type=bool, default=False, converter=_only_if_true) # type: bool + value = attr.ib(type=str, default=None) # type: Optional[str] + pattern = attr.ib(type=str, default=None) # type: Optional[str] + + +@attr.s(eq=False, hash=False) +class IastSpanReporter(object): + sources = attr.ib(type=List[Source], factory=list) # type: List[Source] + vulnerabilities = attr.ib(type=Set[Vulnerability], factory=set) # type: Set[Vulnerability] + + def __hash__(self): + return reduce(operator.xor, (hash(obj) for obj in set(self.sources) | self.vulnerabilities)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py new file mode 100644 index 0000000..e7c8787 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py @@ -0,0 +1,8 @@ +from .ast_taint import ast_function +from .path_traversal import open_path_traversal + + +__all__ = [ + "open_path_traversal", + "ast_function", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py new file mode 100644 index 0000000..8327bd8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py @@ -0,0 +1,314 @@ +import os +import time +from typing import TYPE_CHECKING # noqa:F401 +from typing import cast # noqa:F401 + +from ddtrace import tracer +from ddtrace.appsec._constants import IAST +from ddtrace.internal import core +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.cache import LFUCache +from ddtrace.settings.asm import config as asm_config + +from ..._deduplications import deduplication +from .._overhead_control_engine import Operation +from .._stacktrace import get_info_frame +from .._utils import _has_to_scrub +from .._utils import _is_evidence_value_parts +from .._utils import _scrub +from ..processor import AppSecIastSpanProcessor +from ..reporter import Evidence +from ..reporter import IastSpanReporter +from ..reporter import Location +from ..reporter import Source +from ..reporter import Vulnerability + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Set # noqa:F401 + from typing import Text # noqa:F401 + from typing import Union # noqa:F401 + +log = get_logger(__name__) + +CWD = os.path.abspath(os.getcwd()) + + +class taint_sink_deduplication(deduplication): + def __call__(self, *args, **kwargs): + # we skip 0, 1 and last position because its the cls, span and sources respectively + result = None + if self.is_deduplication_enabled() is False: + result = self.func(*args, **kwargs) + else: + raw_log_hash = hash("".join([str(arg) for arg in args[2:-1]])) + last_reported_timestamp = self.get_last_time_reported(raw_log_hash) + if time.time() > last_reported_timestamp: + result = self.func(*args, **kwargs) + self.reported_logs[raw_log_hash] = time.time() + self._time_lapse + return result + + +def _check_positions_contained(needle, container): + needle_start, needle_end = needle + container_start, container_end = container + + return ( + (container_start <= needle_start < container_end) + or (container_start < needle_end <= container_end) + or (needle_start <= container_start < needle_end) + or (needle_start < container_end <= needle_end) + ) + + +class VulnerabilityBase(Operation): + vulnerability_type = "" + evidence_type = "" + _redacted_report_cache = LFUCache() + + @classmethod + def _reset_cache(cls): + cls._redacted_report_cache.clear() + + @classmethod + def wrap(cls, func): + # type: (Callable) -> Callable + def wrapper(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + """Get the current root Span and attach it to the wrapped function. We need the span to report the + vulnerability and update the context with the report information. + """ + if AppSecIastSpanProcessor.is_span_analyzed() and cls.has_quota(): + return func(wrapped, instance, args, kwargs) + else: + log.debug("IAST: no vulnerability quota to analyze more sink points") + return wrapped(*args, **kwargs) + + return wrapper + + @classmethod + @taint_sink_deduplication + def _prepare_report(cls, span, vulnerability_type, evidence, file_name, line_number, sources): + report = core.get_item(IAST.CONTEXT_KEY, span=span) + if report: + report.vulnerabilities.add( + Vulnerability( + type=vulnerability_type, + evidence=evidence, + location=Location(path=file_name, line=line_number, spanId=span.span_id), + ) + ) + + else: + report = IastSpanReporter( + vulnerabilities={ + Vulnerability( + type=vulnerability_type, + evidence=evidence, + location=Location(path=file_name, line=line_number, spanId=span.span_id), + ) + } + ) + if sources: + + def cast_value(value): + if isinstance(value, (bytes, bytearray)): + value_decoded = value.decode("utf-8") + else: + value_decoded = value + return value_decoded + + report.sources = [Source(origin=x.origin, name=x.name, value=cast_value(x.value)) for x in sources] + + redacted_report = cls._redacted_report_cache.get( + hash(report), lambda x: cls._redact_report(cast(IastSpanReporter, report)) + ) + core.set_item(IAST.CONTEXT_KEY, redacted_report, span=span) + + return True + + @classmethod + def report(cls, evidence_value="", sources=None): + # type: (Union[Text|List[Dict[str, Any]]], Optional[List[Source]]) -> None + """Build a IastSpanReporter instance to report it in the `AppSecIastSpanProcessor` as a string JSON""" + + if cls.acquire_quota(): + if not tracer or not hasattr(tracer, "current_root_span"): + log.debug( + "[IAST] VulnerabilityReporter is trying to report an evidence, " + "but not tracer or tracer has no root span" + ) + return None + + span = tracer.current_root_span() + if not span: + log.debug( + "[IAST] VulnerabilityReporter. No root span in the current execution. Skipping IAST taint sink." + ) + return None + + file_name = None + line_number = None + + skip_location = getattr(cls, "skip_location", False) + if not skip_location: + frame_info = get_info_frame(CWD) + if not frame_info: + return None + + file_name, line_number = frame_info + + # Remove CWD prefix + if file_name.startswith(CWD): + file_name = os.path.relpath(file_name, start=CWD) + + if not cls.is_not_reported(file_name, line_number): + return + + if _is_evidence_value_parts(evidence_value): + evidence = Evidence(valueParts=evidence_value) + # Evidence is a string in weak cipher, weak hash and weak randomness + elif isinstance(evidence_value, (str, bytes, bytearray)): + evidence = Evidence(value=evidence_value) + else: + log.debug("Unexpected evidence_value type: %s", type(evidence_value)) + evidence = Evidence(value="") + + result = cls._prepare_report(span, cls.vulnerability_type, evidence, file_name, line_number, sources) + # If result is None that's mean deduplication raises and no vulnerability wasn't reported, with that, + # we need to restore the quota + if not result: + cls.increment_quota() + + @classmethod + def _extract_sensitive_tokens(cls, report): + # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]] + log.debug("Base class VulnerabilityBase._extract_sensitive_tokens called") + return {} + + @classmethod + def _get_vulnerability_text(cls, vulnerability): + if vulnerability and vulnerability.evidence.value is not None: + return vulnerability.evidence.value + + if vulnerability.evidence.valueParts is not None: + return "".join( + [ + (part.get("value", "") if type(part) is not str else part) + for part in vulnerability.evidence.valueParts + ] + ) + + return "" + + @classmethod + def replace_tokens( + cls, + vuln, + vulns_to_tokens, + has_range=False, + ): + ret = vuln.evidence.value + replaced = False + + for token in vulns_to_tokens[hash(vuln)]["tokens"]: + ret = ret.replace(token, _scrub(token, has_range)) + replaced = True + + return ret, replaced + + @classmethod + def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter + if not asm_config._iast_redaction_enabled: + return report + + # See if there is a match on either any of the sources or value parts of the report + found = False + + for source in report.sources: + # Join them so we only run the regexps once for each source + joined_fields = "%s%s" % (source.name, source.value) + if _has_to_scrub(joined_fields): + found = True + break + + vulns_to_text = {} + + if not found: + # Check the evidence's value/s + for vuln in report.vulnerabilities: + vulnerability_text = cls._get_vulnerability_text(vuln) + if _has_to_scrub(vulnerability_text): + vulns_to_text[vuln] = vulnerability_text + found = True + break + + if not found: + return report + + if not vulns_to_text: + vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities} + + # If we're here, some potentially sensitive information was found, we delegate on + # the specific subclass the task of extracting the variable tokens (e.g. literals inside + # quotes for SQL Injection). Note that by just having one potentially sensitive match + # we need to then scrub all the tokens, thus why we do it in two steps instead of one + vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text) + + if not vulns_to_tokens: + return report + + all_tokens = set() # type: Set[str] + for _, value_dict in vulns_to_tokens.items(): + all_tokens.update(value_dict["tokens"]) + + # Iterate over all the sources, if one of the tokens match it, redact it + for source in report.sources: + if source.name in all_tokens or source.value in all_tokens: + source.pattern = _scrub(source.value, has_range=True) + source.redacted = True + source.value = None + + # Same for all the evidence values + for vuln in report.vulnerabilities: + # Use the initial hash directly as iteration key since the vuln itself will change + vuln_hash = hash(vuln) + if vuln.evidence.value is not None: + pattern, replaced = cls.replace_tokens(vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source")) + if replaced: + vuln.evidence.pattern = pattern + vuln.evidence.redacted = True + vuln.evidence.value = None + elif vuln.evidence.valueParts is not None: + idx = 0 + for part in vuln.evidence.valueParts: + value = part["value"] + part_len = len(value) + part_start = idx + part_end = idx + part_len + pattern_list = [] + + for positions in vulns_to_tokens[vuln_hash]["token_positions"]: + if _check_positions_contained(positions, (part_start, part_end)): + part_scrub_start = max(positions[0] - idx, 0) + part_scrub_end = positions[1] - idx + to_scrub = value[part_scrub_start:part_scrub_end] + scrubbed = _scrub(to_scrub, "source" in part) + pattern_list.append(value[:part_scrub_start] + scrubbed + value[part_scrub_end:]) + part["redacted"] = True + else: + pattern_list.append(value[part_start:part_end]) + continue + + if "redacted" in part: + part["pattern"] = "".join(pattern_list) + del part["value"] + + idx += part_len + + return report diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py new file mode 100644 index 0000000..af8f59b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py @@ -0,0 +1,47 @@ +from typing import TYPE_CHECKING # noqa:F401 + +from ..._constants import IAST_SPAN_TAGS +from .._metrics import _set_metric_iast_executed_sink +from .._metrics import increment_iast_span_metric +from ..constants import DEFAULT_PATH_TRAVERSAL_FUNCTIONS +from ..constants import DEFAULT_WEAK_RANDOMNESS_FUNCTIONS +from .path_traversal import check_and_report_path_traversal +from .weak_randomness import WeakRandomness + + +if TYPE_CHECKING: + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + + +def ast_function( + func, # type: Callable + flag_added_args, # type: Any + *args, # type: Any + **kwargs, # type: Any +): # type: (...) -> Any + instance = getattr(func, "__self__", None) + func_name = getattr(func, "__name__", None) + cls_name = "" + if instance is not None and func_name: + try: + cls_name = instance.__class__.__name__ + except AttributeError: + pass + + if flag_added_args > 0: + args = args[flag_added_args:] + + if ( + instance.__class__.__module__ == "random" + and cls_name == "Random" + and func_name in DEFAULT_WEAK_RANDOMNESS_FUNCTIONS + ): + # Weak, run the analyzer + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakRandomness.vulnerability_type) + _set_metric_iast_executed_sink(WeakRandomness.vulnerability_type) + WeakRandomness.report(evidence_value=cls_name + "." + func_name) + elif hasattr(func, "__module__") and DEFAULT_PATH_TRAVERSAL_FUNCTIONS.get(func.__module__): + if func_name in DEFAULT_PATH_TRAVERSAL_FUNCTIONS[func.__module__]: + check_and_report_path_traversal(*args, **kwargs) + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py new file mode 100644 index 0000000..b792adc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py @@ -0,0 +1,254 @@ +import os +import re +import subprocess # nosec +from typing import TYPE_CHECKING # noqa:F401 +from typing import List # noqa:F401 +from typing import Set # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.contrib import trace_utils +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import increment_iast_span_metric +from .._utils import _has_to_scrub +from .._utils import _scrub +from .._utils import _scrub_get_tokens_positions +from ..constants import EVIDENCE_CMDI +from ..constants import VULN_CMDI +from ._base import VulnerabilityBase +from ._base import _check_positions_contained + + +if TYPE_CHECKING: + from typing import Any # noqa:F401 + from typing import Dict # noqa:F401 + + from ..reporter import IastSpanReporter # noqa:F401 + from ..reporter import Vulnerability # noqa:F401 + + +log = get_logger(__name__) + +_INSIDE_QUOTES_REGEXP = re.compile(r"^(?:\s*(?:sudo|doas)\s+)?\b\S+\b\s*(.*)") + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + if not asm_config._iast_enabled: + return + + if not getattr(os, "_datadog_cmdi_patch", False): + trace_utils.wrap(os, "system", _iast_cmdi_ossystem) + + # all os.spawn* variants eventually use this one: + trace_utils.wrap(os, "_spawnvef", _iast_cmdi_osspawn) + + if not getattr(subprocess, "_datadog_cmdi_patch", False): + trace_utils.wrap(subprocess, "Popen.__init__", _iast_cmdi_subprocess_init) + + os._datadog_cmdi_patch = True + subprocess._datadog_cmdi_patch = True + + +def unpatch(): + # type: () -> None + trace_utils.unwrap(os, "system") + trace_utils.unwrap(os, "_spawnvef") + trace_utils.unwrap(subprocess.Popen, "__init__") + + os._datadog_cmdi_patch = False # type: ignore[attr-defined] + subprocess._datadog_cmdi_patch = False # type: ignore[attr-defined] + + +def _iast_cmdi_ossystem(wrapped, instance, args, kwargs): + _iast_report_cmdi(args[0]) + return wrapped(*args, **kwargs) + + +def _iast_cmdi_osspawn(wrapped, instance, args, kwargs): + mode, file, func_args, _, _ = args + _iast_report_cmdi(func_args) + + return wrapped(*args, **kwargs) + + +def _iast_cmdi_subprocess_init(wrapped, instance, args, kwargs): + cmd_args = args[0] if len(args) else kwargs["args"] + _iast_report_cmdi(cmd_args) + + return wrapped(*args, **kwargs) + + +@oce.register +class CommandInjection(VulnerabilityBase): + vulnerability_type = VULN_CMDI + evidence_type = EVIDENCE_CMDI + + @classmethod + def report(cls, evidence_value=None, sources=None): + if isinstance(evidence_value, (str, bytes, bytearray)): + from .._taint_tracking import taint_ranges_as_evidence_info + + evidence_value, sources = taint_ranges_as_evidence_info(evidence_value) + super(CommandInjection, cls).report(evidence_value=evidence_value, sources=sources) + + @classmethod + def _extract_sensitive_tokens(cls, vulns_to_text): + # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]] + ret = {} # type: Dict[int, Dict[str, Any]] + for vuln, text in vulns_to_text.items(): + vuln_hash = hash(vuln) + ret[vuln_hash] = { + "tokens": set(_INSIDE_QUOTES_REGEXP.findall(text)), + } + ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"]) + + return ret + + @classmethod + def replace_tokens( + cls, + vuln, + vulns_to_tokens, + has_range=False, + ): + ret = vuln.evidence.value + replaced = False + + for token in vulns_to_tokens[hash(vuln)]["tokens"]: + ret = ret.replace(token, "") + replaced = True + + return ret, replaced + + @classmethod + def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter + if not asm_config._iast_redaction_enabled: + return report + + # See if there is a match on either any of the sources or value parts of the report + found = False + + for source in report.sources: + # Join them so we only run the regexps once for each source + joined_fields = "%s%s" % (source.name, source.value) + if _has_to_scrub(joined_fields): + found = True + break + + vulns_to_text = {} + + if not found: + # Check the evidence's value/s + for vuln in report.vulnerabilities: + vulnerability_text = cls._get_vulnerability_text(vuln) + if _has_to_scrub(vulnerability_text) or _INSIDE_QUOTES_REGEXP.match(vulnerability_text): + vulns_to_text[vuln] = vulnerability_text + found = True + break + + if not found: + return report + + if not vulns_to_text: + vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities} + + # If we're here, some potentially sensitive information was found, we delegate on + # the specific subclass the task of extracting the variable tokens (e.g. literals inside + # quotes for SQL Injection). Note that by just having one potentially sensitive match + # we need to then scrub all the tokens, thus why we do it in two steps instead of one + vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text) + + if not vulns_to_tokens: + return report + + all_tokens = set() # type: Set[str] + for _, value_dict in vulns_to_tokens.items(): + all_tokens.update(value_dict["tokens"]) + + # Iterate over all the sources, if one of the tokens match it, redact it + for source in report.sources: + if source.name in "".join(all_tokens) or source.value in "".join(all_tokens): + source.pattern = _scrub(source.value, has_range=True) + source.redacted = True + source.value = None + + # Same for all the evidence values + try: + for vuln in report.vulnerabilities: + # Use the initial hash directly as iteration key since the vuln itself will change + vuln_hash = hash(vuln) + if vuln.evidence.value is not None: + pattern, replaced = cls.replace_tokens( + vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source") + ) + if replaced: + vuln.evidence.pattern = pattern + vuln.evidence.redacted = True + vuln.evidence.value = None + elif vuln.evidence.valueParts is not None: + idx = 0 + new_value_parts = [] + for part in vuln.evidence.valueParts: + value = part["value"] + part_len = len(value) + part_start = idx + part_end = idx + part_len + pattern_list = [] + + for positions in vulns_to_tokens[vuln_hash]["token_positions"]: + if _check_positions_contained(positions, (part_start, part_end)): + part_scrub_start = max(positions[0] - idx, 0) + part_scrub_end = positions[1] - idx + pattern_list.append(value[:part_scrub_start] + "" + value[part_scrub_end:]) + if part.get("source", False) is not False: + source = report.sources[part["source"]] + if source.redacted: + part["redacted"] = source.redacted + part["pattern"] = source.pattern + del part["value"] + new_value_parts.append(part) + break + else: + part["value"] = "".join(pattern_list) + new_value_parts.append(part) + new_value_parts.append({"redacted": True}) + break + else: + new_value_parts.append(part) + pattern_list.append(value[part_start:part_end]) + break + + idx += part_len + vuln.evidence.valueParts = new_value_parts + except (ValueError, KeyError): + log.debug("an error occurred while redacting cmdi", exc_info=True) + return report + + +def _iast_report_cmdi(shell_args): + # type: (Union[str, List[str]]) -> None + report_cmdi = "" + from .._metrics import _set_metric_iast_executed_sink + from .._taint_tracking import is_pyobject_tainted + from .._taint_tracking.aspects import join_aspect + + if isinstance(shell_args, (list, tuple)): + for arg in shell_args: + if is_pyobject_tainted(arg): + report_cmdi = join_aspect(" ".join, 1, " ", shell_args) + break + elif is_pyobject_tainted(shell_args): + report_cmdi = shell_args + + if report_cmdi: + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, CommandInjection.vulnerability_type) + _set_metric_iast_executed_sink(CommandInjection.vulnerability_type) + CommandInjection.report(evidence_value=report_cmdi) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py new file mode 100644 index 0000000..bb81477 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py @@ -0,0 +1,72 @@ +from typing import TYPE_CHECKING # noqa:F401 + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import _set_metric_iast_executed_sink +from .._metrics import increment_iast_span_metric +from ..constants import EVIDENCE_COOKIE +from ..constants import VULN_INSECURE_COOKIE +from ..constants import VULN_NO_HTTPONLY_COOKIE +from ..constants import VULN_NO_SAMESITE_COOKIE +from ..taint_sinks._base import VulnerabilityBase + + +if TYPE_CHECKING: + from typing import Dict # noqa:F401 + from typing import Optional # noqa:F401 + + +@oce.register +class InsecureCookie(VulnerabilityBase): + vulnerability_type = VULN_INSECURE_COOKIE + evidence_type = EVIDENCE_COOKIE + scrub_evidence = False + skip_location = True + + +@oce.register +class NoHttpOnlyCookie(VulnerabilityBase): + vulnerability_type = VULN_NO_HTTPONLY_COOKIE + evidence_type = EVIDENCE_COOKIE + skip_location = True + + +@oce.register +class NoSameSite(VulnerabilityBase): + vulnerability_type = VULN_NO_SAMESITE_COOKIE + evidence_type = EVIDENCE_COOKIE + skip_location = True + + +def asm_check_cookies(cookies): # type: (Optional[Dict[str, str]]) -> None + if not cookies: + return + + for cookie_key, cookie_value in cookies.items(): + lvalue = cookie_value.lower().replace(" ", "") + + if ";secure" not in lvalue: + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, InsecureCookie.vulnerability_type) + _set_metric_iast_executed_sink(InsecureCookie.vulnerability_type) + InsecureCookie.report(evidence_value=cookie_key) + + if ";httponly" not in lvalue: + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, NoHttpOnlyCookie.vulnerability_type) + _set_metric_iast_executed_sink(NoHttpOnlyCookie.vulnerability_type) + NoHttpOnlyCookie.report(evidence_value=cookie_key) + + if ";samesite=" in lvalue: + ss_tokens = lvalue.split(";samesite=") + if len(ss_tokens) == 0: + report_samesite = True + elif ss_tokens[1].startswith("strict") or ss_tokens[1].startswith("lax"): + report_samesite = False + else: + report_samesite = True + else: + report_samesite = True + + if report_samesite: + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, NoSameSite.vulnerability_type) + _set_metric_iast_executed_sink(NoSameSite.vulnerability_type) + NoSameSite.report(evidence_value=cookie_key) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py new file mode 100644 index 0000000..c761800 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py @@ -0,0 +1,70 @@ +from typing import Any + +from ddtrace.internal.logger import get_logger + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import _set_metric_iast_instrumented_sink +from .._metrics import increment_iast_span_metric +from .._patch import set_and_check_module_is_patched +from .._patch import set_module_unpatched +from ..constants import EVIDENCE_PATH_TRAVERSAL +from ..constants import VULN_PATH_TRAVERSAL +from ..processor import AppSecIastSpanProcessor +from ._base import VulnerabilityBase + + +log = get_logger(__name__) + + +@oce.register +class PathTraversal(VulnerabilityBase): + vulnerability_type = VULN_PATH_TRAVERSAL + evidence_type = EVIDENCE_PATH_TRAVERSAL + + @classmethod + def report(cls, evidence_value=None, sources=None): + if isinstance(evidence_value, (str, bytes, bytearray)): + from .._taint_tracking import taint_ranges_as_evidence_info + + evidence_value, sources = taint_ranges_as_evidence_info(evidence_value) + super(PathTraversal, cls).report(evidence_value=evidence_value, sources=sources) + + +def get_version(): + # type: () -> str + return "" + + +def unpatch_iast(): + # type: () -> None + set_module_unpatched("builtins", default_attr="_datadog_path_traversal_patch") + + +def patch(): + # type: () -> None + """Wrap functions which interact with file system.""" + if not set_and_check_module_is_patched("builtins", default_attr="_datadog_path_traversal_patch"): + return + _set_metric_iast_instrumented_sink(VULN_PATH_TRAVERSAL) + + +def check_and_report_path_traversal(*args: Any, **kwargs: Any) -> None: + if AppSecIastSpanProcessor.is_span_analyzed() and PathTraversal.has_quota(): + try: + from .._metrics import _set_metric_iast_executed_sink + from .._taint_tracking import is_pyobject_tainted + + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, PathTraversal.vulnerability_type) + _set_metric_iast_executed_sink(PathTraversal.vulnerability_type) + if is_pyobject_tainted(args[0]): + PathTraversal.report(evidence_value=args[0]) + except Exception: + log.debug("Unexpected exception while reporting vulnerability", exc_info=True) + else: + log.debug("IAST: no vulnerability quota to analyze more sink points") + + +def open_path_traversal(*args, **kwargs): + check_and_report_path_traversal(*args, **kwargs) + return open(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py new file mode 100644 index 0000000..314bcd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py @@ -0,0 +1,44 @@ +import re +from typing import TYPE_CHECKING # noqa:F401 + +from .. import oce +from .._taint_tracking import taint_ranges_as_evidence_info +from .._utils import _scrub_get_tokens_positions +from ..constants import EVIDENCE_SQL_INJECTION +from ..constants import VULN_SQL_INJECTION +from ._base import VulnerabilityBase + + +if TYPE_CHECKING: + from typing import Any # noqa:F401 + from typing import Dict # noqa:F401 + + from .reporter import Vulnerability # noqa:F401 + + +_INSIDE_QUOTES_REGEXP = re.compile(r'["\']([^"\']*?)["\']') + + +@oce.register +class SqlInjection(VulnerabilityBase): + vulnerability_type = VULN_SQL_INJECTION + evidence_type = EVIDENCE_SQL_INJECTION + + @classmethod + def report(cls, evidence_value=None, sources=None): + if isinstance(evidence_value, (str, bytes, bytearray)): + evidence_value, sources = taint_ranges_as_evidence_info(evidence_value) + super(SqlInjection, cls).report(evidence_value=evidence_value, sources=sources) + + @classmethod + def _extract_sensitive_tokens(cls, vulns_to_text): + # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]] + ret = {} # type: Dict[int, Dict[str, Any]] + for vuln, text in vulns_to_text.items(): + vuln_hash = hash(vuln) + ret[vuln_hash] = { + "tokens": set(_INSIDE_QUOTES_REGEXP.findall(text)), + } + ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"]) + + return ret diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py new file mode 100644 index 0000000..5e8a502 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py @@ -0,0 +1,175 @@ +import re +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Set # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import increment_iast_span_metric +from .._taint_tracking import taint_ranges_as_evidence_info +from .._utils import _has_to_scrub +from .._utils import _scrub +from .._utils import _scrub_get_tokens_positions +from ..constants import EVIDENCE_SSRF +from ..constants import VULN_SSRF +from ..constants import VULNERABILITY_TOKEN_TYPE +from ..processor import AppSecIastSpanProcessor +from ..reporter import IastSpanReporter # noqa:F401 +from ..reporter import Vulnerability +from ._base import VulnerabilityBase +from ._base import _check_positions_contained + + +log = get_logger(__name__) + + +_AUTHORITY_REGEXP = re.compile(r"(?:\/\/([^:@\/]+)(?::([^@\/]+))?@).*") +_QUERY_FRAGMENT_REGEXP = re.compile(r"[?#&]([^=&;]+)=(?P[^?#&]+)") + + +@oce.register +class SSRF(VulnerabilityBase): + vulnerability_type = VULN_SSRF + evidence_type = EVIDENCE_SSRF + + @classmethod + def report(cls, evidence_value=None, sources=None): + if isinstance(evidence_value, (str, bytes, bytearray)): + evidence_value, sources = taint_ranges_as_evidence_info(evidence_value) + super(SSRF, cls).report(evidence_value=evidence_value, sources=sources) + + @classmethod + def _extract_sensitive_tokens(cls, vulns_to_text: Dict[Vulnerability, str]) -> VULNERABILITY_TOKEN_TYPE: + ret = {} # type: VULNERABILITY_TOKEN_TYPE + for vuln, text in vulns_to_text.items(): + vuln_hash = hash(vuln) + authority = [] + authority_found = _AUTHORITY_REGEXP.findall(text) + if authority_found: + authority = list(authority_found[0]) + query = [value for param, value in _QUERY_FRAGMENT_REGEXP.findall(text)] + ret[vuln_hash] = { + "tokens": set(authority + query), + } + ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"]) + + return ret + + @classmethod + def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter + if not asm_config._iast_redaction_enabled: + return report + + # See if there is a match on either any of the sources or value parts of the report + found = False + + for source in report.sources: + # Join them so we only run the regexps once for each source + joined_fields = "%s%s" % (source.name, source.value) + if _has_to_scrub(joined_fields): + found = True + break + + vulns_to_text = {} + + if not found: + # Check the evidence's value/s + for vuln in report.vulnerabilities: + vulnerability_text = cls._get_vulnerability_text(vuln) + if _has_to_scrub(vulnerability_text) or _AUTHORITY_REGEXP.match(vulnerability_text): + vulns_to_text[vuln] = vulnerability_text + found = True + break + + if not found: + return report + + if not vulns_to_text: + vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities} + + # If we're here, some potentially sensitive information was found, we delegate on + # the specific subclass the task of extracting the variable tokens (e.g. literals inside + # quotes for SQL Injection). Note that by just having one potentially sensitive match + # we need to then scrub all the tokens, thus why we do it in two steps instead of one + vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text) + + if not vulns_to_tokens: + return report + + all_tokens = set() # type: Set[str] + for _, value_dict in vulns_to_tokens.items(): + all_tokens.update(value_dict["tokens"]) + + # Iterate over all the sources, if one of the tokens match it, redact it + for source in report.sources: + if source.name in "".join(all_tokens) or source.value in "".join(all_tokens): + source.pattern = _scrub(source.value, has_range=True) + source.redacted = True + source.value = None + + # Same for all the evidence values + for vuln in report.vulnerabilities: + # Use the initial hash directly as iteration key since the vuln itself will change + vuln_hash = hash(vuln) + if vuln.evidence.value is not None: + pattern, replaced = cls.replace_tokens(vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source")) + if replaced: + vuln.evidence.pattern = pattern + vuln.evidence.redacted = True + vuln.evidence.value = None + elif vuln.evidence.valueParts is not None: + idx = 0 + new_value_parts = [] + for part in vuln.evidence.valueParts: + value = part["value"] + part_len = len(value) + part_start = idx + part_end = idx + part_len + pattern_list = [] + + for positions in vulns_to_tokens[vuln_hash]["token_positions"]: + if _check_positions_contained(positions, (part_start, part_end)): + part_scrub_start = max(positions[0] - idx, 0) + part_scrub_end = positions[1] - idx + pattern_list.append(value[:part_scrub_start] + "" + value[part_scrub_end:]) + if part.get("source", False) is not False: + source = report.sources[part["source"]] + if source.redacted: + part["redacted"] = source.redacted + part["pattern"] = source.pattern + del part["value"] + new_value_parts.append(part) + break + else: + part["value"] = "".join(pattern_list) + new_value_parts.append(part) + new_value_parts.append({"redacted": True}) + break + else: + new_value_parts.append(part) + pattern_list.append(value[part_start:part_end]) + break + + idx += part_len + vuln.evidence.valueParts = new_value_parts + return report + + +def _iast_report_ssrf(func: Callable, *args, **kwargs): + from .._metrics import _set_metric_iast_executed_sink + + report_ssrf = kwargs.get("url", False) + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, SSRF.vulnerability_type) + _set_metric_iast_executed_sink(SSRF.vulnerability_type) + if report_ssrf: + if AppSecIastSpanProcessor.is_span_analyzed() and SSRF.has_quota(): + try: + from .._taint_tracking import is_pyobject_tainted + + if is_pyobject_tainted(report_ssrf): + SSRF.report(evidence_value=report_ssrf) + except Exception: + log.debug("Unexpected exception while reporting vulnerability", exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py new file mode 100644 index 0000000..3199528 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py @@ -0,0 +1,166 @@ +import os +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import _set_metric_iast_executed_sink +from .._metrics import _set_metric_iast_instrumented_sink +from .._metrics import increment_iast_span_metric +from .._patch import set_and_check_module_is_patched +from .._patch import set_module_unpatched +from .._patch import try_unwrap +from .._patch import try_wrap_function_wrapper +from ..constants import BLOWFISH_DEF +from ..constants import DEFAULT_WEAK_CIPHER_ALGORITHMS +from ..constants import DES_DEF +from ..constants import EVIDENCE_ALGORITHM_TYPE +from ..constants import RC2_DEF +from ..constants import RC4_DEF +from ..constants import VULN_WEAK_CIPHER_TYPE +from ._base import VulnerabilityBase + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import Set # noqa:F401 + +log = get_logger(__name__) + + +def get_weak_cipher_algorithms(): + # type: () -> Set + CONFIGURED_WEAK_CIPHER_ALGORITHMS = None + DD_IAST_WEAK_CIPHER_ALGORITHMS = os.getenv("DD_IAST_WEAK_CIPHER_ALGORITHMS") + if DD_IAST_WEAK_CIPHER_ALGORITHMS: + CONFIGURED_WEAK_CIPHER_ALGORITHMS = set( + algo.strip() for algo in DD_IAST_WEAK_CIPHER_ALGORITHMS.lower().split(",") + ) + return CONFIGURED_WEAK_CIPHER_ALGORITHMS or DEFAULT_WEAK_CIPHER_ALGORITHMS + + +@oce.register +class WeakCipher(VulnerabilityBase): + vulnerability_type = VULN_WEAK_CIPHER_TYPE + evidence_type = EVIDENCE_ALGORITHM_TYPE + + +def unpatch_iast(): + # type: () -> None + set_module_unpatched("Crypto", default_attr="_datadog_weak_cipher_patch") + set_module_unpatched("cryptography", default_attr="_datadog_weak_cipher_patch") + + try_unwrap("Crypto.Cipher.DES", "new") + try_unwrap("Crypto.Cipher.Blowfish", "new") + try_unwrap("Crypto.Cipher.ARC2", "new") + try_unwrap("Crypto.Cipher.ARC4", "ARC4Cipher.encrypt") + try_unwrap("Crypto.Cipher._mode_cbc", "CbcMode.encrypt") + try_unwrap("Crypto.Cipher._mode_cfb", "CfbMode.encrypt") + try_unwrap("Crypto.Cipher._mode_ofb", "OfbMode.encrypt") + try_unwrap("cryptography.hazmat.primitives.ciphers", "Cipher.encryptor") + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + # type: () -> None + """Wrap hashing functions. + Weak hashing algorithms are those that have been proven to be of high risk, or even completely broken, + and thus are not fit for use. + """ + if not set_and_check_module_is_patched("Crypto", default_attr="_datadog_weak_cipher_patch"): + return + if not set_and_check_module_is_patched("cryptography", default_attr="_datadog_weak_cipher_patch"): + return + + weak_cipher_algorithms = get_weak_cipher_algorithms() + num_instrumented_sinks = 0 + # pycryptodome methods + if DES_DEF in weak_cipher_algorithms: + try_wrap_function_wrapper("Crypto.Cipher.DES", "new", wrapped_aux_des_function) + num_instrumented_sinks += 1 + if BLOWFISH_DEF in weak_cipher_algorithms: + try_wrap_function_wrapper("Crypto.Cipher.Blowfish", "new", wrapped_aux_blowfish_function) + num_instrumented_sinks += 1 + if RC2_DEF in weak_cipher_algorithms: + try_wrap_function_wrapper("Crypto.Cipher.ARC2", "new", wrapped_aux_rc2_function) + num_instrumented_sinks += 1 + if RC4_DEF in weak_cipher_algorithms: + try_wrap_function_wrapper("Crypto.Cipher.ARC4", "ARC4Cipher.encrypt", wrapped_rc4_function) + num_instrumented_sinks += 1 + + if weak_cipher_algorithms: + try_wrap_function_wrapper("Crypto.Cipher._mode_cbc", "CbcMode.encrypt", wrapped_function) + try_wrap_function_wrapper("Crypto.Cipher._mode_cfb", "CfbMode.encrypt", wrapped_function) + try_wrap_function_wrapper("Crypto.Cipher._mode_ecb", "EcbMode.encrypt", wrapped_function) + try_wrap_function_wrapper("Crypto.Cipher._mode_ofb", "OfbMode.encrypt", wrapped_function) + num_instrumented_sinks += 4 + + # cryptography methods + try_wrap_function_wrapper( + "cryptography.hazmat.primitives.ciphers", "Cipher.encryptor", wrapped_cryptography_function + ) + num_instrumented_sinks += 1 + + _set_metric_iast_instrumented_sink(VULN_WEAK_CIPHER_TYPE, num_instrumented_sinks) + + +def wrapped_aux_rc2_function(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + result._dd_weakcipher_algorithm = "RC2" + return result + + +def wrapped_aux_des_function(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + result._dd_weakcipher_algorithm = "DES" + return result + + +def wrapped_aux_blowfish_function(wrapped, instance, args, kwargs): + result = wrapped(*args, **kwargs) + result._dd_weakcipher_algorithm = "Blowfish" + return result + + +@WeakCipher.wrap +def wrapped_rc4_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type) + _set_metric_iast_executed_sink(WeakCipher.vulnerability_type) + WeakCipher.report( + evidence_value="RC4", + ) + return wrapped(*args, **kwargs) + + +@WeakCipher.wrap +def wrapped_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + if hasattr(instance, "_dd_weakcipher_algorithm"): + evidence = instance._dd_weakcipher_algorithm + "_" + str(instance.__class__.__name__) + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type) + _set_metric_iast_executed_sink(WeakCipher.vulnerability_type) + WeakCipher.report( + evidence_value=evidence, + ) + + return wrapped(*args, **kwargs) + + +@WeakCipher.wrap +def wrapped_cryptography_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + algorithm_name = instance.algorithm.name.lower() + if algorithm_name in get_weak_cipher_algorithms(): + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type) + _set_metric_iast_executed_sink(WeakCipher.vulnerability_type) + WeakCipher.report( + evidence_value=algorithm_name, + ) + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py new file mode 100644 index 0000000..9bebaf8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py @@ -0,0 +1,171 @@ +import os +import sys +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger + +from ..._constants import IAST_SPAN_TAGS +from .. import oce +from .._metrics import _set_metric_iast_executed_sink +from .._metrics import _set_metric_iast_instrumented_sink +from .._metrics import increment_iast_span_metric +from .._patch import set_and_check_module_is_patched +from .._patch import set_module_unpatched +from .._patch import try_unwrap +from .._patch import try_wrap_function_wrapper +from ..constants import DEFAULT_WEAK_HASH_ALGORITHMS +from ..constants import EVIDENCE_ALGORITHM_TYPE +from ..constants import MD5_DEF +from ..constants import SHA1_DEF +from ..constants import VULN_INSECURE_HASHING_TYPE +from ._base import VulnerabilityBase + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import Set # noqa:F401 + +log = get_logger(__name__) + + +def get_weak_hash_algorithms(): + # type: () -> Set + CONFIGURED_WEAK_HASH_ALGORITHMS = None + DD_IAST_WEAK_HASH_ALGORITHMS = os.getenv("DD_IAST_WEAK_HASH_ALGORITHMS") + if DD_IAST_WEAK_HASH_ALGORITHMS: + CONFIGURED_WEAK_HASH_ALGORITHMS = set(algo.strip() for algo in DD_IAST_WEAK_HASH_ALGORITHMS.lower().split(",")) + + return CONFIGURED_WEAK_HASH_ALGORITHMS or DEFAULT_WEAK_HASH_ALGORITHMS + + +@oce.register +class WeakHash(VulnerabilityBase): + vulnerability_type = VULN_INSECURE_HASHING_TYPE + evidence_type = EVIDENCE_ALGORITHM_TYPE + + +def unpatch_iast(): + # type: () -> None + set_module_unpatched("hashlib", default_attr="_datadog_weak_hash_patch") + set_module_unpatched("Crypto", default_attr="_datadog_weak_hash_patch") + + if sys.version_info >= (3, 0, 0): + try_unwrap("_hashlib", "HASH.digest") + try_unwrap("_hashlib", "HASH.hexdigest") + try_unwrap(("_%s" % MD5_DEF), "MD5Type.digest") + try_unwrap(("_%s" % MD5_DEF), "MD5Type.hexdigest") + try_unwrap(("_%s" % SHA1_DEF), "SHA1Type.digest") + try_unwrap(("_%s" % SHA1_DEF), "SHA1Type.hexdigest") + else: + try_unwrap("hashlib", MD5_DEF) + try_unwrap("hashlib", SHA1_DEF) + try_unwrap("hashlib", "new") + + # pycryptodome methods + try_unwrap("Crypto.Hash.MD5", "MD5Hash.digest") + try_unwrap("Crypto.Hash.MD5", "MD5Hash.hexdigest") + try_unwrap("Crypto.Hash.SHA1", "SHA1Hash.digest") + try_unwrap("Crypto.Hash.SHA1", "SHA1Hash.hexdigest") + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + # type: () -> None + """Wrap hashing functions. + Weak hashing algorithms are those that have been proven to be of high risk, or even completely broken, + and thus are not fit for use. + """ + + if not set_and_check_module_is_patched("hashlib", default_attr="_datadog_weak_hash_patch"): + return + + if not set_and_check_module_is_patched("Crypto", default_attr="_datadog_weak_hash_patch"): + return + + weak_hash_algorithms = get_weak_hash_algorithms() + num_instrumented_sinks = 0 + if sys.version_info >= (3, 0, 0): + try_wrap_function_wrapper("_hashlib", "HASH.digest", wrapped_digest_function) + try_wrap_function_wrapper("_hashlib", "HASH.hexdigest", wrapped_digest_function) + num_instrumented_sinks += 2 + if MD5_DEF in weak_hash_algorithms: + try_wrap_function_wrapper(("_%s" % MD5_DEF), "MD5Type.digest", wrapped_md5_function) + try_wrap_function_wrapper(("_%s" % MD5_DEF), "MD5Type.hexdigest", wrapped_md5_function) + num_instrumented_sinks += 2 + if SHA1_DEF in weak_hash_algorithms: + try_wrap_function_wrapper(("_%s" % SHA1_DEF), "SHA1Type.digest", wrapped_sha1_function) + try_wrap_function_wrapper(("_%s" % SHA1_DEF), "SHA1Type.hexdigest", wrapped_sha1_function) + num_instrumented_sinks += 2 + else: + if MD5_DEF in weak_hash_algorithms: + try_wrap_function_wrapper("hashlib", MD5_DEF, wrapped_md5_function) + num_instrumented_sinks += 1 + if SHA1_DEF in weak_hash_algorithms: + try_wrap_function_wrapper("hashlib", SHA1_DEF, wrapped_sha1_function) + num_instrumented_sinks += 1 + try_wrap_function_wrapper("hashlib", "new", wrapped_new_function) + num_instrumented_sinks += 1 + + # pycryptodome methods + if MD5_DEF in weak_hash_algorithms: + try_wrap_function_wrapper("Crypto.Hash.MD5", "MD5Hash.digest", wrapped_md5_function) + try_wrap_function_wrapper("Crypto.Hash.MD5", "MD5Hash.hexdigest", wrapped_md5_function) + num_instrumented_sinks += 2 + if SHA1_DEF in weak_hash_algorithms: + try_wrap_function_wrapper("Crypto.Hash.SHA1", "SHA1Hash.digest", wrapped_sha1_function) + try_wrap_function_wrapper("Crypto.Hash.SHA1", "SHA1Hash.hexdigest", wrapped_sha1_function) + num_instrumented_sinks += 2 + + if num_instrumented_sinks > 0: + _set_metric_iast_instrumented_sink(VULN_INSECURE_HASHING_TYPE, num_instrumented_sinks) + + +@WeakHash.wrap +def wrapped_digest_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + if instance.name.lower() in get_weak_hash_algorithms(): + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type) + _set_metric_iast_executed_sink(WeakHash.vulnerability_type) + WeakHash.report( + evidence_value=instance.name, + ) + return wrapped(*args, **kwargs) + + +@WeakHash.wrap +def wrapped_md5_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + return wrapped_function(wrapped, MD5_DEF, instance, args, kwargs) + + +@WeakHash.wrap +def wrapped_sha1_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + return wrapped_function(wrapped, SHA1_DEF, instance, args, kwargs) + + +@WeakHash.wrap +def wrapped_new_function(wrapped, instance, args, kwargs): + # type: (Callable, Any, Any, Any) -> Any + if args[0].lower() in get_weak_hash_algorithms(): + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type) + _set_metric_iast_executed_sink(WeakHash.vulnerability_type) + WeakHash.report( + evidence_value=args[0].lower(), + ) + return wrapped(*args, **kwargs) + + +def wrapped_function(wrapped, evidence, instance, args, kwargs): + # type: (Callable, str, Any, Any, Any) -> Any + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type) + _set_metric_iast_executed_sink(WeakHash.vulnerability_type) + WeakHash.report( + evidence_value=evidence, + ) + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py new file mode 100644 index 0000000..bd7fc6e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py @@ -0,0 +1,14 @@ +from .. import oce +from ..constants import EVIDENCE_WEAK_RANDOMNESS +from ..constants import VULN_WEAK_RANDOMNESS +from ._base import VulnerabilityBase + + +@oce.register +class WeakRandomness(VulnerabilityBase): + vulnerability_type = VULN_WEAK_RANDOMNESS + evidence_type = EVIDENCE_WEAK_RANDOMNESS + + @classmethod + def report(cls, evidence_value=None, sources=None): + super(WeakRandomness, cls).report(evidence_value=evidence_value) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py new file mode 100644 index 0000000..e4a1a20 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py @@ -0,0 +1,124 @@ +from ddtrace.appsec import _asm_request_context +from ddtrace.appsec._ddwaf import DDWaf_info +from ddtrace.appsec._ddwaf import version +from ddtrace.appsec._deduplications import deduplication +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +@deduplication +def _set_waf_error_metric(msg: str, stack_trace: str, info: DDWaf_info) -> None: + # perf - avoid importing telemetry until needed + from ddtrace.internal import telemetry + + try: + tags = { + "waf_version": version(), + "lib_language": "python", + } + if info and info.version: + tags["event_rules_version"] = info.version + telemetry.telemetry_writer.add_log("ERROR", msg, stack_trace=stack_trace, tags=tags) + except Exception: + log.warning("Error reporting ASM WAF logs metrics", exc_info=True) + + +def _set_waf_updates_metric(info): + # perf - avoid importing telemetry until needed + from ddtrace.internal import telemetry + from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC + + try: + if info and info.version: + tags = ( + ("event_rules_version", info.version), + ("waf_version", version()), + ) + else: + tags = (("waf_version", version()),) + + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_APPSEC, + "waf.updates", + 1.0, + tags=tags, + ) + except Exception: + log.warning("Error reporting ASM WAF updates metrics", exc_info=True) + + +def _set_waf_init_metric(info): + # perf - avoid importing telemetry until needed + from ddtrace.internal import telemetry + from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC + + try: + if info and info.version: + tags = ( + ("event_rules_version", info.version), + ("waf_version", version()), + ) + else: + tags = ( + ( + "waf_version", + version(), + ), + ) + + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_APPSEC, + "waf.init", + 1.0, + tags=tags, + ) + except Exception: + log.warning("Error reporting ASM WAF init metrics", exc_info=True) + + +def _set_waf_request_metrics(*args): + # perf - avoid importing telemetry until needed + from ddtrace.internal import telemetry + from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC + + try: + list_results, list_result_info, list_is_blocked = _asm_request_context.get_waf_results() or ([], [], []) + if any((list_results, list_result_info, list_is_blocked)): + is_blocked = any(list_is_blocked) + is_triggered = any((result.data for result in list_results)) + is_timeout = any((result.timeout for result in list_results)) + # TODO: enable it when Telemetry intake accepts this tag + # is_truncation = any((result.truncation for result in list_results)) + has_info = any(list_result_info) + + if has_info and list_result_info[0].version: + tags_request = ( + ( + "event_rules_version", + list_result_info[0].version, + ), + ("waf_version", version()), + ("rule_triggered", str(is_triggered).lower()), + ("request_blocked", str(is_blocked).lower()), + ("waf_timeout", str(is_timeout).lower()), + ) + else: + tags_request = ( + ("waf_version", version()), + ("rule_triggered", str(is_triggered).lower()), + ("request_blocked", str(is_blocked).lower()), + ("waf_timeout", str(is_timeout).lower()), + ) + + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_APPSEC, + "waf.requests", + 1.0, + tags=tags_request, + ) + except Exception: + log.warning("Error reporting ASM WAF requests metrics", exc_info=True) + finally: + _asm_request_context.reset_waf_results() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py new file mode 100644 index 0000000..c691486 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py @@ -0,0 +1,409 @@ +import dataclasses +import errno +import json +from json.decoder import JSONDecodeError +import os +import os.path +import traceback +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import Union + +from ddtrace.appsec import _asm_request_context +from ddtrace.appsec._capabilities import _appsec_rc_file_is_not_static +from ddtrace.appsec._constants import APPSEC +from ddtrace.appsec._constants import DEFAULT +from ddtrace.appsec._constants import SPAN_DATA_NAMES +from ddtrace.appsec._constants import WAF_ACTIONS +from ddtrace.appsec._constants import WAF_CONTEXT_NAMES +from ddtrace.appsec._constants import WAF_DATA_NAMES +from ddtrace.appsec._ddwaf.ddwaf_types import ddwaf_context_capsule +from ddtrace.appsec._metrics import _set_waf_error_metric +from ddtrace.appsec._metrics import _set_waf_init_metric +from ddtrace.appsec._metrics import _set_waf_request_metrics +from ddtrace.appsec._metrics import _set_waf_updates_metric +from ddtrace.appsec._trace_utils import _asm_manual_keep +from ddtrace.constants import ORIGIN_KEY +from ddtrace.constants import RUNTIME_FAMILY +from ddtrace.ext import SpanTypes +from ddtrace.internal import core +from ddtrace.internal.logger import get_logger +from ddtrace.internal.processor import SpanProcessor +from ddtrace.internal.rate_limiter import RateLimiter +from ddtrace.settings.asm import config as asm_config +from ddtrace.span import Span + + +log = get_logger(__name__) + + +def _transform_headers(data: Union[Dict[str, str], List[Tuple[str, str]]]) -> Dict[str, Union[str, List[str]]]: + normalized: Dict[str, Union[str, List[str]]] = {} + headers = data if isinstance(data, list) else data.items() + for header, value in headers: + header = header.lower() + if header in ("cookie", "set-cookie"): + continue + if header in normalized: # if a header with the same lowercase name already exists, let's make it an array + existing = normalized[header] + if isinstance(existing, list): + existing.append(value) + else: + normalized[header] = [existing, value] + else: + normalized[header] = value + return normalized + + +def get_rules() -> str: + return os.getenv("DD_APPSEC_RULES", default=DEFAULT.RULES) + + +def get_appsec_obfuscation_parameter_key_regexp() -> bytes: + return os.getenvb(b"DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP", DEFAULT.APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP) + + +def get_appsec_obfuscation_parameter_value_regexp() -> bytes: + return os.getenvb( + b"DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP", DEFAULT.APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP + ) + + +_COLLECTED_REQUEST_HEADERS = { + "accept", + "accept-encoding", + "accept-language", + "cf-connecting-ip", + "cf-connecting-ipv6", + "content-encoding", + "content-language", + "content-length", + "content-type", + "fastly-client-ip", + "forwarded", + "forwarded-for", + "host", + "true-client-ip", + "user-agent", + "via", + "x-client-ip", + "x-cluster-client-ip", + "x-forwarded", + "x-forwarded-for", + "x-real-ip", +} + + +def _set_headers(span: Span, headers: Any, kind: str) -> None: + from ddtrace.contrib.trace_utils import _normalize_tag_name + + for k in headers: + if isinstance(k, tuple): + key, value = k + else: + key, value = k, headers[k] + if key.lower() in _COLLECTED_REQUEST_HEADERS: + # since the header value can be a list, use `set_tag()` to ensure it is converted to a string + span.set_tag(_normalize_tag_name(kind, key), value) + + +def _get_rate_limiter() -> RateLimiter: + return RateLimiter(int(os.getenv("DD_APPSEC_TRACE_RATE_LIMIT", DEFAULT.TRACE_RATE_LIMIT))) + + +@dataclasses.dataclass(eq=False) +class AppSecSpanProcessor(SpanProcessor): + rules: str = dataclasses.field(default_factory=get_rules) + obfuscation_parameter_key_regexp: bytes = dataclasses.field( + default_factory=get_appsec_obfuscation_parameter_key_regexp + ) + obfuscation_parameter_value_regexp: bytes = dataclasses.field( + default_factory=get_appsec_obfuscation_parameter_value_regexp + ) + _addresses_to_keep: Set[str] = dataclasses.field(default_factory=set) + _rate_limiter: RateLimiter = dataclasses.field(default_factory=_get_rate_limiter) + + @property + def enabled(self): + return self._ddwaf is not None + + def __post_init__(self) -> None: + from ddtrace.appsec._ddwaf import DDWaf + + try: + with open(self.rules, "r") as f: + rules = json.load(f) + self._update_actions(rules) + + except EnvironmentError as err: + if err.errno == errno.ENOENT: + log.error("[DDAS-0001-03] ASM could not read the rule file %s. Reason: file does not exist", self.rules) + else: + # TODO: try to log reasons + log.error("[DDAS-0001-03] ASM could not read the rule file %s.", self.rules) + raise + except JSONDecodeError: + log.error("[DDAS-0001-03] ASM could not read the rule file %s. Reason: invalid JSON file", self.rules) + raise + except Exception: + # TODO: try to log reasons + log.error("[DDAS-0001-03] ASM could not read the rule file %s.", self.rules) + raise + try: + self._ddwaf = DDWaf(rules, self.obfuscation_parameter_key_regexp, self.obfuscation_parameter_value_regexp) + if not self._ddwaf._handle or self._ddwaf.info.failed: + stack_trace = "DDWAF.__init__: invalid rules\n ruleset: %s\nloaded:%s\nerrors:%s\n" % ( + rules, + self._ddwaf.info.loaded, + self._ddwaf.info.errors, + ) + _set_waf_error_metric("WAF init error. Invalid rules", stack_trace, self._ddwaf.info) + + _set_waf_init_metric(self._ddwaf.info) + except ValueError: + # Partial of DDAS-0005-00 + log.warning("[DDAS-0005-00] WAF initialization failed") + raise + self._update_required() + + def _update_required(self): + self._addresses_to_keep.clear() + for address in self._ddwaf.required_data: + self._addresses_to_keep.add(address) + # we always need the request headers + self._addresses_to_keep.add(WAF_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES) + # we always need the response headers + self._addresses_to_keep.add(WAF_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES) + + def _update_actions(self, rules: Dict[str, Any]) -> None: + new_actions = rules.get("actions", []) + self._actions: Dict[str, Dict[str, Any]] = WAF_ACTIONS.DEFAULT_ACTIONS + for a in new_actions: + self._actions[a.get(WAF_ACTIONS.ID, None)] = a + if "actions" in rules: + del rules["actions"] + + def _update_rules(self, new_rules: Dict[str, Any]) -> bool: + result = False + if not _appsec_rc_file_is_not_static(): + return result + try: + self._update_actions(new_rules) + result = self._ddwaf.update_rules(new_rules) + _set_waf_updates_metric(self._ddwaf.info) + except TypeError: + error_msg = "Error updating ASM rules. TypeError exception " + log.debug(error_msg, exc_info=True) + _set_waf_error_metric(error_msg, traceback.format_exc(), self._ddwaf.info) + if not result: + error_msg = "Error updating ASM rules. Invalid rules" + log.debug(error_msg) + _set_waf_error_metric(error_msg, "", self._ddwaf.info) + self._update_required() + return result + + def on_span_start(self, span: Span) -> None: + from ddtrace.contrib import trace_utils + + if span.span_type != SpanTypes.WEB: + return + + if _asm_request_context.free_context_available(): + _asm_request_context.register(span) + else: + new_asm_context = _asm_request_context.asm_request_context_manager() + new_asm_context.__enter__() + _asm_request_context.register(span, new_asm_context) + + ctx = self._ddwaf._at_request_start() + + peer_ip = _asm_request_context.get_ip() + headers = _asm_request_context.get_headers() + headers_case_sensitive = _asm_request_context.get_headers_case_sensitive() + + span.set_metric(APPSEC.ENABLED, 1.0) + span.set_tag_str(RUNTIME_FAMILY, "python") + + def waf_callable(custom_data=None): + return self._waf_action(span._local_root or span, ctx, custom_data) + + _asm_request_context.set_waf_callback(waf_callable) + _asm_request_context.add_context_callback(_set_waf_request_metrics) + if headers is not None: + _asm_request_context.set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, headers, span) + _asm_request_context.set_waf_address( + SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, headers_case_sensitive, span + ) + if not peer_ip: + return + + ip = trace_utils._get_request_header_client_ip(headers, peer_ip, headers_case_sensitive) + # Save the IP and headers in the context so the retrieval can be skipped later + _asm_request_context.set_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP, ip, span) + if ip and self._is_needed(WAF_DATA_NAMES.REQUEST_HTTP_IP): + log.debug("[DDAS-001-00] Executing ASM WAF for checking IP block") + # _asm_request_context.call_callback() + _asm_request_context.call_waf_callback({"REQUEST_HTTP_IP": None}) + + def _waf_action( + self, span: Span, ctx: ddwaf_context_capsule, custom_data: Optional[Dict[str, Any]] = None + ) -> Optional[Dict[str, Any]]: + """ + Call the `WAF` with the given parameters. If `custom_data_names` is specified as + a list of `(WAF_NAME, WAF_STR)` tuples specifying what values of the `WAF_DATA_NAMES` + constant class will be checked. Else, it will check all the possible values + from `WAF_DATA_NAMES`. + + If `custom_data_values` is specified, it must be a dictionary where the key is the + `WAF_DATA_NAMES` key and the value the custom value. If not used, the values will + be retrieved from the `core`. This can be used when you don't want to store + the value in the `core` before checking the `WAF`. + """ + if span.span_type != SpanTypes.WEB: + return None + + if core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span) or core.get_item(WAF_CONTEXT_NAMES.BLOCKED): + # We still must run the waf if we need to extract schemas for API SECURITY + if not custom_data or not custom_data.get("PROCESSOR_SETTINGS", {}).get("extract-schema", False): + return None + + data = {} + iter_data = [(key, WAF_DATA_NAMES[key]) for key in custom_data] if custom_data is not None else WAF_DATA_NAMES + data_already_sent = _asm_request_context.get_data_sent() + if data_already_sent is None: + data_already_sent = set() + + # type ignore because mypy seems to not detect that both results of the if + # above can iter if not None + force_keys = custom_data.get("PROCESSOR_SETTINGS", {}).get("extract-schema", False) if custom_data else False + for key, waf_name in iter_data: # type: ignore[attr-defined] + if key in data_already_sent: + continue + if self._is_needed(waf_name) or force_keys: + value = None + if custom_data is not None and custom_data.get(key) is not None: + value = custom_data.get(key) + elif key in SPAN_DATA_NAMES: + value = _asm_request_context.get_value("waf_addresses", SPAN_DATA_NAMES[key]) + if value is not None: + data[waf_name] = _transform_headers(value) if key.endswith("HEADERS_NO_COOKIES") else value + data_already_sent.add(key) + log.debug("[action] WAF got value %s", SPAN_DATA_NAMES.get(key, key)) + + waf_results = self._ddwaf.run(ctx, data, asm_config._waf_timeout) + if waf_results and waf_results.data: + log.debug("[DDAS-011-00] ASM In-App WAF returned: %s. Timeout %s", waf_results.data, waf_results.timeout) + + for action in waf_results.actions: + action_type = self._actions.get(action, {}).get(WAF_ACTIONS.TYPE, None) + if action_type == WAF_ACTIONS.BLOCK_ACTION: + blocked = self._actions[action][WAF_ACTIONS.PARAMETERS] + break + elif action_type == WAF_ACTIONS.REDIRECT_ACTION: + blocked = self._actions[action][WAF_ACTIONS.PARAMETERS] + location = blocked.get("location", "") + if not location: + blocked = WAF_ACTIONS.DEFAULT_PARAMETERS + break + status_code = str(blocked.get("status_code", "")) + if not (status_code[:3].isdigit() and status_code.startswith("3")): + blocked["status_code"] = "303" + blocked[WAF_ACTIONS.TYPE] = "none" + break + else: + blocked = {} + _asm_request_context.set_waf_results(waf_results, self._ddwaf.info, bool(blocked)) + if blocked: + core.set_item(WAF_CONTEXT_NAMES.BLOCKED, blocked, span=span) + core.set_item(WAF_CONTEXT_NAMES.BLOCKED, blocked) + + try: + info = self._ddwaf.info + if info.errors: + errors = json.dumps(info.errors) + span.set_tag_str(APPSEC.EVENT_RULE_ERRORS, errors) + _set_waf_error_metric("WAF run. Error", errors, info) + if waf_results.timeout: + _set_waf_error_metric("WAF run. Timeout errors", "", info) + span.set_tag_str(APPSEC.EVENT_RULE_VERSION, info.version) + from ddtrace.appsec._ddwaf import version + + span.set_tag_str(APPSEC.WAF_VERSION, version()) + + def update_metric(name, value): + old_value = span.get_metric(name) + if old_value is None: + old_value = 0.0 + span.set_metric(name, value + old_value) + + span.set_metric(APPSEC.EVENT_RULE_LOADED, info.loaded) + span.set_metric(APPSEC.EVENT_RULE_ERROR_COUNT, info.failed) + if waf_results: + update_metric(APPSEC.WAF_DURATION, waf_results.runtime) + update_metric(APPSEC.WAF_DURATION_EXT, waf_results.total_runtime) + except (JSONDecodeError, ValueError): + log.warning("Error parsing data ASM In-App WAF metrics report %s", info.errors) + except Exception: + log.warning("Error executing ASM In-App WAF metrics report: %s", exc_info=True) + + if (waf_results and waf_results.data) or blocked: + # We run the rate limiter only if there is an attack, its goal is to limit the number of collected asm + # events + allowed = self._rate_limiter.is_allowed(span.start_ns) + if not allowed: + # TODO: add metric collection to keep an eye (when it's name is clarified) + return waf_results.derivatives + + for id_tag, kind in [ + (SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, "request"), + (SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, "response"), + ]: + headers_req = _asm_request_context.get_waf_address(id_tag) + if headers_req: + _set_headers(span, headers_req, kind=kind) + + _asm_request_context.store_waf_results_data(waf_results.data) + if blocked: + span.set_tag(APPSEC.BLOCKED, "true") + _set_waf_request_metrics() + + # Partial DDAS-011-00 + span.set_tag_str(APPSEC.EVENT, "true") + + remote_ip = _asm_request_context.get_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP) + if remote_ip: + # Note that if the ip collection is disabled by the env var + # DD_TRACE_CLIENT_IP_HEADER_DISABLED actor.ip won't be sent + span.set_tag_str("actor.ip", remote_ip) + + # Right now, we overwrite any value that could be already there. We need to reconsider when ASM/AppSec's + # specs are updated. + _asm_manual_keep(span) + if span.get_tag(ORIGIN_KEY) is None: + span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE) + return waf_results.derivatives + + def _is_needed(self, address: str) -> bool: + return address in self._addresses_to_keep + + def on_span_finish(self, span: Span) -> None: + try: + if span.span_type == SpanTypes.WEB: + # Force to set respond headers at the end + headers_req = core.get_item(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, span=span) + if headers_req: + _set_headers(span, headers_req, kind="response") + + # this call is only necessary for tests or frameworks that are not using blocking + if span.get_tag(APPSEC.JSON) is None and _asm_request_context.in_context(): + log.debug("metrics waf call") + _asm_request_context.call_waf_callback() + + self._ddwaf._at_request_end() + finally: + # release asm context if it was created by the span + _asm_request_context.unregister(span) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py new file mode 100644 index 0000000..84d0af7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 + +from sys import version_info + + +if version_info < (3, 7, 0): + from .module_names_py36 import STDLIB_MODULE_NAMES +elif version_info < (3, 8, 0): + from .module_names_py37 import STDLIB_MODULE_NAMES +elif version_info < (3, 9, 0): + from .module_names_py38 import STDLIB_MODULE_NAMES +elif version_info < (3, 10, 0): + from .module_names_py39 import STDLIB_MODULE_NAMES +elif version_info < (3, 11, 0): + from .module_names_py310 import STDLIB_MODULE_NAMES +else: + from .module_names_py311 import STDLIB_MODULE_NAMES + + +def _stdlib_for_python_version(): # type: () -> set + return STDLIB_MODULE_NAMES diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py new file mode 100644 index 0000000..338f807 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py @@ -0,0 +1,218 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "binhex", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "contextvars", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "dataclasses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "graphlib", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "idlelib", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", + "zoneinfo", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py new file mode 100644 index 0000000..47030a1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py @@ -0,0 +1,218 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "contextvars", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "dataclasses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "graphlib", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "idlelib", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "tomllib", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", + "zoneinfo", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py new file mode 100644 index 0000000..c4eb7c6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py @@ -0,0 +1,220 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_dummy_thread", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "binhex", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "dummy_threading", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "formatter", + "fpectl", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "macpath", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "parser", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symbol", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py new file mode 100644 index 0000000..0f989b2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py @@ -0,0 +1,221 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_dummy_thread", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "binhex", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "contextvars", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "dataclasses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "dummy_threading", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "formatter", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "macpath", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "parser", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symbol", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py new file mode 100644 index 0000000..1e5be0b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py @@ -0,0 +1,220 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_dummy_thread", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "binhex", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "contextvars", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "dataclasses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "dummy_threading", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "formatter", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "parser", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symbol", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py new file mode 100644 index 0000000..6bdc900 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py @@ -0,0 +1,220 @@ +STDLIB_MODULE_NAMES = { + "__future__", + "_ast", + "_compression", + "_thread", + "abc", + "aifc", + "argparse", + "array", + "ast", + "asynchat", + "asyncio", + "asyncore", + "atexit", + "audioop", + "base64", + "bdb", + "binascii", + "binhex", + "bisect", + "builtins", + "bz2", + "cProfile", + "calendar", + "cgi", + "cgitb", + "chunk", + "cmath", + "cmd", + "code", + "codecs", + "codeop", + "collections", + "colorsys", + "compileall", + "concurrent", + "configparser", + "contextlib", + "contextvars", + "copy", + "copyreg", + "crypt", + "csv", + "ctypes", + "curses", + "dataclasses", + "datetime", + "dbm", + "decimal", + "difflib", + "dis", + "distutils", + "doctest", + "email", + "encodings", + "ensurepip", + "enum", + "errno", + "faulthandler", + "fcntl", + "filecmp", + "fileinput", + "fnmatch", + "formatter", + "fractions", + "ftplib", + "functools", + "gc", + "getopt", + "getpass", + "gettext", + "glob", + "graphlib", + "grp", + "gzip", + "hashlib", + "heapq", + "hmac", + "html", + "http", + "imaplib", + "imghdr", + "imp", + "importlib", + "inspect", + "io", + "ipaddress", + "itertools", + "json", + "keyword", + "lib2to3", + "linecache", + "locale", + "logging", + "lzma", + "mailbox", + "mailcap", + "marshal", + "math", + "mimetypes", + "mmap", + "modulefinder", + "msilib", + "msvcrt", + "multiprocessing", + "netrc", + "nis", + "nntplib", + "ntpath", + "numbers", + "opcode", + "operator", + "optparse", + "os", + "ossaudiodev", + "parser", + "pathlib", + "pdb", + "pickle", + "pickletools", + "pipes", + "pkgutil", + "platform", + "plistlib", + "poplib", + "posix", + "posixpath", + "pprint", + "profile", + "pstats", + "pty", + "pwd", + "py_compile", + "pyclbr", + "pydoc", + "queue", + "quopri", + "random", + "re", + "readline", + "reprlib", + "resource", + "rlcompleter", + "runpy", + "sched", + "secrets", + "select", + "selectors", + "shelve", + "shlex", + "shutil", + "signal", + "site", + "smtpd", + "smtplib", + "sndhdr", + "socket", + "socketserver", + "spwd", + "sqlite3", + "sre", + "sre_compile", + "sre_constants", + "sre_parse", + "ssl", + "stat", + "statistics", + "string", + "stringprep", + "struct", + "subprocess", + "sunau", + "symbol", + "symtable", + "sys", + "sysconfig", + "syslog", + "tabnanny", + "tarfile", + "telnetlib", + "tempfile", + "termios", + "test", + "textwrap", + "threading", + "time", + "timeit", + "tkinter", + "token", + "tokenize", + "trace", + "traceback", + "tracemalloc", + "tty", + "turtle", + "turtledemo", + "types", + "typing", + "unicodedata", + "unittest", + "urllib", + "uu", + "uuid", + "venv", + "warnings", + "wave", + "weakref", + "webbrowser", + "winreg", + "winsound", + "wsgiref", + "xdrlib", + "xml", + "xmlrpc", + "zipapp", + "zipfile", + "zipimport", + "zlib", + "zoneinfo", +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py new file mode 100644 index 0000000..a8a7162 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +import json +import os +from typing import Any +from typing import Dict +from typing import Mapping +from typing import Optional + +from ddtrace import Tracer +from ddtrace import config +from ddtrace.appsec._capabilities import _appsec_rc_file_is_not_static +from ddtrace.appsec._capabilities import _asm_feature_is_required +from ddtrace.appsec._constants import PRODUCTS +from ddtrace.appsec._utils import _appsec_rc_features_is_enabled +from ddtrace.constants import APPSEC_ENV +from ddtrace.internal import forksafe +from ddtrace.internal.logger import get_logger +from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector +from ddtrace.internal.remoteconfig._publishers import RemoteConfigPublisherMergeDicts +from ddtrace.internal.remoteconfig._pubsub import PubSub +from ddtrace.internal.remoteconfig._subscribers import RemoteConfigSubscriber +from ddtrace.internal.remoteconfig.worker import remoteconfig_poller +from ddtrace.internal.utils.formats import asbool +from ddtrace.settings.asm import config as asm_config + + +log = get_logger(__name__) + +APPSEC_PRODUCTS = [PRODUCTS.ASM_FEATURES, PRODUCTS.ASM, PRODUCTS.ASM_DATA, PRODUCTS.ASM_DD] + + +class AppSecRC(PubSub): + __subscriber_class__ = RemoteConfigSubscriber + __publisher_class__ = RemoteConfigPublisherMergeDicts + __shared_data__ = PublisherSubscriberConnector() + + def __init__(self, _preprocess_results, callback): + self._publisher = self.__publisher_class__(self.__shared_data__, _preprocess_results) + self._subscriber = self.__subscriber_class__(self.__shared_data__, callback, "ASM") + + +def _forksafe_appsec_rc(): + remoteconfig_poller.start_subscribers_by_product(APPSEC_PRODUCTS) + + +def enable_appsec_rc(test_tracer: Optional[Tracer] = None) -> None: + """Remote config will be used by ASM libraries to receive four different updates from the backend. + Each update has it’s own product: + - ASM_FEATURES product - To allow users enable or disable ASM remotely + - ASM product - To allow clients to activate or deactivate rules + - ASM_DD product - To allow the library to receive rules updates + - ASM_DATA product - To allow the library to receive list of blocked IPs and users + + If environment variable `DD_APPSEC_ENABLED` is not set, registering ASM_FEATURE can enable ASM remotely. If + it's set to true, we will register the rest of the products. + + Parameters `test_tracer` and `start_subscribers` are needed for testing purposes + """ + # Import tracer here to avoid a circular import + if test_tracer is None: + from ddtrace import tracer + else: + tracer = test_tracer + + log.debug("[%s][P: %s] Register ASM Remote Config Callback", os.getpid(), os.getppid()) + asm_callback = ( + remoteconfig_poller.get_registered(PRODUCTS.ASM_FEATURES) + or remoteconfig_poller.get_registered(PRODUCTS.ASM) + or AppSecRC(_preprocess_results_appsec_1click_activation, _appsec_callback) + ) + + if _asm_feature_is_required(): + remoteconfig_poller.register(PRODUCTS.ASM_FEATURES, asm_callback) + + if tracer._asm_enabled and _appsec_rc_file_is_not_static(): + remoteconfig_poller.register(PRODUCTS.ASM_DATA, asm_callback) # IP Blocking + remoteconfig_poller.register(PRODUCTS.ASM, asm_callback) # Exclusion Filters & Custom Rules + remoteconfig_poller.register(PRODUCTS.ASM_DD, asm_callback) # DD Rules + + forksafe.register(_forksafe_appsec_rc) + + +def disable_appsec_rc(): + # only used to avoid data leaks between tests + for product_name in APPSEC_PRODUCTS: + remoteconfig_poller.unregister(product_name) + + +def _add_rules_to_list(features: Mapping[str, Any], feature: str, message: str, ruleset: Dict[str, Any]) -> None: + rules = features.get(feature, None) + if rules is not None: + try: + if ruleset.get(feature) is None: + ruleset[feature] = [] + ruleset[feature] += rules + log.debug("Reloading Appsec %s: %s", message, str(rules)[:20]) + except json.JSONDecodeError: + log.error("ERROR Appsec %s: invalid JSON content from remote configuration", message) + + +def _appsec_callback(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None: + config = features.get("config", {}) + _appsec_1click_activation(config, test_tracer) + _appsec_api_security_settings(config, test_tracer) + _appsec_rules_data(config, test_tracer) + + +def _appsec_rules_data(features: Mapping[str, Any], test_tracer: Optional[Tracer]) -> bool: + # Tracer is a parameter for testing propose + # Import tracer here to avoid a circular import + if test_tracer is None: + from ddtrace import tracer + else: + tracer = test_tracer + + if features and tracer._appsec_processor: + ruleset = {} # type: dict[str, Optional[list[Any]]] + _add_rules_to_list(features, "rules_data", "rules data", ruleset) + _add_rules_to_list(features, "custom_rules", "custom rules", ruleset) + _add_rules_to_list(features, "rules", "Datadog rules", ruleset) + _add_rules_to_list(features, "exclusions", "exclusion filters", ruleset) + _add_rules_to_list(features, "rules_override", "rules override", ruleset) + _add_rules_to_list(features, "scanners", "scanners", ruleset) + _add_rules_to_list(features, "processors", "processors", ruleset) + if ruleset: + return tracer._appsec_processor._update_rules({k: v for k, v in ruleset.items() if v is not None}) + + return False + + +def _preprocess_results_appsec_1click_activation( + features: Dict[str, Any], pubsub_instance: Optional[PubSub] = None +) -> Dict[str, Any]: + """The main process has the responsibility to enable or disable the ASM products. The child processes don't + care about that, the children only need to know about payload content. + """ + if _appsec_rc_features_is_enabled(): + log.debug( + "[%s][P: %s] Receiving ASM Remote Configuration ASM_FEATURES: %s", + os.getpid(), + os.getppid(), + features.get("asm", {}), + ) + + rc_asm_enabled = None + if features is not None: + if APPSEC_ENV in os.environ: + rc_asm_enabled = asbool(os.environ.get(APPSEC_ENV)) + elif features == {}: + rc_asm_enabled = False + else: + asm_features = features.get("asm", {}) + if asm_features is not None: + rc_asm_enabled = asm_features.get("enabled") + log.debug( + "[%s][P: %s] ASM Remote Configuration ASM_FEATURES. Appsec enabled: %s", + os.getpid(), + os.getppid(), + rc_asm_enabled, + ) + if rc_asm_enabled is not None: + from ddtrace.appsec._constants import PRODUCTS + + if pubsub_instance is None: + pubsub_instance = ( + remoteconfig_poller.get_registered(PRODUCTS.ASM_FEATURES) + or remoteconfig_poller.get_registered(PRODUCTS.ASM) + or AppSecRC(_preprocess_results_appsec_1click_activation, _appsec_callback) + ) + + if rc_asm_enabled and _appsec_rc_file_is_not_static(): + remoteconfig_poller.register(PRODUCTS.ASM_DATA, pubsub_instance) # IP Blocking + remoteconfig_poller.register(PRODUCTS.ASM, pubsub_instance) # Exclusion Filters & Custom Rules + remoteconfig_poller.register(PRODUCTS.ASM_DD, pubsub_instance) # DD Rules + else: + remoteconfig_poller.unregister(PRODUCTS.ASM_DATA) + remoteconfig_poller.unregister(PRODUCTS.ASM) + remoteconfig_poller.unregister(PRODUCTS.ASM_DD) + + features["asm"] = {"enabled": rc_asm_enabled} + return features + + +def _appsec_1click_activation(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None: + """This callback updates appsec enabled in tracer and config instances following this logic: + ``` + | DD_APPSEC_ENABLED | RC Enabled | Result | + |-------------------|------------|----------| + | | | Disabled | + | | false | Disabled | + | | true | Enabled | + | false | | Disabled | + | true | | Enabled | + | false | true | Disabled | + | true | true | Enabled | + ``` + """ + if _appsec_rc_features_is_enabled(): + # Tracer is a parameter for testing propose + # Import tracer here to avoid a circular import + if test_tracer is None: + from ddtrace import tracer + else: + tracer = test_tracer + + log.debug("[%s][P: %s] ASM_FEATURES: %s", os.getpid(), os.getppid(), str(features)[:100]) + if APPSEC_ENV in os.environ: + # no one click activation if var env is set + rc_asm_enabled = asbool(os.environ.get(APPSEC_ENV)) + elif features is False: + rc_asm_enabled = False + else: + rc_asm_enabled = features.get("asm", {}).get("enabled", False) + + log.debug("APPSEC_ENABLED: %s", rc_asm_enabled) + if rc_asm_enabled is not None: + log.debug( + "[%s][P: %s] Updating ASM Remote Configuration ASM_FEATURES: %s", + os.getpid(), + os.getppid(), + rc_asm_enabled, + ) + + if rc_asm_enabled: + if not tracer._asm_enabled: + tracer.configure(appsec_enabled=True) + else: + asm_config._asm_enabled = True + else: + if tracer._asm_enabled: + tracer.configure(appsec_enabled=False) + else: + asm_config._asm_enabled = False + + +def _appsec_api_security_settings(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None: + """ + Update API Security settings from remote config + Actually: Update sample rate + """ + if config._remote_config_enabled and asm_config._api_security_enabled: + rc_api_security_sample_rate = features.get("api_security", {}).get("request_sample_rate", None) + if rc_api_security_sample_rate is not None: + try: + sample_rate = max(0.0, min(1.0, float(rc_api_security_sample_rate))) + asm_config._api_security_sample_rate = sample_rate + except BaseException: # nosec + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py new file mode 100644 index 0000000..a09b8c8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py @@ -0,0 +1,346 @@ +from typing import Optional + +from ddtrace import Span +from ddtrace import Tracer +from ddtrace import constants +from ddtrace.appsec import _asm_request_context +from ddtrace.appsec._constants import APPSEC +from ddtrace.appsec._constants import LOGIN_EVENTS_MODE +from ddtrace.appsec._constants import WAF_CONTEXT_NAMES +from ddtrace.appsec._utils import _safe_userid +from ddtrace.contrib.trace_utils import set_user +from ddtrace.ext import SpanTypes +from ddtrace.ext import user +from ddtrace.internal import core +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +log = get_logger(__name__) + + +def _asm_manual_keep(span: Span) -> None: + from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY + from ddtrace.internal.sampling import SamplingMechanism + + span.set_tag(constants.MANUAL_KEEP_KEY) + # set decision maker to ASM = -5 + span.set_tag_str(SAMPLING_DECISION_TRACE_TAG_KEY, "-%d" % SamplingMechanism.APPSEC) + + +def _track_user_login_common( + tracer: Tracer, + success: bool, + metadata: Optional[dict] = None, + login_events_mode: str = LOGIN_EVENTS_MODE.SDK, + login: Optional[str] = None, + name: Optional[str] = None, + email: Optional[str] = None, + span: Optional[Span] = None, +) -> Optional[Span]: + if span is None: + span = tracer.current_root_span() + if span: + success_str = "success" if success else "failure" + tag_prefix = "%s.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX, success_str) + + if success: + span.set_tag_str(APPSEC.USER_LOGIN_EVENT_SUCCESS_TRACK, "true") + else: + span.set_tag_str(APPSEC.USER_LOGIN_EVENT_FAILURE_TRACK, "true") + + # This is used to mark if the call was done from the SDK of the automatic login events + if login_events_mode == LOGIN_EVENTS_MODE.SDK: + span.set_tag_str("%s.sdk" % tag_prefix, "true") + + mode_tag = APPSEC.AUTO_LOGIN_EVENTS_SUCCESS_MODE if success else APPSEC.AUTO_LOGIN_EVENTS_FAILURE_MODE + auto_tag_mode = ( + login_events_mode if login_events_mode != LOGIN_EVENTS_MODE.SDK else asm_config._automatic_login_events_mode + ) + span.set_tag_str(mode_tag, auto_tag_mode) + + tag_metadata_prefix = "%s.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, success_str) + if metadata is not None: + for k, v in metadata.items(): + span.set_tag_str("%s.%s" % (tag_metadata_prefix, k), str(v)) + + if login: + span.set_tag_str("%s.login" % tag_prefix, login) + + if email: + span.set_tag_str("%s.email" % tag_prefix, email) + + if name: + span.set_tag_str("%s.username" % tag_prefix, name) + + _asm_manual_keep(span) + return span + else: + log.warning( + "No root span in the current execution. Skipping track_user_success_login tags. " + "See https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/" + "?tab=set_user&code-lang=python for more information.", + ) + return None + + +def track_user_login_success_event( + tracer: Tracer, + user_id: str, + metadata: Optional[dict] = None, + login: Optional[str] = None, + name: Optional[str] = None, + email: Optional[str] = None, + scope: Optional[str] = None, + role: Optional[str] = None, + session_id: Optional[str] = None, + propagate: bool = False, + login_events_mode: str = LOGIN_EVENTS_MODE.SDK, + span: Optional[Span] = None, +) -> None: + """ + Add a new login success tracking event. The parameters after metadata (name, email, + scope, role, session_id, propagate) will be passed to the `set_user` function that will be called + by this one, see: + https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/#user-related-attributes + https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/?tab=set_tag&code-lang=python + + :param tracer: tracer instance to use + :param user_id: a string with the UserId + :param metadata: a dictionary with additional metadata information to be stored with the event + """ + + span = _track_user_login_common(tracer, True, metadata, login_events_mode, login, name, email, span) + if not span: + return + + if ( + user_id + and (login_events_mode not in (LOGIN_EVENTS_MODE.SDK, LOGIN_EVENTS_MODE.EXTENDED)) + and not asm_config._user_model_login_field + ): + user_id = _safe_userid(user_id) + + set_user(tracer, user_id, name, email, scope, role, session_id, propagate, span) + + +def track_user_login_failure_event( + tracer: Tracer, + user_id: Optional[str], + exists: bool, + metadata: Optional[dict] = None, + login_events_mode: str = LOGIN_EVENTS_MODE.SDK, +) -> None: + """ + Add a new login failure tracking event. + :param tracer: tracer instance to use + :param user_id: a string with the UserId if exists=True or the username if not + :param exists: a boolean indicating if the user exists in the system + :param metadata: a dictionary with additional metadata information to be stored with the event + """ + + if ( + user_id + and (login_events_mode not in (LOGIN_EVENTS_MODE.SDK, LOGIN_EVENTS_MODE.EXTENDED)) + and not asm_config._user_model_login_field + ): + user_id = _safe_userid(user_id) + + span = _track_user_login_common(tracer, False, metadata, login_events_mode) + if not span: + return + + if user_id: + span.set_tag_str("%s.failure.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, user.ID), str(user_id)) + exists_str = "true" if exists else "false" + span.set_tag_str("%s.failure.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, user.EXISTS), exists_str) + + +def track_user_signup_event( + tracer: Tracer, user_id: str, success: bool, login_events_mode: str = LOGIN_EVENTS_MODE.SDK +) -> None: + span = tracer.current_root_span() + if span: + success_str = "true" if success else "false" + span.set_tag_str(APPSEC.USER_SIGNUP_EVENT, success_str) + span.set_tag_str(user.ID, user_id) + _asm_manual_keep(span) + + # This is used to mark if the call was done from the SDK of the automatic login events + if login_events_mode == LOGIN_EVENTS_MODE.SDK: + span.set_tag_str("%s.sdk" % APPSEC.USER_SIGNUP_EVENT, "true") + else: + span.set_tag_str("%s.auto.mode" % APPSEC.USER_SIGNUP_EVENT, str(login_events_mode)) + + return + else: + log.warning( + "No root span in the current execution. Skipping track_user_signup tags. " + "See https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/" + "?tab=set_user&code-lang=python for more information.", + ) + + +def track_custom_event(tracer: Tracer, event_name: str, metadata: dict) -> None: + """ + Add a new custom tracking event. + + :param tracer: tracer instance to use + :param event_name: the name of the custom event + :param metadata: a dictionary with additional metadata information to be stored with the event + """ + + if not event_name: + log.warning("Empty event name given to track_custom_event. Skipping setting tags.") + return + + if not metadata: + log.warning("Empty metadata given to track_custom_event. Skipping setting tags.") + return + + span = tracer.current_root_span() + if not span: + log.warning( + "No root span in the current execution. Skipping track_custom_event tags. " + "See https://docs.datadoghq.com/security_platform/application_security" + "/setup_and_configure/" + "?tab=set_user&code-lang=python for more information.", + ) + return + + span.set_tag_str("%s.%s.track" % (APPSEC.CUSTOM_EVENT_PREFIX, event_name), "true") + + for k, v in metadata.items(): + if isinstance(v, bool): + str_v = "true" if v else "false" + else: + str_v = str(v) + span.set_tag_str("%s.%s.%s" % (APPSEC.CUSTOM_EVENT_PREFIX, event_name, k), str_v) + _asm_manual_keep(span) + + +def should_block_user(tracer: Tracer, userid: str) -> bool: + """ + Return true if the specified User ID should be blocked. + + :param tracer: tracer instance to use + :param userid: the ID of the user as registered by `set_user` + """ + + if not asm_config._asm_enabled: + log.warning( + "One click blocking of user ids is disabled. To use this feature please enable " + "Application Security Monitoring" + ) + return False + + # Early check to avoid calling the WAF if the request is already blocked + span = tracer.current_root_span() + if not span: + log.warning( + "No root span in the current execution. should_block_user returning False" + "See https://docs.datadoghq.com/security_platform/application_security" + "/setup_and_configure/" + "?tab=set_user&code-lang=python for more information.", + ) + return False + + if core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span): + return True + + _asm_request_context.call_waf_callback(custom_data={"REQUEST_USER_ID": str(userid)}) + return bool(core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span)) + + +def block_request() -> None: + """ + Block the current request and return a 403 Unauthorized response. If the response + has already been started to be sent this could not work. The behaviour of this function + could be different among frameworks, but it usually involves raising some kind of internal Exception, + meaning that if you capture the exception the request blocking could not work. + """ + if not asm_config._asm_enabled: + log.warning("block_request() is disabled. To use this feature please enable" "Application Security Monitoring") + return + + _asm_request_context.block_request() + + +def block_request_if_user_blocked(tracer: Tracer, userid: str) -> None: + """ + Check if the specified User ID should be blocked and if positive + block the current request using `block_request`. + + :param tracer: tracer instance to use + :param userid: the ID of the user as registered by `set_user` + """ + if not asm_config._asm_enabled: + log.warning("should_block_user call requires ASM to be enabled") + return + + if should_block_user(tracer, userid): + span = tracer.current_root_span() + if span: + span.set_tag_str(user.ID, str(userid)) + _asm_request_context.block_request() + + +def _on_django_login( + pin, + request, + user, + mode, + info_retriever, +): + if not asm_config._asm_enabled: + return + + if user: + if str(user) != "AnonymousUser": + user_id, user_extra = info_retriever.get_user_info() + + with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH): + from ddtrace.contrib.django.compat import user_is_authenticated + + if user_is_authenticated(user): + session_key = getattr(request, "session_key", None) + track_user_login_success_event( + pin.tracer, + user_id=user_id, + session_id=session_key, + propagate=True, + login_events_mode=mode, + **user_extra, + ) + else: + # Login failed but the user exists + track_user_login_failure_event(pin.tracer, user_id=user_id, exists=True, login_events_mode=mode) + else: + # Login failed and the user is unknown + user_id = info_retriever.get_userid() + track_user_login_failure_event(pin.tracer, user_id=user_id, exists=False, login_events_mode=mode) + + +def _on_django_auth(result_user, mode, kwargs, pin, info_retriever): + if not asm_config._asm_enabled: + return True, result_user + + extended_userid_fields = info_retriever.possible_user_id_fields + info_retriever.possible_login_fields + userid_list = info_retriever.possible_user_id_fields if mode == "safe" else extended_userid_fields + + for possible_key in userid_list: + if possible_key in kwargs: + user_id = kwargs[possible_key] + break + else: + user_id = None + + if not result_user: + with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH): + track_user_login_failure_event(pin.tracer, user_id=user_id, exists=False, login_events_mode=mode) + + return False, None + + +core.on("django.login", _on_django_login) +core.on("django.auth", _on_django_auth, "user") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py new file mode 100644 index 0000000..8efd4cf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py @@ -0,0 +1,176 @@ +import os +import uuid + +from ddtrace.appsec._constants import API_SECURITY +from ddtrace.constants import APPSEC_ENV +from ddtrace.internal.compat import to_unicode +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.http import _get_blocked_template # noqa:F401 +from ddtrace.internal.utils.http import parse_form_multipart # noqa:F401 +from ddtrace.internal.utils.http import parse_form_params # noqa:F401 +from ddtrace.settings import _config as config +from ddtrace.settings.asm import config as asm_config + + +log = get_logger(__name__) + + +def parse_response_body(raw_body): + import json + + import xmltodict + + from ddtrace.appsec import _asm_request_context + from ddtrace.appsec._constants import SPAN_DATA_NAMES + from ddtrace.contrib.trace_utils import _get_header_value_case_insensitive + + if not raw_body: + return + + if isinstance(raw_body, dict): + return raw_body + + headers = _asm_request_context.get_waf_address(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES) + if not headers: + return + content_type = _get_header_value_case_insensitive( + {to_unicode(k): to_unicode(v) for k, v in dict(headers).items()}, + "content-type", + ) + if not content_type: + return + + def access_body(bd): + if isinstance(bd, list) and isinstance(bd[0], (str, bytes)): + bd = bd[0][:0].join(bd) + if getattr(bd, "decode", False): + bd = bd.decode("UTF-8", errors="ignore") + if len(bd) >= API_SECURITY.MAX_PAYLOAD_SIZE: + raise ValueError("response body larger than 16MB") + return bd + + req_body = None + try: + # TODO handle charset + if "json" in content_type: + req_body = json.loads(access_body(raw_body)) + elif "xml" in content_type: + req_body = xmltodict.parse(access_body(raw_body)) + else: + return + except BaseException: + log.debug("Failed to parse response body", exc_info=True) + else: + return req_body + + +def _appsec_rc_features_is_enabled() -> bool: + if config._remote_config_enabled: + return APPSEC_ENV not in os.environ + return False + + +def _appsec_apisec_features_is_active() -> bool: + return asm_config._asm_enabled and asm_config._api_security_enabled and asm_config._api_security_sample_rate > 0.0 + + +def _safe_userid(user_id): + try: + _ = int(user_id) + return user_id + except ValueError: + try: + _ = uuid.UUID(user_id) + return user_id + except ValueError: + pass + + return None + + +class _UserInfoRetriever: + def __init__(self, user): + self.user = user + self.possible_user_id_fields = ["pk", "id", "uid", "userid", "user_id", "PK", "ID", "UID", "USERID"] + self.possible_login_fields = ["username", "user", "login", "USERNAME", "USER", "LOGIN"] + self.possible_email_fields = ["email", "mail", "address", "EMAIL", "MAIL", "ADDRESS"] + self.possible_name_fields = [ + "name", + "fullname", + "full_name", + "first_name", + "NAME", + "FULLNAME", + "FULL_NAME", + "FIRST_NAME", + ] + + def find_in_user_model(self, possible_fields): + for field in possible_fields: + value = getattr(self.user, field, None) + if value: + return value + + return None # explicit to make clear it has a meaning + + def get_userid(self): + user_login = getattr(self.user, asm_config._user_model_login_field, None) + if user_login: + return user_login + + user_login = self.find_in_user_model(self.possible_user_id_fields) + if config._automatic_login_events_mode == "extended": + return user_login + + return _safe_userid(user_login) + + def get_username(self): + username = getattr(self.user, asm_config._user_model_name_field, None) + if username: + return username + + if hasattr(self.user, "get_username"): + try: + return self.user.get_username() + except Exception: + log.debug("User model get_username member produced an exception: ", exc_info=True) + + return self.find_in_user_model(self.possible_login_fields) + + def get_user_email(self): + email = getattr(self.user, asm_config._user_model_email_field, None) + if email: + return email + + return self.find_in_user_model(self.possible_email_fields) + + def get_name(self): + name = getattr(self.user, asm_config._user_model_name_field, None) + if name: + return name + + return self.find_in_user_model(self.possible_name_fields) + + def get_user_info(self): + """ + In safe mode, try to get the user id from the user object. + In extended mode, try to also get the username (which will be the returned user_id), + email and name. + """ + user_extra_info = {} + + user_id = self.get_userid() + if asm_config._automatic_login_events_mode == "extended": + if not user_id: + user_id = self.find_in_user_model(self.possible_user_id_fields) + + user_extra_info = { + "login": self.get_username(), + "email": self.get_user_email(), + "name": self.get_name(), + } + + if not user_id: + return None, {} + + return user_id, user_extra_info diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py new file mode 100644 index 0000000..d109e02 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py @@ -0,0 +1 @@ +from ddtrace.appsec._iast import ddtrace_iast_flask_patch # noqa: F401 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json new file mode 100644 index 0000000..d572c00 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json @@ -0,0 +1,9320 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.10.0" + }, + "rules": [ + { + "id": "blk-001-001", + "name": "Block IP Addresses", + "tags": { + "type": "block_ip", + "category": "security_response" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "http.client_ip" + } + ], + "data": "blocked_ips" + }, + "operator": "ip_match" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + }, + { + "id": "blk-001-002", + "name": "Block User Addresses", + "tags": { + "type": "block_user", + "category": "security_response" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "usr.id" + } + ], + "data": "blocked_users" + }, + "operator": "exact_match" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + }, + { + "id": "crs-913-110", + "name": "Acunetix", + "tags": { + "type": "commercial_scanner", + "crs_id": "913110", + "category": "attack_attempt", + "tool_name": "Acunetix", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "list": [ + "acunetix-product", + "(acunetix web vulnerability scanner", + "acunetix-scanning-agreement", + "acunetix-user-agreement", + "md5(acunetix_wvs_security_test)" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-913-120", + "name": "Known security scanner filename/argument", + "tags": { + "type": "security_scanner", + "crs_id": "913120", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "/.adsensepostnottherenonobook", + "/hello.html", + "/actsensepostnottherenonotive", + "/acunetix-wvs-test-for-some-inexistent-file", + "/antidisestablishmentarianism", + "/appscan_fingerprint/mac_address", + "/arachni-", + "/cybercop", + "/nessus_is_probing_you_", + "/nessustest", + "/netsparker-", + "/rfiinc.txt", + "/thereisnowaythat-you-canbethere", + "/w3af/remotefileinclude.html", + "appscan_fingerprint", + "w00tw00t.at.isc.sans.dfind", + "w00tw00t.at.blackhats.romanian.anti-sec" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-920-260", + "name": "Unicode Full/Half Width Abuse Attack Attempt", + "tags": { + "type": "http_protocol_violation", + "crs_id": "920260", + "category": "attack_attempt", + "cwe": "176", + "capec": "1000/255/153/267/71", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "\\%u[fF]{2}[0-9a-fA-F]{2}", + "options": { + "case_sensitive": true, + "min_length": 6 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-921-110", + "name": "HTTP Request Smuggling Attack", + "tags": { + "type": "http_protocol_violation", + "crs_id": "921110", + "category": "attack_attempt", + "cwe": "444", + "capec": "1000/210/272/220/33" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + } + ], + "regex": "(?:get|post|head|options|connect|put|delete|trace|track|patch|propfind|propatch|mkcol|copy|move|lock|unlock)\\s+[^\\s]+\\s+http/\\d", + "options": { + "case_sensitive": true, + "min_length": 12 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-921-160", + "name": "HTTP Header Injection Attack via payload (CR/LF and header-name detected)", + "tags": { + "type": "http_protocol_violation", + "crs_id": "921160", + "category": "attack_attempt", + "cwe": "113", + "capec": "1000/210/272/220/105" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.path_params" + } + ], + "regex": "[\\n\\r]+(?:refresh|(?:set-)?cookie|(?:x-)?(?:forwarded-(?:for|host|server)|via|remote-ip|remote-addr|originating-IP))\\s*:", + "options": { + "case_sensitive": true, + "min_length": 3 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-930-100", + "name": "Obfuscated Path Traversal Attack (/../)", + "tags": { + "type": "lfi", + "crs_id": "930100", + "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + }, + { + "address": "server.request.headers.no_cookies" + } + ], + "regex": "(?:%(?:c(?:0%(?:[2aq]f|5c|9v)|1%(?:[19p]c|8s|af))|2(?:5(?:c(?:0%25af|1%259c)|2f|5c)|%46|f)|(?:(?:f(?:8%8)?0%8|e)0%80%a|bg%q)f|%3(?:2(?:%(?:%6|4)6|F)|5%%63)|u(?:221[56]|002f|EFC8|F025)|1u|5c)|0x(?:2f|5c)|\\/|\\x5c)(?:%(?:(?:f(?:(?:c%80|8)%8)?0%8|e)0%80%ae|2(?:(?:5(?:c0%25a|2))?e|%45)|u(?:(?:002|ff0)e|2024)|%32(?:%(?:%6|4)5|E)|c0(?:%[256aef]e|\\.))|\\.(?:%0[01])?|0x2e){2,3}(?:%(?:c(?:0%(?:[2aq]f|5c|9v)|1%(?:[19p]c|8s|af))|2(?:5(?:c(?:0%25af|1%259c)|2f|5c)|%46|f)|(?:(?:f(?:8%8)?0%8|e)0%80%a|bg%q)f|%3(?:2(?:%(?:%6|4)6|F)|5%%63)|u(?:221[56]|002f|EFC8|F025)|1u|5c)|0x(?:2f|5c)|\\/|\\x5c)", + "options": { + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "normalizePath" + ] + }, + { + "id": "crs-930-110", + "name": "Simple Path Traversal Attack (/../)", + "tags": { + "type": "lfi", + "crs_id": "930110", + "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + }, + { + "address": "server.request.headers.no_cookies" + } + ], + "regex": "(?:(?:^|[\\x5c/])\\.{2,3}[\\x5c/]|[\\x5c/]\\.{2,3}(?:[\\x5c/]|$))", + "options": { + "case_sensitive": true, + "min_length": 3 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-930-120", + "name": "OS File Access Attempt", + "tags": { + "type": "lfi", + "crs_id": "930120", + "category": "attack_attempt", + "cwe": "22", + "capec": "1000/255/153/126", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "/.htaccess", + "/.htdigest", + "/.htpasswd", + "/.addressbook", + "/.aptitude/config", + ".aws/config", + ".aws/credentials", + "/.bash_config", + "/.bash_history", + "/.bash_logout", + "/.bash_profile", + "/.bashrc", + ".cache/notify-osd.log", + ".config/odesk/odesk team.conf", + "/.cshrc", + "/.dockerignore", + ".drush/", + "/.eslintignore", + "/.fbcindex", + "/.forward", + "/.git", + ".git/", + "/.gitattributes", + "/.gitconfig", + ".gnupg/", + ".hplip/hplip.conf", + "/.ksh_history", + "/.lesshst", + ".lftp/", + "/.lhistory", + "/.lldb-history", + ".local/share/mc/", + "/.lynx_cookies", + "/.my.cnf", + "/.mysql_history", + "/.nano_history", + "/.node_repl_history", + "/.pearrc", + "/.pgpass", + "/.php_history", + "/.pinerc", + ".pki/", + "/.proclog", + "/.procmailrc", + "/.psql_history", + "/.python_history", + "/.rediscli_history", + "/.rhistory", + "/.rhosts", + "/.sh_history", + "/.sqlite_history", + ".ssh/authorized_keys", + ".ssh/config", + ".ssh/id_dsa", + ".ssh/id_dsa.pub", + ".ssh/id_rsa", + ".ssh/id_rsa.pub", + ".ssh/identity", + ".ssh/identity.pub", + ".ssh/id_ecdsa", + ".ssh/id_ecdsa.pub", + ".ssh/known_hosts", + ".subversion/auth", + ".subversion/config", + ".subversion/servers", + ".tconn/tconn.conf", + "/.tcshrc", + ".vidalia/vidalia.conf", + "/.viminfo", + "/.vimrc", + "/.www_acl", + "/.wwwacl", + "/.xauthority", + "/.zhistory", + "/.zshrc", + "/.zsh_history", + "/.nsconfig", + "data/elasticsearch", + "data/kafka", + "etc/ansible", + "etc/bind", + "etc/centos-release", + "etc/centos-release-upstream", + "etc/clam.d", + "etc/elasticsearch", + "etc/freshclam.conf", + "etc/gshadow", + "etc/gshadow-", + "etc/httpd", + "etc/kafka", + "etc/kibana", + "etc/logstash", + "etc/lvm", + "etc/mongod.conf", + "etc/my.cnf", + "etc/nuxeo.conf", + "etc/pki", + "etc/postfix", + "etc/scw-release", + "etc/subgid", + "etc/subgid-", + "etc/sudoers.d", + "etc/sysconfig", + "etc/system-release-cpe", + "opt/nuxeo", + "opt/tomcat", + "tmp/kafka-logs", + "usr/lib/rpm/rpm.log", + "var/data/elasticsearch", + "var/lib/elasticsearch", + "etc/.java", + "etc/acpi", + "etc/alsa", + "etc/alternatives", + "etc/apache2", + "etc/apm", + "etc/apparmor", + "etc/apparmor.d", + "etc/apport", + "etc/apt", + "etc/asciidoc", + "etc/avahi", + "etc/bash_completion.d", + "etc/binfmt.d", + "etc/bluetooth", + "etc/bonobo-activation", + "etc/brltty", + "etc/ca-certificates", + "etc/calendar", + "etc/chatscripts", + "etc/chromium-browser", + "etc/clamav", + "etc/cni", + "etc/console-setup", + "etc/coraza-waf", + "etc/cracklib", + "etc/cron.d", + "etc/cron.daily", + "etc/cron.hourly", + "etc/cron.monthly", + "etc/cron.weekly", + "etc/cups", + "etc/cups.save", + "etc/cupshelpers", + "etc/dbus-1", + "etc/dconf", + "etc/default", + "etc/depmod.d", + "etc/dhcp", + "etc/dictionaries-common", + "etc/dkms", + "etc/dnsmasq.d", + "etc/dockeretc/dpkg", + "etc/emacs", + "etc/environment.d", + "etc/fail2ban", + "etc/firebird", + "etc/firefox", + "etc/fonts", + "etc/fwupd", + "etc/gconf", + "etc/gdb", + "etc/gdm3", + "etc/geoclue", + "etc/ghostscript", + "etc/gimp", + "etc/glvnd", + "etc/gnome", + "etc/gnome-vfs-2.0", + "etc/gnucash", + "etc/gnustep", + "etc/groff", + "etc/grub.d", + "etc/gss", + "etc/gtk-2.0", + "etc/gtk-3.0", + "etc/hp", + "etc/ifplugd", + "etc/imagemagick-6", + "etc/init", + "etc/init.d", + "etc/initramfs-tools", + "etc/insserv.conf.d", + "etc/iproute2", + "etc/iptables", + "etc/java", + "etc/java-11-openjdk", + "etc/java-17-oracle", + "etc/java-8-openjdk", + "etc/kernel", + "etc/ld.so.conf.d", + "etc/ldap", + "etc/libblockdev", + "etc/libibverbs.d", + "etc/libnl-3", + "etc/libpaper.d", + "etc/libreoffice", + "etc/lighttpd", + "etc/logcheck", + "etc/logrotate.d", + "etc/lynx", + "etc/mail", + "etc/mc", + "etc/menu", + "etc/menu-methods", + "etc/modprobe.d", + "etc/modsecurity", + "etc/modules-load.d", + "etc/monit", + "etc/mono", + "etc/mplayer", + "etc/mpv", + "etc/muttrc.d", + "etc/mysql", + "etc/netplan", + "etc/network", + "etc/networkd-dispatcher", + "etc/networkmanager", + "etc/newt", + "etc/nghttpx", + "etc/nikto", + "etc/odbcdatasources", + "etc/openal", + "etc/openmpi", + "etc/opt", + "etc/osync", + "etc/packagekit", + "etc/pam.d", + "etc/pcmcia", + "etc/perl", + "etc/php", + "etc/pki", + "etc/pm", + "etc/polkit-1", + "etc/postfix", + "etc/ppp", + "etc/profile.d", + "etc/proftpd", + "etc/pulse", + "etc/python", + "etc/rc0.d", + "etc/rc1.d", + "etc/rc2.d", + "etc/rc3.d", + "etc/rc4.d", + "etc/rc5.d", + "etc/rc6.d", + "etc/rcs.d", + "etc/resolvconf", + "etc/rsyslog.d", + "etc/samba", + "etc/sane.d", + "etc/security", + "etc/selinux", + "etc/sensors.d", + "etc/sgml", + "etc/signon-ui", + "etc/skel", + "etc/snmp", + "etc/sound", + "etc/spamassassin", + "etc/speech-dispatcher", + "etc/ssh", + "etc/ssl", + "etc/sudoers.d", + "etc/sysctl.d", + "etc/sysstat", + "etc/systemd", + "etc/terminfo", + "etc/texmf", + "etc/thermald", + "etc/thnuclnt", + "etc/thunderbird", + "etc/timidity", + "etc/tmpfiles.d", + "etc/ubuntu-advantage", + "etc/udev", + "etc/udisks2", + "etc/ufw", + "etc/update-manager", + "etc/update-motd.d", + "etc/update-notifier", + "etc/upower", + "etc/urlview", + "etc/usb_modeswitch.d", + "etc/vim", + "etc/vmware", + "etc/vmware-installer", + "etc/vmware-vix", + "etc/vulkan", + "etc/w3m", + "etc/wireshark", + "etc/wpa_supplicant", + "etc/x11", + "etc/xdg", + "etc/xml", + "etc/redis.conf", + "etc/redis-sentinel.conf", + "etc/php.ini", + "bin/php.ini", + "etc/httpd/php.ini", + "usr/lib/php.ini", + "usr/lib/php/php.ini", + "usr/local/etc/php.ini", + "usr/local/lib/php.ini", + "usr/local/php/lib/php.ini", + "usr/local/php4/lib/php.ini", + "usr/local/php5/lib/php.ini", + "usr/local/apache/conf/php.ini", + "etc/php4.4/fcgi/php.ini", + "etc/php4/apache/php.ini", + "etc/php4/apache2/php.ini", + "etc/php5/apache/php.ini", + "etc/php5/apache2/php.ini", + "etc/php/php.ini", + "etc/php/php4/php.ini", + "etc/php/apache/php.ini", + "etc/php/apache2/php.ini", + "web/conf/php.ini", + "usr/local/zend/etc/php.ini", + "opt/xampp/etc/php.ini", + "var/local/www/conf/php.ini", + "etc/php/cgi/php.ini", + "etc/php4/cgi/php.ini", + "etc/php5/cgi/php.ini", + "home2/bin/stable/apache/php.ini", + "home/bin/stable/apache/php.ini", + "etc/httpd/conf.d/php.conf", + "php5/php.ini", + "php4/php.ini", + "php/php.ini", + "windows/php.ini", + "winnt/php.ini", + "apache/php/php.ini", + "xampp/apache/bin/php.ini", + "netserver/bin/stable/apache/php.ini", + "volumes/macintosh_hd1/usr/local/php/lib/php.ini", + "etc/mono/1.0/machine.config", + "etc/mono/2.0/machine.config", + "etc/mono/2.0/web.config", + "etc/mono/config", + "usr/local/cpanel/logs/stats_log", + "usr/local/cpanel/logs/access_log", + "usr/local/cpanel/logs/error_log", + "usr/local/cpanel/logs/license_log", + "usr/local/cpanel/logs/login_log", + "var/cpanel/cpanel.config", + "usr/local/psa/admin/logs/httpsd_access_log", + "usr/local/psa/admin/logs/panel.log", + "usr/local/psa/admin/conf/php.ini", + "etc/sw-cp-server/applications.d/plesk.conf", + "usr/local/psa/admin/conf/site_isolation_settings.ini", + "usr/local/sb/config", + "etc/sw-cp-server/applications.d/00-sso-cpserver.conf", + "etc/sso/sso_config.ini", + "etc/mysql/conf.d/old_passwords.cnf", + "var/mysql.log", + "var/mysql-bin.index", + "var/data/mysql-bin.index", + "program files/mysql/mysql server 5.0/data/{host}.err", + "program files/mysql/mysql server 5.0/data/mysql.log", + "program files/mysql/mysql server 5.0/data/mysql.err", + "program files/mysql/mysql server 5.0/data/mysql-bin.log", + "program files/mysql/mysql server 5.0/data/mysql-bin.index", + "program files/mysql/data/{host}.err", + "program files/mysql/data/mysql.log", + "program files/mysql/data/mysql.err", + "program files/mysql/data/mysql-bin.log", + "program files/mysql/data/mysql-bin.index", + "mysql/data/{host}.err", + "mysql/data/mysql.log", + "mysql/data/mysql.err", + "mysql/data/mysql-bin.log", + "mysql/data/mysql-bin.index", + "usr/local/mysql/data/mysql.log", + "usr/local/mysql/data/mysql.err", + "usr/local/mysql/data/mysql-bin.log", + "usr/local/mysql/data/mysql-slow.log", + "usr/local/mysql/data/mysqlderror.log", + "usr/local/mysql/data/{host}.err", + "usr/local/mysql/data/mysql-bin.index", + "var/lib/mysql/my.cnf", + "etc/mysql/my.cnf", + "etc/my.cnf", + "program files/mysql/mysql server 5.0/my.ini", + "program files/mysql/mysql server 5.0/my.cnf", + "program files/mysql/my.ini", + "program files/mysql/my.cnf", + "mysql/my.ini", + "mysql/my.cnf", + "mysql/bin/my.ini", + "var/postgresql/log/postgresql.log", + "usr/internet/pgsql/data/postmaster.log", + "usr/local/pgsql/data/postgresql.log", + "usr/local/pgsql/data/pg_log", + "postgresql/log/pgadmin.log", + "var/lib/pgsql/data/postgresql.conf", + "var/postgresql/db/postgresql.conf", + "var/nm2/postgresql.conf", + "usr/local/pgsql/data/postgresql.conf", + "usr/local/pgsql/data/pg_hba.conf", + "usr/internet/pgsql/data/pg_hba.conf", + "usr/local/pgsql/data/passwd", + "usr/local/pgsql/bin/pg_passwd", + "etc/postgresql/postgresql.conf", + "etc/postgresql/pg_hba.conf", + "home/postgres/data/postgresql.conf", + "home/postgres/data/pg_version", + "home/postgres/data/pg_ident.conf", + "home/postgres/data/pg_hba.conf", + "program files/postgresql/8.3/data/pg_hba.conf", + "program files/postgresql/8.3/data/pg_ident.conf", + "program files/postgresql/8.3/data/postgresql.conf", + "program files/postgresql/8.4/data/pg_hba.conf", + "program files/postgresql/8.4/data/pg_ident.conf", + "program files/postgresql/8.4/data/postgresql.conf", + "program files/postgresql/9.0/data/pg_hba.conf", + "program files/postgresql/9.0/data/pg_ident.conf", + "program files/postgresql/9.0/data/postgresql.conf", + "program files/postgresql/9.1/data/pg_hba.conf", + "program files/postgresql/9.1/data/pg_ident.conf", + "program files/postgresql/9.1/data/postgresql.conf", + "wamp/logs/access.log", + "wamp/logs/apache_error.log", + "wamp/logs/genquery.log", + "wamp/logs/mysql.log", + "wamp/logs/slowquery.log", + "wamp/bin/apache/apache2.2.22/logs/access.log", + "wamp/bin/apache/apache2.2.22/logs/error.log", + "wamp/bin/apache/apache2.2.21/logs/access.log", + "wamp/bin/apache/apache2.2.21/logs/error.log", + "wamp/bin/mysql/mysql5.5.24/data/mysql-bin.index", + "wamp/bin/mysql/mysql5.5.16/data/mysql-bin.index", + "wamp/bin/apache/apache2.2.21/conf/httpd.conf", + "wamp/bin/apache/apache2.2.22/conf/httpd.conf", + "wamp/bin/apache/apache2.2.21/wampserver.conf", + "wamp/bin/apache/apache2.2.22/wampserver.conf", + "wamp/bin/apache/apache2.2.22/conf/wampserver.conf", + "wamp/bin/mysql/mysql5.5.24/my.ini", + "wamp/bin/mysql/mysql5.5.24/wampserver.conf", + "wamp/bin/mysql/mysql5.5.16/my.ini", + "wamp/bin/mysql/mysql5.5.16/wampserver.conf", + "wamp/bin/php/php5.3.8/php.ini", + "wamp/bin/php/php5.4.3/php.ini", + "xampp/apache/logs/access.log", + "xampp/apache/logs/error.log", + "xampp/mysql/data/mysql-bin.index", + "xampp/mysql/data/mysql.err", + "xampp/mysql/data/{host}.err", + "xampp/sendmail/sendmail.log", + "xampp/apache/conf/httpd.conf", + "xampp/filezillaftp/filezilla server.xml", + "xampp/mercurymail/mercury.ini", + "xampp/php/php.ini", + "xampp/phpmyadmin/config.inc.php", + "xampp/sendmail/sendmail.ini", + "xampp/webalizer/webalizer.conf", + "opt/lampp/etc/httpd.conf", + "xampp/htdocs/aca.txt", + "xampp/htdocs/admin.php", + "xampp/htdocs/leer.txt", + "usr/local/apache/logs/audit_log", + "usr/local/apache2/logs/audit_log", + "logs/security_debug_log", + "logs/security_log", + "usr/local/apache/conf/modsec.conf", + "usr/local/apache2/conf/modsec.conf", + "winnt/system32/logfiles/msftpsvc", + "winnt/system32/logfiles/msftpsvc1", + "winnt/system32/logfiles/msftpsvc2", + "windows/system32/logfiles/msftpsvc", + "windows/system32/logfiles/msftpsvc1", + "windows/system32/logfiles/msftpsvc2", + "etc/logrotate.d/proftpd", + "www/logs/proftpd.system.log", + "etc/pam.d/proftpd", + "etc/proftp.conf", + "etc/protpd/proftpd.conf", + "etc/vhcs2/proftpd/proftpd.conf", + "etc/proftpd/modules.conf", + "etc/vsftpd.chroot_list", + "etc/logrotate.d/vsftpd.log", + "etc/vsftpd/vsftpd.conf", + "etc/vsftpd.conf", + "etc/chrootusers", + "var/adm/log/xferlog", + "etc/wu-ftpd/ftpaccess", + "etc/wu-ftpd/ftphosts", + "etc/wu-ftpd/ftpusers", + "logs/pure-ftpd.log", + "usr/sbin/pure-config.pl", + "usr/etc/pure-ftpd.conf", + "etc/pure-ftpd/pure-ftpd.conf", + "usr/local/etc/pure-ftpd.conf", + "usr/local/etc/pureftpd.pdb", + "usr/local/pureftpd/etc/pureftpd.pdb", + "usr/local/pureftpd/sbin/pure-config.pl", + "usr/local/pureftpd/etc/pure-ftpd.conf", + "etc/pure-ftpd.conf", + "etc/pure-ftpd/pure-ftpd.pdb", + "etc/pureftpd.pdb", + "etc/pureftpd.passwd", + "etc/pure-ftpd/pureftpd.pdb", + "usr/ports/ftp/pure-ftpd/pure-ftpd.conf", + "usr/ports/ftp/pure-ftpd/pureftpd.pdb", + "usr/ports/ftp/pure-ftpd/pureftpd.passwd", + "usr/ports/net/pure-ftpd/pure-ftpd.conf", + "usr/ports/net/pure-ftpd/pureftpd.pdb", + "usr/ports/net/pure-ftpd/pureftpd.passwd", + "usr/pkgsrc/net/pureftpd/pure-ftpd.conf", + "usr/pkgsrc/net/pureftpd/pureftpd.pdb", + "usr/pkgsrc/net/pureftpd/pureftpd.passwd", + "usr/ports/contrib/pure-ftpd/pure-ftpd.conf", + "usr/ports/contrib/pure-ftpd/pureftpd.pdb", + "usr/ports/contrib/pure-ftpd/pureftpd.passwd", + "usr/sbin/mudlogd", + "etc/muddleftpd/mudlog", + "etc/muddleftpd.com", + "etc/muddleftpd/mudlogd.conf", + "etc/muddleftpd/muddleftpd.conf", + "usr/sbin/mudpasswd", + "etc/muddleftpd/muddleftpd.passwd", + "etc/muddleftpd/passwd", + "etc/logrotate.d/ftp", + "etc/ftpchroot", + "etc/ftphosts", + "etc/ftpusers", + "winnt/system32/logfiles/smtpsvc", + "winnt/system32/logfiles/smtpsvc1", + "winnt/system32/logfiles/smtpsvc2", + "winnt/system32/logfiles/smtpsvc3", + "winnt/system32/logfiles/smtpsvc4", + "winnt/system32/logfiles/smtpsvc5", + "windows/system32/logfiles/smtpsvc", + "windows/system32/logfiles/smtpsvc1", + "windows/system32/logfiles/smtpsvc2", + "windows/system32/logfiles/smtpsvc3", + "windows/system32/logfiles/smtpsvc4", + "windows/system32/logfiles/smtpsvc5", + "etc/osxhttpd/osxhttpd.conf", + "system/library/webobjects/adaptors/apache2.2/apache.conf", + "etc/apache2/sites-available/default", + "etc/apache2/sites-available/default-ssl", + "etc/apache2/sites-enabled/000-default", + "etc/apache2/sites-enabled/default", + "etc/apache2/apache2.conf", + "etc/apache2/ports.conf", + "usr/local/etc/apache/httpd.conf", + "usr/pkg/etc/httpd/httpd.conf", + "usr/pkg/etc/httpd/httpd-default.conf", + "usr/pkg/etc/httpd/httpd-vhosts.conf", + "etc/httpd/mod_php.conf", + "etc/httpd/extra/httpd-ssl.conf", + "etc/rc.d/rc.httpd", + "usr/local/apache/conf/httpd.conf.default", + "usr/local/apache/conf/access.conf", + "usr/local/apache22/conf/httpd.conf", + "usr/local/apache22/httpd.conf", + "usr/local/etc/apache22/conf/httpd.conf", + "usr/local/apps/apache22/conf/httpd.conf", + "etc/apache22/conf/httpd.conf", + "etc/apache22/httpd.conf", + "opt/apache22/conf/httpd.conf", + "usr/local/etc/apache2/vhosts.conf", + "usr/local/apache/conf/vhosts.conf", + "usr/local/apache2/conf/vhosts.conf", + "usr/local/apache/conf/vhosts-custom.conf", + "usr/local/apache2/conf/vhosts-custom.conf", + "etc/apache/default-server.conf", + "etc/apache2/default-server.conf", + "usr/local/apache2/conf/extra/httpd-ssl.conf", + "usr/local/apache2/conf/ssl.conf", + "etc/httpd/conf.d", + "usr/local/etc/apache22/httpd.conf", + "usr/local/etc/apache2/httpd.conf", + "etc/apache2/httpd2.conf", + "etc/apache2/ssl-global.conf", + "etc/apache2/vhosts.d/00_default_vhost.conf", + "apache/conf/httpd.conf", + "etc/apache/httpd.conf", + "etc/httpd/conf", + "http/httpd.conf", + "usr/local/apache1.3/conf/httpd.conf", + "usr/local/etc/httpd/conf", + "var/apache/conf/httpd.conf", + "var/www/conf", + "www/apache/conf/httpd.conf", + "www/conf/httpd.conf", + "etc/init.d", + "etc/apache/access.conf", + "etc/rc.conf", + "www/logs/freebsddiary-error.log", + "www/logs/freebsddiary-access_log", + "library/webserver/documents/index.html", + "library/webserver/documents/index.htm", + "library/webserver/documents/default.html", + "library/webserver/documents/default.htm", + "library/webserver/documents/index.php", + "library/webserver/documents/default.php", + "usr/local/etc/webmin/miniserv.conf", + "etc/webmin/miniserv.conf", + "usr/local/etc/webmin/miniserv.users", + "etc/webmin/miniserv.users", + "winnt/system32/logfiles/w3svc/inetsvn1.log", + "winnt/system32/logfiles/w3svc1/inetsvn1.log", + "winnt/system32/logfiles/w3svc2/inetsvn1.log", + "winnt/system32/logfiles/w3svc3/inetsvn1.log", + "windows/system32/logfiles/w3svc/inetsvn1.log", + "windows/system32/logfiles/w3svc1/inetsvn1.log", + "windows/system32/logfiles/w3svc2/inetsvn1.log", + "windows/system32/logfiles/w3svc3/inetsvn1.log", + "apache/logs/error.log", + "apache/logs/access.log", + "apache2/logs/error.log", + "apache2/logs/access.log", + "logs/error.log", + "logs/access.log", + "etc/httpd/logs/access_log", + "etc/httpd/logs/access.log", + "etc/httpd/logs/error_log", + "etc/httpd/logs/error.log", + "usr/local/apache/logs/access_log", + "usr/local/apache/logs/access.log", + "usr/local/apache/logs/error_log", + "usr/local/apache/logs/error.log", + "usr/local/apache2/logs/access_log", + "usr/local/apache2/logs/access.log", + "usr/local/apache2/logs/error_log", + "usr/local/apache2/logs/error.log", + "var/www/logs/access_log", + "var/www/logs/access.log", + "var/www/logs/error_log", + "var/www/logs/error.log", + "opt/lampp/logs/access_log", + "opt/lampp/logs/error_log", + "opt/xampp/logs/access_log", + "opt/xampp/logs/error_log", + "opt/lampp/logs/access.log", + "opt/lampp/logs/error.log", + "opt/xampp/logs/access.log", + "opt/xampp/logs/error.log", + "program files/apache group/apache/logs/access.log", + "program files/apache group/apache/logs/error.log", + "program files/apache software foundation/apache2.2/logs/error.log", + "program files/apache software foundation/apache2.2/logs/access.log", + "opt/apache/apache.conf", + "opt/apache/conf/apache.conf", + "opt/apache2/apache.conf", + "opt/apache2/conf/apache.conf", + "opt/httpd/apache.conf", + "opt/httpd/conf/apache.conf", + "etc/httpd/apache.conf", + "etc/apache2/apache.conf", + "etc/httpd/conf/apache.conf", + "usr/local/apache/apache.conf", + "usr/local/apache/conf/apache.conf", + "usr/local/apache2/apache.conf", + "usr/local/apache2/conf/apache.conf", + "usr/local/php/apache.conf.php", + "usr/local/php4/apache.conf.php", + "usr/local/php5/apache.conf.php", + "usr/local/php/apache.conf", + "usr/local/php4/apache.conf", + "usr/local/php5/apache.conf", + "private/etc/httpd/apache.conf", + "opt/apache/apache2.conf", + "opt/apache/conf/apache2.conf", + "opt/apache2/apache2.conf", + "opt/apache2/conf/apache2.conf", + "opt/httpd/apache2.conf", + "opt/httpd/conf/apache2.conf", + "etc/httpd/apache2.conf", + "etc/httpd/conf/apache2.conf", + "usr/local/apache/apache2.conf", + "usr/local/apache/conf/apache2.conf", + "usr/local/apache2/apache2.conf", + "usr/local/apache2/conf/apache2.conf", + "usr/local/php/apache2.conf.php", + "usr/local/php4/apache2.conf.php", + "usr/local/php5/apache2.conf.php", + "usr/local/php/apache2.conf", + "usr/local/php4/apache2.conf", + "usr/local/php5/apache2.conf", + "private/etc/httpd/apache2.conf", + "usr/local/apache/conf/httpd.conf", + "usr/local/apache2/conf/httpd.conf", + "etc/httpd/conf/httpd.conf", + "etc/apache/apache.conf", + "etc/apache/conf/httpd.conf", + "etc/apache2/httpd.conf", + "usr/apache2/conf/httpd.conf", + "usr/apache/conf/httpd.conf", + "usr/local/etc/apache/conf/httpd.conf", + "usr/local/apache/httpd.conf", + "usr/local/apache2/httpd.conf", + "usr/local/httpd/conf/httpd.conf", + "usr/local/etc/apache2/conf/httpd.conf", + "usr/local/etc/httpd/conf/httpd.conf", + "usr/local/apps/apache2/conf/httpd.conf", + "usr/local/apps/apache/conf/httpd.conf", + "usr/local/php/httpd.conf.php", + "usr/local/php4/httpd.conf.php", + "usr/local/php5/httpd.conf.php", + "usr/local/php/httpd.conf", + "usr/local/php4/httpd.conf", + "usr/local/php5/httpd.conf", + "etc/apache2/conf/httpd.conf", + "etc/http/conf/httpd.conf", + "etc/httpd/httpd.conf", + "etc/http/httpd.conf", + "etc/httpd.conf", + "opt/apache/conf/httpd.conf", + "opt/apache2/conf/httpd.conf", + "var/www/conf/httpd.conf", + "private/etc/httpd/httpd.conf", + "private/etc/httpd/httpd.conf.default", + "etc/apache2/vhosts.d/default_vhost.include", + "etc/apache2/conf.d/charset", + "etc/apache2/conf.d/security", + "etc/apache2/envvars", + "etc/apache2/mods-available/autoindex.conf", + "etc/apache2/mods-available/deflate.conf", + "etc/apache2/mods-available/dir.conf", + "etc/apache2/mods-available/mem_cache.conf", + "etc/apache2/mods-available/mime.conf", + "etc/apache2/mods-available/proxy.conf", + "etc/apache2/mods-available/setenvif.conf", + "etc/apache2/mods-available/ssl.conf", + "etc/apache2/mods-enabled/alias.conf", + "etc/apache2/mods-enabled/deflate.conf", + "etc/apache2/mods-enabled/dir.conf", + "etc/apache2/mods-enabled/mime.conf", + "etc/apache2/mods-enabled/negotiation.conf", + "etc/apache2/mods-enabled/php5.conf", + "etc/apache2/mods-enabled/status.conf", + "program files/apache group/apache/conf/httpd.conf", + "program files/apache group/apache2/conf/httpd.conf", + "program files/xampp/apache/conf/apache.conf", + "program files/xampp/apache/conf/apache2.conf", + "program files/xampp/apache/conf/httpd.conf", + "program files/apache group/apache/apache.conf", + "program files/apache group/apache/conf/apache.conf", + "program files/apache group/apache2/conf/apache.conf", + "program files/apache group/apache/apache2.conf", + "program files/apache group/apache/conf/apache2.conf", + "program files/apache group/apache2/conf/apache2.conf", + "program files/apache software foundation/apache2.2/conf/httpd.conf", + "volumes/macintosh_hd1/opt/httpd/conf/httpd.conf", + "volumes/macintosh_hd1/opt/apache/conf/httpd.conf", + "volumes/macintosh_hd1/opt/apache2/conf/httpd.conf", + "volumes/macintosh_hd1/usr/local/php/httpd.conf.php", + "volumes/macintosh_hd1/usr/local/php4/httpd.conf.php", + "volumes/macintosh_hd1/usr/local/php5/httpd.conf.php", + "volumes/webbackup/opt/apache2/conf/httpd.conf", + "volumes/webbackup/private/etc/httpd/httpd.conf", + "volumes/webbackup/private/etc/httpd/httpd.conf.default", + "usr/local/etc/apache/vhosts.conf", + "usr/local/jakarta/tomcat/conf/jakarta.conf", + "usr/local/jakarta/tomcat/conf/server.xml", + "usr/local/jakarta/tomcat/conf/context.xml", + "usr/local/jakarta/tomcat/conf/workers.properties", + "usr/local/jakarta/tomcat/conf/logging.properties", + "usr/local/jakarta/dist/tomcat/conf/jakarta.conf", + "usr/local/jakarta/dist/tomcat/conf/server.xml", + "usr/local/jakarta/dist/tomcat/conf/context.xml", + "usr/local/jakarta/dist/tomcat/conf/workers.properties", + "usr/local/jakarta/dist/tomcat/conf/logging.properties", + "usr/share/tomcat6/conf/server.xml", + "usr/share/tomcat6/conf/context.xml", + "usr/share/tomcat6/conf/workers.properties", + "usr/share/tomcat6/conf/logging.properties", + "var/cpanel/tomcat.options", + "usr/local/jakarta/tomcat/logs/catalina.out", + "usr/local/jakarta/tomcat/logs/catalina.err", + "opt/tomcat/logs/catalina.out", + "opt/tomcat/logs/catalina.err", + "usr/share/logs/catalina.out", + "usr/share/logs/catalina.err", + "usr/share/tomcat/logs/catalina.out", + "usr/share/tomcat/logs/catalina.err", + "usr/share/tomcat6/logs/catalina.out", + "usr/share/tomcat6/logs/catalina.err", + "usr/local/apache/logs/mod_jk.log", + "usr/local/jakarta/tomcat/logs/mod_jk.log", + "usr/local/jakarta/dist/tomcat/logs/mod_jk.log", + "opt/[jboss]/server/default/conf/jboss-minimal.xml", + "opt/[jboss]/server/default/conf/jboss-service.xml", + "opt/[jboss]/server/default/conf/jndi.properties", + "opt/[jboss]/server/default/conf/log4j.xml", + "opt/[jboss]/server/default/conf/login-config.xml", + "opt/[jboss]/server/default/conf/standardjaws.xml", + "opt/[jboss]/server/default/conf/standardjboss.xml", + "opt/[jboss]/server/default/conf/server.log.properties", + "opt/[jboss]/server/default/deploy/jboss-logging.xml", + "usr/local/[jboss]/server/default/conf/jboss-minimal.xml", + "usr/local/[jboss]/server/default/conf/jboss-service.xml", + "usr/local/[jboss]/server/default/conf/jndi.properties", + "usr/local/[jboss]/server/default/conf/log4j.xml", + "usr/local/[jboss]/server/default/conf/login-config.xml", + "usr/local/[jboss]/server/default/conf/standardjaws.xml", + "usr/local/[jboss]/server/default/conf/standardjboss.xml", + "usr/local/[jboss]/server/default/conf/server.log.properties", + "usr/local/[jboss]/server/default/deploy/jboss-logging.xml", + "private/tmp/[jboss]/server/default/conf/jboss-minimal.xml", + "private/tmp/[jboss]/server/default/conf/jboss-service.xml", + "private/tmp/[jboss]/server/default/conf/jndi.properties", + "private/tmp/[jboss]/server/default/conf/log4j.xml", + "private/tmp/[jboss]/server/default/conf/login-config.xml", + "private/tmp/[jboss]/server/default/conf/standardjaws.xml", + "private/tmp/[jboss]/server/default/conf/standardjboss.xml", + "private/tmp/[jboss]/server/default/conf/server.log.properties", + "private/tmp/[jboss]/server/default/deploy/jboss-logging.xml", + "tmp/[jboss]/server/default/conf/jboss-minimal.xml", + "tmp/[jboss]/server/default/conf/jboss-service.xml", + "tmp/[jboss]/server/default/conf/jndi.properties", + "tmp/[jboss]/server/default/conf/log4j.xml", + "tmp/[jboss]/server/default/conf/login-config.xml", + "tmp/[jboss]/server/default/conf/standardjaws.xml", + "tmp/[jboss]/server/default/conf/standardjboss.xml", + "tmp/[jboss]/server/default/conf/server.log.properties", + "tmp/[jboss]/server/default/deploy/jboss-logging.xml", + "program files/[jboss]/server/default/conf/jboss-minimal.xml", + "program files/[jboss]/server/default/conf/jboss-service.xml", + "program files/[jboss]/server/default/conf/jndi.properties", + "program files/[jboss]/server/default/conf/log4j.xml", + "program files/[jboss]/server/default/conf/login-config.xml", + "program files/[jboss]/server/default/conf/standardjaws.xml", + "program files/[jboss]/server/default/conf/standardjboss.xml", + "program files/[jboss]/server/default/conf/server.log.properties", + "program files/[jboss]/server/default/deploy/jboss-logging.xml", + "[jboss]/server/default/conf/jboss-minimal.xml", + "[jboss]/server/default/conf/jboss-service.xml", + "[jboss]/server/default/conf/jndi.properties", + "[jboss]/server/default/conf/log4j.xml", + "[jboss]/server/default/conf/login-config.xml", + "[jboss]/server/default/conf/standardjaws.xml", + "[jboss]/server/default/conf/standardjboss.xml", + "[jboss]/server/default/conf/server.log.properties", + "[jboss]/server/default/deploy/jboss-logging.xml", + "opt/[jboss]/server/default/log/server.log", + "opt/[jboss]/server/default/log/boot.log", + "usr/local/[jboss]/server/default/log/server.log", + "usr/local/[jboss]/server/default/log/boot.log", + "private/tmp/[jboss]/server/default/log/server.log", + "private/tmp/[jboss]/server/default/log/boot.log", + "tmp/[jboss]/server/default/log/server.log", + "tmp/[jboss]/server/default/log/boot.log", + "program files/[jboss]/server/default/log/server.log", + "program files/[jboss]/server/default/log/boot.log", + "[jboss]/server/default/log/server.log", + "[jboss]/server/default/log/boot.log", + "var/lighttpd.log", + "var/logs/access.log", + "usr/local/apache2/logs/lighttpd.error.log", + "usr/local/apache2/logs/lighttpd.log", + "usr/local/apache/logs/lighttpd.error.log", + "usr/local/apache/logs/lighttpd.log", + "usr/local/lighttpd/log/lighttpd.error.log", + "usr/local/lighttpd/log/access.log", + "usr/home/user/var/log/lighttpd.error.log", + "usr/home/user/var/log/apache.log", + "home/user/lighttpd/lighttpd.conf", + "usr/home/user/lighttpd/lighttpd.conf", + "etc/lighttpd/lighthttpd.conf", + "usr/local/etc/lighttpd.conf", + "usr/local/lighttpd/conf/lighttpd.conf", + "usr/local/etc/lighttpd.conf.new", + "var/www/.lighttpdpassword", + "logs/access_log", + "logs/error_log", + "etc/nginx/nginx.conf", + "usr/local/etc/nginx/nginx.conf", + "usr/local/nginx/conf/nginx.conf", + "usr/local/zeus/web/global.cfg", + "usr/local/zeus/web/log/errors", + "opt/lsws/conf/httpd_conf.xml", + "usr/local/lsws/conf/httpd_conf.xml", + "opt/lsws/logs/error.log", + "opt/lsws/logs/access.log", + "usr/local/lsws/logs/error.log", + "usr/local/logs/access.log", + "usr/local/samba/lib/log.user", + "usr/local/logs/samba.log", + "etc/samba/netlogon", + "etc/smbpasswd", + "etc/smb.conf", + "etc/samba/dhcp.conf", + "etc/samba/smb.conf", + "etc/samba/samba.conf", + "etc/samba/smb.conf.user", + "etc/samba/smbpasswd", + "etc/samba/smbusers", + "etc/samba/private/smbpasswd", + "usr/local/etc/smb.conf", + "usr/local/samba/lib/smb.conf.user", + "etc/dhcp3/dhclient.conf", + "etc/dhcp3/dhcpd.conf", + "etc/dhcp/dhclient.conf", + "program files/vidalia bundle/polipo/polipo.conf", + "etc/tor/tor-tsocks.conf", + "etc/stunnel/stunnel.conf", + "etc/tsocks.conf", + "etc/tinyproxy/tinyproxy.conf", + "etc/miredo-server.conf", + "etc/miredo.conf", + "etc/miredo/miredo-server.conf", + "etc/miredo/miredo.conf", + "etc/wicd/dhclient.conf.template.default", + "etc/wicd/manager-settings.conf", + "etc/wicd/wired-settings.conf", + "etc/wicd/wireless-settings.conf", + "etc/ipfw.rules", + "etc/ipfw.conf", + "etc/firewall.rules", + "winnt/system32/logfiles/firewall/pfirewall.log", + "winnt/system32/logfiles/firewall/pfirewall.log.old", + "windows/system32/logfiles/firewall/pfirewall.log", + "windows/system32/logfiles/firewall/pfirewall.log.old", + "etc/clamav/clamd.conf", + "etc/clamav/freshclam.conf", + "etc/x11/xorg.conf", + "etc/x11/xorg.conf-vesa", + "etc/x11/xorg.conf-vmware", + "etc/x11/xorg.conf.beforevmwaretoolsinstall", + "etc/x11/xorg.conf.orig", + "etc/bluetooth/input.conf", + "etc/bluetooth/main.conf", + "etc/bluetooth/network.conf", + "etc/bluetooth/rfcomm.conf", + "etc/bash_completion.d/debconf", + "root/.bash_logout", + "root/.bash_history", + "root/.bash_config", + "root/.bashrc", + "etc/bash.bashrc", + "var/adm/syslog", + "var/adm/sulog", + "var/adm/utmp", + "var/adm/utmpx", + "var/adm/wtmp", + "var/adm/wtmpx", + "var/adm/lastlog/username", + "usr/spool/lp/log", + "var/adm/lp/lpd-errs", + "usr/lib/cron/log", + "var/adm/loginlog", + "var/adm/pacct", + "var/adm/dtmp", + "var/adm/acct/sum/loginlog", + "var/adm/x0msgs", + "var/adm/crash/vmcore", + "var/adm/crash/unix", + "etc/newsyslog.conf", + "var/adm/qacct", + "var/adm/ras/errlog", + "var/adm/ras/bootlog", + "var/adm/cron/log", + "etc/utmp", + "etc/security/lastlog", + "etc/security/failedlogin", + "usr/spool/mqueue/syslog", + "var/adm/messages", + "var/adm/aculogs", + "var/adm/aculog", + "var/adm/vold.log", + "var/adm/log/asppp.log", + "var/lp/logs/lpsched", + "var/lp/logs/lpnet", + "var/lp/logs/requests", + "var/cron/log", + "var/saf/_log", + "var/saf/port/log", + "tmp/access.log", + "etc/sensors.conf", + "etc/sensors3.conf", + "etc/host.conf", + "etc/pam.conf", + "etc/resolv.conf", + "etc/apt/apt.conf", + "etc/inetd.conf", + "etc/syslog.conf", + "etc/sysctl.conf", + "etc/sysctl.d/10-console-messages.conf", + "etc/sysctl.d/10-network-security.conf", + "etc/sysctl.d/10-process-security.conf", + "etc/sysctl.d/wine.sysctl.conf", + "etc/security/access.conf", + "etc/security/group.conf", + "etc/security/limits.conf", + "etc/security/namespace.conf", + "etc/security/pam_env.conf", + "etc/security/sepermit.conf", + "etc/security/time.conf", + "etc/ssh/sshd_config", + "etc/adduser.conf", + "etc/deluser.conf", + "etc/avahi/avahi-daemon.conf", + "etc/ca-certificates.conf", + "etc/ca-certificates.conf.dpkg-old", + "etc/casper.conf", + "etc/chkrootkit.conf", + "etc/debconf.conf", + "etc/dns2tcpd.conf", + "etc/e2fsck.conf", + "etc/esound/esd.conf", + "etc/etter.conf", + "etc/fuse.conf", + "etc/foremost.conf", + "etc/hdparm.conf", + "etc/kernel-img.conf", + "etc/kernel-pkg.conf", + "etc/ld.so.conf", + "etc/ltrace.conf", + "etc/mail/sendmail.conf", + "etc/manpath.config", + "etc/kbd/config", + "etc/ldap/ldap.conf", + "etc/logrotate.conf", + "etc/mtools.conf", + "etc/smi.conf", + "etc/updatedb.conf", + "etc/pulse/client.conf", + "usr/share/adduser/adduser.conf", + "etc/hostname", + "etc/networks", + "etc/timezone", + "etc/modules", + "etc/passwd", + "etc/shadow", + "etc/fstab", + "etc/motd", + "etc/hosts", + "etc/group", + "etc/alias", + "etc/crontab", + "etc/crypttab", + "etc/exports", + "etc/mtab", + "etc/hosts.allow", + "etc/hosts.deny", + "etc/os-release", + "etc/password.master", + "etc/profile", + "etc/default/grub", + "etc/resolvconf/update-libc.d/sendmail", + "etc/inittab", + "etc/issue", + "etc/issue.net", + "etc/login.defs", + "etc/sudoers", + "etc/sysconfig/network-scripts/ifcfg-eth0", + "etc/redhat-release", + "etc/scw-release", + "etc/system-release-cpe", + "etc/debian_version", + "etc/fedora-release", + "etc/mandrake-release", + "etc/slackware-release", + "etc/suse-release", + "etc/security/group", + "etc/security/passwd", + "etc/security/user", + "etc/security/environ", + "etc/security/limits", + "etc/security/opasswd", + "boot/grub/grub.cfg", + "boot/grub/menu.lst", + "root/.ksh_history", + "root/.xauthority", + "usr/lib/security/mkuser.default", + "var/lib/squirrelmail/prefs/squirrelmail.log", + "etc/squirrelmail/apache.conf", + "etc/squirrelmail/config_local.php", + "etc/squirrelmail/default_pref", + "etc/squirrelmail/index.php", + "etc/squirrelmail/config_default.php", + "etc/squirrelmail/config.php", + "etc/squirrelmail/filters_setup.php", + "etc/squirrelmail/sqspell_config.php", + "etc/squirrelmail/config/config.php", + "etc/httpd/conf.d/squirrelmail.conf", + "usr/share/squirrelmail/config/config.php", + "private/etc/squirrelmail/config/config.php", + "srv/www/htdos/squirrelmail/config/config.php", + "var/www/squirrelmail/config/config.php", + "var/www/html/squirrelmail/config/config.php", + "var/www/html/squirrelmail-1.2.9/config/config.php", + "usr/share/squirrelmail/plugins/squirrel_logger/setup.php", + "usr/local/squirrelmail/www/readme", + "windows/system32/drivers/etc/hosts", + "windows/system32/drivers/etc/lmhosts.sam", + "windows/system32/drivers/etc/networks", + "windows/system32/drivers/etc/protocol", + "windows/system32/drivers/etc/services", + "/boot.ini", + "windows/debug/netsetup.log", + "windows/comsetup.log", + "windows/repair/setup.log", + "windows/setupact.log", + "windows/setupapi.log", + "windows/setuperr.log", + "windows/updspapi.log", + "windows/wmsetup.log", + "windows/windowsupdate.log", + "windows/odbc.ini", + "usr/local/psa/admin/htdocs/domains/databases/phpmyadmin/libraries/config.default.php", + "etc/apache2/conf.d/phpmyadmin.conf", + "etc/phpmyadmin/config.inc.php", + "etc/openldap/ldap.conf", + "etc/cups/acroread.conf", + "etc/cups/cupsd.conf", + "etc/cups/cupsd.conf.default", + "etc/cups/pdftops.conf", + "etc/cups/printers.conf", + "windows/system32/macromed/flash/flashinstall.log", + "windows/system32/macromed/flash/install.log", + "etc/cvs-cron.conf", + "etc/cvs-pserver.conf", + "etc/subversion/config", + "etc/modprobe.d/vmware-tools.conf", + "etc/updatedb.conf.beforevmwaretoolsinstall", + "etc/vmware-tools/config", + "etc/vmware-tools/tpvmlp.conf", + "etc/vmware-tools/vmware-tools-libraries.conf", + "var/log", + "var/log/sw-cp-server/error_log", + "var/log/sso/sso.log", + "var/log/dpkg.log", + "var/log/btmp", + "var/log/utmp", + "var/log/wtmp", + "var/log/mysql/mysql-bin.log", + "var/log/mysql/mysql-bin.index", + "var/log/mysql/data/mysql-bin.index", + "var/log/mysql.log", + "var/log/mysql.err", + "var/log/mysqlderror.log", + "var/log/mysql/mysql.log", + "var/log/mysql/mysql-slow.log", + "var/log/mysql-bin.index", + "var/log/data/mysql-bin.index", + "var/log/postgresql/postgresql.log", + "var/log/postgres/pg_backup.log", + "var/log/postgres/postgres.log", + "var/log/postgresql.log", + "var/log/pgsql/pgsql.log", + "var/log/postgresql/postgresql-8.1-main.log", + "var/log/postgresql/postgresql-8.3-main.log", + "var/log/postgresql/postgresql-8.4-main.log", + "var/log/postgresql/postgresql-9.0-main.log", + "var/log/postgresql/postgresql-9.1-main.log", + "var/log/pgsql8.log", + "var/log/postgresql/postgres.log", + "var/log/pgsql_log", + "var/log/postgresql/main.log", + "var/log/cron", + "var/log/postgres.log", + "var/log/proftpd", + "var/log/proftpd/xferlog.legacy", + "var/log/proftpd.access_log", + "var/log/proftpd.xferlog", + "var/log/vsftpd.log", + "var/log/xferlog", + "var/log/pure-ftpd/pure-ftpd.log", + "var/log/pureftpd.log", + "var/log/muddleftpd", + "var/log/muddleftpd.conf", + "var/log/ftp-proxy/ftp-proxy.log", + "var/log/ftp-proxy", + "var/log/ftplog", + "var/log/exim_mainlog", + "var/log/exim/mainlog", + "var/log/maillog", + "var/log/exim_paniclog", + "var/log/exim/paniclog", + "var/log/exim/rejectlog", + "var/log/exim_rejectlog", + "var/log/webmin/miniserv.log", + "var/log/httpd/access_log", + "var/log/httpd/error_log", + "var/log/httpd/access.log", + "var/log/httpd/error.log", + "var/log/apache/access_log", + "var/log/apache/access.log", + "var/log/apache/error_log", + "var/log/apache/error.log", + "var/log/apache2/access_log", + "var/log/apache2/access.log", + "var/log/apache2/error_log", + "var/log/apache2/error.log", + "var/log/access_log", + "var/log/access.log", + "var/log/error_log", + "var/log/error.log", + "var/log/tomcat6/catalina.out", + "var/log/lighttpd.error.log", + "var/log/lighttpd.access.log", + "var/logs/access.log", + "var/log/lighttpd/", + "var/log/lighttpd/error.log", + "var/log/lighttpd/access.www.log", + "var/log/lighttpd/error.www.log", + "var/log/lighttpd/access.log", + "var/log/lighttpd/{domain}/access.log", + "var/log/lighttpd/{domain}/error.log", + "var/log/nginx/access_log", + "var/log/nginx/error_log", + "var/log/nginx/access.log", + "var/log/nginx/error.log", + "var/log/nginx.access_log", + "var/log/nginx.error_log", + "var/log/samba/log.smbd", + "var/log/samba/log.nmbd", + "var/log/samba.log", + "var/log/samba.log1", + "var/log/samba.log2", + "var/log/log.smb", + "var/log/ipfw.log", + "var/log/ipfw", + "var/log/ipfw/ipfw.log", + "var/log/ipfw.today", + "var/log/poplog", + "var/log/authlog", + "var/log/news.all", + "var/log/news/news.all", + "var/log/news/news.crit", + "var/log/news/news.err", + "var/log/news/news.notice", + "var/log/news/suck.err", + "var/log/news/suck.notice", + "var/log/messages", + "var/log/messages.1", + "var/log/user.log", + "var/log/user.log.1", + "var/log/auth.log", + "var/log/pm-powersave.log", + "var/log/xorg.0.log", + "var/log/daemon.log", + "var/log/daemon.log.1", + "var/log/kern.log", + "var/log/kern.log.1", + "var/log/mail.err", + "var/log/mail.info", + "var/log/mail.warn", + "var/log/ufw.log", + "var/log/boot.log", + "var/log/syslog", + "var/log/syslog.1", + "var/log/squirrelmail.log", + "var/log/apache2/squirrelmail.log", + "var/log/apache2/squirrelmail.err.log", + "var/log/mail.log", + "var/log/vmware/hostd.log", + "var/log/vmware/hostd-1.log", + "/wp-config.php", + "/wp-config.bak", + "/wp-config.old", + "/wp-config.temp", + "/wp-config.tmp", + "/wp-config.txt", + "/config.yml", + "/config_dev.yml", + "/config_prod.yml", + "/config_test.yml", + "/parameters.yml", + "/routing.yml", + "/security.yml", + "/services.yml", + "sites/default/default.settings.php", + "sites/default/settings.php", + "sites/default/settings.local.php", + "app/etc/local.xml", + "/sftp-config.json", + "/web.config", + "includes/config.php", + "includes/configure.php", + "/config.inc.php", + "/localsettings.php", + "inc/config.php", + "typo3conf/localconf.php", + "config/app.php", + "config/custom.php", + "config/database.php", + "/configuration.php", + "/config.php", + "var/mail/www-data", + "etc/network/", + "etc/init/", + "inetpub/wwwroot/global.asa", + "system32/inetsrv/config/applicationhost.config", + "system32/inetsrv/config/administration.config", + "system32/inetsrv/config/redirection.config", + "system32/config/default", + "system32/config/sam", + "system32/config/system", + "system32/config/software", + "winnt/repair/sam._", + "/package.json", + "/package-lock.json", + "/gruntfile.js", + "/npm-debug.log", + "/ormconfig.json", + "/tsconfig.json", + "/webpack.config.js", + "/yarn.lock", + "proc/0", + "proc/1", + "proc/2", + "proc/3", + "proc/4", + "proc/5", + "proc/6", + "proc/7", + "proc/8", + "proc/9", + "proc/acpi", + "proc/asound", + "proc/bootconfig", + "proc/buddyinfo", + "proc/bus", + "proc/cgroups", + "proc/cmdline", + "proc/config.gz", + "proc/consoles", + "proc/cpuinfo", + "proc/crypto", + "proc/devices", + "proc/diskstats", + "proc/dma", + "proc/docker", + "proc/driver", + "proc/dynamic_debug", + "proc/execdomains", + "proc/fb", + "proc/filesystems", + "proc/fs", + "proc/interrupts", + "proc/iomem", + "proc/ioports", + "proc/ipmi", + "proc/irq", + "proc/kallsyms", + "proc/kcore", + "proc/keys", + "proc/keys", + "proc/key-users", + "proc/kmsg", + "proc/kpagecgroup", + "proc/kpagecount", + "proc/kpageflags", + "proc/latency_stats", + "proc/loadavg", + "proc/locks", + "proc/mdstat", + "proc/meminfo", + "proc/misc", + "proc/modules", + "proc/mounts", + "proc/mpt", + "proc/mtd", + "proc/mtrr", + "proc/net", + "proc/net/tcp", + "proc/net/udp", + "proc/pagetypeinfo", + "proc/partitions", + "proc/pressure", + "proc/sched_debug", + "proc/schedstat", + "proc/scsi", + "proc/self", + "proc/self/cmdline", + "proc/self/environ", + "proc/self/fd/0", + "proc/self/fd/1", + "proc/self/fd/10", + "proc/self/fd/11", + "proc/self/fd/12", + "proc/self/fd/13", + "proc/self/fd/14", + "proc/self/fd/15", + "proc/self/fd/2", + "proc/self/fd/3", + "proc/self/fd/4", + "proc/self/fd/5", + "proc/self/fd/6", + "proc/self/fd/7", + "proc/self/fd/8", + "proc/self/fd/9", + "proc/self/mounts", + "proc/self/stat", + "proc/self/status", + "proc/slabinfo", + "proc/softirqs", + "proc/stat", + "proc/swaps", + "proc/sys", + "proc/sysrq-trigger", + "proc/sysvipc", + "proc/thread-self", + "proc/timer_list", + "proc/timer_stats", + "proc/tty", + "proc/uptime", + "proc/version", + "proc/version_signature", + "proc/vmallocinfo", + "proc/vmstat", + "proc/zoneinfo", + "sys/block", + "sys/bus", + "sys/class", + "sys/dev", + "sys/devices", + "sys/firmware", + "sys/fs", + "sys/hypervisor", + "sys/kernel", + "sys/module", + "sys/power", + "windows\\win.ini", + "default\\ntuser.dat", + "/var/run/secrets/kubernetes.io/serviceaccount" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase", + "normalizePath" + ] + }, + { + "id": "crs-931-110", + "name": "RFI: Common RFI Vulnerable Parameter Name used w/ URL Payload", + "tags": { + "type": "rfi", + "crs_id": "931110", + "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + } + ], + "regex": "(?:\\binclude\\s*\\([^)]*|mosConfig_absolute_path|_CONF\\[path\\]|_SERVER\\[DOCUMENT_ROOT\\]|GALLERY_BASEDIR|path\\[docroot\\]|appserv_root|config\\[root_dir\\])=(?:file|ftps?|https?)://", + "options": { + "min_length": 15 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-931-120", + "name": "RFI: URL Payload Used w/Trailing Question Mark Character (?)", + "tags": { + "type": "rfi", + "crs_id": "931120", + "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^(?i:file|ftps?)://.*?\\?+$", + "options": { + "case_sensitive": true, + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-932-160", + "name": "Remote Command Execution: Unix Shell Code Found", + "tags": { + "type": "command_injection", + "crs_id": "932160", + "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "${cdpath}", + "${dirstack}", + "${home}", + "${hostname}", + "${ifs}", + "${oldpwd}", + "${ostype}", + "${path}", + "${pwd}", + "$cdpath", + "$dirstack", + "$home", + "$hostname", + "$ifs", + "$oldpwd", + "$ostype", + "$path", + "$pwd", + "dev/fd/", + "dev/null", + "dev/stderr", + "dev/stdin", + "dev/stdout", + "dev/tcp/", + "dev/udp/", + "dev/zero", + "etc/master.passwd", + "etc/pwd.db", + "etc/shells", + "etc/spwd.db", + "proc/self/", + "bin/7z", + "bin/7za", + "bin/7zr", + "bin/ab", + "bin/agetty", + "bin/ansible-playbook", + "bin/apt", + "bin/apt-get", + "bin/ar", + "bin/aria2c", + "bin/arj", + "bin/arp", + "bin/as", + "bin/ascii-xfr", + "bin/ascii85", + "bin/ash", + "bin/aspell", + "bin/at", + "bin/atobm", + "bin/awk", + "bin/base32", + "bin/base64", + "bin/basenc", + "bin/bash", + "bin/bpftrace", + "bin/bridge", + "bin/bundler", + "bin/bunzip2", + "bin/busctl", + "bin/busybox", + "bin/byebug", + "bin/bzcat", + "bin/bzcmp", + "bin/bzdiff", + "bin/bzegrep", + "bin/bzexe", + "bin/bzfgrep", + "bin/bzgrep", + "bin/bzip2", + "bin/bzip2recover", + "bin/bzless", + "bin/bzmore", + "bin/bzz", + "bin/c89", + "bin/c99", + "bin/cancel", + "bin/capsh", + "bin/cat", + "bin/cc", + "bin/certbot", + "bin/check_by_ssh", + "bin/check_cups", + "bin/check_log", + "bin/check_memory", + "bin/check_raid", + "bin/check_ssl_cert", + "bin/check_statusfile", + "bin/chmod", + "bin/choom", + "bin/chown", + "bin/chroot", + "bin/clang", + "bin/clang++", + "bin/cmp", + "bin/cobc", + "bin/column", + "bin/comm", + "bin/composer", + "bin/core_perl/zipdetails", + "bin/cowsay", + "bin/cowthink", + "bin/cp", + "bin/cpan", + "bin/cpio", + "bin/cpulimit", + "bin/crash", + "bin/crontab", + "bin/csh", + "bin/csplit", + "bin/csvtool", + "bin/cupsfilter", + "bin/curl", + "bin/cut", + "bin/dash", + "bin/date", + "bin/dd", + "bin/dev/fd/", + "bin/dev/null", + "bin/dev/stderr", + "bin/dev/stdin", + "bin/dev/stdout", + "bin/dev/tcp/", + "bin/dev/udp/", + "bin/dev/zero", + "bin/dialog", + "bin/diff", + "bin/dig", + "bin/dmesg", + "bin/dmidecode", + "bin/dmsetup", + "bin/dnf", + "bin/docker", + "bin/dosbox", + "bin/dpkg", + "bin/du", + "bin/dvips", + "bin/easy_install", + "bin/eb", + "bin/echo", + "bin/ed", + "bin/efax", + "bin/emacs", + "bin/env", + "bin/eqn", + "bin/es", + "bin/esh", + "bin/etc/group", + "bin/etc/master.passwd", + "bin/etc/passwd", + "bin/etc/pwd.db", + "bin/etc/shadow", + "bin/etc/shells", + "bin/etc/spwd.db", + "bin/ex", + "bin/exiftool", + "bin/expand", + "bin/expect", + "bin/expr", + "bin/facter", + "bin/fetch", + "bin/file", + "bin/find", + "bin/finger", + "bin/fish", + "bin/flock", + "bin/fmt", + "bin/fold", + "bin/fping", + "bin/ftp", + "bin/gawk", + "bin/gcc", + "bin/gcore", + "bin/gdb", + "bin/gem", + "bin/genie", + "bin/genisoimage", + "bin/ghc", + "bin/ghci", + "bin/gimp", + "bin/ginsh", + "bin/git", + "bin/grc", + "bin/grep", + "bin/gtester", + "bin/gunzip", + "bin/gzexe", + "bin/gzip", + "bin/hd", + "bin/head", + "bin/hexdump", + "bin/highlight", + "bin/hping3", + "bin/iconv", + "bin/id", + "bin/iftop", + "bin/install", + "bin/ionice", + "bin/ip", + "bin/irb", + "bin/ispell", + "bin/jjs", + "bin/join", + "bin/journalctl", + "bin/jq", + "bin/jrunscript", + "bin/knife", + "bin/ksh", + "bin/ksshell", + "bin/latex", + "bin/ld", + "bin/ldconfig", + "bin/less", + "bin/lftp", + "bin/ln", + "bin/loginctl", + "bin/logsave", + "bin/look", + "bin/lp", + "bin/ls", + "bin/ltrace", + "bin/lua", + "bin/lualatex", + "bin/luatex", + "bin/lwp-download", + "bin/lwp-request", + "bin/lz", + "bin/lz4", + "bin/lz4c", + "bin/lz4cat", + "bin/lzcat", + "bin/lzcmp", + "bin/lzdiff", + "bin/lzegrep", + "bin/lzfgrep", + "bin/lzgrep", + "bin/lzless", + "bin/lzma", + "bin/lzmadec", + "bin/lzmainfo", + "bin/lzmore", + "bin/mail", + "bin/make", + "bin/man", + "bin/mawk", + "bin/mkfifo", + "bin/mknod", + "bin/more", + "bin/mosquitto", + "bin/mount", + "bin/msgattrib", + "bin/msgcat", + "bin/msgconv", + "bin/msgfilter", + "bin/msgmerge", + "bin/msguniq", + "bin/mtr", + "bin/mv", + "bin/mysql", + "bin/nano", + "bin/nasm", + "bin/nawk", + "bin/nc", + "bin/ncat", + "bin/neofetch", + "bin/nice", + "bin/nl", + "bin/nm", + "bin/nmap", + "bin/node", + "bin/nohup", + "bin/npm", + "bin/nroff", + "bin/nsenter", + "bin/octave", + "bin/od", + "bin/openssl", + "bin/openvpn", + "bin/openvt", + "bin/opkg", + "bin/paste", + "bin/pax", + "bin/pdb", + "bin/pdflatex", + "bin/pdftex", + "bin/pdksh", + "bin/perf", + "bin/perl", + "bin/pg", + "bin/php", + "bin/php-cgi", + "bin/php5", + "bin/php7", + "bin/pic", + "bin/pico", + "bin/pidstat", + "bin/pigz", + "bin/pip", + "bin/pkexec", + "bin/pkg", + "bin/pr", + "bin/printf", + "bin/proc/self/", + "bin/pry", + "bin/ps", + "bin/psed", + "bin/psftp", + "bin/psql", + "bin/ptx", + "bin/puppet", + "bin/pxz", + "bin/python", + "bin/python2", + "bin/python3", + "bin/rake", + "bin/rbash", + "bin/rc", + "bin/readelf", + "bin/red", + "bin/redcarpet", + "bin/restic", + "bin/rev", + "bin/rlogin", + "bin/rlwrap", + "bin/rpm", + "bin/rpmquery", + "bin/rsync", + "bin/ruby", + "bin/run-mailcap", + "bin/run-parts", + "bin/rview", + "bin/rvim", + "bin/sash", + "bin/sbin/capsh", + "bin/sbin/logsave", + "bin/sbin/service", + "bin/sbin/start-stop-daemon", + "bin/scp", + "bin/screen", + "bin/script", + "bin/sed", + "bin/service", + "bin/setarch", + "bin/sftp", + "bin/sg", + "bin/sh", + "bin/shuf", + "bin/sleep", + "bin/slsh", + "bin/smbclient", + "bin/snap", + "bin/socat", + "bin/soelim", + "bin/sort", + "bin/split", + "bin/sqlite3", + "bin/ss", + "bin/ssh", + "bin/ssh-keygen", + "bin/ssh-keyscan", + "bin/sshpass", + "bin/start-stop-daemon", + "bin/stdbuf", + "bin/strace", + "bin/strings", + "bin/su", + "bin/sysctl", + "bin/systemctl", + "bin/systemd-resolve", + "bin/tac", + "bin/tail", + "bin/tar", + "bin/task", + "bin/taskset", + "bin/tbl", + "bin/tclsh", + "bin/tcpdump", + "bin/tcsh", + "bin/tee", + "bin/telnet", + "bin/tex", + "bin/tftp", + "bin/tic", + "bin/time", + "bin/timedatectl", + "bin/timeout", + "bin/tmux", + "bin/top", + "bin/troff", + "bin/tshark", + "bin/ul", + "bin/uname", + "bin/uncompress", + "bin/unexpand", + "bin/uniq", + "bin/unlz4", + "bin/unlzma", + "bin/unpigz", + "bin/unrar", + "bin/unshare", + "bin/unxz", + "bin/unzip", + "bin/unzstd", + "bin/update-alternatives", + "bin/uudecode", + "bin/uuencode", + "bin/valgrind", + "bin/vi", + "bin/view", + "bin/vigr", + "bin/vim", + "bin/vimdiff", + "bin/vipw", + "bin/virsh", + "bin/volatility", + "bin/wall", + "bin/watch", + "bin/wc", + "bin/wget", + "bin/whiptail", + "bin/who", + "bin/whoami", + "bin/whois", + "bin/wireshark", + "bin/wish", + "bin/xargs", + "bin/xelatex", + "bin/xetex", + "bin/xmodmap", + "bin/xmore", + "bin/xpad", + "bin/xxd", + "bin/xz", + "bin/xzcat", + "bin/xzcmp", + "bin/xzdec", + "bin/xzdiff", + "bin/xzegrep", + "bin/xzfgrep", + "bin/xzgrep", + "bin/xzless", + "bin/xzmore", + "bin/yarn", + "bin/yelp", + "bin/yes", + "bin/yum", + "bin/zathura", + "bin/zip", + "bin/zipcloak", + "bin/zipcmp", + "bin/zipdetails", + "bin/zipgrep", + "bin/zipinfo", + "bin/zipmerge", + "bin/zipnote", + "bin/zipsplit", + "bin/ziptool", + "bin/zsh", + "bin/zsoelim", + "bin/zstd", + "bin/zstdcat", + "bin/zstdgrep", + "bin/zstdless", + "bin/zstdmt", + "bin/zypper" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase", + "cmdLine" + ] + }, + { + "id": "crs-932-171", + "name": "Remote Command Execution: Shellshock (CVE-2014-6271)", + "tags": { + "type": "command_injection", + "crs_id": "932171", + "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^\\(\\s*\\)\\s+{", + "options": { + "case_sensitive": true, + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-932-180", + "name": "Restricted File Upload Attempt", + "tags": { + "type": "command_injection", + "crs_id": "932180", + "category": "attack_attempt", + "cwe": "706", + "capec": "1000/225/122/17/177", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x-filename" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x_filename" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x-file-name" + ] + } + ], + "list": [ + ".htaccess", + ".htdigest", + ".htpasswd", + "wp-config.php", + "config.yml", + "config_dev.yml", + "config_prod.yml", + "config_test.yml", + "parameters.yml", + "routing.yml", + "security.yml", + "services.yml", + "default.settings.php", + "settings.php", + "settings.local.php", + "local.xml", + ".env" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-933-111", + "name": "PHP Injection Attack: PHP Script File Upload Found", + "tags": { + "type": "unrestricted_file_upload", + "crs_id": "933111", + "category": "attack_attempt", + "cwe": "434", + "capec": "1000/225/122/17/650", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x-filename" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x_filename" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x.filename" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "x-file-name" + ] + } + ], + "regex": ".*\\.(?:php\\d*|phtml)\\..*$", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-933-130", + "name": "PHP Injection Attack: Global Variables Found", + "tags": { + "type": "php_code_injection", + "crs_id": "933130", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "$globals", + "$_cookie", + "$_env", + "$_files", + "$_get", + "$_post", + "$_request", + "$_server", + "$_session", + "$argc", + "$argv", + "$http_\\u200bresponse_\\u200bheader", + "$php_\\u200berrormsg", + "$http_cookie_vars", + "$http_env_vars", + "$http_get_vars", + "$http_post_files", + "$http_post_vars", + "$http_raw_post_data", + "$http_request_vars", + "$http_server_vars" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-933-131", + "name": "PHP Injection Attack: HTTP Headers Values Found", + "tags": { + "type": "php_code_injection", + "crs_id": "933131", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:HTTP_(?:ACCEPT(?:_(?:ENCODING|LANGUAGE|CHARSET))?|(?:X_FORWARDED_FO|REFERE)R|(?:USER_AGEN|HOS)T|CONNECTION|KEEP_ALIVE)|PATH_(?:TRANSLATED|INFO)|ORIG_PATH_INFO|QUERY_STRING|REQUEST_URI|AUTH_TYPE)", + "options": { + "case_sensitive": true, + "min_length": 9 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-933-140", + "name": "PHP Injection Attack: I/O Stream Found", + "tags": { + "type": "php_code_injection", + "crs_id": "933140", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "php://(?:std(?:in|out|err)|(?:in|out)put|fd|memory|temp|filter)", + "options": { + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-933-150", + "name": "PHP Injection Attack: High-Risk PHP Function Name Found", + "tags": { + "type": "php_code_injection", + "crs_id": "933150", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "__halt_compiler", + "apache_child_terminate", + "base64_decode", + "bzdecompress", + "call_user_func", + "call_user_func_array", + "call_user_method", + "call_user_method_array", + "convert_uudecode", + "file_get_contents", + "file_put_contents", + "fsockopen", + "get_class_methods", + "get_class_vars", + "get_defined_constants", + "get_defined_functions", + "get_defined_vars", + "gzdecode", + "gzinflate", + "gzuncompress", + "include_once", + "invokeargs", + "pcntl_exec", + "pcntl_fork", + "pfsockopen", + "posix_getcwd", + "posix_getpwuid", + "posix_getuid", + "posix_uname", + "reflectionfunction", + "require_once", + "shell_exec", + "str_rot13", + "sys_get_temp_dir", + "wp_remote_fopen", + "wp_remote_get", + "wp_remote_head", + "wp_remote_post", + "wp_remote_request", + "wp_safe_remote_get", + "wp_safe_remote_head", + "wp_safe_remote_post", + "wp_safe_remote_request", + "zlib_decode" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-933-160", + "name": "PHP Injection Attack: High-Risk PHP Function Call Found", + "tags": { + "type": "php_code_injection", + "crs_id": "933160", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/225/122/17/650" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:s(?:e(?:t(?:_(?:e(?:xception|rror)_handler|magic_quotes_runtime|include_path)|defaultstub)|ssion_s(?:et_save_handler|tart))|qlite_(?:(?:(?:unbuffered|single|array)_)?query|create_(?:aggregate|function)|p?open|exec)|tr(?:eam_(?:context_create|socket_client)|ipc?slashes|rev)|implexml_load_(?:string|file)|ocket_c(?:onnect|reate)|h(?:ow_sourc|a1_fil)e|pl_autoload_register|ystem)|p(?:r(?:eg_(?:replace(?:_callback(?:_array)?)?|match(?:_all)?|split)|oc_(?:(?:terminat|clos|nic)e|get_status|open)|int_r)|o(?:six_(?:get(?:(?:e[gu]|g)id|login|pwnam)|mk(?:fifo|nod)|ttyname|kill)|pen)|hp(?:_(?:strip_whitespac|unam)e|version|info)|g_(?:(?:execut|prepar)e|connect|query)|a(?:rse_(?:ini_file|str)|ssthru)|utenv)|r(?:unkit_(?:function_(?:re(?:defin|nam)e|copy|add)|method_(?:re(?:defin|nam)e|copy|add)|constant_(?:redefine|add))|e(?:(?:gister_(?:shutdown|tick)|name)_function|ad(?:(?:gz)?file|_exif_data|dir))|awurl(?:de|en)code)|i(?:mage(?:createfrom(?:(?:jpe|pn)g|x[bp]m|wbmp|gif)|(?:jpe|pn)g|g(?:d2?|if)|2?wbmp|xbm)|s_(?:(?:(?:execut|write?|read)ab|fi)le|dir)|ni_(?:get(?:_all)?|set)|terator_apply|ptcembed)|g(?:et(?:_(?:c(?:urrent_use|fg_va)r|meta_tags)|my(?:[gpu]id|inode)|(?:lastmo|cw)d|imagesize|env)|z(?:(?:(?:defla|wri)t|encod|fil)e|compress|open|read)|lob)|a(?:rray_(?:u(?:intersect(?:_u?assoc)?|diff(?:_u?assoc)?)|intersect_u(?:assoc|key)|diff_u(?:assoc|key)|filter|reduce|map)|ssert(?:_options)?|tob)|h(?:tml(?:specialchars(?:_decode)?|_entity_decode|entities)|(?:ash(?:_(?:update|hmac))?|ighlight)_file|e(?:ader_register_callback|x2bin))|f(?:i(?:le(?:(?:[acm]tim|inod)e|(?:_exist|perm)s|group)?|nfo_open)|tp_(?:nb_(?:ge|pu)|connec|ge|pu)t|(?:unction_exis|pu)ts|write|open)|o(?:b_(?:get_(?:c(?:ontents|lean)|flush)|end_(?:clean|flush)|clean|flush|start)|dbc_(?:result(?:_all)?|exec(?:ute)?|connect)|pendir)|m(?:b_(?:ereg(?:_(?:replace(?:_callback)?|match)|i(?:_replace)?)?|parse_str)|(?:ove_uploaded|d5)_file|ethod_exists|ysql_query|kdir)|e(?:x(?:if_(?:t(?:humbnail|agname)|imagetype|read_data)|ec)|scapeshell(?:arg|cmd)|rror_reporting|val)|c(?:url_(?:file_create|exec|init)|onvert_uuencode|reate_function|hr)|u(?:n(?:serialize|pack)|rl(?:de|en)code|[ak]?sort)|b(?:(?:son_(?:de|en)|ase64_en)code|zopen|toa)|(?:json_(?:de|en)cod|debug_backtrac|tmpfil)e|var_dump)(?:\\s|/\\*.*\\*/|//.*|#.*|\\\"|')*\\((?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?,)*(?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?)?\\)", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-933-170", + "name": "PHP Injection Attack: Serialized Object Injection", + "tags": { + "type": "php_code_injection", + "crs_id": "933170", + "category": "attack_attempt", + "cwe": "502", + "capec": "1000/152/586", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "[oOcC]:\\d+:\\\".+?\\\":\\d+:{[\\W\\w]*}", + "options": { + "case_sensitive": true, + "min_length": 12 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-933-200", + "name": "PHP Injection Attack: Wrapper scheme detected", + "tags": { + "type": "php_code_injection", + "crs_id": "933200", + "category": "attack_attempt", + "cwe": "502", + "capec": "1000/152/586" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:(?:bzip|ssh)2|z(?:lib|ip)|(?:ph|r)ar|expect|glob|ogg)://", + "options": { + "case_sensitive": true, + "min_length": 6 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-934-100", + "name": "Node.js Injection Attack 1/2", + "tags": { + "type": "js_code_injection", + "crs_id": "934100", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)", + "options": { + "case_sensitive": true, + "min_length": 3 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-934-101", + "name": "Node.js Injection Attack 2/2", + "tags": { + "type": "js_code_injection", + "crs_id": "934101", + "category": "attack_attempt", + "confidence": "1", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:w(?:atch|rite)|(?:spaw|ope)n|exists|close|fork|read)\\s*\\(", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-941-110", + "name": "XSS Filter - Category 1: Script Tag Vector", + "tags": { + "type": "xss", + "crs_id": "941110", + "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "]*>[\\s\\S]*?", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, + { + "id": "crs-941-120", + "name": "XSS Filter - Category 2: Event Handler Vector", + "tags": { + "type": "xss", + "crs_id": "941120", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bon(?:d(?:r(?:ag(?:en(?:ter|d)|leave|start|over)?|op)|urationchange|blclick)|s(?:e(?:ek(?:ing|ed)|arch|lect)|u(?:spend|bmit)|talled|croll|how)|m(?:ouse(?:(?:lea|mo)ve|o(?:ver|ut)|enter|down|up)|essage)|p(?:a(?:ge(?:hide|show)|(?:st|us)e)|lay(?:ing)?|rogress|aste|ointer(?:cancel|down|enter|leave|move|out|over|rawupdate|up))|c(?:anplay(?:through)?|o(?:ntextmenu|py)|hange|lick|ut)|a(?:nimation(?:iteration|start|end)|(?:fterprin|bor)t|uxclick|fterscriptexecute)|t(?:o(?:uch(?:cancel|start|move|end)|ggle)|imeupdate)|f(?:ullscreen(?:change|error)|ocus(?:out|in)?|inish)|(?:(?:volume|hash)chang|o(?:ff|n)lin)e|b(?:efore(?:unload|print)|lur)|load(?:ed(?:meta)?data|start|end)?|r(?:es(?:ize|et)|atechange)|key(?:press|down|up)|w(?:aiting|heel)|in(?:valid|put)|e(?:nded|rror)|unload)[\\s\\x0B\\x09\\x0C\\x3B\\x2C\\x28\\x3B]*?=[^=]", + "options": { + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, + { + "id": "crs-941-140", + "name": "XSS Filter - Category 4: Javascript URI Vector", + "tags": { + "type": "xss", + "crs_id": "941140", + "category": "attack_attempt", + "cwe": "84", + "capec": "1000/152/242/63/591/244", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "[a-z]+=(?:[^:=]+:.+;)*?[^:=]+:url\\(javascript", + "options": { + "min_length": 18 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, + { + "id": "crs-941-170", + "name": "NoScript XSS InjectionChecker: Attribute Injection", + "tags": { + "type": "xss", + "crs_id": "941170", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:\\W|^)(?:javascript:(?:[\\s\\S]+[=\\x5c\\(\\[\\.<]|[\\s\\S]*?(?:\\bname\\b|\\x5c[ux]\\d)))|@\\W*?i\\W*?m\\W*?p\\W*?o\\W*?r\\W*?t\\W*?(?:/\\*[\\s\\S]*?)?(?:[\\\"']|\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\()|[^-]*?-\\W*?m\\W*?o\\W*?z\\W*?-\\W*?b\\W*?i\\W*?n\\W*?d\\W*?i\\W*?n\\W*?g[^:]*?:\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\(", + "options": { + "min_length": 6 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, + { + "id": "crs-941-180", + "name": "Node-Validator Deny List Keywords", + "tags": { + "type": "xss", + "crs_id": "941180", + "category": "attack_attempt", + "cwe": "79", + "capec": "1000/152/242/63/591" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "document.cookie", + "document.write", + ".parentnode", + ".innerhtml", + "window.location", + "-moz-binding" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "removeNulls", + "lowercase" + ] + }, + { + "id": "crs-941-200", + "name": "IE XSS Filters - Attack Detected via vmlframe tag", + "tags": { + "type": "xss", + "crs_id": "941200", + "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:<.*[:]?vmlframe.*?[\\s/+]*?src[\\s/+]*=)", + "options": { + "case_sensitive": true, + "min_length": 13 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-941-210", + "name": "IE XSS Filters - Obfuscated Attack Detected via javascript injection", + "tags": { + "type": "xss", + "crs_id": "941210", + "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:(?:j|&#x?0*(?:74|4A|106|6A);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", + "options": { + "case_sensitive": true, + "min_length": 12 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-941-220", + "name": "IE XSS Filters - Obfuscated Attack Detected via vbscript injection", + "tags": { + "type": "xss", + "crs_id": "941220", + "category": "attack_attempt", + "cwe": "80", + "capec": "1000/152/242/63/591", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:b|&#x?0*(?:66|42|98|62);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", + "options": { + "case_sensitive": true, + "min_length": 10 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-941-230", + "name": "IE XSS Filters - Attack Detected via embed tag", + "tags": { + "type": "xss", + "crs_id": "941230", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "]", + "options": { + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-941-300", + "name": "IE XSS Filters - Attack Detected via object tag", + "tags": { + "type": "xss", + "crs_id": "941300", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": ")|<.*\\+AD4-", + "options": { + "case_sensitive": true, + "min_length": 6 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-941-360", + "name": "JSFuck / Hieroglyphy obfuscation detected", + "tags": { + "type": "xss", + "crs_id": "941360", + "category": "attack_attempt", + "cwe": "87", + "capec": "1000/152/242/63/591/199" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "![!+ ]\\[\\]", + "options": { + "case_sensitive": true, + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-941-390", + "name": "Javascript method detected", + "tags": { + "type": "xss", + "crs_id": "941390", + "category": "attack_attempt", + "confidence": "1", + "cwe": "79", + "capec": "1000/152/242/63/591" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?i:eval|settimeout|setinterval|new\\s+Function|alert|prompt)[\\s+]*\\([^\\)]", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-100", + "name": "SQL Injection Attack Detected via libinjection", + "tags": { + "type": "sql_injection", + "crs_id": "942100", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ] + }, + "operator": "is_sqli" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "crs-942-160", + "name": "Detects blind sqli tests using sleep() or benchmark()", + "tags": { + "type": "sql_injection", + "crs_id": "942160", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:sleep\\(\\s*?\\d*?\\s*?\\)|benchmark\\(.*?\\,.*?\\))", + "options": { + "case_sensitive": true, + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-240", + "name": "Detects MySQL charset switch and MSSQL DoS attempts", + "tags": { + "type": "sql_injection", + "crs_id": "942240", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:[\\\"'`](?:;*?\\s*?waitfor\\s+(?:delay|time)\\s+[\\\"'`]|;.*?:\\s*?goto)|alter\\s*?\\w+.*?cha(?:racte)?r\\s+set\\s+\\w+)", + "options": { + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-250", + "name": "Detects MATCH AGAINST, MERGE and EXECUTE IMMEDIATE injections", + "tags": { + "type": "sql_injection", + "crs_id": "942250", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:merge.*?using\\s*?\\(|execute\\s*?immediate\\s*?[\\\"'`]|match\\s*?[\\w(?:),+-]+\\s*?against\\s*?\\()", + "options": { + "case_sensitive": true, + "min_length": 11 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-270", + "name": "Basic SQL injection", + "tags": { + "type": "sql_injection", + "crs_id": "942270", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "union.*?select.*?from", + "options": { + "min_length": 15 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-280", + "name": "SQL Injection with delay functions", + "tags": { + "type": "sql_injection", + "crs_id": "942280", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/7", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:;\\s*?shutdown\\s*?(?:[#;{]|\\/\\*|--)|waitfor\\s*?delay\\s?[\\\"'`]+\\s?\\d|select\\s*?pg_sleep)", + "options": { + "min_length": 10 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-290", + "name": "Finds basic MongoDB SQL injection attempts", + "tags": { + "type": "nosql_injection", + "crs_id": "942290", + "category": "attack_attempt", + "cwe": "943", + "capec": "1000/152/248/676" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:(?:\\[?\\$(?:(?:s(?:lic|iz)|wher)e|e(?:lemMatch|xists|q)|n(?:o[rt]|in?|e)|l(?:ike|te?)|t(?:ext|ype)|a(?:ll|nd)|jsonSchema|between|regex|x?or|div|mod)\\]?)\\b)", + "options": { + "case_sensitive": true, + "min_length": 3 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "keys_only" + ] + }, + { + "id": "crs-942-360", + "name": "Detects concatenated basic SQL injection and SQLLFI attempts", + "tags": { + "type": "sql_injection", + "crs_id": "942360", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66/470" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:^[\\W\\d]+\\s*?(?:alter\\s*(?:a(?:(?:pplication\\s*rol|ggregat)e|s(?:ymmetric\\s*ke|sembl)y|u(?:thorization|dit)|vailability\\s*group)|c(?:r(?:yptographic\\s*provider|edential)|o(?:l(?:latio|um)|nversio)n|ertificate|luster)|s(?:e(?:rv(?:ice|er)|curity|quence|ssion|arch)|y(?:mmetric\\s*key|nonym)|togroup|chema)|m(?:a(?:s(?:ter\\s*key|k)|terialized)|e(?:ssage\\s*type|thod)|odule)|l(?:o(?:g(?:file\\s*group|in)|ckdown)|a(?:ngua|r)ge|ibrary)|t(?:(?:abl(?:espac)?|yp)e|r(?:igger|usted)|hreshold|ext)|p(?:a(?:rtition|ckage)|ro(?:cedur|fil)e|ermission)|d(?:i(?:mension|skgroup)|atabase|efault|omain)|r(?:o(?:l(?:lback|e)|ute)|e(?:sourc|mot)e)|f(?:u(?:lltext|nction)|lashback|oreign)|e(?:xte(?:nsion|rnal)|(?:ndpoi|ve)nt)|in(?:dex(?:type)?|memory|stance)|b(?:roker\\s*priority|ufferpool)|x(?:ml\\s*schema|srobject)|w(?:ork(?:load)?|rapper)|hi(?:erarchy|stogram)|o(?:perator|utline)|(?:nicknam|queu)e|us(?:age|er)|group|java|view)|union\\s*(?:(?:distin|sele)ct|all))\\b|\\b(?:(?:(?:trunc|cre|upd)at|renam)e|(?:inser|selec)t|de(?:lete|sc)|alter|load)\\s+(?:group_concat|load_file|char)\\b\\s*\\(?|[\\s(]load_file\\s*?\\(|[\\\"'`]\\s+regexp\\W)", + "options": { + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-942-500", + "name": "MySQL in-line comment detected", + "tags": { + "type": "sql_injection", + "crs_id": "942500", + "category": "attack_attempt", + "cwe": "89", + "capec": "1000/152/248/66" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:/\\*[!+](?:[\\w\\s=_\\-(?:)]+)?\\*/)", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-943-100", + "name": "Possible Session Fixation Attack: Setting Cookie Values in HTML", + "tags": { + "type": "http_protocol_violation", + "crs_id": "943100", + "category": "attack_attempt", + "cwe": "384", + "capec": "1000/225/21/593/61", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i:\\.cookie\\b.*?;\\W*?(?:expires|domain)\\W*?=|\\bhttp-equiv\\W+set-cookie\\b)", + "options": { + "case_sensitive": true, + "min_length": 15 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-944-100", + "name": "Remote Command Execution: Suspicious Java class detected", + "tags": { + "type": "java_code_injection", + "crs_id": "944100", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "java\\.lang\\.(?:runtime|processbuilder)", + "options": { + "case_sensitive": true, + "min_length": 17 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-944-110", + "name": "Remote Command Execution: Java process spawn (CVE-2017-9805)", + "tags": { + "type": "java_code_injection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:unmarshaller|base64data|java\\.).*(?:runtime|processbuilder)", + "options": { + "case_sensitive": false, + "min_length": 13 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "crs-944-130", + "name": "Suspicious Java class detected", + "tags": { + "type": "java_code_injection", + "crs_id": "944130", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "list": [ + "com.opensymphony.xwork2", + "com.sun.org.apache", + "java.io.bufferedinputstream", + "java.io.bufferedreader", + "java.io.bytearrayinputstream", + "java.io.bytearrayoutputstream", + "java.io.chararrayreader", + "java.io.datainputstream", + "java.io.file", + "java.io.fileoutputstream", + "java.io.filepermission", + "java.io.filewriter", + "java.io.filterinputstream", + "java.io.filteroutputstream", + "java.io.filterreader", + "java.io.inputstream", + "java.io.inputstreamreader", + "java.io.linenumberreader", + "java.io.objectoutputstream", + "java.io.outputstream", + "java.io.pipedoutputstream", + "java.io.pipedreader", + "java.io.printstream", + "java.io.pushbackinputstream", + "java.io.reader", + "java.io.stringreader", + "java.lang.class", + "java.lang.integer", + "java.lang.number", + "java.lang.object", + "java.lang.process", + "java.lang.reflect", + "java.lang.runtime", + "java.lang.string", + "java.lang.stringbuilder", + "java.lang.system", + "javax.script.scriptenginemanager", + "org.apache.commons", + "org.apache.struts", + "org.apache.struts2", + "org.omg.corba", + "java.beans.xmldecode" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "crs-944-260", + "name": "Remote Command Execution: Malicious class-loading payload", + "tags": { + "type": "java_code_injection", + "crs_id": "944260", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:class\\.module\\.classLoader\\.resources\\.context\\.parent\\.pipeline|springframework\\.context\\.support\\.FileSystemXmlApplicationContext)", + "options": { + "case_sensitive": true, + "min_length": 58 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-000-001", + "name": "Look for Cassandra injections", + "tags": { + "type": "nosql_injection", + "category": "attack_attempt", + "cwe": "943", + "capec": "1000/152/248/676" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + }, + { + "address": "server.request.headers.no_cookies" + } + ], + "regex": "\\ballow\\s+filtering\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeComments" + ] + }, + { + "id": "dog-000-002", + "name": "OGNL - Look for formatting injection patterns", + "tags": { + "type": "java_code_injection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + }, + { + "address": "server.request.headers.no_cookies" + } + ], + "regex": "[#%$]{(?:[^}]+[^\\w\\s}\\-_][^}]+|\\d+-\\d+)}", + "options": { + "case_sensitive": true + } + } + } + ], + "transformers": [] + }, + { + "id": "dog-000-003", + "name": "OGNL - Detect OGNL exploitation primitives", + "tags": { + "type": "java_code_injection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "[@#]ognl", + "options": { + "case_sensitive": true + } + } + } + ], + "transformers": [] + }, + { + "id": "dog-000-004", + "name": "Spring4Shell - Attempts to exploit the Spring4shell vulnerability", + "tags": { + "type": "exploit_detection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.body" + } + ], + "regex": "^class\\.module\\.classLoader\\.", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [ + "keys_only" + ] + }, + { + "id": "dog-000-005", + "name": "Node.js: Prototype pollution through __proto__", + "tags": { + "type": "js_code_injection", + "category": "attack_attempt", + "cwe": "1321", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + } + ], + "regex": "^__proto__$" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "keys_only" + ] + }, + { + "id": "dog-000-006", + "name": "Node.js: Prototype pollution through constructor.prototype", + "tags": { + "type": "js_code_injection", + "category": "attack_attempt", + "cwe": "1321", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + } + ], + "regex": "^constructor$" + }, + "operator": "match_regex" + }, + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + } + ], + "regex": "^prototype$" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "keys_only" + ] + }, + { + "id": "dog-000-007", + "name": "Server side template injection: Velocity & Freemarker", + "tags": { + "type": "java_code_injection", + "category": "attack_attempt", + "cwe": "1336", + "capec": "1000/152/242/19", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "#(?:set|foreach|macro|parse|if)\\(.*\\)|<#assign.*>" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-001", + "name": "BurpCollaborator OOB domain", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "tool_name": "BurpCollaborator", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:burpcollaborator\\.net|oastify\\.com)\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-002", + "name": "Qualys OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Qualys", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bqualysperiscope\\.com\\b|\\.oscomm\\." + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-003", + "name": "Probely OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Probely", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bprbly\\.win\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-004", + "name": "Known malicious out-of-band interaction domain", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:webhook\\.site|\\.canarytokens\\.com|vii\\.one|act1on3\\.ru|gdsburp\\.com|arcticwolf\\.net|oob\\.li|htbiw\\.com|h4\\.vc|mochan\\.cloud|imshopping\\.com|bootstrapnodejs\\.com|mooo-ng\\.com|securitytrails\\.com|canyouhackit\\.io|7bae\\.xyz)\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-005", + "name": "Known suspicious out-of-band interaction domain", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:\\.ngrok\\.io|requestbin\\.com|requestbin\\.net)\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-006", + "name": "Rapid7 OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Rapid7", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bappspidered\\.rapid7\\." + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-007", + "name": "Interact.sh OOB domain", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "tool_name": "interact.sh", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:interact\\.sh|oast\\.(?:pro|live|site|online|fun|me)|indusfacefinder\\.in|where\\.land|syhunt\\.net|tssrt\\.de|boardofcyber\\.io|assetnote-callback\\.com|praetorianlabs\\.dev|netspi\\.sh)\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-008", + "name": "Netsparker OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Netsparker", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)?r87(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)(?:me|com)\\b", + "options": { + "case_sensitive": false, + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-009", + "name": "WhiteHat Security OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "WhiteHatSecurity", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bwhsec(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)us\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-010", + "name": "Nessus OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Nessus", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\b\\.nessus\\.org\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-011", + "name": "Watchtowr OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Watchtowr", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bwatchtowr\\.com\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-012", + "name": "AppCheck NG OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "AppCheckNG", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\bptst\\.io\\b", + "options": { + "case_sensitive": false, + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-931-001", + "name": "RFI: URL Payload to well known RFI target", + "tags": { + "type": "rfi", + "category": "attack_attempt", + "cwe": "98", + "capec": "1000/152/175/253/193", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^(?i:file|ftps?|https?).*/rfiinc\\.txt\\?+$", + "options": { + "case_sensitive": true, + "min_length": 17 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-932-100", + "name": "Shell spawn executing network command", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:(?:['\"\\x60({|;&]|(?:^|['\"\\x60({|;&])(?:cmd(?:\\.exe)?\\s+(?:/\\w(?::\\w+)?\\s+)*))(?:ping|curl|wget|telnet)|\\bnslookup)[\\s,]", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-934-001", + "name": "XXE - XML file loads external entity", + "tags": { + "type": "xxe", + "category": "attack_attempt", + "cwe": "91", + "capec": "1000/152/248/250", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.body" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?:<\\?xml[^>]*>.*)]+SYSTEM\\s+[^>]+>", + "options": { + "case_sensitive": false, + "min_length": 24 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-941-001", + "name": "XSS in source property", + "tags": { + "type": "xss", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "referer" + ] + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "<(?:iframe|esi:include)(?:(?:\\s|/)*\\w+=[\"'\\w]+)*(?:\\s|/)*src(?:doc)?=[\"']?(?:data:|javascript:|http:|dns:|//)[^\\s'\"]+['\"]?", + "options": { + "min_length": 14 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls", + "urlDecodeUni" + ] + }, + { + "id": "dog-942-001", + "name": "Blind XSS callback domains", + "tags": { + "type": "xss", + "category": "attack_attempt", + "cwe": "83", + "capec": "1000/152/242/63/591/243", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "https?:\\/\\/(?:.*\\.)?(?:bxss\\.(?:in|me)|xss\\.ht|js\\.rip)", + "options": { + "case_sensitive": false + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "nfd-000-001", + "name": "Detect common directory discovery scans", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "phrase_match", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "list": [ + "/wordpress/", + "/etc/", + "/login.php", + "/install.php", + "/administrator", + "/admin.php", + "/wp-config", + "/phpmyadmin", + "/fckeditor", + "/mysql", + "/manager/html", + ".htaccess", + "/config.php", + "/configuration", + "/cgi-bin/php", + "/search.php", + "/tinymce", + "/tiny_mce", + "/settings.php", + "../../..", + "/install/", + "/download.php", + "/webdav", + "/forum.php", + "/user.php", + "/style.php", + "/jmx-console", + "/modules.php", + "/include.php", + "/default.asp", + "/help.php", + "/database.yml", + "/database.yml.pgsql", + "/database.yml.sqlite3", + "/database.yml.sqlite", + "/database.yml.mysql", + ".%2e/", + "/view.php", + "/header.php", + "/search.asp", + "%5c%5c", + "/server/php/", + "/invoker/jmxinvokerservlet", + "/phpmyadmin/index.php", + "/data/admin/allowurl.txt", + "/verify.php", + "/misc/ajax.js", + "/.idea", + "/module.php", + "/backup.rar", + "/backup.tar", + "/backup.zip", + "/backup.7z", + "/backup.gz", + "/backup.tgz", + "/backup.tar.gz", + "waitfor%20delay", + "/calendar.php", + "/news.php", + "/dompdf.php", + "))))))))))))))))", + "/web.config", + "tree.php", + "/cgi-bin-sdb/printenv", + "/comments.php", + "/detail.asp", + "/license.txt", + "/admin.asp", + "/auth.php", + "/list.php", + "/content.php", + "/mod.php", + "/mini.php", + "/install.pgsql", + "/install.mysql", + "/install.sqlite", + "/install.sqlite3", + "/install.txt", + "/install.md", + "/doku.php", + "/main.asp", + "/myadmin", + "/force-download.php", + "/iisprotect/admin", + "/.gitignore", + "/print.php", + "/common.php", + "/mainfile.php", + "/functions.php", + "/scripts/setup.php", + "/faq.php", + "/op/op.login.php", + "/home.php", + "/includes/hnmain.inc.php3", + "/preview.php", + "/dump.rar", + "/dump.tar", + "/dump.zip", + "/dump.7z", + "/dump.gz", + "/dump.tgz", + "/dump.tar.gz", + "/thumbnail.php", + "/sendcard.php", + "/global.asax", + "/directory.php", + "/footer.php", + "/error.asp", + "/forum.asp", + "/save.php", + "/htmlsax3.php", + "/adm/krgourl.php", + "/includes/converter.inc.php", + "/nucleus/libs/pluginadmin.php", + "/base_qry_common.php", + "/fileadmin", + "/bitrix/admin/", + "/adm.php", + "/util/barcode.php", + "/action.php", + "/rss.asp", + "/downloads.php", + "/page.php", + "/snarf_ajax.php", + "/fck/editor", + "/sendmail.php", + "/detail.php", + "/iframe.php", + "/swfupload.swf", + "/jenkins/login", + "/phpmyadmin/main.php", + "/phpmyadmin/scripts/setup.php", + "/user/index.php", + "/checkout.php", + "/process.php", + "/ks_inc/ajax.js", + "/export.php", + "/register.php", + "/cart.php", + "/console.php", + "/friend.php", + "/readmsg.php", + "/install.asp", + "/dagent/downloadreport.asp", + "/system/index.php", + "/core/changelog.txt", + "/js/util.js", + "/interna.php", + "/gallery.php", + "/links.php", + "/data/admin/ver.txt", + "/language/zh-cn.xml", + "/productdetails.asp", + "/admin/template/article_more/config.htm", + "/components/com_moofaq/includes/file_includer.php", + "/licence.txt", + "/rss.xsl", + "/vtigerservice.php", + "/mysql/main.php", + "/passwiki.php", + "/scr/soustab.php", + "/global.php", + "/email.php", + "/user.asp", + "/msd", + "/products.php", + "/cultbooking.php", + "/cron.php", + "/static/js/admincp.js", + "/comment.php", + "/maintainers", + "/modules/plain/adminpart/addplain.php", + "/wp-content/plugins/ungallery/source_vuln.php", + "/upgrade.txt", + "/category.php", + "/index_logged.php", + "/members.asp", + "/script/html.js", + "/images/ad.js", + "/awstats/awstats.pl", + "/includes/esqueletos/skel_null.php", + "/modules/profile/user.php", + "/window_top.php", + "/openbrowser.php", + "/thread.php", + "tinfoil_xss", + "/includes/include.php", + "/urheber.php", + "/header.inc.php", + "/mysqldumper", + "/display.php", + "/website.php", + "/stats.php", + "/assets/plugins/mp3_id/mp3_id.php", + "/siteminderagent/forms/smpwservices.fcc" + ] + } + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "nfd-000-002", + "name": "Detect failed attempt to fetch readme files", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "readme\\.[\\.a-z0-9]+$", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-003", + "name": "Detect failed attempt to fetch Java EE resource files", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "^(?:.*web\\-inf)(?:.*web\\.xml).*$", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-004", + "name": "Detect failed attempt to fetch code files", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "\\.(java|pyc?|rb|class)\\b", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-005", + "name": "Detect failed attempt to fetch source code archives", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "\\.(sql|log|ndb|gz|zip|tar\\.gz|tar|regVV|reg|conf|bz2|ini|db|war|bat|inc|btr|server|ds|conf|config|admin|master|sln|bak)\\b(?:[^.]|$)", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-006", + "name": "Detect failed attempt to fetch sensitive files", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "\\.(cgi|bat|dll|exe|key|cert|crt|pem|der|pkcs|pkcs|pkcs[0-9]*|nsf|jsa|war|java|class|vb|vba|so|git|svn|hg|cvs)([^a-zA-Z0-9_]|$)", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-007", + "name": "Detect failed attempt to fetch archives", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "/[\\d\\-_]*\\.(rar|tar|zip|7z|gz|tgz|tar.gz)", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-008", + "name": "Detect failed attempt to trigger incorrect application behavior", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "(/(administrator/components/com.*\\.php|response\\.write\\(.+\\))|select\\(.+\\)from|\\(.*sleep\\(.+\\)|(%[a-zA-Z0-9]{2}[a-zA-Z]{0,1})+\\))", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-009", + "name": "Detect failed attempt to leak the structure of the application", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "/(login\\.rol|LICENSE|[\\w-]+\\.(plx|pwd))$", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "nfd-000-010", + "name": "Detect failed attempts to find API documentation", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "(?:/swagger\\b|/api[-/]docs?\\b)", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, + { + "id": "sqr-000-001", + "name": "SSRF: Try to access the credential manager of the main cloud services", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i)^\\W*((http|ftp)s?://)?\\W*((::f{4}:)?(169|(0x)?0*a9|0+251)\\.?(254|(0x)?0*fe|0+376)[0-9a-fx\\.:]+|metadata\\.google\\.internal|metadata\\.goog)\\W*/", + "options": { + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "sqr-000-002", + "name": "Server-side Javascript injection: Try to detect obvious JS injection", + "tags": { + "type": "js_code_injection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "require\\(['\"][\\w\\.]+['\"]\\)|process\\.\\w+\\([\\w\\.]*\\)|\\.toString\\(\\)", + "options": { + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [ + "removeNulls" + ] + }, + { + "id": "sqr-000-008", + "name": "Windows: Detect attempts to exfiltrate .ini files", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i)[&|]\\s*type\\s+%\\w+%\\\\+\\w+\\.ini\\s*[&|]" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "sqr-000-009", + "name": "Linux: Detect attempts to exfiltrate passwd files", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i)[&|]\\s*cat\\s*\\/etc\\/[\\w\\.\\/]*passwd\\s*[&|]" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "cmdLine" + ] + }, + { + "id": "sqr-000-010", + "name": "Windows: Detect attempts to timeout a shell", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "cwe": "78", + "capec": "1000/152/248/88", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(?i)[&|]\\s*timeout\\s+/t\\s+\\d+\\s*[&|]" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "sqr-000-011", + "name": "SSRF: Try to access internal OMI service (CVE-2021-38647)", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "http(s?):\\/\\/([A-Za-z0-9\\.\\-\\_]+|\\[[A-Fa-f0-9\\:]+\\]|):5986\\/wsman", + "options": { + "min_length": 4 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "sqr-000-012", + "name": "SSRF: Detect SSRF attempt on internal service", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^(jar:)?(http|https):\\/\\/([0-9oq]{1,5}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|[0-9]{1,10})(:[0-9]{1,5})?(\\/[^:@]*)?$" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "sqr-000-013", + "name": "SSRF: Detect SSRF attempts using IPv6 or octal/hexdecimal obfuscation", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^(jar:)?(http|https):\\/\\/((\\[)?[:0-9a-f\\.x]{2,}(\\])?)(:[0-9]{1,5})?(\\/[^:@]*)?$" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "sqr-000-014", + "name": "SSRF: Detect SSRF domain redirection bypass", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "(http|https):\\/\\/(?:.*\\.)?(?:burpcollaborator\\.net|localtest\\.me|mail\\.ebc\\.apple\\.com|bugbounty\\.dod\\.network|.*\\.[nx]ip\\.io|oastify\\.com|oast\\.(?:pro|live|site|online|fun|me)|sslip\\.io|requestbin\\.com|requestbin\\.net|hookbin\\.com|webhook\\.site|canarytokens\\.com|interact\\.sh|ngrok\\.io|bugbounty\\.click|prbly\\.win|qualysperiscope\\.com|vii\\.one|act1on3\\.ru)" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "sqr-000-015", + "name": "SSRF: Detect SSRF attempt using non HTTP protocol", + "tags": { + "type": "ssrf", + "category": "attack_attempt", + "cwe": "918", + "capec": "1000/225/115/664", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "^(jar:)?((file|netdoc):\\/\\/[\\\\\\/]+|(dict|gopher|ldap|sftp|tftp):\\/\\/.*:[0-9]{1,5})" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "lowercase" + ] + }, + { + "id": "sqr-000-017", + "name": "Log4shell: Attempt to exploit log4j CVE-2021-44228", + "tags": { + "type": "exploit_detection", + "category": "attack_attempt", + "cwe": "94", + "capec": "1000/152/242", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + }, + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "regex": "\\${[^j]*j[^n]*n[^d]*d[^i]*i[^:]*:[^}]*}" + }, + "operator": "match_regex" + } + ], + "transformers": [ + "unicode_normalize" + ] + }, + { + "id": "ua0-600-0xx", + "name": "Joomla exploitation tool", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Joomla exploitation tool", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "JDatabaseDriverMysqli" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-10x", + "name": "Nessus", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nessus", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)^Nessus(/|([ :]+SOAP))" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-12x", + "name": "Arachni", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Arachni", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^Arachni\\/v" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-13x", + "name": "Jorgee", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Jorgee", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bJorgee\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-14x", + "name": "Probely", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Probely", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bProbely\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-15x", + "name": "Metis", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Metis", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bmetis\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-16x", + "name": "SQL power injector", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "SQLPowerInjector", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "sql power injector" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-18x", + "name": "N-Stealth", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "N-Stealth", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bn-stealth\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-19x", + "name": "Brutus", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Brutus", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bbrutus\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-1xx", + "name": "Shellshock exploitation tool", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\(\\) \\{ :; *\\}" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-20x", + "name": "Netsparker", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Netsparker", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\bnetsparker\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-22x", + "name": "JAASCois", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "JAASCois", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bjaascois\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-26x", + "name": "Nsauditor", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nsauditor", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bnsauditor\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-27x", + "name": "Paros", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Paros", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)Mozilla/.* Paros/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-28x", + "name": "DirBuster", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "DirBuster", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bdirbuster\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-29x", + "name": "Pangolin", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Pangolin", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bpangolin\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-2xx", + "name": "Qualys", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Qualys", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bqualys\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-30x", + "name": "SQLNinja", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "SQLNinja", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bsqlninja\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-31x", + "name": "Nikto", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nikto", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\(Nikto/[\\d\\.]+\\)" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-33x", + "name": "BlackWidow", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "BlackWidow", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bblack\\s?widow\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-34x", + "name": "Grendel-Scan", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Grendel-Scan", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bgrendel-scan\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-35x", + "name": "Havij", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Havij", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bhavij\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-36x", + "name": "w3af", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "w3af", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bw3af\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-37x", + "name": "Nmap", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nmap", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "nmap (nse|scripting engine)" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-39x", + "name": "Nessus Scripted", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nessus", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)^'?[a-z0-9_]+\\.nasl'?$" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-3xx", + "name": "Evil Scanner", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "EvilScanner", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bevilScanner\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-40x", + "name": "WebFuck", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "WebFuck", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bWebFuck\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-42x", + "name": "OpenVAS", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "OpenVAS", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)OpenVAS\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-43x", + "name": "Spider-Pig", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Spider-Pig", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "Powered by Spider-Pig by tinfoilsecurity\\.com" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-44x", + "name": "Zgrab", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Zgrab", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "Mozilla/\\d+.\\d+ zgrab" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-45x", + "name": "Zmeu", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Zmeu", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bZmEu\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-47x", + "name": "GoogleSecurityScanner", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "GoogleSecurityScanner", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bGoogleSecurityScanner\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-48x", + "name": "Commix", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Commix", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^commix\\/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-49x", + "name": "Gobuster", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Gobuster", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^gobuster\\/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-4xx", + "name": "CGIchk", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "CGIchk", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bcgichk\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-51x", + "name": "FFUF", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "FFUF", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)^Fuzz Faster U Fool\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-52x", + "name": "Nuclei", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nuclei", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)^Nuclei\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-53x", + "name": "Tsunami", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Tsunami", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bTsunamiSecurityScanner\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-54x", + "name": "Nimbostratus", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Nimbostratus", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bnimbostratus-bot\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-55x", + "name": "Datadog test scanner: user-agent", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Datadog Canary Test", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "grpc.server.request.metadata", + "key_path": [ + "dd-canary" + ] + } + ], + "regex": "^dd-test-scanner-log(?:$|/|\\s)" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-56x", + "name": "Datadog test scanner - blocking version: user-agent", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Datadog Canary Test", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + }, + { + "address": "grpc.server.request.metadata", + "key_path": [ + "dd-canary" + ] + } + ], + "regex": "^dd-test-scanner-log-block(?:$|/|\\s)" + }, + "operator": "match_regex" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + }, + { + "id": "ua0-600-57x", + "name": "AlertLogic", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "AlertLogic", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\bAlertLogic-MDR-" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-58x", + "name": "wfuzz", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "wfuzz", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\bwfuzz\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-59x", + "name": "Detectify", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Detectify", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "\\bdetectify\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-5xx", + "name": "Blind SQL Injection Brute Forcer", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "BSQLBF", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)\\bbsqlbf\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-60x", + "name": "masscan", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "masscan", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^masscan/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-61x", + "name": "WPScan", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "WPScan", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^wpscan\\b" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-62x", + "name": "Aon pentesting services", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Aon", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^Aon/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-63x", + "name": "FeroxBuster", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "feroxbuster", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^feroxbuster/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-6xx", + "name": "Stealthy scanner", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "mozilla/4\\.0 \\(compatible(; msie (?:6\\.0; (?:win32|Windows NT 5\\.0)|4\\.0; Windows NT))?\\)", + "options": { + "case_sensitive": false + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-7xx", + "name": "SQLmap", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "SQLmap", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^sqlmap/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "ua0-600-9xx", + "name": "Skipfish", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "Skipfish", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "(?i)mozilla/5\\.0 sf/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + } + ], + "processors": [ + { + "id": "extract-content", + "generator": "extract_schema", + "conditions": [ + { + "operator": "equals", + "parameters": { + "inputs": [ + { + "address": "waf.context.processor", + "key_path": [ + "extract-schema" + ] + } + ], + "type": "boolean", + "value": true + } + } + ], + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.body" + } + ], + "output": "_dd.appsec.s.req.body" + }, + { + "inputs": [ + { + "address": "server.request.cookies" + } + ], + "output": "_dd.appsec.s.req.cookies" + }, + { + "inputs": [ + { + "address": "server.request.query" + } + ], + "output": "_dd.appsec.s.req.query" + }, + { + "inputs": [ + { + "address": "server.request.path_params" + } + ], + "output": "_dd.appsec.s.req.params" + }, + { + "inputs": [ + { + "address": "server.response.body" + } + ], + "output": "_dd.appsec.s.res.body" + }, + { + "inputs": [ + { + "address": "graphql.server.all_resolvers" + } + ], + "output": "_dd.appsec.s.graphql.all_resolvers" + }, + { + "inputs": [ + { + "address": "graphql.server.resolver" + } + ], + "output": "_dd.appsec.s.graphql.resolver" + } + ], + "scanners": [ + { + "tags": { + "category": "payment" + } + }, + { + "tags": { + "category": "pii" + } + } + ] + }, + "evaluate": false, + "output": true + }, + { + "id": "extract-headers", + "generator": "extract_schema", + "conditions": [ + { + "operator": "equals", + "parameters": { + "inputs": [ + { + "address": "waf.context.processor", + "key_path": [ + "extract-schema" + ] + } + ], + "type": "boolean", + "value": true + } + } + ], + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.req.headers" + }, + { + "inputs": [ + { + "address": "server.response.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.res.headers" + } + ], + "scanners": [ + { + "tags": { + "category": "credentials" + } + }, + { + "tags": { + "category": "pii" + } + } + ] + }, + "evaluate": false, + "output": true + } + ], + "scanners": [ + { + "id": "JU1sRk3mSzqSUJn6GrVn7g", + "name": "American Express Card Scanner (4+4+4+3 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{2}(?:(?:\\s\\d{4}\\s\\d{4}\\s\\d{3})|(?:\\,\\d{4}\\,\\d{4}\\,\\d{3})|(?:-\\d{4}-\\d{4}-\\d{3})|(?:\\.\\d{4}\\.\\d{4}\\.\\d{3}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "edmH513UTQWcRiQ9UnzHlw-mod", + "name": "American Express Card Scanner (4+6|5+5|6 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{2}(?:(?:\\s\\d{5,6}\\s\\d{5,6})|(?:\\.\\d{5,6}\\.\\d{5,6})|(?:-\\d{5,6}-\\d{5,6})|(?:,\\d{5,6},\\d{5,6}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "e6K4h_7qTLaMiAbaNXoSZA", + "name": "American Express Card Scanner (8+7 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{6}(?:(?:\\s\\d{7})|(?:\\,\\d{7})|(?:-\\d{7})|(?:\\.\\d{7}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "K2rZflWzRhGM9HiTc6whyQ", + "name": "American Express Card Scanner (1x15 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{13}\\b", + "options": { + "case_sensitive": false, + "min_length": 15 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "9d7756e343cefa22a5c098e1092590f806eb5446", + "name": "Basic Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^basic\\s+[A-Za-z0-9+/=]+", + "options": { + "case_sensitive": false, + "min_length": 7 + } + } + }, + "tags": { + "type": "basic_auth", + "category": "credentials" + } + }, + { + "id": "mZy8XjZLReC9smpERXWnnw", + "name": "Bearer Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^bearer\\s+[-a-z0-9._~+/]{4,}", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "bearer_token", + "category": "credentials" + } + }, + { + "id": "450239afc250a19799b6c03dc0e16fd6a4b2a1af", + "name": "Canadian Social Insurance Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:social[\\s_]?(?:insurance(?:\\s+number)?)?|SIN|Canadian[\\s_]?(?:social[\\s_]?(?:insurance)?|insurance[\\s_]?number)?)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b\\d{3}-\\d{3}-\\d{3}\\b", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "canadian_sin", + "category": "pii" + } + }, + { + "id": "87a879ff33693b46c8a614d8211f5a2c289beca0", + "name": "Digest Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^digest\\s+", + "options": { + "case_sensitive": false, + "min_length": 7 + } + } + }, + "tags": { + "type": "digest_auth", + "category": "credentials" + } + }, + { + "id": "qWumeP1GQUa_E4ffAnT-Yg", + "name": "American Express Card Scanner (1x14 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "(?:30[0-59]\\d|3[689]\\d{2})(?:\\d{10})", + "options": { + "case_sensitive": false, + "min_length": 14 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "NlTWWM5LS6W0GSqBLuvtRw", + "name": "Diners Card Scanner (4+4+4+2 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d|3[689]\\d{2})(?:(?:\\s\\d{4}\\s\\d{4}\\s\\d{2})|(?:\\,\\d{4}\\,\\d{4}\\,\\d{2})|(?:-\\d{4}-\\d{4}-\\d{2})|(?:\\.\\d{4}\\.\\d{4}\\.\\d{2}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "Xr5VdbQSTXitYGGiTfxBpw", + "name": "Diners Card Scanner (4+6+4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d|3[689]\\d{2})(?:(?:\\s\\d{6}\\s\\d{4})|(?:\\.\\d{6}\\.\\d{4})|(?:-\\d{6}-\\d{4})|(?:,\\d{6},\\d{4}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "gAbunN_WQNytxu54DjcbAA-mod", + "name": "Diners Card Scanner (8+6 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d{5}|3[689]\\d{6})\\s?(?:(?:\\s\\d{6})|(?:\\,\\d{6})|(?:-\\d{6})|(?:\\.\\d{6}))\\b", + "options": { + "case_sensitive": false, + "min_length": 14 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "9cs4qCfEQBeX17U7AepOvQ", + "name": "MasterCard Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:6221(?:2[6-9]|[3-9][0-9])\\d{2}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8})|6229(?:[01][0-9]|2[0-5])\\d{2}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8})|(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])\\d{4}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "YBIDWJIvQWW_TFOyU0CGJg", + "name": "Discover Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:(?:6221(?:2[6-9]|[3-9][0-9])\\d{2}(?:,\\d{4}){2})|(?:6221\\s(?:2[6-9]|[3-9][0-9])\\d{2}(?:\\s\\d{4}){2})|(?:6221\\.(?:2[6-9]|[3-9][0-9])\\d{2}(?:\\.\\d{4}){2})|(?:6221-(?:2[6-9]|[3-9][0-9])\\d{2}(?:-\\d{4}){2}))|(?:(?:6229(?:[01][0-9]|2[0-5])\\d{2}(?:,\\d{4}){2})|(?:6229\\s(?:[01][0-9]|2[0-5])\\d{2}(?:\\s\\d{4}){2})|(?:6229\\.(?:[01][0-9]|2[0-5])\\d{2}(?:\\.\\d{4}){2})|(?:6229-(?:[01][0-9]|2[0-5])\\d{2}(?:-\\d{4}){2}))|(?:(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "12cpbjtVTMaMutFhh9sojQ", + "name": "Discover Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:6221(?:2[6-9]|[3-9][0-9])\\d{10}|6229(?:[01][0-9]|2[0-5])\\d{10}|(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "PuXiVTCkTHOtj0Yad1ppsw", + "name": "Standard E-mail Address", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:e[-\\s]?)?mail|address|sender|\\bto\\b|from|recipient)\\b", + "options": { + "case_sensitive": false, + "min_length": 2 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[\\w!#$%&'*+/=?`{|}~^-]+(?:\\.[\\w!#$%&'*+/=?`{|}~^-]+)*(%40|@)(?:[a-zA-Z0-9-]+\\.)+[a-zA-Z]{2,6}\\b", + "options": { + "case_sensitive": false, + "min_length": 5 + } + } + }, + "tags": { + "type": "email", + "category": "pii" + } + }, + { + "id": "8VS2RKxzR8a_95L5fuwaXQ", + "name": "IBAN", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:iban|account|sender|receiver)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:NO\\d{2}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{3}|BE\\d{2}(?:[ \\-]?\\d{4}){3}|(?:DK|FO|FI|GL|SD)\\d{2}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|NL\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{2}|MK\\d{2}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]\\d{2}|SI\\d{17}|(?:AT|BA|EE|LT|XK)\\d{18}|(?:LU|KZ|EE|LT)\\d{5}[A-Z0-9]{13}|LV\\d{2}[A-Z]{4}[A-Z0-9]{13}|(?:LI|CH)\\d{2}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]|HR\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d|GE\\d{2}[ \\-]?[A-Z0-9]{2}\\d{2}\\d{14}|VA\\d{20}|BG\\d{2}[A-Z]{4}\\d{6}[A-Z0-9]{8}|BH\\d{2}[A-Z]{4}[A-Z0-9]{14}|GB\\d{2}[A-Z]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|IE\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|(?:CR|DE|ME|RS)\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d{2}|(?:AE|TL|IL)\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d{3}|GI\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){3}[ \\-]?[A-Z0-9]{3}|IQ\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{3}|MD\\d{2}(?:[ \\-]?[A-Z0-9]{4}){5}|SA\\d{2}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){4}|RO\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){4}|(?:PK|VG)\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){4}|AD\\d{2}(?:[ \\-]?\\d{4}){2}(?:[ \\-]?[A-Z0-9]{4}){3}|(?:CZ|SK|ES|SE|TN)\\d{2}(?:[ \\-]?\\d{4}){5}|(?:LY|PT|ST)\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d|TR\\d{2}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{4}){3}[ \\-]?[A-Z0-9]{2}|IS\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{2}|(?:IT|SM)\\d{2}[ \\-]?[A-Z]\\d{3}[ \\-]?\\d{4}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]{3}|GR\\d{2}[ \\-]?\\d{4}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){3}[A-Z0-9]{3}|(?:FR|MC)\\d{2}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]\\d{2}|MR\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{3}|(?:SV|DO)\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){5}|BY\\d{2}[ \\-]?[A-Z]{4}[ \\-]?\\d{4}(?:[ \\-]?[A-Z0-9]{4}){4}|GT\\d{2}(?:[ \\-]?[A-Z0-9]{4}){6}|AZ\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{5}){4}|LB\\d{2}[ \\-]?\\d{4}(?:[ \\-]?[A-Z0-9]{5}){4}|(?:AL|CY)\\d{2}(?:[ \\-]?\\d{4}){2}(?:[ \\-]?[A-Z0-9]{4}){4}|(?:HU|PL)\\d{2}(?:[ \\-]?\\d{4}){6}|QA\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){5}[ \\-]?[A-Z0-9]|PS\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d|UA\\d{2}[ \\-]?\\d{4}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){4}[ \\-]?[A-Z0-9]|BR\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{3}[A-Z0-9][ \\-]?[A-Z0-9]|EG\\d{2}(?:[ \\-]?\\d{4}){6}\\d|MU\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){4}\\d{3}[A-Z][ \\-]?[A-Z]{2}|(?:KW|JO)\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){5}[ \\-]?[A-Z0-9]{2}|MT\\d{2}[ \\-]?[A-Z]{4}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{3}){4}[ \\-]?[A-Z0-9]{3}|SC\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){5}[ \\-]?[A-Z]{3}|LC\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){6})\\b", + "options": { + "case_sensitive": false, + "min_length": 15 + } + } + }, + "tags": { + "type": "iban", + "category": "payment" + } + }, + { + "id": "h6WJcecQTwqvN9KeEtwDvg", + "name": "JCB Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "gcEaMu_VSJ2-bGCEkgyC0w", + "name": "JCB Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "imTliuhXT5GAeRNhqChXQQ", + "name": "JCB Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "9osY3xc9Q7ONAV0zw9Uz4A", + "name": "JSON Web Token", + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\bey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(\\.[\\w.+\\/=-]+)?\\b", + "options": { + "case_sensitive": false, + "min_length": 20 + } + } + }, + "tags": { + "type": "json_web_token", + "category": "credentials" + } + }, + { + "id": "d1Q9D3YMRxuVKf6CZInJPw", + "name": "Maestro Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{2}|6\\d{3})(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "M3YIQKKjRVmoeQuM3pjzrw", + "name": "Maestro Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{6}|6\\d{7})(?:\\s\\d{8}|\\.\\d{8}|-\\d{8}|,\\d{8})\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "hRxiQBlSSVKcjh5U7LZYLA", + "name": "Maestro Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{2}|6\\d{3})(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "NwhIYNS4STqZys37WlaIKA", + "name": "MasterCard Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:(?:\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "axxJkyjhRTOuhjwlsA35Vw", + "name": "MasterCard Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "76EhmoK3TPqJcpM-fK0pLw", + "name": "MasterCard Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "de0899e0cbaaa812bb624cf04c912071012f616d-mod", + "name": "UK National Insurance Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "^nin$|\\binsurance\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[A-Z]{2}[\\s-]?\\d{6}[\\s-]?[A-Z]?\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "tags": { + "type": "uk_nin", + "category": "pii" + } + }, + { + "id": "d962f7ddb3f55041e39195a60ff79d4814a7c331", + "name": "US Passport Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bpassport\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[0-9A-Z]{9}\\b|\\b[0-9]{6}[A-Z][0-9]{2}\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "tags": { + "type": "passport_number", + "category": "pii" + } + }, + { + "id": "7771fc3b-b205-4b93-bcef-28608c5c1b54", + "name": "United States Social Security Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:SSN|(?:(?:social)?[\\s_]?(?:security)?[\\s_]?(?:number)?)?)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b\\d{3}[-\\s\\.]{1}\\d{2}[-\\s\\.]{1}\\d{4}\\b", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "us_ssn", + "category": "pii" + } + }, + { + "id": "ac6d683cbac77f6e399a14990793dd8fd0fca333", + "name": "US Vehicle Identification Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:vehicle[_\\s-]*identification[_\\s-]*number|vin)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[A-HJ-NPR-Z0-9]{17}\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "vin", + "category": "pii" + } + }, + { + "id": "wJIgOygRQhKkR69b_9XbRQ", + "name": "Visa Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b4\\d{3}(?:(?:\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + }, + { + "id": "0o71SJxXQNK7Q6gMbBesFQ", + "name": "Visa Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b4\\d{3}(?:(?:,\\d{4}){3}|(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + }, + { + "id": "QrHD6AfgQm6z-j0wStxTvA", + "name": "Visa Card Scanner (1x15 & 1x16 & 1x19 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "4[0-9]{12}(?:[0-9]{3})?", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + } + ] +} \ No newline at end of file diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/trace_utils/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/trace_utils/__init__.py new file mode 100644 index 0000000..25559d7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/trace_utils/__init__.py @@ -0,0 +1,11 @@ +from ddtrace.appsec._trace_utils import block_request # noqa: F401 +from ddtrace.appsec._trace_utils import block_request_if_user_blocked # noqa: F401 +from ddtrace.appsec._trace_utils import should_block_user # noqa: F401 +from ddtrace.appsec._trace_utils import track_custom_event # noqa: F401 +from ddtrace.appsec._trace_utils import track_user_login_failure_event # noqa: F401 +from ddtrace.appsec._trace_utils import track_user_login_success_event # noqa: F401 +from ddtrace.appsec._trace_utils import track_user_signup_event # noqa: F401 +import ddtrace.internal.core + + +ddtrace.internal.core.on("set_user_for_asm", block_request_if_user_blocked, "block_user") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/auto.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/auto.py new file mode 100644 index 0000000..27f8086 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/auto.py @@ -0,0 +1,21 @@ +""" +.. _ddtraceauto: + +Importing ``ddtrace.auto`` installs Datadog instrumentation in the runtime. It should be used +when :ref:`ddtrace-run` is not an option. Using it with :ref:`ddtrace-run` +is unsupported and may lead to undefined behavior:: + + # myapp.py + + import ddtrace.auto # install instrumentation as early as possible + import mystuff + + def main(): + print("It's my app!") + + main() + +If you'd like more granular control over instrumentation setup, you can call the `patch*` functions +directly. +""" +import ddtrace.bootstrap.sitecustomize # noqa:F401 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/preload.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/preload.py new file mode 100644 index 0000000..aecab4e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/preload.py @@ -0,0 +1,114 @@ +""" +Bootstrapping code that is run when using the `ddtrace-run` Python entrypoint +Add all monkey-patching that needs to run by default here +""" +import os # noqa:I001 + +from ddtrace import config # noqa:F401 +from ddtrace.debugging._config import di_config # noqa:F401 +from ddtrace.debugging._config import ed_config # noqa:F401 +from ddtrace.settings.profiling import config as profiling_config # noqa:F401 +from ddtrace.internal.logger import get_logger # noqa:F401 +from ddtrace.internal.module import ModuleWatchdog # noqa:F401 +from ddtrace.internal.runtime.runtime_metrics import RuntimeWorker # noqa:F401 +from ddtrace.internal.tracemethods import _install_trace_methods # noqa:F401 +from ddtrace.internal.utils.formats import asbool # noqa:F401 +from ddtrace.internal.utils.formats import parse_tags_str # noqa:F401 +from ddtrace.settings.asm import config as asm_config # noqa:F401 +from ddtrace import tracer + + +import typing as t + +# Register operations to be performned after the preload is complete. In +# general, we might need to perform some cleanup operations after the +# initialisation of the library, while also execute some more code after that. +# _____ ___ _________ _____ ______ _____ ___ _ _ _____ +# |_ _|| \/ || ___ \| _ || ___ \|_ _| / _ \ | \ | ||_ _| +# | | | . . || |_/ /| | | || |_/ / | | / /_\ \| \| | | | +# | | | |\/| || __/ | | | || / | | | _ || . ` | | | +# _| |_ | | | || | \ \_/ /| |\ \ | | | | | || |\ | | | +# \___/ \_| |_/\_| \___/ \_| \_| \_/ \_| |_/\_| \_/ \_/ +# Do not register any functions that import ddtrace modules that have not been +# imported yet. +post_preload = [] + + +def register_post_preload(func: t.Callable) -> None: + post_preload.append(func) + + +log = get_logger(__name__) + + +if profiling_config.enabled: + log.debug("profiler enabled via environment variable") + try: + import ddtrace.profiling.auto # noqa: F401 + except Exception: + log.error("failed to enable profiling", exc_info=True) + +if di_config.enabled or ed_config.enabled: + from ddtrace.debugging import DynamicInstrumentation + + DynamicInstrumentation.enable() + +if config._runtime_metrics_enabled: + RuntimeWorker.enable() + +if asbool(os.getenv("DD_IAST_ENABLED", False)): + from ddtrace.appsec._iast._utils import _is_python_version_supported + + if _is_python_version_supported(): + from ddtrace.appsec._iast._ast.ast_patching import _should_iast_patch + from ddtrace.appsec._iast._loader import _exec_iast_patched_module + + log.debug("IAST enabled") + ModuleWatchdog.register_pre_exec_module_hook(_should_iast_patch, _exec_iast_patched_module) + +if config._remote_config_enabled: + from ddtrace.internal.remoteconfig.worker import remoteconfig_poller + + remoteconfig_poller.enable() + config.enable_remote_configuration() + +if asm_config._asm_enabled or config._remote_config_enabled: + from ddtrace.appsec._remoteconfiguration import enable_appsec_rc + + enable_appsec_rc() + +if config._otel_enabled: + + @ModuleWatchdog.after_module_imported("opentelemetry.trace") + def _(_): + from opentelemetry.trace import set_tracer_provider + + from ddtrace.opentelemetry import TracerProvider + + set_tracer_provider(TracerProvider()) + + +if asbool(os.getenv("DD_TRACE_ENABLED", default=True)): + from ddtrace import patch_all + + @register_post_preload + def _(): + # We need to clean up after we have imported everything we need from + # ddtrace, but before we register the patch-on-import hooks for the + # integrations. + modules_to_patch = os.getenv("DD_PATCH_MODULES") + modules_to_str = parse_tags_str(modules_to_patch) + modules_to_bool = {k: asbool(v) for k, v in modules_to_str.items()} + patch_all(**modules_to_bool) + + if config.trace_methods: + _install_trace_methods(config.trace_methods) + +if "DD_TRACE_GLOBAL_TAGS" in os.environ: + env_tags = os.getenv("DD_TRACE_GLOBAL_TAGS") + tracer.set_tags(parse_tags_str(env_tags)) + + +@register_post_preload +def _(): + tracer._generate_diagnostic_logs() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/sitecustomize.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/sitecustomize.py new file mode 100644 index 0000000..b7e1d59 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/bootstrap/sitecustomize.py @@ -0,0 +1,171 @@ +""" +Bootstrapping code that is run when using the `ddtrace-run` Python entrypoint +Add all monkey-patching that needs to run by default here +""" +# _____ ___ _________ _____ ______ _____ ___ _ _ _____ +# |_ _|| \/ || ___ \| _ || ___ \|_ _| / _ \ | \ | ||_ _| +# | | | . . || |_/ /| | | || |_/ / | | / /_\ \| \| | | | +# | | | |\/| || __/ | | | || / | | | _ || . ` | | | +# _| |_ | | | || | \ \_/ /| |\ \ | | | | | || |\ | | | +# \___/ \_| |_/\_| \___/ \_| \_| \_/ \_| |_/\_| \_/ \_/ +# DO NOT MODIFY THIS FILE! +# Only do so if you know what you're doing. This file contains boilerplate code +# to allow injecting a custom sitecustomize.py file into the Python process to +# perform the correct initialisation for the library. All the actual +# initialisation logic should be placed in preload.py. +from ddtrace import LOADED_MODULES # isort:skip + +import logging # noqa:I001 +import os # noqa:F401 +import sys +import warnings # noqa:F401 + +from ddtrace import config # noqa:F401 +from ddtrace._logger import _configure_log_injection +from ddtrace.internal.logger import get_logger # noqa:F401 +from ddtrace.internal.module import ModuleWatchdog # noqa:F401 +from ddtrace.internal.module import is_module_installed +from ddtrace.internal.utils.formats import asbool # noqa:F401 + +# Debug mode from the tracer will do the same here, so only need to do this otherwise. +if config.logs_injection: + _configure_log_injection() + + +log = get_logger(__name__) + + +if "gevent" in sys.modules or "gevent.monkey" in sys.modules: + import gevent.monkey # noqa:F401 + + if gevent.monkey.is_module_patched("threading"): + warnings.warn( # noqa: B028 + "Loading ddtrace after gevent.monkey.patch_all() is not supported and is " + "likely to break the application. Use ddtrace-run to fix this, or " + "import ddtrace.auto before calling gevent.monkey.patch_all().", + RuntimeWarning, + ) + + +def cleanup_loaded_modules(): + def drop(module_name): + # type: (str) -> None + del sys.modules[module_name] + + MODULES_REQUIRING_CLEANUP = ("gevent",) + do_cleanup = os.getenv("DD_UNLOAD_MODULES_FROM_SITECUSTOMIZE", default="auto").lower() + if do_cleanup == "auto": + do_cleanup = any(is_module_installed(m) for m in MODULES_REQUIRING_CLEANUP) + + if not asbool(do_cleanup): + return + + # Unload all the modules that we have imported, except for the ddtrace one. + # NB: this means that every `import threading` anywhere in `ddtrace/` code + # uses a copy of that module that is distinct from the copy that user code + # gets when it does `import threading`. The same applies to every module + # not in `KEEP_MODULES`. + KEEP_MODULES = frozenset( + [ + "atexit", + "copyreg", # pickling issues for tracebacks with gevent + "ddtrace", + "concurrent", + "typing", + "re", # referenced by the typing module + "sre_constants", # imported by re at runtime + "logging", + "attr", + "google", + "google.protobuf", # the upb backend in >= 4.21 does not like being unloaded + ] + ) + for m in list(_ for _ in sys.modules if _ not in LOADED_MODULES): + if any(m == _ or m.startswith(_ + ".") for _ in KEEP_MODULES): + continue + + drop(m) + + # TODO: The better strategy is to identify the core modues in LOADED_MODULES + # that should not be unloaded, and then unload as much as possible. + UNLOAD_MODULES = frozenset( + [ + # imported in Python >= 3.10 and patched by gevent + "time", + # we cannot unload the whole concurrent hierarchy, but this + # submodule makes use of threading so it is critical to unload when + # gevent is used. + "concurrent.futures", + ] + ) + for u in UNLOAD_MODULES: + for m in list(sys.modules): + if m == u or m.startswith(u + "."): + drop(m) + + # Because we are not unloading it, the logging module requires a reference + # to the newly imported threading module to allow it to retrieve the correct + # thread object information, like the thread name. We register a post-import + # hook on the threading module to perform this update. + @ModuleWatchdog.after_module_imported("threading") + def _(threading): + logging.threading = threading + + +try: + import ddtrace.bootstrap.preload as preload # Perform the actual initialisation + + cleanup_loaded_modules() + + # Check for and import any sitecustomize that would have normally been used + # had ddtrace-run not been used. + bootstrap_dir = os.path.dirname(__file__) + if bootstrap_dir in sys.path: + index = sys.path.index(bootstrap_dir) + del sys.path[index] + + # NOTE: this reference to the module is crucial in Python 2. + # Without it the current module gets gc'd and all subsequent references + # will be `None`. + ddtrace_sitecustomize = sys.modules["sitecustomize"] + del sys.modules["sitecustomize"] + + # Cache this module under it's fully qualified package name + if "ddtrace.bootstrap.sitecustomize" not in sys.modules: + sys.modules["ddtrace.bootstrap.sitecustomize"] = ddtrace_sitecustomize + + try: + import sitecustomize # noqa:F401 + except ImportError: + # If an additional sitecustomize is not found then put the ddtrace + # sitecustomize back. + log.debug("additional sitecustomize not found") + sys.modules["sitecustomize"] = ddtrace_sitecustomize + else: + log.debug("additional sitecustomize found in: %s", sys.path) + finally: + # Always reinsert the ddtrace bootstrap directory to the path so + # that introspection and debugging the application makes sense. + # Note that this does not interfere with imports since a user + # sitecustomize, if it exists, will be imported. + sys.path.insert(index, bootstrap_dir) + else: + try: + import sitecustomize # noqa:F401 + except ImportError: + log.debug("additional sitecustomize not found") + else: + log.debug("additional sitecustomize found in: %s", sys.path) + + config._ddtrace_bootstrapped = True + # Loading status used in tests to detect if the `sitecustomize` has been + # properly loaded without exceptions. This must be the last action in the module + # when the execution ends with a success. + loaded = True + + for f in preload.post_preload: + f() + +except Exception: + loaded = False + log.warning("error configuring Datadog tracing", exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/commands/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/constants.py new file mode 100644 index 0000000..ee1a024 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/constants.py @@ -0,0 +1,46 @@ +SAMPLE_RATE_METRIC_KEY = "_sample_rate" +SAMPLING_PRIORITY_KEY = "_sampling_priority_v1" +ANALYTICS_SAMPLE_RATE_KEY = "_dd1.sr.eausr" +SAMPLING_AGENT_DECISION = "_dd.agent_psr" +SAMPLING_RULE_DECISION = "_dd.rule_psr" +SAMPLING_LIMIT_DECISION = "_dd.limit_psr" +_SINGLE_SPAN_SAMPLING_MECHANISM = "_dd.span_sampling.mechanism" +_SINGLE_SPAN_SAMPLING_RATE = "_dd.span_sampling.rule_rate" +_SINGLE_SPAN_SAMPLING_MAX_PER_SEC = "_dd.span_sampling.max_per_second" +_SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT = -1 + +ORIGIN_KEY = "_dd.origin" +USER_ID_KEY = "_dd.p.usr.id" +HOSTNAME_KEY = "_dd.hostname" +RUNTIME_FAMILY = "_dd.runtime_family" +ENV_KEY = "env" +VERSION_KEY = "version" +SERVICE_KEY = "service.name" +BASE_SERVICE_KEY = "_dd.base_service" +SERVICE_VERSION_KEY = "service.version" +SPAN_KIND = "span.kind" +SPAN_MEASURED_KEY = "_dd.measured" +KEEP_SPANS_RATE_KEY = "_dd.tracer_kr" +MULTIPLE_IP_HEADERS = "_dd.multiple-ip-headers" + +APPSEC_ENV = "DD_APPSEC_ENABLED" + +IAST_ENV = "DD_IAST_ENABLED" + +MANUAL_DROP_KEY = "manual.drop" +MANUAL_KEEP_KEY = "manual.keep" + +ERROR_MSG = "error.message" # a string representing the error message +ERROR_TYPE = "error.type" # a string representing the type of the error +ERROR_STACK = "error.stack" # a human readable version of the stack. + +PID = "process_id" + +# Use this to explicitly inform the backend that a trace should be rejected and not stored. +USER_REJECT = -1 +# Used by the builtin sampler to inform the backend that a trace should be rejected and not stored. +AUTO_REJECT = 0 +# Used by the builtin sampler to inform the backend that a trace should be kept and stored. +AUTO_KEEP = 1 +# Use this to explicitly inform the backend that a trace should be kept and stored. +USER_KEEP = 2 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/context.py new file mode 100644 index 0000000..c67f329 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/context.py @@ -0,0 +1,269 @@ +import base64 +import re +import threading +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Optional +from typing import Text # noqa:F401 + +from ddtrace.tracing._span_link import SpanLink # noqa:F401 + +from .constants import ORIGIN_KEY +from .constants import SAMPLING_PRIORITY_KEY +from .constants import USER_ID_KEY +from .internal.compat import NumericType +from .internal.constants import W3C_TRACEPARENT_KEY +from .internal.constants import W3C_TRACESTATE_KEY +from .internal.logger import get_logger +from .internal.utils.http import w3c_get_dd_list_member as _w3c_get_dd_list_member + + +if TYPE_CHECKING: # pragma: no cover + from typing import Tuple # noqa:F401,I001 + + from .span import Span # noqa:F401 + + from .span import _MetaDictType + from .span import _MetricDictType + + _ContextState = Tuple[ + Optional[int], # trace_id + Optional[int], # span_id + _MetaDictType, # _meta + _MetricDictType, # _metrics + list[SpanLink], + dict[str, Any], + ] + + +_DD_ORIGIN_INVALID_CHARS_REGEX = re.compile(r"[^\x20-\x7E]+") + +log = get_logger(__name__) + + +class Context(object): + """Represents the state required to propagate a trace across execution + boundaries. + """ + + __slots__ = ["trace_id", "span_id", "_lock", "_meta", "_metrics", "_span_links", "_baggage"] + + def __init__( + self, + trace_id=None, # type: Optional[int] + span_id=None, # type: Optional[int] + dd_origin=None, # type: Optional[str] + sampling_priority=None, # type: Optional[float] + meta=None, # type: Optional[_MetaDictType] + metrics=None, # type: Optional[_MetricDictType] + lock=None, # type: Optional[threading.RLock] + span_links=None, # type: Optional[list[SpanLink]] + baggage=None, # type: Optional[dict[str, Any]] + ): + self._meta = meta if meta is not None else {} # type: _MetaDictType + self._metrics = metrics if metrics is not None else {} # type: _MetricDictType + self._baggage = baggage if baggage is not None else {} # type: dict[str, Any] + + self.trace_id = trace_id # type: Optional[int] + self.span_id = span_id # type: Optional[int] + + if dd_origin is not None and _DD_ORIGIN_INVALID_CHARS_REGEX.search(dd_origin) is None: + self._meta[ORIGIN_KEY] = dd_origin + if sampling_priority is not None: + self._metrics[SAMPLING_PRIORITY_KEY] = sampling_priority + if span_links is not None: + self._span_links = span_links + else: + self._span_links = [] + + if lock is not None: + self._lock = lock + else: + # DEV: A `forksafe.RLock` is not necessary here since Contexts + # are recreated by the tracer after fork + # https://github.com/DataDog/dd-trace-py/blob/a1932e8ddb704d259ea8a3188d30bf542f59fd8d/ddtrace/tracer.py#L489-L508 + self._lock = threading.RLock() + + def __getstate__(self): + # type: () -> _ContextState + return ( + self.trace_id, + self.span_id, + self._meta, + self._metrics, + self._span_links, + self._baggage + # Note: self._lock is not serializable + ) + + def __setstate__(self, state): + # type: (_ContextState) -> None + self.trace_id, self.span_id, self._meta, self._metrics, self._span_links, self._baggage = state + # We cannot serialize and lock, so we must recreate it unless we already have one + self._lock = threading.RLock() + + def _with_span(self, span): + # type: (Span) -> Context + """Return a shallow copy of the context with the given span.""" + return self.__class__( + trace_id=span.trace_id, + span_id=span.span_id, + meta=self._meta, + metrics=self._metrics, + lock=self._lock, + baggage=self._baggage, + ) + + def _update_tags(self, span): + # type: (Span) -> None + with self._lock: + for tag in self._meta: + span._meta.setdefault(tag, self._meta[tag]) + for metric in self._metrics: + span._metrics.setdefault(metric, self._metrics[metric]) + + @property + def sampling_priority(self) -> Optional[NumericType]: + """Return the context sampling priority for the trace.""" + return self._metrics.get(SAMPLING_PRIORITY_KEY) + + @sampling_priority.setter + def sampling_priority(self, value: Optional[NumericType]) -> None: + with self._lock: + if value is None: + if SAMPLING_PRIORITY_KEY in self._metrics: + del self._metrics[SAMPLING_PRIORITY_KEY] + return + self._metrics[SAMPLING_PRIORITY_KEY] = value + + @property + def _traceparent(self): + # type: () -> str + tp = self._meta.get(W3C_TRACEPARENT_KEY) + if self.span_id is None or self.trace_id is None: + # if we only have a traceparent then we'll forward it + # if we don't have a span id or trace id value we can't build a valid traceparent + return tp or "" + + # determine the trace_id value + if tp: + # grab the original traceparent trace id, not the converted value + trace_id = tp.split("-")[1] + else: + trace_id = "{:032x}".format(self.trace_id) + + return "00-{}-{:016x}-{}".format(trace_id, self.span_id, self._traceflags) + + @property + def _traceflags(self): + # type: () -> str + return "01" if self.sampling_priority and self.sampling_priority > 0 else "00" + + @property + def _tracestate(self): + # type: () -> str + dd_list_member = _w3c_get_dd_list_member(self) + + # if there's a preexisting tracestate we need to update it to preserve other vendor data + ts = self._meta.get(W3C_TRACESTATE_KEY, "") + if ts and dd_list_member: + # cut out the original dd list member from tracestate so we can replace it with the new one we created + ts_w_out_dd = re.sub("dd=(.+?)(?:,|$)", "", ts) + if ts_w_out_dd: + ts = "dd={},{}".format(dd_list_member, ts_w_out_dd) + else: + ts = "dd={}".format(dd_list_member) + # if there is no original tracestate value then tracestate is just the dd list member we created + elif dd_list_member: + ts = "dd={}".format(dd_list_member) + return ts + + @property + def dd_origin(self): + # type: () -> Optional[Text] + """Get the origin of the trace.""" + return self._meta.get(ORIGIN_KEY) + + @dd_origin.setter + def dd_origin(self, value): + # type: (Optional[Text]) -> None + """Set the origin of the trace.""" + with self._lock: + if value is None: + if ORIGIN_KEY in self._meta: + del self._meta[ORIGIN_KEY] + return + self._meta[ORIGIN_KEY] = value + + @property + def dd_user_id(self): + # type: () -> Optional[Text] + """Get the user ID of the trace.""" + user_id = self._meta.get(USER_ID_KEY) + if user_id: + return str(base64.b64decode(user_id), encoding="utf-8") + return None + + @dd_user_id.setter + def dd_user_id(self, value): + # type: (Optional[Text]) -> None + """Set the user ID of the trace.""" + with self._lock: + if value is None: + if USER_ID_KEY in self._meta: + del self._meta[USER_ID_KEY] + return + self._meta[USER_ID_KEY] = str(base64.b64encode(bytes(value, encoding="utf-8")), encoding="utf-8") + + def _set_baggage_item(self, key, value): + # type: (str, Any) -> None + """Sets a baggage item in this span context. + Note that this operation mutates the baggage of this span context + """ + self._baggage[key] = value + + def _with_baggage_item(self, key, value): + # type: (str, Any) -> Context + """Returns a copy of this span with a new baggage item. + Useful for instantiating new child span contexts. + """ + new_baggage = dict(self._baggage) + new_baggage[key] = value + + ctx = self.__class__(trace_id=self.trace_id, span_id=self.span_id) + ctx._meta = self._meta + ctx._metrics = self._metrics + ctx._baggage = new_baggage + return ctx + + def _get_baggage_item(self, key): + # type: (str) -> Optional[Any] + """Gets a baggage item in this span context.""" + return self._baggage.get(key, None) + + def __eq__(self, other): + # type: (Any) -> bool + if isinstance(other, Context): + with self._lock: + return ( + self.trace_id == other.trace_id + and self.span_id == other.span_id + and self._meta == other._meta + and self._metrics == other._metrics + and self._span_links == other._span_links + and self._baggage == other._baggage + ) + return False + + def __repr__(self): + # type: () -> str + return "Context(trace_id=%s, span_id=%s, _meta=%s, _metrics=%s, _span_links=%s, _baggage=%s)" % ( + self.trace_id, + self.span_id, + self._meta, + self._metrics, + self._span_links, + self._baggage, + ) + + __str__ = __repr__ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/__init__.py new file mode 100644 index 0000000..8d86099 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/__init__.py @@ -0,0 +1,4 @@ +from ..internal.utils.importlib import func_name # noqa:F401 +from ..internal.utils.importlib import module_name # noqa:F401 +from ..internal.utils.importlib import require_modules # noqa:F401 +from ..tracing import trace_handlers # noqa:F401 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/__init__.py new file mode 100644 index 0000000..5930f54 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/__init__.py @@ -0,0 +1,38 @@ +""" +The aiobotocore integration will trace all AWS calls made with the ``aiobotocore`` +library. This integration is not enabled by default. + +Enabling +~~~~~~~~ + +The aiobotocore integration is not enabled by default. Use +:func:`patch()` to enable the integration:: + + from ddtrace import patch + patch(aiobotocore=True) + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.aiobotocore['tag_no_params'] + + This opts out of the default behavior of adding span tags for a narrow set of API parameters. + + To not collect any API parameters, ``ddtrace.config.aiobotocore.tag_no_params = True`` or by setting the environment + variable ``DD_AWS_TAG_NO_PARAMS=true``. + + + Default: ``False`` + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["aiobotocore.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/patch.py new file mode 100644 index 0000000..6b08d60 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiobotocore/patch.py @@ -0,0 +1,180 @@ +import os + +import aiobotocore.client + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.utils.version import parse_version +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import aws +from ...ext import http +from ...internal.schema import schematize_cloud_api_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...internal.utils.formats import asbool +from ...internal.utils.formats import deep_getattr +from ...pin import Pin +from ..trace_utils import unwrap + + +aiobotocore_version_str = getattr(aiobotocore, "__version__", "") +AIOBOTOCORE_VERSION = parse_version(aiobotocore_version_str) + +if AIOBOTOCORE_VERSION <= (0, 10, 0): + # aiobotocore>=0.11.0 + from aiobotocore.endpoint import ClientResponseContentProxy +elif AIOBOTOCORE_VERSION >= (0, 11, 0) and AIOBOTOCORE_VERSION < (2, 3, 0): + from aiobotocore._endpoint_helpers import ClientResponseContentProxy + + +ARGS_NAME = ("action", "params", "path", "verb") +TRACED_ARGS = {"params", "path", "verb"} + + +config._add( + "aiobotocore", + { + "tag_no_params": asbool(os.getenv("DD_AWS_TAG_NO_PARAMS", default=False)), + }, +) + + +def get_version(): + # type: () -> str + return aiobotocore_version_str + + +def patch(): + if getattr(aiobotocore.client, "_datadog_patch", False): + return + aiobotocore.client._datadog_patch = True + + wrapt.wrap_function_wrapper("aiobotocore.client", "AioBaseClient._make_api_call", _wrapped_api_call) + Pin(service=config.service or "aws").onto(aiobotocore.client.AioBaseClient) + + +def unpatch(): + if getattr(aiobotocore.client, "_datadog_patch", False): + aiobotocore.client._datadog_patch = False + unwrap(aiobotocore.client.AioBaseClient, "_make_api_call") + + +class WrappedClientResponseContentProxy(wrapt.ObjectProxy): + def __init__(self, body, pin, parent_span): + super(WrappedClientResponseContentProxy, self).__init__(body) + self._self_pin = pin + self._self_parent_span = parent_span + + async def read(self, *args, **kwargs): + # async read that must be child of the parent span operation + operation_name = "{}.read".format(self._self_parent_span.name) + + with self._self_pin.tracer.start_span(operation_name, child_of=self._self_parent_span) as span: + span.set_tag_str(COMPONENT, config.aiobotocore.integration_name) + + # set span.kind tag equal to type of request + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + # inherit parent attributes + span.resource = self._self_parent_span.resource + span.span_type = self._self_parent_span.span_type + span._meta = dict(self._self_parent_span._meta) + span._metrics = dict(self._self_parent_span.metrics) + + result = await self.__wrapped__.read(*args, **kwargs) + span.set_tag("Length", len(result)) + + return result + + # wrapt doesn't proxy `async with` context managers + async def __aenter__(self): + # call the wrapped method but return the object proxy + await self.__wrapped__.__aenter__() + return self + + async def __aexit__(self, *args, **kwargs): + response = await self.__wrapped__.__aexit__(*args, **kwargs) + return response + + +async def _wrapped_api_call(original_func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + result = await original_func(*args, **kwargs) + return result + + endpoint_name = deep_getattr(instance, "_endpoint._endpoint_prefix") + + service = pin.service if pin.service != "aws" else "{}.{}".format(pin.service, endpoint_name) + with pin.tracer.trace( + schematize_cloud_api_operation( + "{}.command".format(endpoint_name), cloud_provider="aws", cloud_service=endpoint_name + ), + service=schematize_service_name(service), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.aiobotocore.integration_name) + + # set span.kind tag equal to type of request + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + try: + operation = get_argument_value(args, kwargs, 0, "operation_name") + params = get_argument_value(args, kwargs, 1, "params") + + span.resource = "{}.{}".format(endpoint_name, operation.lower()) + + if params and not config.aiobotocore["tag_no_params"]: + aws._add_api_param_span_tags(span, endpoint_name, params) + except ArgumentError: + operation = None + span.resource = endpoint_name + + region_name = deep_getattr(instance, "meta.region_name") + + meta = { + "aws.agent": "aiobotocore", + "aws.operation": operation, + "aws.region": region_name, + "region": region_name, + } + span.set_tags(meta) + + result = await original_func(*args, **kwargs) + + body = result.get("Body") + + # ClientResponseContentProxy removed in aiobotocore 2.3.x: https://github.com/aio-libs/aiobotocore/pull/934/ + if hasattr(body, "ClientResponseContentProxy") and isinstance(body, ClientResponseContentProxy): + result["Body"] = WrappedClientResponseContentProxy(body, pin, span) + + response_meta = result["ResponseMetadata"] + response_headers = response_meta["HTTPHeaders"] + + span.set_tag(http.STATUS_CODE, response_meta["HTTPStatusCode"]) + if 500 <= response_meta["HTTPStatusCode"] < 600: + span.error = 1 + + span.set_tag("retry_attempts", response_meta["RetryAttempts"]) + + request_id = response_meta.get("RequestId") + if request_id: + span.set_tag_str("aws.requestid", request_id) + + request_id2 = response_headers.get("x-amz-id-2") + if request_id2: + span.set_tag_str("aws.requestid2", request_id2) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.aiobotocore.get_analytics_sample_rate()) + + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/__init__.py new file mode 100644 index 0000000..ae5a71e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/__init__.py @@ -0,0 +1,99 @@ +""" +The ``aiohttp`` integration traces requests made with the client or to the server. + +The client is automatically instrumented while the server must be manually instrumented using middleware. + +Client +****** + +Enabling +~~~~~~~~ + +The client integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(aiohttp=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.aiohttp_client['distributed_tracing'] + + Include distributed tracing headers in requests sent from the aiohttp client. + + This option can also be set with the ``DD_AIOHTTP_CLIENT_DISTRIBUTED_TRACING`` + environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.aiohttp_client['split_by_domain'] + + Whether or not to use the domain name of requests as the service name. + + Default: ``False`` + + +Server +****** + +Enabling +~~~~~~~~ + +Automatic instrumentation is not available for the server, instead +the provided ``trace_app`` function must be used:: + + from aiohttp import web + from ddtrace import tracer, patch + from ddtrace.contrib.aiohttp import trace_app + + # create your application + app = web.Application() + app.router.add_get('/', home_handler) + + # trace your application handlers + trace_app(app, tracer, service='async-api') + web.run_app(app, port=8000) + +Integration settings are attached to your application under the ``datadog_trace`` +namespace. You can read or update them as follows:: + + # disables distributed tracing for all received requests + app['datadog_trace']['distributed_tracing_enabled'] = False + +Available settings are: + +* ``tracer`` (default: ``ddtrace.tracer``): set the default tracer instance that is used to + trace `aiohttp` internals. By default the `ddtrace` tracer is used. +* ``service`` (default: ``aiohttp-web``): set the service name used by the tracer. Usually + this configuration must be updated with a meaningful name. +* ``distributed_tracing_enabled`` (default: ``True``): enable distributed tracing during + the middleware execution, so that a new span is created with the given ``trace_id`` and + ``parent_id`` injected via request headers. + +When a request span is created, a new ``Context`` for this logical execution is attached +to the ``request`` object, so that it can be used in the application code:: + + async def home_handler(request): + ctx = request['datadog_context'] + # do something with the tracing Context + +:ref:`All HTTP tags ` are supported for this integration. + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["aiohttp"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .middlewares import trace_app + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "trace_app", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/middlewares.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/middlewares.py new file mode 100644 index 0000000..87f6a10 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/middlewares.py @@ -0,0 +1,182 @@ +from aiohttp import web +from aiohttp.web_urldispatcher import SystemRoute + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import http +from ...internal.schema import schematize_url_operation +from .. import trace_utils +from ..asyncio import context_provider + + +CONFIG_KEY = "datadog_trace" +REQUEST_CONTEXT_KEY = "datadog_context" +REQUEST_CONFIG_KEY = "__datadog_trace_config" +REQUEST_SPAN_KEY = "__datadog_request_span" + + +async def trace_middleware(app, handler): + """ + ``aiohttp`` middleware that traces the handler execution. + Because handlers are run in different tasks for each request, we attach the Context + instance both to the Task and to the Request objects. In this way: + + * the Task is used by the internal automatic instrumentation + * the ``Context`` attached to the request can be freely used in the application code + """ + + async def attach_context(request): + # application configs + tracer = app[CONFIG_KEY]["tracer"] + service = app[CONFIG_KEY]["service"] + distributed_tracing = app[CONFIG_KEY]["distributed_tracing_enabled"] + # Create a new context based on the propagated information. + trace_utils.activate_distributed_headers( + tracer, + int_config=config.aiohttp, + request_headers=request.headers, + override=distributed_tracing, + ) + + # trace the handler + request_span = tracer.trace( + schematize_url_operation("aiohttp.request", protocol="http", direction=SpanDirection.INBOUND), + service=service, + span_type=SpanTypes.WEB, + ) + request_span.set_tag(SPAN_MEASURED_KEY) + + request_span.set_tag_str(COMPONENT, config.aiohttp.integration_name) + + # set span.kind tag equal to type of request + request_span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + # Configure trace search sample rate + # DEV: aiohttp is special case maintains separate configuration from config api + analytics_enabled = app[CONFIG_KEY]["analytics_enabled"] + if (config.analytics_enabled and analytics_enabled is not False) or analytics_enabled is True: + request_span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, app[CONFIG_KEY].get("analytics_sample_rate", True)) + + # attach the context and the root span to the request; the Context + # may be freely used by the application code + request[REQUEST_CONTEXT_KEY] = request_span.context + request[REQUEST_SPAN_KEY] = request_span + request[REQUEST_CONFIG_KEY] = app[CONFIG_KEY] + try: + response = await handler(request) + if isinstance(response, web.StreamResponse): + request.task.add_done_callback(lambda _: finish_request_span(request, response)) + return response + except Exception: + request_span.set_traceback() + raise + + return attach_context + + +def finish_request_span(request, response): + # safe-guard: discard if we don't have a request span + request_span = request.get(REQUEST_SPAN_KEY, None) + if not request_span: + return + + # default resource name + resource = str(response.status) + + if request.match_info.route.resource: + # collect the resource name based on http resource type + res_info = request.match_info.route.resource.get_info() + + if res_info.get("path"): + resource = res_info.get("path") + elif res_info.get("formatter"): + resource = res_info.get("formatter") + elif res_info.get("prefix"): + resource = res_info.get("prefix") + + # prefix the resource name by the http method + resource = "{} {}".format(request.method, resource) + + request_span.resource = resource + + # DEV: aiohttp is special case maintains separate configuration from config api + trace_query_string = request[REQUEST_CONFIG_KEY].get("trace_query_string") + if trace_query_string is None: + trace_query_string = config.http.trace_query_string + if trace_query_string: + request_span.set_tag_str(http.QUERY_STRING, request.query_string) + + # The match info object provided by aiohttp's default (and only) router + # has a `route` attribute, but routers are susceptible to being replaced/hand-rolled + # so we can only support this case. + route = None + if hasattr(request.match_info, "route"): + aiohttp_route = request.match_info.route + if not isinstance(aiohttp_route, SystemRoute): + # SystemRoute objects exist to throw HTTP errors and have no path + route = aiohttp_route.resource.canonical + + trace_utils.set_http_meta( + request_span, + config.aiohttp, + method=request.method, + url=str(request.url), # DEV: request.url is a yarl's URL object + status_code=response.status, + request_headers=request.headers, + response_headers=response.headers, + route=route, + ) + + request_span.finish() + + +async def on_prepare(request, response): + """ + The on_prepare signal is used to close the request span that is created during + the trace middleware execution. + """ + # NB isinstance is not appropriate here because StreamResponse is a parent of the other + # aiohttp response types + if type(response) is web.StreamResponse and not response.task.done(): + return + finish_request_span(request, response) + + +def trace_app(app, tracer, service="aiohttp-web"): + """ + Tracing function that patches the ``aiohttp`` application so that it will be + traced using the given ``tracer``. + + :param app: aiohttp application to trace + :param tracer: tracer instance to use + :param service: service name of tracer + """ + + # safe-guard: don't trace an application twice + if getattr(app, "__datadog_trace", False): + return + app.__datadog_trace = True + + # configure datadog settings + app[CONFIG_KEY] = { + "tracer": tracer, + "service": config._get_service(default=service), + "distributed_tracing_enabled": None, + "analytics_enabled": None, + "analytics_sample_rate": 1.0, + } + + # the tracer must work with asynchronous Context propagation + tracer.configure(context_provider=context_provider) + + # add the async tracer middleware as a first middleware + # and be sure that the on_prepare signal is the last one + app.middlewares.insert(0, trace_middleware) + app.on_response_prepare.append(on_prepare) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/patch.py new file mode 100644 index 0000000..3830ee4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp/patch.py @@ -0,0 +1,158 @@ +import os + +import aiohttp +from yarl import URL + +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils.formats import asbool +from ddtrace.vendor import wrapt + +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.schema import schematize_url_operation +from ...pin import Pin +from ...propagation.http import HTTPPropagator +from ..trace_utils import ext_service +from ..trace_utils import extract_netloc_and_query_info_from_url +from ..trace_utils import set_http_meta +from ..trace_utils import unwrap +from ..trace_utils import with_traced_module as with_traced_module_sync +from ..trace_utils import wrap +from ..trace_utils_async import with_traced_module + + +log = get_logger(__name__) + + +# Server config +config._add( + "aiohttp", + dict(distributed_tracing=True), +) + +config._add( + "aiohttp_client", + dict( + distributed_tracing=asbool(os.getenv("DD_AIOHTTP_CLIENT_DISTRIBUTED_TRACING", True)), + default_http_tag_query_string=os.getenv("DD_HTTP_CLIENT_TAG_QUERY_STRING", "true"), + split_by_domain=asbool(os.getenv("DD_AIOHTTP_CLIENT_SPLIT_BY_DOMAIN", default=False)), + ), +) + + +def get_version(): + # type: () -> str + return aiohttp.__version__ + + +class _WrappedConnectorClass(wrapt.ObjectProxy): + def __init__(self, obj, pin): + super().__init__(obj) + pin.onto(self) + + async def connect(self, req, *args, **kwargs): + pin = Pin.get_from(self) + with pin.tracer.trace("%s.connect" % self.__class__.__name__) as span: + # set component tag equal to name of integration + span.set_tag(COMPONENT, config.aiohttp.integration_name) + result = await self.__wrapped__.connect(req, *args, **kwargs) + return result + + async def _create_connection(self, req, *args, **kwargs): + pin = Pin.get_from(self) + with pin.tracer.trace("%s._create_connection" % self.__class__.__name__) as span: + # set component tag equal to name of integration + span.set_tag(COMPONENT, config.aiohttp.integration_name) + result = await self.__wrapped__._create_connection(req, *args, **kwargs) + return result + + +@with_traced_module +async def _traced_clientsession_request(aiohttp, pin, func, instance, args, kwargs): + method = get_argument_value(args, kwargs, 0, "method") # type: str + url = URL(get_argument_value(args, kwargs, 1, "url")) # type: URL + params = kwargs.get("params") + headers = kwargs.get("headers") or {} + + with pin.tracer.trace( + schematize_url_operation("aiohttp.request", protocol="http", direction=SpanDirection.OUTBOUND), + span_type=SpanTypes.HTTP, + service=ext_service(pin, config.aiohttp_client), + ) as span: + if config.aiohttp_client.split_by_domain: + span.service = url.host + + if pin._config["distributed_tracing"]: + HTTPPropagator.inject(span.context, headers) + kwargs["headers"] = headers + + span.set_tag_str(COMPONENT, config.aiohttp_client.integration_name) + + # set span.kind tag equal to type of request + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + # Params can be included separate of the URL so the URL has to be constructed + # with the passed params. + url_str = str(url.update_query(params) if params else url) + host, query = extract_netloc_and_query_info_from_url(url_str) + set_http_meta( + span, + config.aiohttp_client, + method=method, + url=str(url), + target_host=host, + query=query, + request_headers=headers, + ) + resp = await func(*args, **kwargs) # type: aiohttp.ClientResponse + set_http_meta( + span, config.aiohttp_client, response_headers=resp.headers, status_code=resp.status, status_msg=resp.reason + ) + return resp + + +@with_traced_module_sync +def _traced_clientsession_init(aiohttp, pin, func, instance, args, kwargs): + func(*args, **kwargs) + instance._connector = _WrappedConnectorClass(instance._connector, pin) + + +def _patch_client(aiohttp): + Pin().onto(aiohttp) + pin = Pin(_config=config.aiohttp_client.copy()) + pin.onto(aiohttp.ClientSession) + + wrap("aiohttp", "ClientSession.__init__", _traced_clientsession_init(aiohttp)) + wrap("aiohttp", "ClientSession._request", _traced_clientsession_request(aiohttp)) + + +def patch(): + import aiohttp + + if getattr(aiohttp, "_datadog_patch", False): + return + + _patch_client(aiohttp) + + aiohttp._datadog_patch = True + + +def _unpatch_client(aiohttp): + unwrap(aiohttp.ClientSession, "__init__") + unwrap(aiohttp.ClientSession, "_request") + + +def unpatch(): + import aiohttp + + if not getattr(aiohttp, "_datadog_patch", False): + return + + _unpatch_client(aiohttp) + + aiohttp._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/__init__.py new file mode 100644 index 0000000..96b626a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/__init__.py @@ -0,0 +1,27 @@ +""" +The ``aiohttp_jinja2`` integration adds tracing of template rendering. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(aiohttp_jinja2=True) +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["aiohttp_jinja2"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/patch.py new file mode 100644 index 0000000..e774362 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiohttp_jinja2/patch.py @@ -0,0 +1,75 @@ +import aiohttp_jinja2 + +from ddtrace import Pin +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from ...ext import SpanTypes +from ...internal.utils import get_argument_value +from ..trace_utils import unwrap +from ..trace_utils import with_traced_module +from ..trace_utils import wrap + + +config._add( + "aiohttp_jinja2", + dict(), +) + + +def get_version(): + # type: () -> str + return getattr(aiohttp_jinja2, "__version__", "") + + +@with_traced_module +def traced_render_template(aiohttp_jinja2, pin, func, instance, args, kwargs): + # original signature: + # render_template(template_name, request, context, *, app_key=APP_KEY, encoding='utf-8') + template_name = get_argument_value(args, kwargs, 0, "template_name") + request = get_argument_value(args, kwargs, 1, "request") + get_env_kwargs = {} + if "app_key" in kwargs: + get_env_kwargs["app_key"] = kwargs["app_key"] + env = aiohttp_jinja2.get_env(request.app, **get_env_kwargs) + + # the prefix is available only on PackageLoader + template_prefix = getattr(env.loader, "package_path", "") + template_meta = "%s/%s" % (template_prefix, template_name) + + with pin.tracer.trace("aiohttp.template", span_type=SpanTypes.TEMPLATE) as span: + span.set_tag_str(COMPONENT, config.aiohttp_jinja2.integration_name) + + span.set_tag_str("aiohttp.template", template_meta) + return func(*args, **kwargs) + + +def _patch(aiohttp_jinja2): + Pin().onto(aiohttp_jinja2) + wrap("aiohttp_jinja2", "render_template", traced_render_template(aiohttp_jinja2)) + + +def patch(): + import aiohttp_jinja2 + + if getattr(aiohttp_jinja2, "_datadog_patch", False): + return + + _patch(aiohttp_jinja2) + + aiohttp_jinja2._datadog_patch = True + + +def _unpatch(aiohttp_jinja2): + unwrap(aiohttp_jinja2, "render_template") + + +def unpatch(): + import aiohttp_jinja2 + + if not getattr(aiohttp_jinja2, "_datadog_patch", False): + return + + _unpatch(aiohttp_jinja2) + + aiohttp_jinja2._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/__init__.py new file mode 100644 index 0000000..a104996 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/__init__.py @@ -0,0 +1,50 @@ +""" +The aiomysql integration instruments the aiomysql library to trace MySQL queries. + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(aiomysql=True) + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + import asyncio + import aiomysql + + # This will report a span with the default settings + conn = await aiomysql.connect(host="127.0.0.1", port=3306, + user="root", password="", db="mysql", + loop=loop) + + # Use a pin to override the service name for this connection. + Pin.override(conn, service="mysql-users") + + + cur = await conn.cursor() + await cur.execute("SELECT 6*7 AS the_answer;") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["aiomysql"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/patch.py new file mode 100644 index 0000000..56a9636 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiomysql/patch.py @@ -0,0 +1,161 @@ +import aiomysql + +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import dbapi +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_database_operation +from ddtrace.internal.utils.wrappers import unwrap +from ddtrace.vendor import wrapt + +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net +from ...internal.schema import schematize_service_name + + +config._add( + "aiomysql", + dict(_default_service=schematize_service_name("mysql")), +) + + +def get_version(): + # type: () -> str + return getattr(aiomysql, "__version__", "") + + +CONN_ATTR_BY_TAG = { + net.TARGET_HOST: "host", + net.TARGET_PORT: "port", + db.USER: "user", + db.NAME: "db", +} + + +async def patched_connect(connect_func, _, args, kwargs): + conn = await connect_func(*args, **kwargs) + tags = {} + for tag, attr in CONN_ATTR_BY_TAG.items(): + if hasattr(conn, attr): + tags[tag] = getattr(conn, attr) + tags[db.SYSTEM] = "mysql" + + c = AIOTracedConnection(conn) + Pin(tags=tags).onto(c) + return c + + +class AIOTracedCursor(wrapt.ObjectProxy): + """TracedCursor wraps a aiomysql cursor and traces its queries.""" + + def __init__(self, cursor, pin): + super(AIOTracedCursor, self).__init__(cursor) + pin.onto(self) + self._self_datadog_name = schematize_database_operation("mysql.query", database_provider="mysql") + + async def _trace_method(self, method, resource, extra_tags, *args, **kwargs): + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + result = await method(*args, **kwargs) + return result + service = pin.service + + with pin.tracer.trace( + self._self_datadog_name, service=service, resource=resource, span_type=SpanTypes.SQL + ) as s: + s.set_tag_str(COMPONENT, config.aiomysql.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tag(SPAN_MEASURED_KEY) + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.aiomysql.get_analytics_sample_rate()) + + try: + result = await method(*args, **kwargs) + return result + finally: + s.set_metric(db.ROWCOUNT, self.rowcount) + s.set_metric("db.rownumber", self.rownumber) + + async def executemany(self, query, *args, **kwargs): + result = await self._trace_method( + self.__wrapped__.executemany, query, {"sql.executemany": "true"}, query, *args, **kwargs + ) + return result + + async def execute(self, query, *args, **kwargs): + result = await self._trace_method(self.__wrapped__.execute, query, {}, query, *args, **kwargs) + return result + + # Explicitly define `__aenter__` and `__aexit__` since they do not get proxied properly + async def __aenter__(self): + # The base class just returns `self`, but we want the wrapped cursor so we return ourselves + return self + + async def __aexit__(self, *args, **kwargs): + return await self.__wrapped__.__aexit__(*args, **kwargs) + + +class AIOTracedConnection(wrapt.ObjectProxy): + def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor): + super(AIOTracedConnection, self).__init__(conn) + name = dbapi._get_vendor(conn) + db_pin = pin or Pin(service=name) + db_pin.onto(self) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls + + def cursor(self, *args, **kwargs): + ctx_manager = self.__wrapped__.cursor(*args, **kwargs) + pin = Pin.get_from(self) + if not pin: + return ctx_manager + + # The result of `cursor()` is an `aiomysql.utils._ContextManager` + # which wraps a coroutine (a future) and adds async context manager + # helper functions to it. + # https://github.com/aio-libs/aiomysql/blob/8a32f052a16dc3886af54b98f4d91d95862bfb8e/aiomysql/connection.py#L461 + # https://github.com/aio-libs/aiomysql/blob/7fa5078da31bbc95f5e32a934a4b2b4207c67ede/aiomysql/utils.py#L30-L79 + # We cannot swap out the result on the future/context manager so + # instead we have to create a new coroutine that returns our + # wrapped cursor + # We also cannot turn `def cursor` into `async def cursor` because + # otherwise we will change the result to be a coroutine instead of + # an `aiomysql.utils._ContextManager` which wraps a coroutine. This + # will cause issues with `async with conn.cursor() as cur:` usage. + async def _wrap_cursor(): + cursor = await ctx_manager + return self._self_cursor_cls(cursor, pin) + + return type(ctx_manager)(_wrap_cursor()) + + # Explicitly define `__aenter__` and `__aexit__` since they do not get proxied properly + async def __aenter__(self): + return await self.__wrapped__.__aenter__() + + async def __aexit__(self, *args, **kwargs): + return await self.__wrapped__.__aexit__(*args, **kwargs) + + +def patch(): + if getattr(aiomysql, "__datadog_patch", False): + return + aiomysql.__datadog_patch = True + wrapt.wrap_function_wrapper(aiomysql.connection, "_connect", patched_connect) + + +def unpatch(): + if getattr(aiomysql, "__datadog_patch", False): + aiomysql.__datadog_patch = False + unwrap(aiomysql.connection, "_connect") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/__init__.py new file mode 100644 index 0000000..df4fbb2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/__init__.py @@ -0,0 +1,28 @@ +""" +Instrument aiopg to report a span for each executed Postgres queries:: + + from ddtrace import Pin, patch + import aiopg + + # If not patched yet, you can patch aiopg specifically + patch(aiopg=True) + + # This will report a span with the default settings + async with aiopg.connect(DSN) as db: + with (await db.cursor()) as cursor: + await cursor.execute("SELECT * FROM users WHERE id = 1") + + # Use a pin to specify metadata related to this connection + Pin.override(db, service='postgres-users') +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["aiopg"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/connection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/connection.py new file mode 100644 index 0000000..3373405 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/connection.py @@ -0,0 +1,121 @@ +import asyncio + +from aiopg import __version__ +from aiopg.utils import _ContextManager + +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import dbapi +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_database_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.version import parse_version +from ddtrace.pin import Pin +from ddtrace.vendor import wrapt + + +AIOPG_VERSION = parse_version(__version__) + + +class AIOTracedCursor(wrapt.ObjectProxy): + """TracedCursor wraps a psql cursor and traces its queries.""" + + def __init__(self, cursor, pin): + super(AIOTracedCursor, self).__init__(cursor) + pin.onto(self) + self._datadog_name = schematize_database_operation("postgres.query", database_provider="postgresql") + + @asyncio.coroutine + def _trace_method(self, method, resource, extra_tags, *args, **kwargs): + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + result = yield from method(*args, **kwargs) + return result + service = pin.service + + with pin.tracer.trace(self._datadog_name, service=service, resource=resource, span_type=SpanTypes.SQL) as s: + s.set_tag_str(COMPONENT, config.aiopg.integration_name) + s.set_tag_str(db.SYSTEM, "postgresql") + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tag(SPAN_MEASURED_KEY) + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.aiopg.get_analytics_sample_rate()) + + try: + result = yield from method(*args, **kwargs) + return result + finally: + s.set_metric(db.ROWCOUNT, self.rowcount) + + @asyncio.coroutine + def executemany(self, query, *args, **kwargs): + # FIXME[matt] properly handle kwargs here. arg names can be different + # with different libs. + result = yield from self._trace_method( + self.__wrapped__.executemany, query, {"sql.executemany": "true"}, query, *args, **kwargs + ) + return result + + @asyncio.coroutine + def execute(self, query, *args, **kwargs): + result = yield from self._trace_method(self.__wrapped__.execute, query, {}, query, *args, **kwargs) + return result + + @asyncio.coroutine + def callproc(self, proc, args): + result = yield from self._trace_method(self.__wrapped__.callproc, proc, {}, proc, args) + return result + + def __aiter__(self): + return self.__wrapped__.__aiter__() + + +class AIOTracedConnection(wrapt.ObjectProxy): + """TracedConnection wraps a Connection with tracing code.""" + + def __init__(self, conn, pin=None, cursor_cls=AIOTracedCursor): + super(AIOTracedConnection, self).__init__(conn) + vendor = dbapi._get_vendor(conn) + name = schematize_service_name(vendor) + db_pin = pin or Pin(service=name) + db_pin.onto(self) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls + + # unfortunately we also need to patch this method as otherwise "self" + # ends up being the aiopg connection object + if AIOPG_VERSION >= (0, 16, 0): + + def cursor(self, *args, **kwargs): + # Only one cursor per connection is allowed, as per DB API spec + self.close_cursor() + self._last_usage = self._loop.time() + + coro = self._cursor(*args, **kwargs) + return _ContextManager(coro) + + else: + + def cursor(self, *args, **kwargs): + coro = self._cursor(*args, **kwargs) + return _ContextManager(coro) + + @asyncio.coroutine + def _cursor(self, *args, **kwargs): + cursor = yield from self.__wrapped__._cursor(*args, **kwargs) + pin = Pin.get_from(self) + if not pin: + return cursor + return self._self_cursor_cls(cursor, pin) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/patch.py new file mode 100644 index 0000000..1e15d54 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aiopg/patch.py @@ -0,0 +1,62 @@ +# 3p +import asyncio + +import aiopg.connection +import psycopg2.extensions + +from ddtrace.contrib.aiopg.connection import AIOTracedConnection +from ddtrace.contrib.psycopg.connection import patch_conn as psycopg_patch_conn +from ddtrace.contrib.psycopg.extensions import _patch_extensions +from ddtrace.contrib.psycopg.extensions import _unpatch_extensions +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.vendor import wrapt + + +def get_version(): + # type: () -> str + return getattr(aiopg, "__version__", "") + + +def patch(): + """Patch monkey patches psycopg's connection function + so that the connection's functions are traced. + """ + if getattr(aiopg, "_datadog_patch", False): + return + aiopg._datadog_patch = True + + wrapt.wrap_function_wrapper(aiopg.connection, "_connect", patched_connect) + _patch_extensions(_aiopg_extensions) # do this early just in case + + +def unpatch(): + if getattr(aiopg, "_datadog_patch", False): + aiopg._datadog_patch = False + _u(aiopg.connection, "_connect") + _unpatch_extensions(_aiopg_extensions) + + +@asyncio.coroutine +def patched_connect(connect_func, _, args, kwargs): + conn = yield from connect_func(*args, **kwargs) + return psycopg_patch_conn(conn, traced_conn_cls=AIOTracedConnection) + + +def _extensions_register_type(func, _, args, kwargs): + def _unroll_args(obj, scope=None): + return obj, scope + + obj, scope = _unroll_args(*args, **kwargs) + + # register_type performs a c-level check of the object + # type so we must be sure to pass in the actual db connection + if scope and isinstance(scope, wrapt.ObjectProxy): + scope = scope.__wrapped__._conn + + return func(obj, scope) if scope else func(obj) + + +# extension hooks +_aiopg_extensions = [ + (psycopg2.extensions.register_type, psycopg2.extensions, "register_type", _extensions_register_type), +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/__init__.py new file mode 100644 index 0000000..46d5e71 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/__init__.py @@ -0,0 +1,83 @@ +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate + + +deprecate( + "The aioredis integration is deprecated.", + message="Please use the redis integration with redis>=4.2.0 instead.", + category=DDTraceDeprecationWarning, +) +""" +The aioredis integration instruments aioredis requests. Version 1.3 and above are fully +supported. + + +Enabling +~~~~~~~~ + +The aioredis integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch() ` to manually enable the integration:: + + from ddtrace import patch + patch(aioredis=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.aioredis["service"] + + The service name reported by default for aioredis instances. + + This option can also be set with the ``DD_AIOREDIS_SERVICE`` environment + variable. + + Default: ``"redis"`` + +.. py:data:: ddtrace.config.aioredis["cmd_max_length"] + + Max allowable size for the aioredis command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_AIOREDIS_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + +.. py:data:: ddtrace.config.aioedis["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_REDIS_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the aioredis integration on a per-instance basis use the +``Pin`` API:: + + import aioredis + from ddtrace import Pin + + myaioredis = aioredis.Aioredis() + Pin.override(myaioredis, service="myaioredis") +""" +from ...internal.utils.importlib import require_modules # noqa:E402 + + +required_modules = ["aioredis"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/patch.py new file mode 100644 index 0000000..9941a90 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aioredis/patch.py @@ -0,0 +1,233 @@ +import asyncio +import os +import sys + +import aioredis + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.pin import Pin +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net +from ...ext import redis as redisx +from ...internal.schema import schematize_cache_operation +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import CMD_MAX_LEN +from ...internal.utils.formats import asbool +from ...internal.utils.formats import stringify_cache_args +from .. import trace_utils +from ..trace_utils_redis import ROW_RETURNING_COMMANDS +from ..trace_utils_redis import _run_redis_command_async +from ..trace_utils_redis import _trace_redis_cmd +from ..trace_utils_redis import _trace_redis_execute_pipeline +from ..trace_utils_redis import determine_row_count + + +try: + from aioredis.commands.transaction import _RedisBuffer +except ImportError: + _RedisBuffer = None + +config._add( + "aioredis", + dict( + _default_service=schematize_service_name("redis"), + cmd_max_length=int(os.getenv("DD_AIOREDIS_CMD_MAX_LENGTH", CMD_MAX_LEN)), + resource_only_command=asbool(os.getenv("DD_REDIS_RESOURCE_ONLY_COMMAND", True)), + ), +) + +aioredis_version_str = getattr(aioredis, "__version__", "") +aioredis_version = tuple([int(i) for i in aioredis_version_str.split(".")]) + + +def get_version(): + # type: () -> str + return aioredis_version_str + + +def patch(): + if getattr(aioredis, "_datadog_patch", False): + return + aioredis._datadog_patch = True + pin = Pin() + if aioredis_version >= (2, 0): + _w("aioredis.client", "Redis.execute_command", traced_execute_command) + _w("aioredis.client", "Redis.pipeline", traced_pipeline) + _w("aioredis.client", "Pipeline.execute", traced_execute_pipeline) + pin.onto(aioredis.client.Redis) + else: + _w("aioredis", "Redis.execute", traced_13_execute_command) + _w("aioredis", "Redis.pipeline", traced_13_pipeline) + _w("aioredis.commands.transaction", "Pipeline.execute", traced_13_execute_pipeline) + pin.onto(aioredis.Redis) + + +def unpatch(): + if not getattr(aioredis, "_datadog_patch", False): + return + + aioredis._datadog_patch = False + if aioredis_version >= (2, 0): + _u(aioredis.client.Redis, "execute_command") + _u(aioredis.client.Redis, "pipeline") + _u(aioredis.client.Pipeline, "execute") + else: + _u(aioredis.Redis, "execute") + _u(aioredis.Redis, "pipeline") + _u(aioredis.commands.transaction.Pipeline, "execute") + + +async def traced_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + with _trace_redis_cmd(pin, config.aioredis, instance, args) as span: + return await _run_redis_command_async(span=span, func=func, args=args, kwargs=kwargs) + + +def traced_pipeline(func, instance, args, kwargs): + pipeline = func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +async def traced_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c, cmd_max_len=config.aioredis.cmd_max_length) for c, _ in instance.command_stack] + with _trace_redis_execute_pipeline(pin, config.aioredis, cmds, instance): + return await func(*args, **kwargs) + + +def traced_13_pipeline(func, instance, args, kwargs): + pipeline = func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +def traced_13_execute_command(func, instance, args, kwargs): + # If we have a _RedisBuffer then we are in a pipeline + if isinstance(instance.connection, _RedisBuffer): + return func(*args, **kwargs) + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # Don't activate the span since this operation is performed as a future which concludes sometime later on in + # execution so subsequent operations in the stack are not necessarily semantically related + # (we don't want this span to be the parent of all other spans created before the future is resolved) + parent = pin.tracer.current_span() + query = stringify_cache_args(args, cmd_max_len=config.aioredis.cmd_max_length) + span = pin.tracer.start_span( + schematize_cache_operation(redisx.CMD, cache_provider="redis"), + service=trace_utils.ext_service(pin, config.aioredis), + resource=query.split(" ")[0] if config.aioredis.resource_only_command else query, + span_type=SpanTypes.REDIS, + activate=False, + child_of=parent, + ) + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag_str(COMPONENT, config.aioredis.integration_name) + span.set_tag_str(db.SYSTEM, redisx.APP) + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag_str(redisx.RAWCMD, query) + if pin.tags: + span.set_tags(pin.tags) + + span.set_tags( + { + net.TARGET_HOST: instance.address[0], + net.TARGET_PORT: instance.address[1], + redisx.DB: instance.db or 0, + } + ) + span.set_metric(redisx.ARGS_LEN, len(args)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.aioredis.get_analytics_sample_rate()) + + def _finish_span(future): + try: + # Accessing the result will raise an exception if: + # - The future was cancelled (CancelledError) + # - There was an error executing the future (`future.exception()`) + # - The future is in an invalid state + redis_command = span.resource.split(" ")[0] + future.result() + if redis_command in ROW_RETURNING_COMMANDS: + determine_row_count(redis_command=redis_command, span=span, result=future.result()) + # CancelledError exceptions extend from BaseException as of Python 3.8, instead of usual Exception + except BaseException: + span.set_exc_info(*sys.exc_info()) + if redis_command in ROW_RETURNING_COMMANDS: + span.set_metric(db.ROWCOUNT, 0) + finally: + span.finish() + + task = func(*args, **kwargs) + # Execute command returns a coroutine when no free connections are available + # https://github.com/aio-libs/aioredis-py/blob/v1.3.1/aioredis/pool.py#L191 + task = asyncio.ensure_future(task) + task.add_done_callback(_finish_span) + return task + + +async def traced_13_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [] + for _, cmd, cmd_args, _ in instance._pipeline: + parts = [cmd] + parts.extend(cmd_args) + cmds.append(stringify_cache_args(parts, cmd_max_len=config.aioredis.cmd_max_length)) + + resource = cmds_string = "\n".join(cmds) + if config.aioredis.resource_only_command: + resource = "\n".join([cmd.split(" ")[0] for cmd in cmds]) + + with pin.tracer.trace( + schematize_cache_operation(redisx.CMD, cache_provider="redis"), + resource=resource, + service=trace_utils.ext_service(pin, config.aioredis), + span_type=SpanTypes.REDIS, + ) as span: + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag_str(COMPONENT, config.aioredis.integration_name) + span.set_tag_str(db.SYSTEM, redisx.APP) + span.set_tags( + { + net.TARGET_HOST: instance._pool_or_conn.address[0], + net.TARGET_PORT: instance._pool_or_conn.address[1], + redisx.DB: instance._pool_or_conn.db or 0, + } + ) + + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag_str(redisx.RAWCMD, cmds_string) + span.set_metric(redisx.PIPELINE_LEN, len(instance._pipeline)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.aioredis.get_analytics_sample_rate()) + + return await func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/__init__.py new file mode 100644 index 0000000..8b4c96d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/__init__.py @@ -0,0 +1,36 @@ +""" +The Algoliasearch__ integration will add tracing to your Algolia searches. + +:: + + import ddtrace.auto + + from algoliasearch import algoliasearch + client = alogliasearch.Client(, ) + index = client.init_index() + index.search("your query", args={"attributesToRetrieve": "attribute1,attribute1"}) + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.algoliasearch['collect_query_text'] + + Whether to pass the text of your query onto Datadog. Since this may contain sensitive data it's off by default + + Default: ``False`` + +.. __: https://www.algolia.com +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["algoliasearch", "algoliasearch.version"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/patch.py new file mode 100644 index 0000000..b576e4d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/algoliasearch/patch.py @@ -0,0 +1,168 @@ +from ddtrace import config +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_cloud_api_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.pin import Pin +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from .. import trace_utils + + +DD_PATCH_ATTR = "_datadog_patch" + +SERVICE_NAME = schematize_service_name("algoliasearch") +APP_NAME = "algoliasearch" + +try: + VERSION = "0.0.0" + import algoliasearch + from algoliasearch.version import VERSION + + algoliasearch_version = tuple([int(i) for i in VERSION.split(".")]) + + # Default configuration + config._add("algoliasearch", dict(_default_service=SERVICE_NAME, collect_query_text=False)) +except ImportError: + algoliasearch_version = (0, 0) + + +def get_version(): + # type: () -> str + return VERSION + + +def patch(): + if algoliasearch_version == (0, 0): + return + + if getattr(algoliasearch, DD_PATCH_ATTR, False): + return + + algoliasearch._datadog_patch = True + + pin = Pin() + + if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): + _w(algoliasearch.index, "Index.search", _patched_search) + pin.onto(algoliasearch.index.Index) + elif algoliasearch_version >= (2, 0) and algoliasearch_version < (3, 0): + from algoliasearch import search_index + + _w(algoliasearch, "search_index.SearchIndex.search", _patched_search) + pin.onto(search_index.SearchIndex) + else: + return + + +def unpatch(): + if algoliasearch_version == (0, 0): + return + + if getattr(algoliasearch, DD_PATCH_ATTR, False): + setattr(algoliasearch, DD_PATCH_ATTR, False) + + if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): + _u(algoliasearch.index.Index, "search") + elif algoliasearch_version >= (2, 0) and algoliasearch_version < (3, 0): + from algoliasearch import search_index + + _u(search_index.SearchIndex, "search") + else: + return + + +# DEV: this maps serves the dual purpose of enumerating the algoliasearch.search() query_args that +# will be sent along as tags, as well as converting arguments names into tag names compliant with +# tag naming recommendations set out here: https://docs.datadoghq.com/tagging/ +QUERY_ARGS_DD_TAG_MAP = { + "page": "page", + "hitsPerPage": "hits_per_page", + "attributesToRetrieve": "attributes_to_retrieve", + "attributesToHighlight": "attributes_to_highlight", + "attributesToSnippet": "attributes_to_snippet", + "minWordSizefor1Typo": "min_word_size_for_1_typo", + "minWordSizefor2Typos": "min_word_size_for_2_typos", + "getRankingInfo": "get_ranking_info", + "aroundLatLng": "around_lat_lng", + "numericFilters": "numeric_filters", + "tagFilters": "tag_filters", + "queryType": "query_type", + "optionalWords": "optional_words", + "distinct": "distinct", +} + + +def _patched_search(func, instance, wrapt_args, wrapt_kwargs): + """ + wrapt_args is called the way it is to distinguish it from the 'args' + argument to the algoliasearch.index.Index.search() method. + """ + + if algoliasearch_version < (2, 0) and algoliasearch_version >= (1, 0): + function_query_arg_name = "args" + elif algoliasearch_version >= (2, 0) and algoliasearch_version < (3, 0): + function_query_arg_name = "request_options" + else: + return func(*wrapt_args, **wrapt_kwargs) + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*wrapt_args, **wrapt_kwargs) + + with pin.tracer.trace( + schematize_cloud_api_operation("algoliasearch.search", cloud_provider="algoliasearch", cloud_service="search"), + service=trace_utils.ext_service(pin, config.algoliasearch), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.algoliasearch.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + if not span.sampled: + return func(*wrapt_args, **wrapt_kwargs) + + if config.algoliasearch.collect_query_text: + span.set_tag_str("query.text", wrapt_kwargs.get("query", wrapt_args[0])) + + query_args = wrapt_kwargs.get(function_query_arg_name, wrapt_args[1] if len(wrapt_args) > 1 else None) + + if query_args and isinstance(query_args, dict): + for query_arg, tag_name in QUERY_ARGS_DD_TAG_MAP.items(): + value = query_args.get(query_arg) + if value is not None: + span.set_tag("query.args.{}".format(tag_name), value) + + # Result would look like this + # { + # 'hits': [ + # { + # .... your search results ... + # } + # ], + # 'processingTimeMS': 1, + # 'nbHits': 1, + # 'hitsPerPage': 20, + # 'exhaustiveNbHits': true, + # 'params': 'query=xxx', + # 'nbPages': 1, + # 'query': 'xxx', + # 'page': 0 + # } + result = func(*wrapt_args, **wrapt_kwargs) + + if isinstance(result, dict): + if result.get("processingTimeMS", None) is not None: + span.set_metric("processing_time_ms", int(result["processingTimeMS"])) + + if result.get("nbHits", None) is not None: + span.set_metric("number_of_hits", int(result["nbHits"])) + + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/__init__.py new file mode 100644 index 0000000..b00475c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/__init__.py @@ -0,0 +1,79 @@ +""" +The aredis integration traces aredis requests. + + +Enabling +~~~~~~~~ + +The aredis integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(aredis=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.aredis["service"] + + The service name reported by default for aredis traces. + + This option can also be set with the ``DD_AREDIS_SERVICE`` environment + variable. + + Default: ``"redis"`` + +.. py:data:: ddtrace.config.aredis["cmd_max_length"] + + Max allowable size for the aredis command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_AREDIS_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + +.. py:data:: ddtrace.config.aredis["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_REDIS_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular aredis instances use the :class:`Pin ` API:: + + import aredis + from ddtrace import Pin + + client = aredis.StrictRedis(host="localhost", port=6379) + + # Override service name for this instance + Pin.override(client, service="my-custom-queue") + + # Traces reported for this client will now have "my-custom-queue" + # as the service name. + async def example(): + await client.get("my-key") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["aredis", "aredis.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/patch.py new file mode 100644 index 0000000..1c0dc8c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aredis/patch.py @@ -0,0 +1,86 @@ +import os + +import aredis + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import CMD_MAX_LEN +from ...internal.utils.formats import asbool +from ...internal.utils.formats import stringify_cache_args +from ...internal.utils.wrappers import unwrap +from ...pin import Pin +from ..trace_utils_redis import _run_redis_command_async +from ..trace_utils_redis import _trace_redis_cmd +from ..trace_utils_redis import _trace_redis_execute_pipeline + + +config._add( + "aredis", + dict( + _default_service=schematize_service_name("redis"), + cmd_max_length=int(os.getenv("DD_AREDIS_CMD_MAX_LENGTH", CMD_MAX_LEN)), + resource_only_command=asbool(os.getenv("DD_REDIS_RESOURCE_ONLY_COMMAND", True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(aredis, "__version__", "") + + +def patch(): + """Patch the instrumented methods""" + if getattr(aredis, "_datadog_patch", False): + return + aredis._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + _w("aredis.client", "StrictRedis.execute_command", traced_execute_command) + _w("aredis.client", "StrictRedis.pipeline", traced_pipeline) + _w("aredis.pipeline", "StrictPipeline.execute", traced_execute_pipeline) + _w("aredis.pipeline", "StrictPipeline.immediate_execute_command", traced_execute_command) + Pin(service=None).onto(aredis.StrictRedis) + + +def unpatch(): + if getattr(aredis, "_datadog_patch", False): + aredis._datadog_patch = False + + unwrap(aredis.client.StrictRedis, "execute_command") + unwrap(aredis.client.StrictRedis, "pipeline") + unwrap(aredis.pipeline.StrictPipeline, "execute") + unwrap(aredis.pipeline.StrictPipeline, "immediate_execute_command") + + +# +# tracing functions +# +async def traced_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + with _trace_redis_cmd(pin, config.aredis, instance, args) as span: + return await _run_redis_command_async(span=span, func=func, args=args, kwargs=kwargs) + + +async def traced_pipeline(func, instance, args, kwargs): + pipeline = await func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +async def traced_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c, cmd_max_len=config.aredis.cmd_max_length) for c, _ in instance.command_stack] + with _trace_redis_execute_pipeline(pin, config.aredis, cmds, instance): + return await func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/__init__.py new file mode 100644 index 0000000..59902f5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/__init__.py @@ -0,0 +1,75 @@ +""" +The asgi__ middleware for tracing all requests to an ASGI-compliant application. + +To configure tracing manually:: + + from ddtrace.contrib.asgi import TraceMiddleware + + # app = + app = TraceMiddleware(app) + +Then use ddtrace-run when serving your application. For example, if serving with Uvicorn:: + + ddtrace-run uvicorn app:app + +On Python 3.6 and below, you must enable the legacy ``AsyncioContextProvider`` before using the middleware:: + + from ddtrace.contrib.asyncio.provider import AsyncioContextProvider + from ddtrace import tracer # Or whichever tracer instance you plan to use + tracer.configure(context_provider=AsyncioContextProvider()) + +The middleware also supports using a custom function for handling exceptions for a trace:: + + from ddtrace.contrib.asgi import TraceMiddleware + + def custom_handle_exception_span(exc, span): + span.set_tag("http.status_code", 501) + + # app = + app = TraceMiddleware(app, handle_exception_span=custom_handle_exception_span) + + +To retrieve the request span from the scope of an ASGI request use the ``span_from_scope`` +function:: + + from ddtrace.contrib.asgi import span_from_scope + + def handle_request(scope, send): + span = span_from_scope(scope) + if span: + span.set_tag(...) + ... + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.asgi['distributed_tracing'] + + Whether to use distributed tracing headers from requests received by your Asgi app. + + Default: ``True`` + +.. py:data:: ddtrace.config.asgi['service_name'] + + The service name reported for your ASGI app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'asgi'`` + +.. __: https://asgi.readthedocs.io/ +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = [] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .middleware import TraceMiddleware + from .middleware import get_version + from .middleware import span_from_scope + + __all__ = ["TraceMiddleware", "span_from_scope", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/middleware.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/middleware.py new file mode 100644 index 0000000..e87944e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/middleware.py @@ -0,0 +1,286 @@ +import sys +from typing import Any +from typing import Mapping +from typing import Optional +from urllib import parse + +import ddtrace +from ddtrace import Span +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import http +from ddtrace.internal.compat import is_valid_ip +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...internal import core +from ...internal.logger import get_logger +from .. import trace_utils +from .utils import guarantee_single_callable + + +log = get_logger(__name__) + +config._add( + "asgi", + dict(service_name=config._get_service(default="asgi"), request_span_name="asgi.request", distributed_tracing=True), +) + +ASGI_VERSION = "asgi.version" +ASGI_SPEC_VERSION = "asgi.spec_version" + + +def get_version() -> str: + return "" + + +def bytes_to_str(str_or_bytes): + return str_or_bytes.decode(errors="ignore") if isinstance(str_or_bytes, bytes) else str_or_bytes + + +def _extract_versions_from_scope(scope, integration_config): + tags = {} + + http_version = scope.get("http_version") + if http_version: + tags[http.VERSION] = http_version + + scope_asgi = scope.get("asgi") + + if scope_asgi and "version" in scope_asgi: + tags[ASGI_VERSION] = scope_asgi["version"] + + if scope_asgi and "spec_version" in scope_asgi: + tags[ASGI_SPEC_VERSION] = scope_asgi["spec_version"] + + return tags + + +def _extract_headers(scope): + headers = scope.get("headers") + if headers: + # headers: (Iterable[[byte string, byte string]]) + return dict((bytes_to_str(k), bytes_to_str(v)) for (k, v) in headers) + return {} + + +def _default_handle_exception_span(exc, span): + """Default handler for exception for span""" + span.set_tag(http.STATUS_CODE, 500) + + +def span_from_scope(scope: Mapping[str, Any]) -> Optional[Span]: + """Retrieve the top-level ASGI span from the scope.""" + return scope.get("datadog", {}).get("request_spans", [None])[0] + + +async def _blocked_asgi_app(scope, receive, send): + await send({"type": "http.response.start", "status": 403, "headers": []}) + await send({"type": "http.response.body", "body": b""}) + + +class TraceMiddleware: + """ + ASGI application middleware that traces the requests. + Args: + app: The ASGI application. + tracer: Custom tracer. Defaults to the global tracer. + """ + + default_ports = {"http": 80, "https": 443, "ws": 80, "wss": 443} + + def __init__( + self, + app, + tracer=None, + integration_config=config.asgi, + handle_exception_span=_default_handle_exception_span, + span_modifier=None, + ): + self.app = guarantee_single_callable(app) + self.tracer = tracer or ddtrace.tracer + self.integration_config = integration_config + self.handle_exception_span = handle_exception_span + self.span_modifier = span_modifier + + async def __call__(self, scope, receive, send): + if scope["type"] != "http": + return await self.app(scope, receive, send) + try: + headers = _extract_headers(scope) + except Exception: + log.warning("failed to decode headers for distributed tracing", exc_info=True) + headers = {} + else: + trace_utils.activate_distributed_headers( + self.tracer, int_config=self.integration_config, request_headers=headers + ) + resource = " ".join((scope["method"], scope["path"])) + operation_name = self.integration_config.get("request_span_name", "asgi.request") + operation_name = schematize_url_operation(operation_name, direction=SpanDirection.INBOUND, protocol="http") + pin = ddtrace.pin.Pin(service="asgi", tracer=self.tracer) + with pin.tracer.trace( + name=operation_name, + service=trace_utils.int_service(None, self.integration_config), + resource=resource, + span_type=SpanTypes.WEB, + ) as span, core.context_with_data( + "asgi.__call__", + remote_addr=scope.get("REMOTE_ADDR"), + headers=headers, + headers_case_sensitive=True, + environ=scope, + middleware=self, + span=span, + ) as ctx: + span.set_tag_str(COMPONENT, self.integration_config.integration_name) + ctx.set_item("req_span", span) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + if "datadog" not in scope: + scope["datadog"] = {"request_spans": [span]} + else: + scope["datadog"]["request_spans"].append(span) + + if self.span_modifier: + self.span_modifier(span, scope) + + sample_rate = self.integration_config.get_analytics_sample_rate(use_global_config=True) + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + host_header = None + for key, value in scope["headers"]: + if key == b"host": + try: + host_header = value.decode("ascii") + except UnicodeDecodeError: + log.warning( + "failed to decode host header, host from http headers will not be considered", exc_info=True + ) + break + method = scope.get("method") + server = scope.get("server") + scheme = scope.get("scheme", "http") + parsed_query = parse.parse_qs(bytes_to_str(scope.get("query_string", b""))) + full_path = scope.get("root_path", "") + scope.get("path", "") + if host_header: + url = "{}://{}{}".format(scheme, host_header, full_path) + elif server and len(server) == 2: + port = server[1] + default_port = self.default_ports.get(scheme, None) + server_host = server[0] + (":" + str(port) if port is not None and port != default_port else "") + url = "{}://{}{}".format(scheme, server_host, full_path) + else: + url = None + query_string = scope.get("query_string") + if query_string: + query_string = bytes_to_str(query_string) + if url: + url = f"{url}?{query_string}" + if not self.integration_config.trace_query_string: + query_string = None + body = None + result = core.dispatch_with_results("asgi.request.parse.body", (receive, headers)).await_receive_and_body + if result: + receive, body = await result.value + + client = scope.get("client") + if isinstance(client, list) and len(client) and is_valid_ip(client[0]): + peer_ip = client[0] + else: + peer_ip = None + + trace_utils.set_http_meta( + span, + self.integration_config, + method=method, + url=url, + query=query_string, + request_headers=headers, + raw_uri=url, + parsed_query=parsed_query, + request_body=body, + peer_ip=peer_ip, + headers_are_case_sensitive=True, + ) + tags = _extract_versions_from_scope(scope, self.integration_config) + span.set_tags(tags) + + async def wrapped_send(message): + try: + response_headers = _extract_headers(message) + except Exception: + log.warning("failed to extract response headers", exc_info=True) + response_headers = None + + if span and message.get("type") == "http.response.start" and "status" in message: + status_code = message["status"] + trace_utils.set_http_meta( + span, self.integration_config, status_code=status_code, response_headers=response_headers + ) + core.dispatch("asgi.start_response", ("asgi",)) + core.dispatch("asgi.finalize_response", (message.get("body"), response_headers)) + + if core.get_item(HTTP_REQUEST_BLOCKED): + raise trace_utils.InterruptException("wrapped_send") + try: + return await send(message) + finally: + # Per asgi spec, "more_body" is used if there is still data to send + # Close the span if "http.response.body" has no more data left to send in the + # response. + if ( + message.get("type") == "http.response.body" + and not message.get("more_body", False) + # If the span has an error status code delay finishing the span until the + # traceback and exception message is available + and span.error == 0 + ): + span.finish() + + async def wrapped_blocked_send(message): + result = core.dispatch_with_results("asgi.block.started", (ctx, url)).status_headers_content + if result: + status, headers, content = result.value + else: + status, headers, content = 403, [], b"" + if span and message.get("type") == "http.response.start": + message["headers"] = headers + message["status"] = int(status) + core.dispatch("asgi.finalize_response", (None, headers)) + elif message.get("type") == "http.response.body": + message["body"] = ( + content if isinstance(content, bytes) else content.encode("utf-8", errors="ignore") + ) + message["more_body"] = False + core.dispatch("asgi.finalize_response", (content, None)) + try: + return await send(message) + finally: + trace_utils.set_http_meta( + span, self.integration_config, status_code=status, response_headers=headers + ) + if message.get("type") == "http.response.body" and span.error == 0: + span.finish() + + try: + core.dispatch("asgi.start_request", ("asgi",)) + return await self.app(scope, receive, wrapped_send) + except trace_utils.InterruptException: + return await _blocked_asgi_app(scope, receive, wrapped_blocked_send) + except Exception as exc: + (exc_type, exc_val, exc_tb) = sys.exc_info() + span.set_exc_info(exc_type, exc_val, exc_tb) + self.handle_exception_span(exc, span) + raise + finally: + if span in scope["datadog"]["request_spans"]: + scope["datadog"]["request_spans"].remove(span) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/utils.py new file mode 100644 index 0000000..73f5d17 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asgi/utils.py @@ -0,0 +1,82 @@ +""" +Compatibility functions vendored from asgiref + +Source: https://github.com/django/asgiref +Version: 3.2.10 +License: + +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" +import asyncio +import inspect + + +def is_double_callable(application): + """ + Tests to see if an application is a legacy-style (double-callable) application. + """ + # Look for a hint on the object first + if getattr(application, "_asgi_single_callable", False): + return False + if getattr(application, "_asgi_double_callable", False): + return True + # Uninstanted classes are double-callable + if inspect.isclass(application): + return True + # Instanted classes depend on their __call__ + if hasattr(application, "__call__"): # noqa: B004 + # We only check to see if its __call__ is a coroutine function - + # if it's not, it still might be a coroutine function itself. + if asyncio.iscoroutinefunction(application.__call__): + return False + # Non-classes we just check directly + return not asyncio.iscoroutinefunction(application) + + +def double_to_single_callable(application): + """ + Transforms a double-callable ASGI application into a single-callable one. + """ + + async def new_application(scope, receive, send): + instance = application(scope) + return await instance(receive, send) + + return new_application + + +def guarantee_single_callable(application): + """ + Takes either a single- or double-callable application and always returns it + in single-callable style. Use this to add backwards compatibility for ASGI + 2.0 applications to your server/test harness/etc. + """ + if is_double_callable(application): + application = double_to_single_callable(application) + return application diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/__init__.py new file mode 100644 index 0000000..0e515e2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/__init__.py @@ -0,0 +1,65 @@ +""" +This integration provides context management for tracing the execution flow +of concurrent execution of ``asyncio.Task``. + +This integration is only necessary in Python < 3.7 (where contextvars is not supported). +For Python > 3.7 this works automatically without configuration. + +For asynchronous execution tracing to work properly the tracer must +be configured as follows:: + + import asyncio + from ddtrace import tracer + from ddtrace.contrib.asyncio import context_provider + + # enable asyncio support + tracer.configure(context_provider=context_provider) + + async def some_work(): + with tracer.trace('asyncio.some_work'): + # do something + + # launch your coroutines as usual + loop = asyncio.get_event_loop() + loop.run_until_complete(some_work()) + loop.close() + +In addition, helpers are provided to simplify how the tracing ``Context`` is +handled between scheduled coroutines and ``Future`` invoked in separated +threads: + + * ``set_call_context(task, ctx)``: attach the context to the given ``Task`` + so that it will be available from the ``tracer.current_trace_context()`` + * ``ensure_future(coro_or_future, *, loop=None)``: wrapper for the + ``asyncio.ensure_future`` that attaches the current context to a new + ``Task`` instance + * ``run_in_executor(loop, executor, func, *args)``: wrapper for the + ``loop.run_in_executor`` that attaches the current context to the new + thread so that the trace can be resumed regardless when it's executed + * ``create_task(coro)``: creates a new asyncio ``Task`` that inherits the + current active ``Context`` so that generated traces in the new task are + attached to the main trace +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["asyncio"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from ...internal.compat import CONTEXTVARS_IS_AVAILABLE + from ...provider import DefaultContextProvider + from .provider import AsyncioContextProvider + + if CONTEXTVARS_IS_AVAILABLE: + context_provider = DefaultContextProvider() + else: + context_provider = AsyncioContextProvider() + + from .helpers import ensure_future + from .helpers import run_in_executor + from .helpers import set_call_context + from .patch import get_version + from .patch import patch + + __all__ = ["context_provider", "set_call_context", "ensure_future", "run_in_executor", "patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/compat.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/compat.py new file mode 100644 index 0000000..4389a36 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/compat.py @@ -0,0 +1,15 @@ +import asyncio + + +if hasattr(asyncio, "current_task"): + + def asyncio_current_task(): + try: + return asyncio.current_task() + except RuntimeError: + return None + +else: + + def asyncio_current_task(): + return asyncio.Task.current_task() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/helpers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/helpers.py new file mode 100644 index 0000000..f1803ee --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/helpers.py @@ -0,0 +1,81 @@ +""" +This module includes a list of convenience methods that +can be used to simplify some operations while handling +Context and Spans in instrumented ``asyncio`` code. +""" +import asyncio + +import ddtrace + +from .provider import AsyncioContextProvider +from .wrappers import wrapped_create_task + + +def set_call_context(task, ctx): + """ + Updates the ``Context`` for the given Task. Useful when you need to + pass the context among different tasks. + + This method is available for backward-compatibility. Use the + ``AsyncioContextProvider`` API to set the current active ``Context``. + """ + setattr(task, AsyncioContextProvider._CONTEXT_ATTR, ctx) + + +def ensure_future(coro_or_future, *, loop=None, tracer=None): + """Wrapper that sets a context to the newly created Task. + + If the current task already has a Context, it will be attached to the new Task so the Trace list will be preserved. + """ + tracer = tracer or ddtrace.tracer + current_ctx = tracer.current_trace_context() + task = asyncio.ensure_future(coro_or_future, loop=loop) + set_call_context(task, current_ctx) + return task + + +def run_in_executor(loop, executor, func, *args, tracer=None): + """Wrapper function that sets a context to the newly created Thread. + + If the current task has a Context, it will be attached as an empty Context with the current_span activated to + inherit the ``trace_id`` and the ``parent_id``. + + Because the Executor can run the Thread immediately or after the + coroutine is executed, we may have two different scenarios: + * the Context is copied in the new Thread and the trace is sent twice + * the coroutine flushes the Context and when the Thread copies the + Context it is already empty (so it will be a root Span) + + To support both situations, we create a new Context that knows only what was + the latest active Span when the new thread was created. In this new thread, + we fallback to the thread-local ``Context`` storage. + + """ + tracer = tracer or ddtrace.tracer + current_ctx = tracer.current_trace_context() + + # prepare the future using an executor wrapper + future = loop.run_in_executor(executor, _wrap_executor, func, args, tracer, current_ctx) + return future + + +def _wrap_executor(fn, args, tracer, ctx): + """ + This function is executed in the newly created Thread so the right + ``Context`` can be set in the thread-local storage. This operation + is safe because the ``Context`` class is thread-safe and can be + updated concurrently. + """ + # the AsyncioContextProvider knows that this is a new thread + # so it is legit to pass the Context in the thread-local storage; + # fn() will be executed outside the asyncio loop as a synchronous code + tracer.context_provider.activate(ctx) + return fn(*args) + + +def create_task(*args, **kwargs): + """This function spawns a task with a Context that inherits the + `trace_id` and the `parent_id` from the current active one if available. + """ + loop = asyncio.get_event_loop() + return wrapped_create_task(loop.create_task, None, args, kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/patch.py new file mode 100644 index 0000000..24620a7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/patch.py @@ -0,0 +1,22 @@ +import asyncio + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + """Patches current loop `create_task()` method to enable spawned tasks to + parent to the base task context. + """ + if getattr(asyncio, "_datadog_patch", False): + return + asyncio._datadog_patch = True + + +def unpatch(): + """Remove tracing from patched modules.""" + + if getattr(asyncio, "_datadog_patch", False): + asyncio._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/provider.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/provider.py new file mode 100644 index 0000000..356753c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/provider.py @@ -0,0 +1,74 @@ +import asyncio + +from ...provider import BaseContextProvider +from ...provider import DatadogContextMixin +from ...span import Span + + +class AsyncioContextProvider(BaseContextProvider, DatadogContextMixin): + """Manages the active context for asyncio execution. Framework + instrumentation that is built on top of the ``asyncio`` library, should + use this provider when contextvars are not available (Python versions + less than 3.7). + + This Context Provider inherits from ``DefaultContextProvider`` because + it uses a thread-local storage when the ``Context`` is propagated to + a different thread, than the one that is running the async loop. + """ + + # Task attribute used to set/get the context + _CONTEXT_ATTR = "__datadog_context" + + def activate(self, context, loop=None): + """Sets the scoped ``Context`` for the current running ``Task``.""" + loop = self._get_loop(loop) + if not loop: + super(AsyncioContextProvider, self).activate(context) + return context + + # the current unit of work (if tasks are used) + task = asyncio.Task.current_task(loop=loop) + if task: + setattr(task, self._CONTEXT_ATTR, context) + return context + + def _get_loop(self, loop=None): + """Helper to try and resolve the current loop""" + try: + return loop or asyncio.get_event_loop() + except RuntimeError: + # Detects if a loop is available in the current thread; + # DEV: This happens when a new thread is created from the out that is running the async loop + # DEV: It's possible that a different Executor is handling a different Thread that + # works with blocking code. In that case, we fallback to a thread-local Context. + pass + return None + + def _has_active_context(self, loop=None): + """Helper to determine if we have a currently active context""" + loop = self._get_loop(loop=loop) + if loop is None: + return super(AsyncioContextProvider, self)._has_active_context() + + # the current unit of work (if tasks are used) + task = asyncio.Task.current_task(loop=loop) + if task is None: + return False + + ctx = getattr(task, self._CONTEXT_ATTR, None) + return ctx is not None + + def active(self, loop=None): + """Returns the active context for the execution.""" + loop = self._get_loop(loop=loop) + if not loop: + return super(AsyncioContextProvider, self).active() + + # the current unit of work (if tasks are used) + task = asyncio.Task.current_task(loop=loop) + if task is None: + return None + ctx = getattr(task, self._CONTEXT_ATTR, None) + if isinstance(ctx, Span): + return self._update_active(ctx) + return ctx diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/wrappers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/wrappers.py new file mode 100644 index 0000000..ddbabc4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncio/wrappers.py @@ -0,0 +1,25 @@ +from .compat import asyncio_current_task +from .provider import AsyncioContextProvider + + +def wrapped_create_task(wrapped, instance, args, kwargs): + """Wrapper for ``create_task(coro)`` that propagates the current active + ``Context`` to the new ``Task``. This function is useful to connect traces + of detached executions. + + Note: we can't just link the task contexts due to the following scenario: + * begin task A + * task A starts task B1..B10 + * finish task B1-B9 (B10 still on trace stack) + * task A starts task C + * now task C gets parented to task B10 since it's still on the stack, + however was not actually triggered by B10 + """ + new_task = wrapped(*args, **kwargs) + current_task = asyncio_current_task() + + ctx = getattr(current_task, AsyncioContextProvider._CONTEXT_ATTR, None) + if ctx: + setattr(new_task, AsyncioContextProvider._CONTEXT_ATTR, ctx) + + return new_task diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/__init__.py new file mode 100644 index 0000000..d67edee --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/__init__.py @@ -0,0 +1,57 @@ +""" +The ``asyncpg`` integration traces database requests made using connection +and cursor objects. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(asyncpg=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.asyncpg['service'] + + The service name reported by default for asyncpg connections. + + This option can also be set with the ``DD_ASYNCPG_SERVICE`` + environment variable. + + Default: ``postgres`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +Service +^^^^^^^ + +To configure the service name used by the asyncpg integration on a per-instance +basis use the ``Pin`` API:: + + import asyncpg + from ddtrace import Pin + + conn = asyncpg.connect("postgres://localhost:5432") + Pin.override(conn, service="custom-service") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["asyncpg"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/patch.py new file mode 100644 index 0000000..c7686a0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/asyncpg/patch.py @@ -0,0 +1,163 @@ +from typing import TYPE_CHECKING # noqa:I001 +from types import ModuleType +import asyncpg + +from ddtrace import Pin +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor import wrapt + +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net +from ...internal.logger import get_logger +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ..trace_utils import ext_service +from ..trace_utils import unwrap +from ..trace_utils import wrap +from ..trace_utils_async import with_traced_module + + +if TYPE_CHECKING: # pragma: no cover + from typing import Dict # noqa:F401 + from typing import Union # noqa:F401 + + from asyncpg.prepared_stmt import PreparedStatement # noqa:F401 + + +DBMS_NAME = "postgresql" + + +config._add( + "asyncpg", + dict( + _default_service=schematize_service_name("postgres"), + ), +) + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return getattr(asyncpg, "__version__", "") + + +def _get_connection_tags(conn): + # type: (asyncpg.Connection) -> Dict[str, str] + addr = conn._addr + params = conn._params + host = port = "" + if isinstance(addr, tuple) and len(addr) == 2: + host, port = addr + return { + net.TARGET_HOST: host, + net.TARGET_PORT: port, + db.USER: params.user, + db.NAME: params.database, + } + + +class _TracedConnection(wrapt.ObjectProxy): + def __init__(self, conn, pin): + super(_TracedConnection, self).__init__(conn) + tags = _get_connection_tags(conn) + tags[db.SYSTEM] = DBMS_NAME + conn_pin = pin.clone(tags=tags) + # Keep the pin on the protocol + conn_pin.onto(self._protocol) + + def __setddpin__(self, pin): + pin.onto(self._protocol) + + def __getddpin__(self): + return Pin.get_from(self._protocol) + + +@with_traced_module +async def _traced_connect(asyncpg, pin, func, instance, args, kwargs): + """Traced asyncpg.connect(). + + connect() is instrumented and patched to return a connection proxy. + """ + with pin.tracer.trace( + "postgres.connect", span_type=SpanTypes.SQL, service=ext_service(pin, config.asyncpg) + ) as span: + span.set_tag_str(COMPONENT, config.asyncpg.integration_name) + span.set_tag_str(db.SYSTEM, DBMS_NAME) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + # Need an ObjectProxy since Connection uses slots + conn = _TracedConnection(await func(*args, **kwargs), pin) + span.set_tags(_get_connection_tags(conn)) + return conn + + +async def _traced_query(pin, method, query, args, kwargs): + with pin.tracer.trace( + schematize_database_operation("postgres.query", database_provider="postgresql"), + resource=query, + service=ext_service(pin, config.asyncpg), + span_type=SpanTypes.SQL, + ) as span: + span.set_tag_str(COMPONENT, config.asyncpg.integration_name) + span.set_tag_str(db.SYSTEM, DBMS_NAME) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + span.set_tags(pin.tags) + return await method(*args, **kwargs) + + +@with_traced_module +async def _traced_protocol_execute(asyncpg, pin, func, instance, args, kwargs): + state = get_argument_value(args, kwargs, 0, "state") # type: Union[str, PreparedStatement] + query = state if isinstance(state, str) else state.query + return await _traced_query(pin, func, query, args, kwargs) + + +def _patch(asyncpg: ModuleType) -> None: + wrap(asyncpg, "connect", _traced_connect(asyncpg)) + for method in ("execute", "bind_execute", "query", "bind_execute_many"): + wrap(asyncpg.protocol, "Protocol.%s" % method, _traced_protocol_execute(asyncpg)) + + +def patch(): + # type: () -> None + import asyncpg + + if getattr(asyncpg, "_datadog_patch", False): + return + + Pin().onto(asyncpg) + _patch(asyncpg) + + asyncpg._datadog_patch = True + + +def _unpatch(asyncpg: ModuleType) -> None: + unwrap(asyncpg, "connect") + for method in ("execute", "bind_execute", "query", "bind_execute_many"): + unwrap(asyncpg.protocol.Protocol, method) + + +def unpatch(): + # type: () -> None + import asyncpg + + if not getattr(asyncpg, "_datadog_patch", False): + return + + _unpatch(asyncpg) + + asyncpg._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/__init__.py new file mode 100644 index 0000000..6b43cd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/__init__.py @@ -0,0 +1,46 @@ +""" +The aws_lambda integration currently enables traces to be sent +before an impending timeout in an AWS Lambda function instrumented with the +`Datadog Lambda Python `_ package. + +Enabling +~~~~~~~~ + +The aws_lambda integration is enabled automatically for AWS Lambda +functions which have been instrumented with Datadog. + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +This integration is configured automatically. The `datadog_lambda` package +calls ``patch_all`` when ``DD_TRACE_ENABLED`` is set to ``true``. +It's not recommended to call ``patch`` for it manually. Since it would not do +anything for other environments that do not meet the criteria above. + + +Configuration +~~~~~~~~~~~~~ + +.. important:: + + You can configure some features with environment variables. + +.. py:data:: DD_APM_FLUSH_DEADLINE_MILLISECONDS + + Used to determine when to submit spans before a timeout occurs. + When the remaining time in an AWS Lambda invocation is less than `DD_APM_FLUSH_DEADLINE_MILLISECONDS`, + the tracer will attempt to submit the current active spans and all finished spans. + + Default: 100 + + +For additional configuration refer to +`Instrumenting Python Serverless Applications by Datadog `_. +""" +from .patch import get_version +from .patch import patch +from .patch import unpatch + + +__all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/_cold_start.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/_cold_start.py new file mode 100644 index 0000000..75aeda4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/_cold_start.py @@ -0,0 +1,18 @@ +__cold_start = True +__lambda_container_initialized = False + + +def set_cold_start(): + """Set the value of the cold start global. + + This should be executed once per AWS Lambda execution before the execution. + """ + global __cold_start + global __lambda_container_initialized + __cold_start = not __lambda_container_initialized + __lambda_container_initialized = True + + +def is_cold_start(): + """Returns the value of the global cold_start.""" + return __cold_start diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/patch.py new file mode 100644 index 0000000..53c2fcf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/aws_lambda/patch.py @@ -0,0 +1,272 @@ +from importlib import import_module +import os +import signal + +from ddtrace import tracer +from ddtrace.constants import ERROR_MSG +from ddtrace.constants import ERROR_TYPE +from ddtrace.internal.logger import get_logger +from ddtrace.internal.serverless import in_aws_lambda +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.wrapping import unwrap +from ddtrace.internal.wrapping import wrap + +from ._cold_start import is_cold_start +from ._cold_start import set_cold_start + + +def get_version(): + # type: () -> str + return "" + + +class DDLambdaLogger: + """Uses `DDLogger` to log only on cold start invocations.""" + + def __init__(self): + self.logger = get_logger(__name__) + self.is_cold_start = is_cold_start() + + def exception(self, msg, *args, exc_info=True, **kwargs): + if self.is_cold_start: + self.logger.error(msg, *args, exc_info=exc_info, **kwargs) + + def warning(self, msg, *args, **kwargs): + if self.is_cold_start: + self.logger.warning(msg, *args, **kwargs) + + +log = DDLambdaLogger() + + +class TimeoutChannel: + def __init__(self, context): + self.crashed = False + self.context = context + + def _handle_signal(self, sig, f): + """ + Returns a signal of type `sig` with function `f`, if there are + no previously defined signals. + + Else, wraps the given signal with the previously defined one, + so no signals are overridden. + """ + old_signal = signal.getsignal(sig) + + def wrap_signals(*args, **kwargs): + if old_signal is not None: + old_signal(*args, **kwargs) + f(*args, **kwargs) + + # Return the incoming signal if any of the following cases happens: + # - old signal does not exist, + # - old signal is the same as the incoming, or + # - old signal is our wrapper. + # This avoids multiple signal calling and infinite wrapping. + if not callable(old_signal) or old_signal == f or old_signal == wrap_signals: + return signal.signal(sig, f) + + return signal.signal(sig, wrap_signals) + + def _start(self): + self._handle_signal(signal.SIGALRM, self._crash_flush) + + remaining_time_in_millis = self.context.get_remaining_time_in_millis() + apm_flush_deadline = int(os.environ.get("DD_APM_FLUSH_DEADLINE_MILLISECONDS", 100)) + apm_flush_deadline = 100 if apm_flush_deadline < 0 else apm_flush_deadline + + # TODO: Update logic to calculate an approximate of how long it will + # take us to flush the spans on the queue. + remaining_time_in_seconds = max(((remaining_time_in_millis - apm_flush_deadline) / 1000), 0) + signal.setitimer(signal.ITIMER_REAL, remaining_time_in_seconds) + + def _crash_flush(self, _, __): + """ + Tags the current root span with an Impending Timeout error. + Finishes spans with ancestors from the current span. + """ + self._remove_alarm_signal() + self.crashed = True + + root_span = tracer.current_root_span() + if root_span is not None: + root_span.error = 1 + root_span.set_tag_str(ERROR_MSG, "Datadog detected an Impending Timeout") + root_span.set_tag_str(ERROR_TYPE, "Impending Timeout") + else: + log.warning("An impending timeout was reached, but no root span was found. No error will be tagged.") + + current_span = tracer.current_span() + if current_span is not None: + current_span.finish_with_ancestors() + + def _remove_alarm_signal(self): + """Removes the handler set for the signal `SIGALRM`.""" + signal.alarm(0) + signal.signal(signal.SIGALRM, signal.SIG_DFL) + + def stop(self): + self._remove_alarm_signal() + + +class DatadogInstrumentation(object): + """Patches an AWS Lambda handler function for Datadog instrumentation.""" + + def __call__(self, func, args, kwargs): + self.func = func + self._before(args, kwargs) + try: + self.response = self.func(*args, **kwargs) + return self.response + except Exception: + raise + finally: + self._after() + + def _set_context(self, args, kwargs): + """Sets the context attribute.""" + # The context is the second argument in a handler + # signature and it is always sent. + # + # note: AWS Lambda context is an object, the event is a dict. + # `get_remaining_time_in_millis` is guaranteed to be + # present in the context. + _context = get_argument_value(args, kwargs, 1, "context") + if hasattr(_context, "get_remaining_time_in_millis"): + self.context = _context + else: + # Handler was possibly manually wrapped, and the first + # argument is the `datadog-lambda` decorator object. + self.context = get_argument_value(args, kwargs, 2, "context") + + def _before(self, args, kwargs): + set_cold_start() + self._set_context(args, kwargs) + self.timeoutChannel = TimeoutChannel(self.context) + + self.timeoutChannel._start() + + def _after(self): + if not self.timeoutChannel.crashed: + self.timeoutChannel.stop() + + +def _modify_module_name(module_name): + """Returns a valid modified module to get imported.""" + return ".".join(module_name.split("/")) + + +def _get_handler_and_module(): + """Returns the user AWS Lambda handler and module.""" + path = os.environ.get("DD_LAMBDA_HANDLER", None) + _datadog_instrumentation = DatadogInstrumentation() + + if path is None: + from datadog_lambda.wrapper import datadog_lambda_wrapper + + wrapper_module = datadog_lambda_wrapper + wrapper_handler = datadog_lambda_wrapper.__call__ + + return wrapper_handler, wrapper_module, _datadog_instrumentation + else: + parts = path.rsplit(".", 1) + (mod_name, handler_name) = parts + modified_mod_name = _modify_module_name(mod_name) + handler_module = import_module(modified_mod_name) + handler = getattr(handler_module, handler_name) + + if callable(handler): + class_name = type(handler).__name__ + is_function = not isinstance(handler, type) and hasattr(handler, "__code__") and class_name == "function" + # handler is a function + # + # note: this is a best effort to identify function based handlers + # this will not cover all cases + if is_function: + return handler, handler_module, _datadog_instrumentation + + # handler must be either a class or an instance of a class + # + # note: if handler is a class instance with `__code__` defined, + # we will prioritize the `__call__` method, ignoring `__code__`. + class_module = getattr(handler_module, class_name) + class_handler = class_module.__call__ + + if isinstance(handler, type): + # class handler is a metaclass + if hasattr(class_handler, "__func__"): + class_handler = class_handler.__func__ + + return class_handler, class_module, _datadog_instrumentation + else: + raise TypeError("Handler type is not supported to patch.") + + +def _has_patch_module(): + """ + Ensures that the `aws_lambda` integration can be patched. + + It checks either the user has the DD_LAMBDA_HANDLER set correctly. + Or if the `datadog_lambda` package is installed. + """ + path = os.environ.get("DD_LAMBDA_HANDLER", None) + if path is None: + try: + import_module("datadog_lambda.wrapper") + except Exception: + return False + else: + parts = path.rsplit(".", 1) + if len(parts) != 2: + return False + return True + + +def patch(): + """Patches an AWS Lambda using the `datadog-lambda-py` Lambda layer.""" + + # It's expected to only patch only in AWS Lambda environments. + # The need to check if a patch module exists is to avoid patching + # when `ddtrace` is present but not `datadog-lambda`. + if not in_aws_lambda() and not _has_patch_module(): + return + + try: + handler, handler_module, wrapper = _get_handler_and_module() + + if getattr(handler_module, "_datadog_patch", False): + return + + wrap(handler, wrapper) + + handler_module._datadog_patch = True + except AttributeError: + # User code might contain `ddtrace.patch_all()` or `ddtrace.patch(aws_lambda=True)` + # which might cause a circular dependency. Skipping. + return + except Exception: + log.exception("Error patching handler. Timeout spans will not be generated.") + + return + + +def unpatch(): + if not in_aws_lambda() and not _has_patch_module(): + return + + try: + handler, handler_module, wrapper = _get_handler_and_module() + + if not getattr(handler_module, "_datadog_patch", False): + return + + unwrap(handler, wrapper) + + handler_module._datadog_patch = False + except AttributeError: + return + except Exception: + log.exception("Error unpatching handler.") + + return diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/__init__.py new file mode 100644 index 0000000..0478bc7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/__init__.py @@ -0,0 +1,41 @@ +""" +Boto integration will trace all AWS calls made via boto2. + +Enabling +~~~~~~~~ + +The boto integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(boto=True) + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.boto['tag_no_params'] + + This opts out of the default behavior of collecting a narrow set of API + parameters as span tags. + + To not collect any API parameters, ``ddtrace.config.boto.tag_no_params = + True`` or by setting the environment variable ``DD_AWS_TAG_NO_PARAMS=true``. + + + Default: ``False`` + +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["boto.connection"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/patch.py new file mode 100644 index 0000000..4db66fb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/boto/patch.py @@ -0,0 +1,212 @@ +import inspect +import os + +from boto import __version__ +import boto.connection + +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import aws +from ddtrace.ext import http +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.utils.wrappers import unwrap +from ddtrace.pin import Pin +from ddtrace.vendor import wrapt + +from ...internal.schema import schematize_cloud_api_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ...internal.utils.formats import asbool + + +# Original boto client class +_Boto_client = boto.connection.AWSQueryConnection + +AWS_QUERY_ARGS_NAME = ("operation_name", "params", "path", "verb") +AWS_AUTH_ARGS_NAME = ( + "method", + "path", + "headers", + "data", + "host", + "auth_path", + "sender", +) +AWS_QUERY_TRACED_ARGS = {"operation_name", "params", "path"} +AWS_AUTH_TRACED_ARGS = {"path", "data", "host"} + + +config._add( + "boto", + { + "tag_no_params": asbool(os.getenv("DD_AWS_TAG_NO_PARAMS", default=False)), + }, +) + + +def get_version(): + # type: () -> str + return __version__ + + +def patch(): + if getattr(boto.connection, "_datadog_patch", False): + return + boto.connection._datadog_patch = True + + # AWSQueryConnection and AWSAuthConnection are two different classes called by + # different services for connection. + # For example EC2 uses AWSQueryConnection and S3 uses AWSAuthConnection + wrapt.wrap_function_wrapper("boto.connection", "AWSQueryConnection.make_request", patched_query_request) + wrapt.wrap_function_wrapper("boto.connection", "AWSAuthConnection.make_request", patched_auth_request) + Pin(service="aws").onto(boto.connection.AWSQueryConnection) + Pin(service="aws").onto(boto.connection.AWSAuthConnection) + + +def unpatch(): + if getattr(boto.connection, "_datadog_patch", False): + boto.connection._datadog_patch = False + unwrap(boto.connection.AWSQueryConnection, "make_request") + unwrap(boto.connection.AWSAuthConnection, "make_request") + + +# ec2, sqs, kinesis +def patched_query_request(original_func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return original_func(*args, **kwargs) + + endpoint_name = instance.host.split(".")[0] + + with pin.tracer.trace( + schematize_cloud_api_operation( + "{}.command".format(endpoint_name), cloud_provider="aws", cloud_service=endpoint_name + ), + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.boto.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + operation_name = None + if args: + operation_name = get_argument_value(args, kwargs, 0, "action") + params = get_argument_value(args, kwargs, 1, "params") + + span.resource = "%s.%s" % (endpoint_name, operation_name.lower()) + + if params and not config.boto["tag_no_params"]: + aws._add_api_param_span_tags(span, endpoint_name, params) + else: + span.resource = endpoint_name + + # Obtaining region name + region_name = _get_instance_region_name(instance) + + meta = { + aws.AGENT: "boto", + aws.OPERATION: operation_name, + } + if region_name: + meta[aws.REGION] = region_name + meta[aws.AWSREGION] = region_name + + span.set_tags(meta) + + # Original func returns a boto.connection.HTTPResponse object + result = original_func(*args, **kwargs) + span.set_tag(http.STATUS_CODE, result.status) + span.set_tag_str(http.METHOD, result._method) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.boto.get_analytics_sample_rate()) + + return result + + +# s3, lambda +def patched_auth_request(original_func, instance, args, kwargs): + # Catching the name of the operation that called make_request() + operation_name = None + + # Go up the stack until we get the first non-ddtrace module + # DEV: For `lambda.list_functions()` this should be: + # - ddtrace.contrib.boto.patch + # - ddtrace.vendor.wrapt.wrappers + # - boto.awslambda.layer1 (make_request) + # - boto.awslambda.layer1 (list_functions) + # But can vary depending on Python versions; that's why we use an heuristic + frame = inspect.currentframe().f_back + operation_name = None + while frame: + if frame.f_code.co_name == "make_request": + operation_name = frame.f_back.f_code.co_name + break + frame = frame.f_back + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return original_func(*args, **kwargs) + + endpoint_name = instance.host.split(".")[0] + + with pin.tracer.trace( + schematize_cloud_api_operation( + "{}.command".format(endpoint_name), cloud_provider="aws", cloud_service=endpoint_name + ), + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag(SPAN_MEASURED_KEY) + if args: + http_method = get_argument_value(args, kwargs, 0, "method") + span.resource = "%s.%s" % (endpoint_name, http_method.lower()) + else: + span.resource = endpoint_name + + # Obtaining region name + region_name = _get_instance_region_name(instance) + + meta = { + aws.AGENT: "boto", + aws.OPERATION: operation_name, + } + if region_name: + meta[aws.REGION] = region_name + meta[aws.AWSREGION] = region_name + + span.set_tags(meta) + + # Original func returns a boto.connection.HTTPResponse object + result = original_func(*args, **kwargs) + span.set_tag(http.STATUS_CODE, result.status) + span.set_tag_str(http.METHOD, result._method) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.boto.get_analytics_sample_rate()) + + span.set_tag_str(COMPONENT, config.boto.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + return result + + +def _get_instance_region_name(instance): + region = getattr(instance, "region", None) + + if not region: + return None + if isinstance(region, str): + return region.split(":")[1] + else: + return region.name diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/__init__.py new file mode 100644 index 0000000..561ca4e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/__init__.py @@ -0,0 +1,125 @@ +""" +The Botocore integration will trace all AWS calls made with the botocore +library. Libraries like Boto3 that use Botocore will also be patched. + +Enabling +~~~~~~~~ + +The botocore integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(botocore=True) + +To patch only specific botocore modules, pass a list of the module names instead:: + + from ddtrace import patch + patch(botocore=['s3', 'sns']) + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.botocore['distributed_tracing'] + + Whether to inject distributed tracing data to requests in SQS, SNS, EventBridge, Kinesis Streams and Lambda. + + Can also be enabled with the ``DD_BOTOCORE_DISTRIBUTED_TRACING`` environment variable. + + Example:: + + from ddtrace import config + + # Enable distributed tracing + config.botocore['distributed_tracing'] = True + + + Default: ``True`` + + +.. py:data:: ddtrace.config.botocore['invoke_with_legacy_context'] + + This preserves legacy behavior when tracing directly invoked Python and Node Lambda + functions instrumented with datadog-lambda-python < v41 or datadog-lambda-js < v3.58.0. + + Legacy support for older libraries is available with + ``ddtrace.config.botocore.invoke_with_legacy_context = True`` or by setting the environment + variable ``DD_BOTOCORE_INVOKE_WITH_LEGACY_CONTEXT=true``. + + + Default: ``False`` + + +.. py:data:: ddtrace.config.botocore['operations'][].error_statuses = "" + + Definition of which HTTP status codes to consider for making a span as an error span. + + By default response status codes of ``'500-599'`` are considered as errors for all endpoints. + + Example marking 404, and 5xx as errors for ``s3.headobject`` API calls:: + + from ddtrace import config + + config.botocore['operations']['s3.headobject'].error_statuses = '404,500-599' + + + See :ref:`HTTP - Custom Error Codes` documentation for more examples. + +.. py:data:: ddtrace.config.botocore['tag_no_params'] + + This opts out of the default behavior of collecting a narrow set of API parameters as span tags. + + To not collect any API parameters, ``ddtrace.config.botocore.tag_no_params = True`` or by setting the environment + variable ``DD_AWS_TAG_NO_PARAMS=true``. + + + Default: ``False`` + + +.. py:data:: ddtrace.config.botocore['instrument_internals'] + + This opts into collecting spans for some internal functions, including ``parsers.ResponseParser.parse``. + + Can also be enabled with the ``DD_BOTOCORE_INSTRUMENT_INTERNALS`` environment variable. + + Default: ``False`` + + +.. py:data:: ddtrace.config.botocore['span_prompt_completion_sample_rate'] + + Configure the sample rate for the collection of bedrock prompts and completions as span tags. + + Alternatively, you can set this option with the ``DD_BEDROCK_SPAN_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``1.0`` + + +.. py:data:: (beta) ddtrace.config.botocore["span_char_limit"] + + Configure the maximum number of characters for bedrock span tags for prompt/response text. + + Text exceeding the maximum number of characters is truncated to the character limit + and has ``...`` appended to the end. + + Alternatively, you can set this option with the ``DD_BEDROCK_SPAN_CHAR_LIMIT`` environment + variable. + + Default: ``128`` + +""" + + +from ...internal.utils.importlib import require_modules + + +required_modules = ["botocore.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import patch_submodules + + __all__ = ["patch", "patch_submodules", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/patch.py new file mode 100644 index 0000000..043ae5e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/patch.py @@ -0,0 +1,273 @@ +""" +Trace queries to aws api done via botocore client +""" +import collections +import os +from typing import List # noqa:F401 +from typing import Set # noqa:F401 +from typing import Union # noqa:F401 + +from botocore import __version__ +import botocore.client +import botocore.exceptions + +from ddtrace import config +from ddtrace.contrib.trace_utils import with_traced_module +from ddtrace.internal.llmobs.integrations import BedrockIntegration +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.settings.config import Config +from ddtrace.vendor import wrapt + +from ...constants import SPAN_KIND +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import http +from ...internal.constants import COMPONENT +from ...internal.logger import get_logger +from ...internal.schema import schematize_cloud_api_operation +from ...internal.schema import schematize_cloud_faas_operation +from ...internal.schema import schematize_cloud_messaging_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ...internal.utils.formats import asbool +from ...internal.utils.formats import deep_getattr +from ...pin import Pin +from ..trace_utils import unwrap +from .services.bedrock import patched_bedrock_api_call +from .services.kinesis import patched_kinesis_api_call +from .services.sqs import inject_trace_to_sqs_or_sns_batch_message +from .services.sqs import inject_trace_to_sqs_or_sns_message +from .services.sqs import patched_sqs_api_call +from .services.stepfunctions import inject_trace_to_stepfunction_input +from .services.stepfunctions import patched_stepfunction_api_call +from .utils import inject_trace_to_client_context +from .utils import inject_trace_to_eventbridge_detail +from .utils import set_patched_api_call_span_tags +from .utils import set_response_metadata_tags + + +_PATCHED_SUBMODULES = set() # type: Set[str] + +# Original botocore client class +_Botocore_client = botocore.client.BaseClient + +ARGS_NAME = ("action", "params", "path", "verb") +TRACED_ARGS = {"params", "path", "verb"} + +log = get_logger(__name__) + + +# Botocore default settings +config._add( + "botocore", + { + "distributed_tracing": asbool(os.getenv("DD_BOTOCORE_DISTRIBUTED_TRACING", default=True)), + "invoke_with_legacy_context": asbool(os.getenv("DD_BOTOCORE_INVOKE_WITH_LEGACY_CONTEXT", default=False)), + "llmobs_enabled": asbool(os.getenv("DD_BEDROCK_LLMOBS_ENABLED", False)), + "operations": collections.defaultdict(Config._HTTPServerConfig), + "span_prompt_completion_sample_rate": float(os.getenv("DD_BEDROCK_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), + "llmobs_prompt_completion_sample_rate": float( + os.getenv("DD_LANGCHAIN_LLMOBS_PROMPT_COMPLETION_SAMPLE_RATE", 1.0) + ), + "span_char_limit": int(os.getenv("DD_BEDROCK_SPAN_CHAR_LIMIT", 128)), + "tag_no_params": asbool(os.getenv("DD_AWS_TAG_NO_PARAMS", default=False)), + "instrument_internals": asbool(os.getenv("DD_BOTOCORE_INSTRUMENT_INTERNALS", default=False)), + "propagation_enabled": asbool(os.getenv("DD_BOTOCORE_PROPAGATION_ENABLED", default=False)), + "empty_poll_enabled": asbool(os.getenv("DD_BOTOCORE_EMPTY_POLL_ENABLED", default=True)), + }, +) + + +def get_version(): + # type: () -> str + return __version__ + + +def patch(): + if getattr(botocore.client, "_datadog_patch", False): + return + botocore.client._datadog_patch = True + + botocore._datadog_integration = BedrockIntegration(integration_config=config.botocore) + wrapt.wrap_function_wrapper("botocore.client", "BaseClient._make_api_call", patched_api_call(botocore)) + Pin(service="aws").onto(botocore.client.BaseClient) + wrapt.wrap_function_wrapper("botocore.parsers", "ResponseParser.parse", patched_lib_fn) + Pin(service="aws").onto(botocore.parsers.ResponseParser) + _PATCHED_SUBMODULES.clear() + + +def unpatch(): + _PATCHED_SUBMODULES.clear() + if getattr(botocore.client, "_datadog_patch", False): + botocore.client._datadog_patch = False + unwrap(botocore.parsers.ResponseParser, "parse") + unwrap(botocore.client.BaseClient, "_make_api_call") + + +def patch_submodules(submodules): + # type: (Union[List[str], bool]) -> None + if isinstance(submodules, bool) and submodules: + _PATCHED_SUBMODULES.clear() + elif isinstance(submodules, list): + submodules = [sub_module.lower() for sub_module in submodules] + _PATCHED_SUBMODULES.update(submodules) + + +def patched_lib_fn(original_func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled() or not config.botocore["instrument_internals"]: + return original_func(*args, **kwargs) + with pin.tracer.trace("{}.{}".format(original_func.__module__, original_func.__name__)) as span: + span.set_tag_str(COMPONENT, config.botocore.integration_name) + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + return original_func(*args, **kwargs) + + +@with_traced_module +def patched_api_call(botocore, pin, original_func, instance, args, kwargs): + if not pin or not pin.enabled(): + return original_func(*args, **kwargs) + + endpoint_name = deep_getattr(instance, "_endpoint._endpoint_prefix") + + if _PATCHED_SUBMODULES and endpoint_name not in _PATCHED_SUBMODULES: + return original_func(*args, **kwargs) + + trace_operation = schematize_cloud_api_operation( + "{}.command".format(endpoint_name), cloud_provider="aws", cloud_service=endpoint_name + ) + + operation = get_argument_value(args, kwargs, 0, "operation_name", True) + params = get_argument_value(args, kwargs, 1, "api_params", True) + + function_vars = { + "endpoint_name": endpoint_name, + "operation": operation, + "params": params, + "pin": pin, + "trace_operation": trace_operation, + "integration": botocore._datadog_integration, + } + + if endpoint_name == "kinesis": + return patched_kinesis_api_call( + original_func=original_func, + instance=instance, + args=args, + kwargs=kwargs, + function_vars=function_vars, + ) + elif endpoint_name == "sqs": + return patched_sqs_api_call( + original_func=original_func, + instance=instance, + args=args, + kwargs=kwargs, + function_vars=function_vars, + ) + elif endpoint_name == "bedrock-runtime" and operation.startswith("InvokeModel"): + return patched_bedrock_api_call( + original_func=original_func, + instance=instance, + args=args, + kwargs=kwargs, + function_vars=function_vars, + ) + elif endpoint_name == "states": + return patched_stepfunction_api_call( + original_func=original_func, instance=instance, args=args, kwargs=kwargs, function_vars=function_vars + ) + else: + # this is the default patched api call + return patched_api_call_fallback( + original_func=original_func, + instance=instance, + args=args, + kwargs=kwargs, + function_vars=function_vars, + ) + + +def patched_api_call_fallback(original_func, instance, args, kwargs, function_vars): + # default patched api call that is used generally for several services / operations + params = function_vars.get("params") + trace_operation = function_vars.get("trace_operation") + pin = function_vars.get("pin") + endpoint_name = function_vars.get("endpoint_name") + operation = function_vars.get("operation") + + with pin.tracer.trace( + trace_operation, + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + ) as span: + set_patched_api_call_span_tags(span, instance, args, params, endpoint_name, operation) + + if args: + if config.botocore["distributed_tracing"]: + try: + if endpoint_name == "lambda" and operation == "Invoke": + inject_trace_to_client_context(params, span) + span.name = schematize_cloud_faas_operation( + trace_operation, cloud_provider="aws", cloud_service="lambda" + ) + if endpoint_name == "events" and operation == "PutEvents": + inject_trace_to_eventbridge_detail(params, span) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="events", + direction=SpanDirection.OUTBOUND, + ) + if endpoint_name == "sns" and operation == "Publish": + inject_trace_to_sqs_or_sns_message( + params, + span, + endpoint_service=endpoint_name, + ) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="sns", + direction=SpanDirection.OUTBOUND, + ) + if endpoint_name == "sns" and operation == "PublishBatch": + inject_trace_to_sqs_or_sns_batch_message( + params, + span, + endpoint_service=endpoint_name, + ) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="sns", + direction=SpanDirection.OUTBOUND, + ) + if endpoint_name == "states" and ( + operation == "StartExecution" or operation == "StartSyncExecution" + ): + inject_trace_to_stepfunction_input(params, span) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="stepfunctions", + direction=SpanDirection.OUTBOUND, + ) + except Exception: + log.warning("Unable to inject trace context", exc_info=True) + + try: + result = original_func(*args, **kwargs) + set_response_metadata_tags(span, result) + return result + + except botocore.exceptions.ClientError as e: + # `ClientError.response` contains the result, so we can still grab response metadata + set_response_metadata_tags(span, e.response) + + # If we have a status code, and the status code is not an error, + # then ignore the exception being raised + status_code = span.get_tag(http.STATUS_CODE) + if status_code and not config.botocore.operations[span.resource].is_error_code(int(status_code)): + span._ignore_exception(botocore.exceptions.ClientError) + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/bedrock.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/bedrock.py new file mode 100644 index 0000000..4009400 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/bedrock.py @@ -0,0 +1,341 @@ +import json +import sys +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from ddtrace import Span +from ddtrace.internal.llmobs.integrations import BedrockIntegration +from ddtrace.internal.logger import get_logger +from ddtrace.vendor import wrapt + +from ....internal.schema import schematize_service_name + + +log = get_logger(__name__) + + +_AI21 = "ai21" +_AMAZON = "amazon" +_ANTHROPIC = "anthropic" +_COHERE = "cohere" +_META = "meta" +_STABILITY = "stability" + + +class TracedBotocoreStreamingBody(wrapt.ObjectProxy): + """ + This class wraps the StreamingBody object returned by botocore api calls, specifically for Bedrock invocations. + Since the response body is in the form of a stream object, we need to wrap it in order to tag the response data + and finish the span as the user consumes the streamed response. + Currently, the corresponding span finishes only if: + 1) the user fully consumes the stream body + 2) error during reading + This means that if the stream is not consumed, there is a small risk of memory leak due to unfinished spans. + """ + + def __init__(self, wrapped, span, integration, prompt=None): + """ + The TracedBotocoreStreamingBody wrapper stores a reference to the + underlying Span object, BedrockIntegration object, and the response body that will saved and tagged. + """ + super().__init__(wrapped) + self._datadog_span = span + self._datadog_integration = integration + self._body = [] + self._prompt = prompt + + def read(self, amt=None): + """Wraps around method to tags the response data and finish the span as the user consumes the stream.""" + try: + body = self.__wrapped__.read(amt=amt) + self._body.append(json.loads(body)) + if self.__wrapped__.tell() == int(self.__wrapped__._content_length): + formatted_response = _extract_response(self._datadog_span, self._body[0]) + self._process_response(formatted_response) + self._datadog_span.finish() + return body + except Exception: + _handle_exception(self._datadog_span, self._datadog_integration, self._prompt, sys.exc_info()) + raise + + def readlines(self): + """Wraps around method to tags the response data and finish the span as the user consumes the stream.""" + try: + lines = self.__wrapped__.readlines() + for line in lines: + self._body.append(json.loads(line)) + formatted_response = _extract_response(self._datadog_span, self._body[0]) + self._process_response(formatted_response) + self._datadog_span.finish() + return lines + except Exception: + _handle_exception(self._datadog_span, self._datadog_integration, self._prompt, sys.exc_info()) + raise + + def __iter__(self): + """Wraps around method to tags the response data and finish the span as the user consumes the stream.""" + try: + for line in self.__wrapped__: + self._body.append(json.loads(line["chunk"]["bytes"])) + yield line + metadata = _extract_streamed_response_metadata(self._datadog_span, self._body) + formatted_response = _extract_streamed_response(self._datadog_span, self._body) + self._process_response(formatted_response, metadata=metadata) + self._datadog_span.finish() + except Exception: + _handle_exception(self._datadog_span, self._datadog_integration, self._prompt, sys.exc_info()) + raise + + def _process_response(self, formatted_response: Dict[str, Any], metadata: Dict[str, Any] = None) -> None: + """ + Sets the response tags on the span given the formatted response body and any metadata. + Also generates an LLM record if enabled. + """ + if metadata is not None: + for k, v in metadata.items(): + self._datadog_span.set_tag_str("bedrock.{}".format(k), str(v)) + for i in range(len(formatted_response["text"])): + if self._datadog_integration.is_pc_sampled_span(self._datadog_span): + self._datadog_span.set_tag_str( + "bedrock.response.choices.{}.text".format(i), + self._datadog_integration.trunc(str(formatted_response["text"][i])), + ) + self._datadog_span.set_tag_str( + "bedrock.response.choices.{}.finish_reason".format(i), str(formatted_response["finish_reason"][i]) + ) + if self._datadog_integration.is_pc_sampled_llmobs(self._datadog_span): + self._datadog_integration.generate_llm_record( + self._datadog_span, formatted_response=formatted_response, prompt=self._prompt + ) + + +def _handle_exception(span, integration, prompt, exc_info): + """Helper method to finish the span on stream read error.""" + span.set_exc_info(*exc_info) + span.finish() + if integration.is_pc_sampled_llmobs(span): + integration.generate_llm_record(span, formatted_response=None, prompt=prompt, err=1) + + +def _extract_request_params(params: Dict[str, Any], provider: str) -> Dict[str, Any]: + """ + Extracts request parameters including prompt, temperature, top_p, max_tokens, and stop_sequences. + """ + request_body = json.loads(params.get("body")) + if provider == _AI21: + return { + "prompt": request_body.get("prompt"), + "temperature": request_body.get("temperature", None), + "top_p": request_body.get("topP", None), + "max_tokens": request_body.get("maxTokens", None), + "stop_sequences": request_body.get("stopSequences", []), + } + elif provider == _AMAZON: + text_generation_config = request_body.get("textGenerationConfig", {}) + return { + "prompt": request_body.get("inputText"), + "temperature": text_generation_config.get("temperature", None), + "top_p": text_generation_config.get("topP", None), + "max_tokens": text_generation_config.get("maxTokenCount", None), + "stop_sequences": text_generation_config.get("stopSequences", []), + } + elif provider == _ANTHROPIC: + return { + "prompt": request_body.get("prompt"), + "temperature": request_body.get("temperature", None), + "top_p": request_body.get("top_p", None), + "top_k": request_body.get("top_k", None), + "max_tokens": request_body.get("max_tokens_to_sample", None), + "stop_sequences": request_body.get("stop_sequences", []), + } + elif provider == _COHERE: + return { + "prompt": request_body.get("prompt"), + "temperature": request_body.get("temperature", None), + "top_p": request_body.get("p", None), + "top_k": request_body.get("k", None), + "max_tokens": request_body.get("max_tokens", None), + "stop_sequences": request_body.get("stop_sequences", []), + "stream": request_body.get("stream", None), + "n": request_body.get("num_generations", None), + } + elif provider == _META: + return { + "prompt": request_body.get("prompt"), + "temperature": request_body.get("temperature", None), + "top_p": request_body.get("top_p", None), + "max_tokens": request_body.get("max_gen_len", None), + } + elif provider == _STABILITY: + # TODO: request/response formats are different for image-based models. Defer for now + return {} + return {} + + +def _extract_response(span: Span, body: Dict[str, Any]) -> Dict[str, List[str]]: + """ + Extracts text and finish_reason from the response body, which has different formats for different providers. + """ + text, finish_reason = "", "" + provider = span.get_tag("bedrock.request.model_provider") + try: + if provider == _AI21: + text = body.get("completions")[0].get("data").get("text") + finish_reason = body.get("completions")[0].get("finishReason") + elif provider == _AMAZON: + text = body.get("results")[0].get("outputText") + finish_reason = body.get("results")[0].get("completionReason") + elif provider == _ANTHROPIC: + text = body.get("completion") + finish_reason = body.get("stop_reason") + elif provider == _COHERE: + text = [generation["text"] for generation in body.get("generations")] + finish_reason = [generation["finish_reason"] for generation in body.get("generations")] + for i in range(len(text)): + span.set_tag_str("bedrock.response.choices.{}.id".format(i), str(body.get("generations")[i]["id"])) + elif provider == _META: + text = body.get("generation") + finish_reason = body.get("stop_reason") + elif provider == _STABILITY: + # TODO: request/response formats are different for image-based models. Defer for now + pass + except (IndexError, AttributeError): + log.warning("Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.") + + if not isinstance(text, list): + text = [text] + if not isinstance(finish_reason, list): + finish_reason = [finish_reason] + + return {"text": text, "finish_reason": finish_reason} + + +def _extract_streamed_response(span: Span, streamed_body: List[Dict[str, Any]]) -> Dict[str, List[str]]: + """ + Extracts text,finish_reason from the streamed response body, which has different formats for different providers. + """ + text, finish_reason = "", "" + provider = span.get_tag("bedrock.request.model_provider") + try: + if provider == _AI21: + pass # note: ai21 does not support streamed responses + elif provider == _AMAZON: + text = "".join([chunk["outputText"] for chunk in streamed_body]) + finish_reason = streamed_body[-1]["completionReason"] + elif provider == _ANTHROPIC: + text = "".join([chunk["completion"] for chunk in streamed_body]) + finish_reason = streamed_body[-1]["stop_reason"] + elif provider == _COHERE and streamed_body: + if "is_finished" in streamed_body[0]: # streamed response + if "index" in streamed_body[0]: # n >= 2 + n = int(span.get_tag("bedrock.request.n")) + text = [ + "".join([chunk["text"] for chunk in streamed_body[:-1] if chunk["index"] == i]) + for i in range(n) + ] + finish_reason = [streamed_body[-1]["finish_reason"] for _ in range(n)] + else: + text = "".join([chunk["text"] for chunk in streamed_body[:-1]]) + finish_reason = streamed_body[-1]["finish_reason"] + else: + text = [chunk["text"] for chunk in streamed_body[0]["generations"]] + finish_reason = [chunk["finish_reason"] for chunk in streamed_body[0]["generations"]] + for i in range(len(text)): + span.set_tag_str( + "bedrock.response.choices.{}.id".format(i), + str(streamed_body[0]["generations"][i].get("id", None)), + ) + elif provider == _META: + text = "".join([chunk["generation"] for chunk in streamed_body]) + finish_reason = streamed_body[-1]["stop_reason"] + elif provider == _STABILITY: + # TODO: figure out extraction for image-based models + pass + except (IndexError, AttributeError): + log.warning("Unable to extract text/finish_reason from response body. Defaulting to empty text/finish_reason.") + + if not isinstance(text, list): + text = [text] + if not isinstance(finish_reason, list): + finish_reason = [finish_reason] + + return {"text": text, "finish_reason": finish_reason} + + +def _extract_streamed_response_metadata(span: Span, streamed_body: List[Dict[str, Any]]) -> Dict[str, Any]: + """Extracts metadata from the streamed response body.""" + provider = span.get_tag("bedrock.request.model_provider") + metadata = {} + if provider == _AI21: + pass # ai21 does not support streamed responses + elif provider in [_AMAZON, _ANTHROPIC, _COHERE, _META] and streamed_body: + metadata = streamed_body[-1].get("amazon-bedrock-invocationMetrics", {}) + elif provider == _STABILITY: + # TODO: figure out extraction for image-based models + pass + return { + "response.duration": metadata.get("invocationLatency", None), + "usage.prompt_tokens": metadata.get("inputTokenCount", None), + "usage.completion_tokens": metadata.get("outputTokenCount", None), + } + + +def handle_bedrock_request(span: Span, integration: BedrockIntegration, params: Dict[str, Any]) -> None: + """Perform request param extraction and tagging.""" + model_provider, model_name = params.get("modelId").split(".") + request_params = _extract_request_params(params, model_provider) + + span.set_tag_str("bedrock.request.model_provider", model_provider) + span.set_tag_str("bedrock.request.model", model_name) + prompt = None + for k, v in request_params.items(): + if k == "prompt" and integration.is_pc_sampled_span(span): + v = integration.trunc(str(v)) + if k == "prompt" and integration.is_pc_sampled_llmobs(span): + prompt = v + span.set_tag_str("bedrock.request.{}".format(k), str(v)) + return prompt + + +def handle_bedrock_response( + span: Span, integration: BedrockIntegration, result: Dict[str, Any], prompt: Optional[str] = None +) -> Dict[str, Any]: + """Perform response param extraction and tagging.""" + metadata = result["ResponseMetadata"] + http_headers = metadata["HTTPHeaders"] + span.set_tag_str("bedrock.response.id", str(metadata.get("RequestId", ""))) + span.set_tag_str("bedrock.response.duration", str(http_headers.get("x-amzn-bedrock-invocation-latency", ""))) + span.set_tag_str("bedrock.usage.prompt_tokens", str(http_headers.get("x-amzn-bedrock-input-token-count", ""))) + span.set_tag_str("bedrock.usage.completion_tokens", str(http_headers.get("x-amzn-bedrock-output-token-count", ""))) + + # Wrap the StreamingResponse in a traced object so that we can tag response data as the user consumes it. + body = result["body"] + result["body"] = TracedBotocoreStreamingBody(body, span, integration, prompt=prompt) + return result + + +def patched_bedrock_api_call(original_func, instance, args, kwargs, function_vars): + params = function_vars.get("params") + trace_operation = function_vars.get("trace_operation") + operation = function_vars.get("operation") + pin = function_vars.get("pin") + endpoint_name = function_vars.get("endpoint_name") + integration = function_vars.get("integration") + # This span will be finished separately as the user fully consumes the stream body, or on error. + bedrock_span = pin.tracer.start_span( + trace_operation, + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + resource=operation, + activate=False, + ) + prompt = None + try: + prompt = handle_bedrock_request(bedrock_span, integration, params) + result = original_func(*args, **kwargs) + result = handle_bedrock_response(bedrock_span, integration, result, prompt=prompt) + return result + except Exception: + _handle_exception(bedrock_span, integration, prompt, sys.exc_info()) + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/kinesis.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/kinesis.py new file mode 100644 index 0000000..2785545 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/kinesis.py @@ -0,0 +1,182 @@ +import json +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 + +import botocore.client +import botocore.exceptions + +from ddtrace import Span # noqa:F401 +from ddtrace import config +from ddtrace.internal import core +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ....ext import SpanTypes +from ....ext import http +from ....internal.compat import time_ns +from ....internal.logger import get_logger +from ....internal.schema import schematize_cloud_messaging_operation +from ....internal.schema import schematize_service_name +from ....pin import Pin # noqa:F401 +from ....propagation.http import HTTPPropagator +from ..utils import extract_DD_context +from ..utils import get_kinesis_data_object +from ..utils import set_patched_api_call_span_tags +from ..utils import set_response_metadata_tags + + +log = get_logger(__name__) + + +MAX_KINESIS_DATA_SIZE = 1 << 20 # 1MB + + +class TraceInjectionSizeExceed(Exception): + pass + + +def inject_trace_to_kinesis_stream_data(record, span): + # type: (Dict[str, Any], Span) -> None + """ + :record: contains args for the current botocore action, Kinesis record is at index 1 + :span: the span which provides the trace context to be propagated + Inject trace headers into the Kinesis record's Data field in addition to the existing + data. Only possible if the existing data is JSON string or base64 encoded JSON string + Max data size per record is 1MB (https://aws.amazon.com/kinesis/data-streams/faqs/) + """ + if "Data" not in record: + log.warning("Unable to inject context. The kinesis stream has no data") + return + + data = record["Data"] + line_break, data_obj = get_kinesis_data_object(data) + if data_obj is not None: + data_obj["_datadog"] = {} + HTTPPropagator.inject(span.context, data_obj["_datadog"]) + data_json = json.dumps(data_obj) + + # if original string had a line break, add it back + if line_break is not None: + data_json += line_break + + # check if data size will exceed max size with headers + data_size = len(data_json) + if data_size >= MAX_KINESIS_DATA_SIZE: + raise TraceInjectionSizeExceed( + "Data including trace injection ({}) exceeds ({})".format(data_size, MAX_KINESIS_DATA_SIZE) + ) + + record["Data"] = data_json + + +def inject_trace_to_kinesis_stream(params, span): + # type: (List[Any], Span) -> None + """ + :params: contains the params for the current botocore action + :span: the span which provides the trace context to be propagated + Max data size per record is 1MB (https://aws.amazon.com/kinesis/data-streams/faqs/) + """ + core.dispatch("botocore.kinesis.start", [params]) + if "Records" in params: + records = params["Records"] + + if records: + record = records[0] + inject_trace_to_kinesis_stream_data(record, span) + elif "Data" in params: + inject_trace_to_kinesis_stream_data(params, span) + + +def patched_kinesis_api_call(original_func, instance, args, kwargs, function_vars): + params = function_vars.get("params") + trace_operation = function_vars.get("trace_operation") + pin = function_vars.get("pin") + endpoint_name = function_vars.get("endpoint_name") + operation = function_vars.get("operation") + + message_received = False + func_run = False + func_run_err = None + child_of = None + start_ns = None + result = None + + if operation == "GetRecords": + try: + start_ns = time_ns() + func_run = True + core.dispatch(f"botocore.{endpoint_name}.{operation}.pre", [params]) + result = original_func(*args, **kwargs) + core.dispatch(f"botocore.{endpoint_name}.{operation}.post", [params, result]) + except Exception as e: + func_run_err = e + if result is not None and "Records" in result and len(result["Records"]) >= 1: + message_received = True + if config.botocore.propagation_enabled: + child_of = extract_DD_context(result["Records"]) + + """ + We only want to create a span for the following cases: + - not func_run: The function is not `getRecords` and we need to run it + - func_run and message_received: Received a message when polling + - config.empty_poll_enabled: We want to trace empty poll operations + - func_run_err: There was an error when calling the `getRecords` function + """ + if (func_run and message_received) or config.botocore.empty_poll_enabled or not func_run or func_run_err: + with pin.tracer.start_span( + trace_operation, + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + child_of=child_of if child_of is not None else pin.tracer.context_provider.active(), + activate=True, + ) as span: + set_patched_api_call_span_tags(span, instance, args, params, endpoint_name, operation) + + # we need this since we may have ran the wrapped operation before starting the span + # we need to ensure the span start time is correct + if start_ns is not None and func_run: + span.start_ns = start_ns + + if args and config.botocore["distributed_tracing"]: + try: + if endpoint_name == "kinesis" and operation in {"PutRecord", "PutRecords"}: + inject_trace_to_kinesis_stream(params, span) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="kinesis", + direction=SpanDirection.OUTBOUND, + ) + except Exception: + log.warning("Unable to inject trace context", exc_info=True) + + try: + if not func_run: + core.dispatch(f"botocore.{endpoint_name}.{operation}.pre", [params]) + result = original_func(*args, **kwargs) + core.dispatch(f"botocore.{endpoint_name}.{operation}.post", [params, result]) + + # raise error if it was encountered before the span was started + if func_run_err: + raise func_run_err + + set_response_metadata_tags(span, result) + return result + + except botocore.exceptions.ClientError as e: + # `ClientError.response` contains the result, so we can still grab response metadata + set_response_metadata_tags(span, e.response) + + # If we have a status code, and the status code is not an error, + # then ignore the exception being raised + status_code = span.get_tag(http.STATUS_CODE) + if status_code and not config.botocore.operations[span.resource].is_error_code(int(status_code)): + span._ignore_exception(botocore.exceptions.ClientError) + raise + # return results in the case that we ran the function, but no records were returned and empty + # poll spans are disabled + elif func_run: + if func_run_err: + raise func_run_err + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/sqs.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/sqs.py new file mode 100644 index 0000000..21f9636 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/sqs.py @@ -0,0 +1,227 @@ +import json +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 + +import botocore.client +import botocore.exceptions + +from ddtrace import Span # noqa:F401 +from ddtrace import config +from ddtrace.internal import core +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ....ext import SpanTypes +from ....ext import http +from ....internal.compat import time_ns +from ....internal.logger import get_logger +from ....internal.schema import schematize_cloud_messaging_operation +from ....internal.schema import schematize_service_name +from ....pin import Pin # noqa:F401 +from ....propagation.http import HTTPPropagator +from ..utils import extract_DD_context +from ..utils import set_patched_api_call_span_tags +from ..utils import set_response_metadata_tags + + +log = get_logger(__name__) + + +def _encode_data(trace_data): + """ + This method exists solely to enable us to patch the value in tests, since + moto doesn't support auto-encoded SNS -> SQS as binary with RawDelivery enabled + """ + return json.dumps(trace_data) + + +def inject_trace_data_to_message_attributes(trace_data, entry, endpoint_service=None): + # type: (Dict[str, str], Dict[str, Any], Optional[str]) -> None + """ + :trace_data: trace headers and DSM pathway to be stored in the entry's MessageAttributes + :entry: an SQS or SNS record + :endpoint_service: endpoint of message, "sqs" or "sns" + Inject trace headers and DSM info into the SQS or SNS record's MessageAttributes + """ + if "MessageAttributes" not in entry: + entry["MessageAttributes"] = {} + # Max of 10 message attributes. + if len(entry["MessageAttributes"]) < 10: + if endpoint_service == "sqs": + # Use String since changing this to Binary would be a breaking + # change as other tracers expect this to be a String. + entry["MessageAttributes"]["_datadog"] = {"DataType": "String", "StringValue": _encode_data(trace_data)} + elif endpoint_service == "sns": + # Use Binary since SNS subscription filter policies fail silently + # with JSON strings https://github.com/DataDog/datadog-lambda-js/pull/269 + # AWS will encode our value if it sees "Binary" + entry["MessageAttributes"]["_datadog"] = {"DataType": "Binary", "BinaryValue": _encode_data(trace_data)} + else: + log.debug( + "skipping trace injection, endpoint service is not SNS or SQS.", + extra=dict(endpoint_service=endpoint_service), + ) + else: + # In the event a record has 10 or more msg attributes we cannot add our _datadog msg attribute + log.warning("skipping trace injection, max number (10) of MessageAttributes exceeded") + + +def inject_trace_to_sqs_or_sns_batch_message(params, span, endpoint_service=None): + # type: (Any, Span, Optional[str]) -> None + """ + :params: contains the params for the current botocore action + :span: the span which provides the trace context to be propagated + :endpoint_service: endpoint of message, "sqs" or "sns" + Inject trace headers info into MessageAttributes for all SQS or SNS records inside a batch + """ + + trace_data = {} + HTTPPropagator.inject(span.context, trace_data) + + # An entry here is an SNS or SQS record, and depending on how it was published, + # it could either show up under Entries (in case of PutRecords), + # or PublishBatchRequestEntries (in case of PublishBatch). + entries = params.get("Entries", params.get("PublishBatchRequestEntries", [])) + if len(entries) != 0: + for entry in entries: + core.dispatch("botocore.sqs_sns.start", [endpoint_service, trace_data, params]) + inject_trace_data_to_message_attributes(trace_data, entry, endpoint_service) + else: + log.warning("Skipping injecting Datadog attributes to records, no records available") + + +def inject_trace_to_sqs_or_sns_message(params, span, endpoint_service=None): + # type: (Any, Span, Optional[str]) -> None + """ + :params: contains the params for the current botocore action + :span: the span which provides the trace context to be propagated + :endpoint_service: endpoint of message, "sqs" or "sns" + Inject trace headers info into MessageAttributes for the SQS or SNS record + """ + trace_data = {} + HTTPPropagator.inject(span.context, trace_data) + + core.dispatch("botocore.sqs_sns.start", [endpoint_service, trace_data, params]) + inject_trace_data_to_message_attributes(trace_data, params, endpoint_service) + + +def patched_sqs_api_call(original_func, instance, args, kwargs, function_vars): + params = function_vars.get("params") + trace_operation = function_vars.get("trace_operation") + pin = function_vars.get("pin") + endpoint_name = function_vars.get("endpoint_name") + operation = function_vars.get("operation") + + message_received = False + func_run = False + func_run_err = None + child_of = None + start_ns = None + result = None + + if operation == "ReceiveMessage": + # Ensure we have Datadog MessageAttribute enabled + if "MessageAttributeNames" not in params: + params.update({"MessageAttributeNames": ["_datadog"]}) + elif "_datadog" not in params["MessageAttributeNames"]: + params.update({"MessageAttributeNames": list(params["MessageAttributeNames"]) + ["_datadog"]}) + + try: + start_ns = time_ns() + func_run = True + # run the function before in order to extract possible parent context before starting span + + core.dispatch(f"botocore.{endpoint_name}.{operation}.pre", [params]) + result = original_func(*args, **kwargs) + core.dispatch(f"botocore.{endpoint_name}.{operation}.post", [params, result]) + except Exception as e: + func_run_err = e + if result is not None and "Messages" in result and len(result["Messages"]) >= 1: + message_received = True + if config.botocore.propagation_enabled: + child_of = extract_DD_context(result["Messages"]) + + """ + We only want to create a span for the following cases: + - not func_run: The function is not `ReceiveMessage` and we need to run it + - func_run and message_received: Received a message when polling + - config.empty_poll_enabled: We want to trace empty poll operations + - func_run_err: There was an error when calling the `ReceiveMessage` function + """ + if (func_run and message_received) or config.botocore.empty_poll_enabled or not func_run or func_run_err: + with pin.tracer.start_span( + trace_operation, + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + child_of=child_of if child_of is not None else pin.tracer.context_provider.active(), + activate=True, + ) as span: + set_patched_api_call_span_tags(span, instance, args, params, endpoint_name, operation) + + # we need this since we may have ran the wrapped operation before starting the span + # we need to ensure the span start time is correct + if start_ns is not None and func_run: + span.start_ns = start_ns + + if args and config.botocore["distributed_tracing"]: + try: + if endpoint_name == "sqs" and operation == "SendMessage": + inject_trace_to_sqs_or_sns_message( + params, + span, + endpoint_service=endpoint_name, + ) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="sqs", + direction=SpanDirection.OUTBOUND, + ) + if endpoint_name == "sqs" and operation == "SendMessageBatch": + inject_trace_to_sqs_or_sns_batch_message( + params, + span, + endpoint_service=endpoint_name, + ) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="sqs", + direction=SpanDirection.OUTBOUND, + ) + if endpoint_name == "sqs" and operation == "ReceiveMessage": + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="sqs", + direction=SpanDirection.INBOUND, + ) + except Exception: + log.warning("Unable to inject trace context", exc_info=True) + try: + if not func_run: + core.dispatch(f"botocore.{endpoint_name}.{operation}.pre", [params]) + result = original_func(*args, **kwargs) + core.dispatch(f"botocore.{endpoint_name}.{operation}.post", [params, result]) + + set_response_metadata_tags(span, result) + # raise error if it was encountered before the span was started + if func_run_err: + raise func_run_err + return result + except botocore.exceptions.ClientError as e: + # `ClientError.response` contains the result, so we can still grab response metadata + set_response_metadata_tags(span, e.response) + + # If we have a status code, and the status code is not an error, + # then ignore the exception being raised + status_code = span.get_tag(http.STATUS_CODE) + if status_code and not config.botocore.operations[span.resource].is_error_code(int(status_code)): + span._ignore_exception(botocore.exceptions.ClientError) + raise + # return results in the case that we ran the function, but no records were returned and empty + # poll spans are disabled + elif func_run: + if func_run_err: + raise func_run_err + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/stepfunctions.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/stepfunctions.py new file mode 100644 index 0000000..7aa32a5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/services/stepfunctions.py @@ -0,0 +1,108 @@ +import json +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 + +import botocore.exceptions + +from ddtrace import Span # noqa:F401 +from ddtrace import config +from ddtrace.ext import http +from ddtrace.propagation.http import HTTPPropagator + +from ....ext import SpanTypes +from ....internal.logger import get_logger +from ....internal.schema import SpanDirection +from ....internal.schema import schematize_cloud_messaging_operation +from ....internal.schema import schematize_service_name +from ..utils import set_patched_api_call_span_tags +from ..utils import set_response_metadata_tags + + +log = get_logger(__name__) + + +def inject_trace_to_stepfunction_input(params, span): + # type: (Any, Span) -> None + """ + :params: contains the params for the current botocore action + :span: the span which provides the trace context to be propagated + + Inject the trace headers into the StepFunction input if the input is a JSON string + """ + if "input" not in params: + log.warning("Unable to inject context. The StepFunction input had no input.") + return + + if params["input"] is None: + log.warning("Unable to inject context. The StepFunction input was None.") + return + + elif isinstance(params["input"], dict): + if "_datadog" in params["input"]: + log.warning("Input already has trace context.") + return + params["input"]["_datadog"] = {} + HTTPPropagator.inject(span.context, params["input"]["_datadog"]) + return + + elif isinstance(params["input"], str): + try: + input_obj = json.loads(params["input"]) + except ValueError: + log.warning("Input is not a valid JSON string") + return + + if isinstance(input_obj, dict): + input_obj["_datadog"] = {} + HTTPPropagator.inject(span.context, input_obj["_datadog"]) + input_json = json.dumps(input_obj) + + params["input"] = input_json + return + else: + log.warning("Unable to inject context. The StepFunction input was not a dict.") + return + + else: + log.warning("Unable to inject context. The StepFunction input was not a dict or a JSON string.") + + +def patched_stepfunction_api_call(original_func, instance, args, kwargs: Dict, function_vars: Dict): + params = function_vars.get("params") + trace_operation = function_vars.get("trace_operation") + pin = function_vars.get("pin") + endpoint_name = function_vars.get("endpoint_name") + operation = function_vars.get("operation") + + with pin.tracer.trace( + trace_operation, + service=schematize_service_name("{}.{}".format(pin.service, endpoint_name)), + span_type=SpanTypes.HTTP, + ) as span: + set_patched_api_call_span_tags(span, instance, args, params, endpoint_name, operation) + + if args: + if config.botocore["distributed_tracing"]: + try: + if endpoint_name == "states" and operation in {"StartExecution", "StartSyncExecution"}: + inject_trace_to_stepfunction_input(params, span) + span.name = schematize_cloud_messaging_operation( + trace_operation, + cloud_provider="aws", + cloud_service="stepfunctions", + direction=SpanDirection.OUTBOUND, + ) + except Exception: + log.warning("Unable to inject trace context", exc_info=True) + + try: + return original_func(*args, **kwargs) + except botocore.exceptions.ClientError as e: + set_response_metadata_tags(span, e.response) + + # If we have a status code, and the status code is not an error, + # then ignore the exception being raised + status_code = span.get_tag(http.STATUS_CODE) + if status_code and not config.botocore.operations[span.resource].is_error_code(int(status_code)): + span._ignore_exception(botocore.exceptions.ClientError) + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/utils.py new file mode 100644 index 0000000..46a5e4b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/botocore/utils.py @@ -0,0 +1,248 @@ +""" +Trace queries monitoring to aws api done via botocore client +""" +import base64 +import json +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + +from ddtrace import Span # noqa:F401 +from ddtrace import config + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import aws +from ...ext import http +from ...internal.constants import COMPONENT +from ...internal.logger import get_logger +from ...internal.utils.formats import deep_getattr +from ...propagation.http import HTTPPropagator + + +log = get_logger(__name__) + +MAX_EVENTBRIDGE_DETAIL_SIZE = 1 << 18 # 256KB + +LINE_BREAK = "\n" + + +def get_json_from_str(data_str): + # type: (str) -> Tuple[str, Optional[Dict[str, Any]]] + data_obj = json.loads(data_str) + + if data_str.endswith(LINE_BREAK): + return LINE_BREAK, data_obj + return None, data_obj + + +def get_kinesis_data_object(data): + # type: (str) -> Tuple[str, Optional[Dict[str, Any]]] + """ + :data: the data from a kinesis stream + The data from a kinesis stream comes as a string (could be json, base64 encoded, etc.) + We support injecting our trace context in the following three cases: + - json string + - byte encoded json string + - base64 encoded json string + If it's none of these, then we leave the message as it is. + """ + + # check if data is a json string + try: + return get_json_from_str(data) + except Exception: + log.debug("Kinesis data is not a JSON string. Trying Byte encoded JSON string.") + + # check if data is an encoded json string + try: + data_str = data.decode("ascii") + return get_json_from_str(data_str) + except Exception: + log.debug("Kinesis data is not a JSON string encoded. Trying Base64 encoded JSON string.") + + # check if data is a base64 encoded json string + try: + data_str = base64.b64decode(data).decode("ascii") + return get_json_from_str(data_str) + except Exception: + log.debug("Unable to parse payload, unable to inject trace context.") + + return None, None + + +def inject_trace_to_eventbridge_detail(params, span): + # type: (Any, Span) -> None + """ + :params: contains the params for the current botocore action + :span: the span which provides the trace context to be propagated + Inject trace headers into the EventBridge record if the record's Detail object contains a JSON string + Max size per event is 256KB (https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-putevent-size.html) + """ + if "Entries" not in params: + log.warning("Unable to inject context. The Event Bridge event had no Entries.") + return + + for entry in params["Entries"]: + detail = {} + if "Detail" in entry: + try: + detail = json.loads(entry["Detail"]) + except ValueError: + log.warning("Detail is not a valid JSON string") + continue + + detail["_datadog"] = {} + HTTPPropagator.inject(span.context, detail["_datadog"]) + detail_json = json.dumps(detail) + + # check if detail size will exceed max size with headers + detail_size = len(detail_json) + if detail_size >= MAX_EVENTBRIDGE_DETAIL_SIZE: + log.warning("Detail with trace injection (%s) exceeds limit (%s)", detail_size, MAX_EVENTBRIDGE_DETAIL_SIZE) + continue + + entry["Detail"] = detail_json + + +def modify_client_context(client_context_object, trace_headers): + if config.botocore["invoke_with_legacy_context"]: + trace_headers = {"_datadog": trace_headers} + + if "custom" in client_context_object: + client_context_object["custom"].update(trace_headers) + else: + client_context_object["custom"] = trace_headers + + +def inject_trace_to_client_context(params, span): + trace_headers = {} + HTTPPropagator.inject(span.context, trace_headers) + client_context_object = {} + if "ClientContext" in params: + try: + client_context_json = base64.b64decode(params["ClientContext"]).decode("utf-8") + client_context_object = json.loads(client_context_json) + except Exception: + log.warning("malformed client_context=%s", params["ClientContext"], exc_info=True) + return + modify_client_context(client_context_object, trace_headers) + try: + json_context = json.dumps(client_context_object).encode("utf-8") + except Exception: + log.warning("unable to encode modified client context as json: %s", client_context_object, exc_info=True) + return + params["ClientContext"] = base64.b64encode(json_context).decode("utf-8") + + +def set_patched_api_call_span_tags(span, instance, args, params, endpoint_name, operation): + span.set_tag_str(COMPONENT, config.botocore.integration_name) + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag(SPAN_MEASURED_KEY) + + if args: + # DEV: join is the fastest way of concatenating strings that is compatible + # across Python versions (see + # https://stackoverflow.com/questions/1316887/what-is-the-most-efficient-string-concatenation-method-in-python) + span.resource = ".".join((endpoint_name, operation.lower())) + span.set_tag("aws_service", endpoint_name) + + if params and not config.botocore["tag_no_params"]: + aws._add_api_param_span_tags(span, endpoint_name, params) + + else: + span.resource = endpoint_name + + region_name = deep_getattr(instance, "meta.region_name") + + span.set_tag_str("aws.agent", "botocore") + if operation is not None: + span.set_tag_str("aws.operation", operation) + if region_name is not None: + span.set_tag_str("aws.region", region_name) + span.set_tag_str("region", region_name) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.botocore.get_analytics_sample_rate()) + + +def set_response_metadata_tags(span, result): + # type: (Span, Dict[str, Any]) -> None + if not result or not result.get("ResponseMetadata"): + return + response_meta = result["ResponseMetadata"] + + if "HTTPStatusCode" in response_meta: + status_code = response_meta["HTTPStatusCode"] + span.set_tag(http.STATUS_CODE, status_code) + + # Mark this span as an error if requested + if config.botocore.operations[span.resource].is_error_code(int(status_code)): + span.error = 1 + + if "RetryAttempts" in response_meta: + span.set_tag("retry_attempts", response_meta["RetryAttempts"]) + + if "RequestId" in response_meta: + span.set_tag_str("aws.requestid", response_meta["RequestId"]) + + +def extract_DD_context(messages): + ctx = None + if len(messages) >= 1: + message = messages[0] + context_json = extract_trace_context_json(message) + if context_json is not None: + child_of = HTTPPropagator.extract(context_json) + if child_of.trace_id is not None: + ctx = child_of + return ctx + + +def extract_trace_context_json(message): + context_json = None + try: + if message and message.get("Type") == "Notification": + # This is potentially a DSM SNS notification + if ( + "MessageAttributes" in message + and "_datadog" in message["MessageAttributes"] + and message["MessageAttributes"]["_datadog"]["Type"] == "Binary" + ): + context_json = json.loads(base64.b64decode(message["MessageAttributes"]["_datadog"]["Value"]).decode()) + elif ( + "MessageAttributes" in message + and "_datadog" in message["MessageAttributes"] + and "StringValue" in message["MessageAttributes"]["_datadog"] + ): + # The message originated from SQS + context_json = json.loads(message["MessageAttributes"]["_datadog"]["StringValue"]) + elif ( + "MessageAttributes" in message + and "_datadog" in message["MessageAttributes"] + and "BinaryValue" in message["MessageAttributes"]["_datadog"] + ): + # Raw message delivery + context_json = json.loads(message["MessageAttributes"]["_datadog"]["BinaryValue"].decode()) + # this is a kinesis message + elif "Data" in message: + # Raw message delivery + _, data = get_kinesis_data_object(message["Data"]) + if "_datadog" in data: + context_json = data["_datadog"] + + if context_json is None: + # AWS SNS holds attributes within message body + if "Body" in message: + try: + body = json.loads(message["Body"]) + return extract_trace_context_json(body) + except ValueError: + log.debug("Unable to parse AWS message body.") + except Exception: + log.debug("Unable to parse AWS message attributes for Datadog Context.") + return context_json diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/__init__.py new file mode 100644 index 0000000..0e8f8d4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/__init__.py @@ -0,0 +1,47 @@ +""" +The bottle integration traces the Bottle web framework. Add the following +plugin to your app:: + + import bottle + from ddtrace import tracer + from ddtrace.contrib.bottle import TracePlugin + + app = bottle.Bottle() + plugin = TracePlugin(service="my-web-app") + app.install(plugin) + +:ref:`All HTTP tags ` are supported for this integration. + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.bottle['distributed_tracing'] + + Whether to parse distributed tracing headers from requests received by your bottle app. + + Can also be enabled with the ``DD_BOTTLE_DISTRIBUTED_TRACING`` environment variable. + + Default: ``True`` + + +Example:: + + from ddtrace import config + + # Enable distributed tracing + config.bottle['distributed_tracing'] = True + +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["bottle"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .trace import TracePlugin + + __all__ = ["TracePlugin", "patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/patch.py new file mode 100644 index 0000000..d22f076 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/patch.py @@ -0,0 +1,41 @@ +import os + +import bottle + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...internal.utils.formats import asbool +from .trace import TracePlugin + + +# Configure default configuration +config._add( + "bottle", + dict( + distributed_tracing=asbool(os.getenv("DD_BOTTLE_DISTRIBUTED_TRACING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(bottle, "__version__", "") + + +def patch(): + """Patch the bottle.Bottle class""" + if getattr(bottle, "_datadog_patch", False): + return + + bottle._datadog_patch = True + wrapt.wrap_function_wrapper("bottle", "Bottle.__init__", traced_init) + + +def traced_init(wrapped, instance, args, kwargs): + wrapped(*args, **kwargs) + + service = config._get_service(default="bottle") + + plugin = TracePlugin(service=service) + instance.install(plugin) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/trace.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/trace.py new file mode 100644 index 0000000..fd41c59 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/bottle/trace.py @@ -0,0 +1,107 @@ +from bottle import HTTPError +from bottle import HTTPResponse +from bottle import request +from bottle import response + +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.schema import schematize_url_operation +from ...internal.utils.formats import asbool +from .. import trace_utils + + +class TracePlugin(object): + name = "trace" + api = 2 + + def __init__(self, service="bottle", tracer=None, distributed_tracing=None): + self.service = config.service or service + self.tracer = tracer or ddtrace.tracer + if distributed_tracing is not None: + config.bottle.distributed_tracing = distributed_tracing + + @property + def distributed_tracing(self): + return config.bottle.distributed_tracing + + @distributed_tracing.setter + def distributed_tracing(self, distributed_tracing): + config.bottle["distributed_tracing"] = asbool(distributed_tracing) + + def apply(self, callback, route): + def wrapped(*args, **kwargs): + if not self.tracer or not self.tracer.enabled: + return callback(*args, **kwargs) + + resource = "{} {}".format(request.method, route.rule) + + trace_utils.activate_distributed_headers( + self.tracer, int_config=config.bottle, request_headers=request.headers + ) + + with self.tracer.trace( + schematize_url_operation("bottle.request", protocol="http", direction=SpanDirection.INBOUND), + service=self.service, + resource=resource, + span_type=SpanTypes.WEB, + ) as s: + s.set_tag_str(COMPONENT, config.bottle.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + s.set_tag(SPAN_MEASURED_KEY) + # set analytics sample rate with global config enabled + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.bottle.get_analytics_sample_rate(use_global_config=True)) + + code = None + result = None + try: + result = callback(*args, **kwargs) + return result + except (HTTPError, HTTPResponse) as e: + # you can interrupt flows using abort(status_code, 'message')... + # we need to respect the defined status_code. + # we also need to handle when response is raised as is the + # case with a 4xx status + code = e.status_code + raise + except Exception: + # bottle doesn't always translate unhandled exceptions, so + # we mark it here. + code = 500 + raise + finally: + if isinstance(result, HTTPResponse): + response_code = result.status_code + elif code: + response_code = code + else: + # bottle local response has not yet been updated so this + # will be default + response_code = response.status_code + + method = request.method + url = request.urlparts._replace(query="").geturl() + full_route = "/".join([request.script_name.rstrip("/"), route.rule.lstrip("/")]) + trace_utils.set_http_meta( + s, + config.bottle, + method=method, + url=url, + status_code=response_code, + query=request.query_string, + request_headers=request.headers, + response_headers=response.headers, + route=full_route, + ) + + return wrapped diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/__init__.py new file mode 100644 index 0000000..ea9376c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/__init__.py @@ -0,0 +1,34 @@ +"""Instrument Cassandra to report Cassandra queries. + +``import ddtrace.auto`` will automatically patch your Cluster instance to make it work. +:: + + from ddtrace import Pin, patch + from cassandra.cluster import Cluster + + # If not patched yet, you can patch cassandra specifically + patch(cassandra=True) + + # This will report spans with the default instrumentation + cluster = Cluster(contact_points=["127.0.0.1"], port=9042) + session = cluster.connect("my_keyspace") + # Example of instrumented query + session.execute("select id from my_table limit 10;") + + # Use a pin to specify metadata related to this cluster + cluster = Cluster(contact_points=['10.1.1.3', '10.1.1.4', '10.1.1.5'], port=9042) + Pin.override(cluster, service='cassandra-backend') + session = cluster.connect("my_keyspace") + session.execute("select id from my_table limit 10;") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["cassandra.cluster"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import patch + from .session import get_version + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/patch.py new file mode 100644 index 0000000..d82e620 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/patch.py @@ -0,0 +1,5 @@ +from .session import patch +from .session import unpatch + + +__all__ = ["patch", "unpatch"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/session.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/session.py new file mode 100644 index 0000000..c8f821c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cassandra/session.py @@ -0,0 +1,294 @@ +""" +Trace queries along a session to a cassandra cluster +""" +import sys + +from cassandra import __version__ + + +try: + import cassandra.cluster as cassandra_cluster +except AttributeError: + from cassandra import cluster as cassandra_cluster + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import ERROR_MSG +from ...constants import ERROR_TYPE +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import cassandra as cassx +from ...ext import db +from ...ext import net +from ...internal.compat import maybe_stringify +from ...internal.logger import get_logger +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ...internal.utils.formats import deep_getattr +from ...pin import Pin +from ...vendor import wrapt + + +log = get_logger(__name__) + +RESOURCE_MAX_LENGTH = 5000 +SERVICE = schematize_service_name("cassandra") +CURRENT_SPAN = "_ddtrace_current_span" +PAGE_NUMBER = "_ddtrace_page_number" + + +# Original connect connect function +_connect = cassandra_cluster.Cluster.connect + + +def get_version(): + # type: () -> str + return __version__ + + +def patch(): + """patch will add tracing to the cassandra library.""" + cassandra_cluster.Cluster.connect = wrapt.FunctionWrapper(_connect, traced_connect) + Pin(service=SERVICE).onto(cassandra_cluster.Cluster) + + +def unpatch(): + cassandra_cluster.Cluster.connect = _connect + + +def traced_connect(func, instance, args, kwargs): + session = func(*args, **kwargs) + if not isinstance(session.execute, wrapt.FunctionWrapper): + # FIXME[matt] this should probably be private. + session.execute_async = wrapt.FunctionWrapper(session.execute_async, traced_execute_async) + return session + + +def _close_span_on_success(result, future): + span = getattr(future, CURRENT_SPAN, None) + if not span: + log.debug("traced_set_final_result was not able to get the current span from the ResponseFuture") + return + try: + span.set_tags(_extract_result_metas(cassandra_cluster.ResultSet(future, result))) + except Exception: + log.debug("an exception occurred while setting tags", exc_info=True) + finally: + span.finish() + delattr(future, CURRENT_SPAN) + + +def traced_set_final_result(func, instance, args, kwargs): + result = get_argument_value(args, kwargs, 0, "response") + _close_span_on_success(result, instance) + return func(*args, **kwargs) + + +def _close_span_on_error(exc, future): + span = getattr(future, CURRENT_SPAN, None) + if not span: + log.debug("traced_set_final_exception was not able to get the current span from the ResponseFuture") + return + try: + # handling the exception manually because we + # don't have an ongoing exception here + span.error = 1 + span.set_tag_str(ERROR_MSG, exc.args[0]) + span.set_tag_str(ERROR_TYPE, exc.__class__.__name__) + except Exception: + log.debug("traced_set_final_exception was not able to set the error, failed with error", exc_info=True) + finally: + span.finish() + delattr(future, CURRENT_SPAN) + + +def traced_set_final_exception(func, instance, args, kwargs): + exc = get_argument_value(args, kwargs, 0, "response") + _close_span_on_error(exc, instance) + return func(*args, **kwargs) + + +def traced_start_fetching_next_page(func, instance, args, kwargs): + has_more_pages = getattr(instance, "has_more_pages", True) + if not has_more_pages: + return func(*args, **kwargs) + session = getattr(instance, "session", None) + cluster = getattr(session, "cluster", None) + pin = Pin.get_from(cluster) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # In case the current span is not finished we make sure to finish it + old_span = getattr(instance, CURRENT_SPAN, None) + if old_span: + log.debug("previous span was not finished before fetching next page") + old_span.finish() + + query = getattr(instance, "query", None) + + span = _start_span_and_set_tags(pin, query, session, cluster) + + page_number = getattr(instance, PAGE_NUMBER, 1) + 1 + setattr(instance, PAGE_NUMBER, page_number) + setattr(instance, CURRENT_SPAN, span) + try: + return func(*args, **kwargs) + except Exception: + with span: + span.set_exc_info(*sys.exc_info()) + raise + + +def traced_execute_async(func, instance, args, kwargs): + cluster = getattr(instance, "cluster", None) + pin = Pin.get_from(cluster) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + query = get_argument_value(args, kwargs, 0, "query") + + span = _start_span_and_set_tags(pin, query, instance, cluster) + + try: + result = func(*args, **kwargs) + setattr(result, CURRENT_SPAN, span) + setattr(result, PAGE_NUMBER, 1) + result._set_final_result = wrapt.FunctionWrapper(result._set_final_result, traced_set_final_result) + result._set_final_exception = wrapt.FunctionWrapper(result._set_final_exception, traced_set_final_exception) + result.start_fetching_next_page = wrapt.FunctionWrapper( + result.start_fetching_next_page, traced_start_fetching_next_page + ) + + # Since we cannot be sure that the previous methods were overwritten + # before the call ended, we add callbacks that will be run + # synchronously if the call already returned and we remove them right + # after. + result.add_callbacks( + _close_span_on_success, _close_span_on_error, callback_args=(result,), errback_args=(result,) + ) + result.clear_callbacks() + return result + except Exception: + with span: + span.set_exc_info(*sys.exc_info()) + raise + + +def _start_span_and_set_tags(pin, query, session, cluster): + service = pin.service + tracer = pin.tracer + span_name = schematize_database_operation("cassandra.query", database_provider="cassandra") + span = tracer.trace(span_name, service=service, span_type=SpanTypes.CASSANDRA) + + span.set_tag_str(COMPONENT, config.cassandra.integration_name) + span.set_tag_str(db.SYSTEM, "cassandra") + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + _sanitize_query(span, query) + span.set_tags(_extract_session_metas(session)) # FIXME[matt] do once? + span.set_tags(_extract_cluster_metas(cluster)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.cassandra.get_analytics_sample_rate()) + return span + + +def _extract_session_metas(session): + metas = {} + + if getattr(session, "keyspace", None): + # FIXME the keyspace can be overridden explicitly in the query itself + # e.g. 'select * from trace.hash_to_resource' + metas[cassx.KEYSPACE] = session.keyspace.lower() + + return metas + + +def _extract_cluster_metas(cluster): + metas = {} + if deep_getattr(cluster, "metadata.cluster_name"): + metas[cassx.CLUSTER] = cluster.metadata.cluster_name + if getattr(cluster, "port", None): + metas[net.TARGET_PORT] = cluster.port + + return metas + + +def _extract_result_metas(result): + metas = {} + if result is None: + return metas + + future = getattr(result, "response_future", None) + + if future: + # get the host + host = maybe_stringify(getattr(future, "coordinator_host", None)) + if host: + host, _, port = host.partition(":") + metas[net.TARGET_HOST] = host + if port: + metas[net.TARGET_PORT] = int(port) + elif hasattr(future, "_current_host"): + address = deep_getattr(future, "_current_host.address") + if address: + metas[net.TARGET_HOST] = address + + query = getattr(future, "query", None) + if getattr(query, "consistency_level", None): + metas[cassx.CONSISTENCY_LEVEL] = query.consistency_level + if getattr(query, "keyspace", None): + metas[cassx.KEYSPACE] = query.keyspace.lower() + + page_number = getattr(future, PAGE_NUMBER, 1) + has_more_pages = future.has_more_pages + is_paginated = has_more_pages or page_number > 1 + metas[cassx.PAGINATED] = is_paginated + if is_paginated: + metas[cassx.PAGE_NUMBER] = page_number + + if hasattr(result, "current_rows"): + result_rows = result.current_rows or [] + metas[db.ROWCOUNT] = len(result_rows) + + return metas + + +def _sanitize_query(span, query): + # TODO (aaditya): fix this hacky type check. we need it to avoid circular imports + t = type(query).__name__ + + resource = None + if t in ("SimpleStatement", "PreparedStatement"): + # reset query if a string is available + resource = getattr(query, "query_string", query) + elif t == "BatchStatement": + resource = "BatchStatement" + # Each element in `_statements_and_parameters` is: + # (is_prepared, statement, parameters) + # ref:https://github.com/datastax/python-driver/blob/13d6d72be74f40fcef5ec0f2b3e98538b3b87459/cassandra/query.py#L844 + # + # For prepared statements, the `statement` value is just the query_id + # which is not a statement and when trying to join with other strings + # raises an error in python3 around joining bytes to unicode, so this + # just filters out prepared statements from this tag value + q = "; ".join(q[1] for q in query._statements_and_parameters[:2] if not q[0]) + span.set_tag_str("cassandra.query", q) + span.set_metric("cassandra.batch_size", len(query._statements_and_parameters)) + elif t == "BoundStatement": + ps = getattr(query, "prepared_statement", None) + if ps: + resource = getattr(ps, "query_string", None) + elif t == "str": + resource = query + else: + resource = "unknown-query-type" # FIXME[matt] what else do to here? + + span.resource = str(resource)[:RESOURCE_MAX_LENGTH] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/__init__.py new file mode 100644 index 0000000..a17b483 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/__init__.py @@ -0,0 +1,58 @@ +""" +The Celery integration will trace all tasks that are executed in the +background. Functions and class based tasks are traced only if the Celery API +is used, so calling the function directly or via the ``run()`` method will not +generate traces. However, calling ``apply()``, ``apply_async()`` and ``delay()`` +will produce tracing data. To trace your Celery application, call the patch method:: + + import celery + from ddtrace import patch + + patch(celery=True) + app = celery.Celery() + + @app.task + def my_task(): + pass + + class MyTask(app.Task): + def run(self): + pass + +Configuration +~~~~~~~~~~~~~ +.. py:data:: ddtrace.config.celery['distributed_tracing'] + + Whether or not to pass distributed tracing headers to Celery workers. + + Can also be enabled with the ``DD_CELERY_DISTRIBUTED_TRACING`` environment variable. + + Default: ``False`` + +.. py:data:: ddtrace.config.celery['producer_service_name'] + + Sets service name for producer + + Default: ``'celery-producer'`` + +.. py:data:: ddtrace.config.celery['worker_service_name'] + + Sets service name for worker + + Default: ``'celery-worker'`` + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["celery"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .app import patch_app + from .app import unpatch_app + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "patch_app", "unpatch", "unpatch_app", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/app.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/app.py new file mode 100644 index 0000000..f132ee4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/app.py @@ -0,0 +1,99 @@ +import celery +from celery import signals + +from ddtrace import Pin +from ddtrace import config +from ddtrace.pin import _DD_PIN_NAME + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from .. import trace_utils +from .signals import trace_after_publish +from .signals import trace_before_publish +from .signals import trace_failure +from .signals import trace_postrun +from .signals import trace_prerun +from .signals import trace_retry + + +def patch_app(app, pin=None): + """Attach the Pin class to the application and connect + our handlers to Celery signals. + """ + if getattr(app, "__datadog_patch", False): + return + app.__datadog_patch = True + + # attach the PIN object + pin = pin or Pin( + service=config.celery["worker_service_name"], + _config=config.celery, + ) + pin.onto(app) + + trace_utils.wrap( + "celery.beat", + "Scheduler.apply_entry", + _traced_beat_function(config.celery, "apply_entry", lambda args: args[0].name), + ) + trace_utils.wrap("celery.beat", "Scheduler.tick", _traced_beat_function(config.celery, "tick")) + pin.onto(celery.beat.Scheduler) + + # connect to the Signal framework + signals.task_prerun.connect(trace_prerun, weak=False) + signals.task_postrun.connect(trace_postrun, weak=False) + signals.before_task_publish.connect(trace_before_publish, weak=False) + signals.after_task_publish.connect(trace_after_publish, weak=False) + signals.task_failure.connect(trace_failure, weak=False) + signals.task_retry.connect(trace_retry, weak=False) + return app + + +def unpatch_app(app): + """Remove the Pin instance from the application and disconnect + our handlers from Celery signal framework. + """ + if not getattr(app, "__datadog_patch", False): + return + app.__datadog_patch = False + + pin = Pin.get_from(app) + if pin is not None: + delattr(app, _DD_PIN_NAME) + + trace_utils.unwrap(celery.beat.Scheduler, "apply_entry") + trace_utils.unwrap(celery.beat.Scheduler, "tick") + + signals.task_prerun.disconnect(trace_prerun) + signals.task_postrun.disconnect(trace_postrun) + signals.before_task_publish.disconnect(trace_before_publish) + signals.after_task_publish.disconnect(trace_after_publish) + signals.task_failure.disconnect(trace_failure) + signals.task_retry.disconnect(trace_retry) + + +def _traced_beat_function(integration_config, fn_name, resource_fn=None): + def _traced_beat_inner(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + with pin.tracer.trace( + "celery.beat.{}".format(fn_name), + span_type=SpanTypes.WORKER, + service=trace_utils.ext_service(pin, integration_config), + ) as span: + if resource_fn: + span.resource = resource_fn(args) + span.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) + rate = config.celery.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + span.set_tag(SPAN_MEASURED_KEY) + + return func(*args, **kwargs) + + return _traced_beat_inner diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/constants.py new file mode 100644 index 0000000..39923a0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/constants.py @@ -0,0 +1,20 @@ +from ddtrace import config + + +# Celery Context key +CTX_KEY = "__dd_task_span" + +# Span names +PRODUCER_ROOT_SPAN = "celery.apply" +WORKER_ROOT_SPAN = "celery.run" + +# Task operations +TASK_TAG_KEY = "celery.action" +TASK_APPLY = "apply" +TASK_APPLY_ASYNC = "apply_async" +TASK_RUN = "run" +TASK_RETRY_REASON_KEY = "celery.retry.reason" + +# Service info +PRODUCER_SERVICE = config._get_service(default="celery-producer") +WORKER_SERVICE = config._get_service(default="celery-worker") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/patch.py new file mode 100644 index 0000000..f604e53 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/patch.py @@ -0,0 +1,41 @@ +import os + +import celery + +from ddtrace import config +from ddtrace.internal.utils.formats import asbool + +from .app import patch_app +from .app import unpatch_app +from .constants import PRODUCER_SERVICE +from .constants import WORKER_SERVICE + + +# Celery default settings +config._add( + "celery", + { + "distributed_tracing": asbool(os.getenv("DD_CELERY_DISTRIBUTED_TRACING", default=False)), + "producer_service_name": os.getenv("DD_CELERY_PRODUCER_SERVICE_NAME", default=PRODUCER_SERVICE), + "worker_service_name": os.getenv("DD_CELERY_WORKER_SERVICE_NAME", default=WORKER_SERVICE), + }, +) + + +def get_version(): + # type: () -> str + return str(celery.__version__) + + +def patch(): + """Instrument Celery base application and the `TaskRegistry` so + that any new registered task is automatically instrumented. In the + case of Django-Celery integration, also the `@shared_task` decorator + must be instrumented because Django doesn't use the Celery registry. + """ + patch_app(celery.Celery) + + +def unpatch(): + """Disconnect all signals and remove Tracing capabilities""" + unpatch_app(celery.Celery) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/signals.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/signals.py new file mode 100644 index 0000000..e16f5ec --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/signals.py @@ -0,0 +1,225 @@ +from celery import registry +from celery.utils import nodenames + +from ddtrace import Pin +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import net +from ...internal.logger import get_logger +from ...propagation.http import HTTPPropagator +from .. import trace_utils +from . import constants as c +from .utils import attach_span +from .utils import detach_span +from .utils import retrieve_span +from .utils import retrieve_task_id +from .utils import set_tags_from_context + + +log = get_logger(__name__) +propagator = HTTPPropagator + + +def trace_prerun(*args, **kwargs): + # safe-guard to avoid crashes in case the signals API + # changes in Celery + task = kwargs.get("sender") + task_id = kwargs.get("task_id") + log.debug("prerun signal start task_id=%s", task_id) + if task is None or task_id is None: + log.debug("unable to extract the Task and the task_id. This version of Celery may not be supported.") + return + + # retrieve the task Pin or fallback to the global one + pin = Pin.get_from(task) or Pin.get_from(task.app) + if pin is None: + log.debug("no pin found on task or task.app task_id=%s", task_id) + return + + request_headers = task.request.get("headers", {}) + trace_utils.activate_distributed_headers(pin.tracer, int_config=config.celery, request_headers=request_headers) + + # propagate the `Span` in the current task Context + service = config.celery["worker_service_name"] + span = pin.tracer.trace(c.WORKER_ROOT_SPAN, service=service, resource=task.name, span_type=SpanTypes.WORKER) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) + + # set component tag equal to name of integration + span.set_tag_str(COMPONENT, config.celery.integration_name) + + # set analytics sample rate + rate = config.celery.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + + span.set_tag(SPAN_MEASURED_KEY) + attach_span(task, task_id, span) + + +def trace_postrun(*args, **kwargs): + # safe-guard to avoid crashes in case the signals API + # changes in Celery + task = kwargs.get("sender") + task_id = kwargs.get("task_id") + log.debug("postrun signal task_id=%s", task_id) + if task is None or task_id is None: + log.debug("unable to extract the Task and the task_id. This version of Celery may not be supported.") + return + + # retrieve and finish the Span + span = retrieve_span(task, task_id) + if span is None: + log.warning("no existing span found for task_id=%s", task_id) + return + else: + # request context tags + span.set_tag_str(c.TASK_TAG_KEY, c.TASK_RUN) + set_tags_from_context(span, kwargs) + set_tags_from_context(span, task.request.__dict__) + span.finish() + detach_span(task, task_id) + + +def trace_before_publish(*args, **kwargs): + # `before_task_publish` signal doesn't propagate the task instance so + # we need to retrieve it from the Celery Registry to access the `Pin`. The + # `Task` instance **does not** include any information about the current + # execution, so it **must not** be used to retrieve `request` data. + task_name = kwargs.get("sender") + task = registry.tasks.get(task_name) + task_id = retrieve_task_id(kwargs) + # safe-guard to avoid crashes in case the signals API + # changes in Celery + if task is None or task_id is None: + log.debug("unable to extract the Task and the task_id. This version of Celery may not be supported.") + return + + # propagate the `Span` in the current task Context + pin = Pin.get_from(task) or Pin.get_from(task.app) + if pin is None: + return + + # apply some tags here because most of the data is not available + # in the task_after_publish signal + service = config.celery["producer_service_name"] + span = pin.tracer.trace(c.PRODUCER_ROOT_SPAN, service=service, resource=task_name) + + span.set_tag_str(COMPONENT, config.celery.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) + + # set analytics sample rate + rate = config.celery.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag_str(c.TASK_TAG_KEY, c.TASK_APPLY_ASYNC) + span.set_tag_str("celery.id", task_id) + set_tags_from_context(span, kwargs) + if kwargs.get("headers") is not None: + # required to extract hostname from origin header on `celery>=4.0` + set_tags_from_context(span, kwargs["headers"]) + + # Note: adding tags from `traceback` or `state` calls will make an + # API call to the backend for the properties so we should rely + # only on the given `Context` + attach_span(task, task_id, span, is_publish=True) + + if config.celery["distributed_tracing"]: + trace_headers = {} + propagator.inject(span.context, trace_headers) + + # put distributed trace headers where celery will propagate them + task_headers = kwargs.get("headers") or {} + task_headers.setdefault("headers", {}) + task_headers["headers"].update(trace_headers) + kwargs["headers"] = task_headers + + +def trace_after_publish(*args, **kwargs): + task_name = kwargs.get("sender") + task = registry.tasks.get(task_name) + task_id = retrieve_task_id(kwargs) + # safe-guard to avoid crashes in case the signals API + # changes in Celery + if task is None or task_id is None: + log.debug("unable to extract the Task and the task_id. This version of Celery may not be supported.") + return + + # retrieve and finish the Span + span = retrieve_span(task, task_id, is_publish=True) + if span is None: + return + else: + nodename = span.get_tag("celery.hostname") + if nodename is not None: + _, host = nodenames.nodesplit(nodename) + span.set_tag_str(net.TARGET_HOST, host) + + span.finish() + detach_span(task, task_id, is_publish=True) + + +def trace_failure(*args, **kwargs): + # safe-guard to avoid crashes in case the signals API + # changes in Celery + task = kwargs.get("sender") + task_id = kwargs.get("task_id") + if task is None or task_id is None: + log.debug("unable to extract the Task and the task_id. This version of Celery may not be supported.") + return + + # retrieve and finish the Span + span = retrieve_span(task, task_id) + if span is None: + return + else: + # add Exception tags; post signals are still called + # so we don't need to attach other tags here + ex = kwargs.get("einfo") + if ex is None: + return + + if hasattr(task, "throws"): + original_exception = ex.exception + if hasattr(original_exception, "exc"): + # Python 3.11+ support: The original exception is wrapped in an `exc` attribute + original_exception = original_exception.exc + + if isinstance(original_exception, task.throws): + return + + span.set_exc_info(ex.type, ex.exception, ex.tb) + + +def trace_retry(*args, **kwargs): + # safe-guard to avoid crashes in case the signals API + # changes in Celery + task = kwargs.get("sender") + context = kwargs.get("request") + if task is None or context is None: + log.debug("unable to extract the Task or the Context. This version of Celery may not be supported.") + return + + reason = kwargs.get("reason") + if not reason: + log.debug("unable to extract the retry reason. This version of Celery may not be supported.") + return + + span = retrieve_span(task, context.id) + if span is None: + return + + # Add retry reason metadata to span + # DEV: Use `str(reason)` instead of `reason.message` in case we get something that isn't an `Exception` + span.set_tag_str(c.TASK_RETRY_REASON_KEY, str(reason)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/utils.py new file mode 100644 index 0000000..0893c04 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/celery/utils.py @@ -0,0 +1,136 @@ +from typing import Any +from typing import Dict +from weakref import WeakValueDictionary + +from ddtrace.contrib.trace_utils import set_flattened_tags +from ddtrace.span import Span + +from .constants import CTX_KEY + + +TAG_KEYS = frozenset( + [ + ("compression", "celery.compression"), + ("correlation_id", "celery.correlation_id"), + ("countdown", "celery.countdown"), + ("delivery_info", "celery.delivery_info"), + ("eta", "celery.eta"), + ("exchange", "celery.exchange"), + ("expires", "celery.expires"), + ("hostname", "celery.hostname"), + ("id", "celery.id"), + ("priority", "celery.priority"), + ("queue", "celery.queue"), + ("reply_to", "celery.reply_to"), + ("retries", "celery.retries"), + ("routing_key", "celery.routing_key"), + ("serializer", "celery.serializer"), + ("timelimit", "celery.timelimit"), + # Celery 4.0 uses `origin` instead of `hostname`; this change preserves + # the same name for the tag despite Celery version + ("origin", "celery.hostname"), + ("state", "celery.state"), + ] +) + + +def should_skip_context_value(key, value): + # type: (str, Any) -> bool + # Skip this key if it is not set + if value is None or value == "": + return True + + # Skip `timelimit` if it is not set (its default/unset value is a + # tuple or a list of `None` values + if key == "timelimit" and all(_ is None for _ in value): + return True + + # Skip `retries` if its value is `0` + if key == "retries" and value == 0: + return True + + return False + + +def set_tags_from_context(span: Span, context: Dict[str, Any]) -> None: + """Helper to extract meta values from a Celery Context""" + + context_tags = [] + for key, tag_name in TAG_KEYS: + value = context.get(key) + if should_skip_context_value(key, value): + continue + + context_tags.append((tag_name, value)) + + set_flattened_tags(span, context_tags) + + +def attach_span(task, task_id, span, is_publish=False): + """Helper to propagate a `Span` for the given `Task` instance. This + function uses a `WeakValueDictionary` that stores a Datadog Span using + the `(task_id, is_publish)` as a key. This is useful when information must be + propagated from one Celery signal to another. + + DEV: We use (task_id, is_publish) for the key to ensure that publishing a + task from within another task does not cause any conflicts. + + This mostly happens when either a task fails and a retry policy is in place, + or when a task is manually retried (e.g. `task.retry()`), we end up trying + to publish a task with the same id as the task currently running. + + Previously publishing the new task would overwrite the existing `celery.run` span + in the `weak_dict` causing that span to be forgotten and never finished. + + NOTE: We cannot test for this well yet, because we do not run a celery worker, + and cannot run `task.apply_async()` + """ + weak_dict = getattr(task, CTX_KEY, None) + if weak_dict is None: + weak_dict = WeakValueDictionary() + setattr(task, CTX_KEY, weak_dict) + + weak_dict[(task_id, is_publish)] = span + + +def detach_span(task, task_id, is_publish=False): + """Helper to remove a `Span` in a Celery task when it's propagated. + This function handles tasks where the `Span` is not attached. + """ + weak_dict = getattr(task, CTX_KEY, None) + if weak_dict is None: + return + + # DEV: See note in `attach_span` for key info + try: + del weak_dict[(task_id, is_publish)] + except KeyError: + pass + + +def retrieve_span(task, task_id, is_publish=False): + """Helper to retrieve an active `Span` stored in a `Task` + instance + """ + weak_dict = getattr(task, CTX_KEY, None) + if weak_dict is None: + return + else: + # DEV: See note in `attach_span` for key info + return weak_dict.get((task_id, is_publish)) + + +def retrieve_task_id(context): + """Helper to retrieve the `Task` identifier from the message `body`. + This helper supports Protocol Version 1 and 2. The Protocol is well + detailed in the official documentation: + http://docs.celeryproject.org/en/latest/internals/protocol.html + """ + headers = context.get("headers") + body = context.get("body") + if headers: + # Protocol Version 2 (default from Celery 4.0) + return headers.get("id") + else: + # Protocol Version 1 + return body.get("id") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/__init__.py new file mode 100644 index 0000000..67c1e59 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/__init__.py @@ -0,0 +1,66 @@ +""" +The Cherrypy trace middleware will track request timings. +It uses the cherrypy hooks and creates a tool to track requests and errors + + +Usage +~~~~~ +To install the middleware, add:: + + from ddtrace import tracer + from ddtrace.contrib.cherrypy import TraceMiddleware + +and create a `TraceMiddleware` object:: + + traced_app = TraceMiddleware(cherrypy, tracer, service="my-cherrypy-app") + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.cherrypy['distributed_tracing'] + + Whether to parse distributed tracing headers from requests received by your CherryPy app. + + Can also be enabled with the ``DD_CHERRYPY_DISTRIBUTED_TRACING`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.cherrypy['service'] + + The service name reported for your CherryPy app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'cherrypy'`` + + +Example:: +Here is the end result, in a sample app:: + + import cherrypy + + from ddtrace import tracer, Pin + from ddtrace.contrib.cherrypy import TraceMiddleware + TraceMiddleware(cherrypy, tracer, service="my-cherrypy-app") + + @cherrypy.tools.tracer() + class HelloWorld(object): + def index(self): + return "Hello World" + index.exposed = True + + cherrypy.quickstart(HelloWorld()) +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["cherrypy"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .middleware import TraceMiddleware + from .middleware import get_version + + __all__ = ["TraceMiddleware", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/middleware.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/middleware.py new file mode 100644 index 0000000..6e64fbf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/cherrypy/middleware.py @@ -0,0 +1,159 @@ +""" +Datadog trace code for cherrypy. +""" +import logging +import os + +import cherrypy +from cherrypy.lib.httputil import valid_status + +from ddtrace import config +from ddtrace.constants import ERROR_MSG +from ddtrace.constants import ERROR_STACK +from ddtrace.constants import ERROR_TYPE +from ddtrace.constants import SPAN_KIND +from ddtrace.internal.constants import COMPONENT + +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal import compat +from ...internal.schema import SpanDirection +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from ...internal.utils.formats import asbool +from .. import trace_utils + + +log = logging.getLogger(__name__) + + +# Configure default configuration +config._add( + "cherrypy", + dict( + distributed_tracing=asbool(os.getenv("DD_CHERRYPY_DISTRIBUTED_TRACING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(cherrypy, "__version__", "") + + +SPAN_NAME = schematize_url_operation("cherrypy.request", protocol="http", direction=SpanDirection.INBOUND) + + +class TraceTool(cherrypy.Tool): + def __init__(self, app, tracer, service, use_distributed_tracing=None): + self.app = app + self._tracer = tracer + self.service = service + if use_distributed_tracing is not None: + self.use_distributed_tracing = use_distributed_tracing + + # CherryPy uses priority to determine which tools act first on each event. The lower the number, the higher + # the priority. See: https://docs.cherrypy.org/en/latest/extend.html#tools-ordering + cherrypy.Tool.__init__(self, "on_start_resource", self._on_start_resource, priority=95) + + @property + def use_distributed_tracing(self): + return config.cherrypy.distributed_tracing + + @use_distributed_tracing.setter + def use_distributed_tracing(self, use_distributed_tracing): + config.cherrypy["distributed_tracing"] = asbool(use_distributed_tracing) + + @property + def service(self): + return config.cherrypy.get("service", "cherrypy") + + @service.setter + def service(self, service): + config.cherrypy["service"] = schematize_service_name(service) + + def _setup(self): + cherrypy.Tool._setup(self) + cherrypy.request.hooks.attach("on_end_request", self._on_end_request, priority=5) + cherrypy.request.hooks.attach("after_error_response", self._after_error_response, priority=5) + + def _on_start_resource(self): + trace_utils.activate_distributed_headers( + self._tracer, int_config=config.cherrypy, request_headers=cherrypy.request.headers + ) + + cherrypy.request._datadog_span = self._tracer.trace( + SPAN_NAME, + service=trace_utils.int_service(None, config.cherrypy, default="cherrypy"), + span_type=SpanTypes.WEB, + ) + + cherrypy.request._datadog_span.set_tag_str(COMPONENT, config.cherrypy.integration_name) + + # set span.kind to the type of request being performed + cherrypy.request._datadog_span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + def _after_error_response(self): + span = getattr(cherrypy.request, "_datadog_span", None) + + if not span: + log.warning("cherrypy: tracing tool after_error_response hook called, but no active span found") + return + + span.error = 1 + span.set_tag_str(ERROR_TYPE, str(cherrypy._cperror._exc_info()[0])) + span.set_tag_str(ERROR_MSG, str(cherrypy._cperror._exc_info()[1])) + span.set_tag_str(ERROR_STACK, cherrypy._cperror.format_exc()) + + self._close_span(span) + + def _on_end_request(self): + span = getattr(cherrypy.request, "_datadog_span", None) + + if not span: + log.warning("cherrypy: tracing tool on_end_request hook called, but no active span found") + return + + self._close_span(span) + + def _close_span(self, span): + # Let users specify their own resource in middleware if they so desire. + # See case https://github.com/DataDog/dd-trace-py/issues/353 + if span.resource == SPAN_NAME: + # In the future, mask virtual path components in a + # URL e.g. /dispatch/abc123 becomes /dispatch/{{test_value}}/ + # Following investigation, this should be possible using + # [find_handler](https://docs.cherrypy.org/en/latest/_modules/cherrypy/_cpdispatch.html#Dispatcher.find_handler) + # but this may not be as easy as `cherrypy.request.dispatch.find_handler(cherrypy.request.path_info)` as + # this function only ever seems to return an empty list for the virtual path components. + + # For now, default resource is method and path: + # GET / + # POST /save + resource = "{} {}".format(cherrypy.request.method, cherrypy.request.path_info) + span.resource = compat.to_unicode(resource) + + url = compat.to_unicode(cherrypy.request.base + cherrypy.request.path_info) + status_code, _, _ = valid_status(cherrypy.response.status) + + trace_utils.set_http_meta( + span, + config.cherrypy, + method=cherrypy.request.method, + url=url, + status_code=status_code, + request_headers=cherrypy.request.headers, + response_headers=cherrypy.response.headers, + ) + + span.finish() + + # Clear our span just in case. + cherrypy.request._datadog_span = None + + +class TraceMiddleware(object): + def __init__(self, app, tracer, service="cherrypy", distributed_tracing=None): + self.app = app + + self.app.tools.tracer = TraceTool(app, tracer, service, distributed_tracing) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/__init__.py new file mode 100644 index 0000000..e56a261 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/__init__.py @@ -0,0 +1,33 @@ +"""Instrument Consul to trace KV queries. + +Only supports tracing for the synchronous client. + +``import ddtrace.auto`` will automatically patch your Consul client to make it work. +:: + + from ddtrace import Pin, patch + import consul + + # If not patched yet, you can patch consul specifically + patch(consul=True) + + # This will report a span with the default settings + client = consul.Consul(host="127.0.0.1", port=8500) + client.get("my-key") + + # Use a pin to specify metadata related to this client + Pin.override(client, service='consul-kv') +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["consul"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/patch.py new file mode 100644 index 0000000..4187340 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/consul/patch.py @@ -0,0 +1,85 @@ +import consul + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import consul as consulx +from ...ext import net +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from ...internal.utils import get_argument_value +from ...internal.utils.wrappers import unwrap as _u +from ...pin import Pin + + +_KV_FUNCS = ["put", "get", "delete"] + + +def get_version(): + # type: () -> str + return getattr(consul, "__version__", "") + + +def patch(): + if getattr(consul, "__datadog_patch", False): + return + consul.__datadog_patch = True + + pin = Pin(service=schematize_service_name(consulx.SERVICE)) + pin.onto(consul.Consul.KV) + + for f_name in _KV_FUNCS: + _w("consul", "Consul.KV.%s" % f_name, wrap_function(f_name)) + + +def unpatch(): + if not getattr(consul, "__datadog_patch", False): + return + consul.__datadog_patch = False + + for f_name in _KV_FUNCS: + _u(consul.Consul.KV, f_name) + + +def wrap_function(name): + def trace_func(wrapped, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + # Only patch the synchronous implementation + if not isinstance(instance.agent.http, consul.std.HTTPClient): + return wrapped(*args, **kwargs) + + path = get_argument_value(args, kwargs, 0, "key") + resource = name.upper() + + with pin.tracer.trace( + schematize_url_operation(consulx.CMD, protocol="http", direction=SpanDirection.OUTBOUND), + service=pin.service, + resource=resource, + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.consul.integration_name) + + span.set_tag_str(net.TARGET_HOST, instance.agent.http.host) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + rate = config.consul.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + span.set_tag_str(consulx.KEY, path) + span.set_tag_str(consulx.CMD, resource) + return wrapped(*args, **kwargs) + + return trace_func diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/__init__.py new file mode 100644 index 0000000..214c471 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/__init__.py @@ -0,0 +1,32 @@ +""" +The Coverage.py integration traces test code coverage when using `pytest` or `unittest`. + + +Enabling +~~~~~~~~ + +The Coverage.py integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Alternately, use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(coverage=True) + +Note: Coverage.py instrumentation is only enabled if `pytest` or `unittest` instrumentation is enabled. +""" +from ...internal.logger import get_logger +from ...internal.utils.importlib import require_modules + + +required_modules = ["coverage"] +log = get_logger(__name__) + + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/constants.py new file mode 100644 index 0000000..b945c4e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/constants.py @@ -0,0 +1 @@ +PCT_COVERED_KEY = "pct_coverage" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/data.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/data.py new file mode 100644 index 0000000..ebeb0e0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/data.py @@ -0,0 +1,7 @@ +from copy import copy +import sys + + +_coverage_data = {} + +_original_sys_argv_command = copy(sys.argv) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/patch.py new file mode 100644 index 0000000..55471eb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/patch.py @@ -0,0 +1,62 @@ +from ddtrace.contrib.coverage.constants import PCT_COVERED_KEY +from ddtrace.contrib.coverage.data import _coverage_data +from ddtrace.contrib.coverage.utils import is_coverage_loaded +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.vendor import wrapt + + +try: + import coverage +except ImportError: + coverage = None # type: ignore[misc,assignment] + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + """ + Patch the instrumented methods from Coverage.py + """ + if getattr(coverage, "_datadog_patch", False) or not is_coverage_loaded(): + return + + coverage._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + _w(coverage, "Coverage.report", report_total_pct_covered_wrapper) + + +def unpatch(): + """ + Undo patched instrumented methods from Coverage.py + """ + if not getattr(coverage, "_datadog_patch", False) or not is_coverage_loaded(): + return + + _u(coverage.Coverage, "report") + + coverage._datadog_patch = False + + +def report_total_pct_covered_wrapper(func, instance, args: tuple, kwargs: dict): + pct_covered = func(*args, **kwargs) + _coverage_data[PCT_COVERED_KEY] = pct_covered + return pct_covered + + +def run_coverage_report(): + if not is_coverage_loaded(): + return + try: + current_coverage_object = coverage.Coverage.current() + _coverage_data[PCT_COVERED_KEY] = current_coverage_object.report() + except Exception: + log.warning("An exception occurred when running a coverage report") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/utils.py new file mode 100644 index 0000000..018940f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/coverage/utils.py @@ -0,0 +1,26 @@ +import os +import sys +from typing import List + +from ddtrace.contrib.coverage.data import _original_sys_argv_command + + +def is_coverage_loaded() -> bool: + return "coverage" in sys.modules + + +def _is_coverage_patched(): + if not is_coverage_loaded(): + return False + + return getattr(sys.modules["coverage"], "_datadog_patch", False) + + +def _command_invokes_coverage_run(sys_argv_command: List[str]) -> bool: + return "coverage run -m" in " ".join(sys_argv_command) + + +def _is_coverage_invoked_by_coverage_run() -> bool: + if os.environ.get("COVERAGE_RUN", False): + return True + return _command_invokes_coverage_run(_original_sys_argv_command) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi/__init__.py new file mode 100644 index 0000000..92d5efe --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi/__init__.py @@ -0,0 +1,344 @@ +""" +Generic dbapi tracing code. +""" +from ddtrace import config +from ddtrace.appsec._iast._utils import _is_iast_enabled +from ddtrace.internal.constants import COMPONENT + +from ...appsec._constants import IAST_SPAN_TAGS +from ...appsec._iast._metrics import increment_iast_span_metric +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import sql +from ...internal.logger import get_logger +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...pin import Pin +from ...vendor import wrapt +from ..trace_utils import ext_service +from ..trace_utils import iswrapped + + +log = get_logger(__name__) + + +config._add( + "dbapi2", + dict( + _default_service="db", + _dbapi_span_name_prefix="sql", + trace_fetch_methods=None, # Part of the API. Should be implemented at the integration level. + ), +) + + +def get_version(): + # type: () -> str + return "" + + +class TracedCursor(wrapt.ObjectProxy): + """TracedCursor wraps a psql cursor and traces its queries.""" + + def __init__(self, cursor, pin, cfg): + super(TracedCursor, self).__init__(cursor) + pin.onto(self) + # Allow dbapi-based integrations to override default span name prefix + span_name_prefix = ( + cfg["_dbapi_span_name_prefix"] + if cfg and "_dbapi_span_name_prefix" in cfg + else config.dbapi2["_dbapi_span_name_prefix"] + ) + span_name = ( + cfg["_dbapi_span_operation_name"] + if cfg and "_dbapi_span_operation_name" in cfg + else "{}.query".format(span_name_prefix) + ) + self._self_datadog_name = span_name + self._self_last_execute_operation = None + self._self_config = cfg or config.dbapi2 + self._self_dbm_propagator = getattr(self._self_config, "_dbm_propagator", None) + + def __iter__(self): + return self.__wrapped__.__iter__() + + def __next__(self): + return self.__wrapped__.__next__() + + def _trace_method(self, method, name, resource, extra_tags, dbm_propagator, *args, **kwargs): + """ + Internal function to trace the call to the underlying cursor method + :param method: The callable to be wrapped + :param name: The name of the resulting span. + :param resource: The sql query. Sql queries are obfuscated on the agent side. + :param extra_tags: A dict of tags to store into the span's meta + :param dbm_propagator: _DBM_Propagator, prepends dbm comments to sql statements + :param args: The args that will be passed as positional args to the wrapped method + :param kwargs: The args that will be passed as kwargs to the wrapped method + :return: The result of the wrapped method invocation + """ + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + return method(*args, **kwargs) + measured = name == self._self_datadog_name + + with pin.tracer.trace( + name, service=ext_service(pin, self._self_config), resource=resource, span_type=SpanTypes.SQL + ) as s: + if measured: + s.set_tag(SPAN_MEASURED_KEY) + # No reason to tag the query since it is set as the resource by the agent. See: + # https://github.com/DataDog/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + s.set_tag_str(COMPONENT, self._self_config.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + if _is_iast_enabled(): + try: + from ddtrace.appsec._iast._metrics import _set_metric_iast_executed_sink + from ddtrace.appsec._iast._taint_utils import check_tainted_args + from ddtrace.appsec._iast.taint_sinks.sql_injection import SqlInjection + + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, SqlInjection.vulnerability_type) + _set_metric_iast_executed_sink(SqlInjection.vulnerability_type) + if check_tainted_args(args, kwargs, pin.tracer, self._self_config.integration_name, method): + SqlInjection.report(evidence_value=args[0]) + except Exception: + log.debug("Unexpected exception while reporting vulnerability", exc_info=True) + + # set analytics sample rate if enabled but only for non-FetchTracedCursor + if not isinstance(self, FetchTracedCursor): + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, self._self_config.get_analytics_sample_rate()) + + if dbm_propagator: + args, kwargs = dbm_propagator.inject(s, args, kwargs) + + try: + return method(*args, **kwargs) + finally: + # Try to fetch custom properties that were passed by the specific Database implementation + self._set_post_execute_tags(s) + + def executemany(self, query, *args, **kwargs): + """Wraps the cursor.executemany method""" + self._self_last_execute_operation = query + # Always return the result as-is + # DEV: Some libraries return `None`, others `int`, and others the cursor objects + # These differences should be overridden at the integration specific layer (e.g. in `sqlite3/patch.py`) + # FIXME[matt] properly handle kwargs here. arg names can be different + # with different libs. + return self._trace_method( + self.__wrapped__.executemany, + self._self_datadog_name, + query, + {"sql.executemany": "true"}, + self._self_dbm_propagator, + query, + *args, + **kwargs, + ) + + def execute(self, query, *args, **kwargs): + """Wraps the cursor.execute method""" + self._self_last_execute_operation = query + + # Always return the result as-is + # DEV: Some libraries return `None`, others `int`, and others the cursor objects + # These differences should be overridden at the integration specific layer (e.g. in `sqlite3/patch.py`) + return self._trace_method( + self.__wrapped__.execute, + self._self_datadog_name, + query, + {}, + self._self_dbm_propagator, + query, + *args, + **kwargs, + ) + + def callproc(self, proc, *args): + """Wraps the cursor.callproc method""" + self._self_last_execute_operation = proc + return self._trace_method(self.__wrapped__.callproc, self._self_datadog_name, proc, {}, None, proc, *args) + + def _set_post_execute_tags(self, span): + # rowcount is in the dbapi specification (https://peps.python.org/pep-0249/#rowcount) + # but some database drivers (cassandra-driver specifically) don't implement it. + row_count = getattr(self.__wrapped__, "rowcount", None) + if row_count is None: + return + span.set_metric(db.ROWCOUNT, row_count) + # Necessary for django integration backward compatibility. Django integration used to provide its own + # implementation of the TracedCursor, which used to store the row count into a tag instead of + # as a metric. Such custom implementation has been replaced by this generic dbapi implementation and + # this tag has been added since. + # Check row count is an integer type to avoid comparison type error + if isinstance(row_count, int) and row_count >= 0: + span.set_tag(db.ROWCOUNT, row_count) + + def __enter__(self): + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that errors + # messages will be the same. + self.__wrapped__.__enter__ + + # and finally, yield the traced cursor. + return self + + +class FetchTracedCursor(TracedCursor): + """ + Sub-class of :class:`TracedCursor` that also instruments `fetchone`, `fetchall`, and `fetchmany` methods. + + We do not trace these functions by default since they can get very noisy (e.g. `fetchone` with 100k rows). + """ + + def fetchone(self, *args, **kwargs): + """Wraps the cursor.fetchone method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchone") + return self._trace_method( + self.__wrapped__.fetchone, span_name, self._self_last_execute_operation, {}, None, *args, **kwargs + ) + + def fetchall(self, *args, **kwargs): + """Wraps the cursor.fetchall method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchall") + return self._trace_method( + self.__wrapped__.fetchall, span_name, self._self_last_execute_operation, {}, None, *args, **kwargs + ) + + def fetchmany(self, *args, **kwargs): + """Wraps the cursor.fetchmany method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchmany") + # We want to trace the information about how many rows were requested. Note that this number may be larger + # the number of rows actually returned if less then requested are available from the query. + size_tag_key = "db.fetch.size" + + try: + extra_tags = {size_tag_key: get_argument_value(args, kwargs, 0, "size")} + except ArgumentError: + default_array_size = getattr(self.__wrapped__, "arraysize", None) + extra_tags = {size_tag_key: default_array_size} if default_array_size else {} + + return self._trace_method( + self.__wrapped__.fetchmany, span_name, self._self_last_execute_operation, extra_tags, None, *args, **kwargs + ) + + +class TracedConnection(wrapt.ObjectProxy): + """TracedConnection wraps a Connection with tracing code.""" + + def __init__(self, conn, pin=None, cfg=None, cursor_cls=None): + if not cfg: + cfg = config.dbapi2 + # Set default cursor class if one was not provided + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = FetchTracedCursor if cfg.trace_fetch_methods else TracedCursor + + super(TracedConnection, self).__init__(conn) + name = _get_vendor(conn) + self._self_datadog_name = "{}.connection".format(name) + db_pin = pin or Pin(service=name) + db_pin.onto(self) + # wrapt requires prefix of `_self` for attributes that are only in the + # proxy (since some of our source objects will use `__slots__`) + self._self_cursor_cls = cursor_cls + self._self_config = cfg + + def __enter__(self): + """Context management is not defined by the dbapi spec. + + This means unfortunately that the database clients each define their own + implementations. + + The ones we know about are: + + - mysqlclient<2.0 which returns a cursor instance. >=2.0 returns a + connection instance. + - psycopg returns a connection. + - pyodbc returns a connection. + - pymysql doesn't implement it. + - sqlite3 returns the connection. + """ + r = self.__wrapped__.__enter__() + + if hasattr(r, "cursor"): + # r is Connection-like. + if r is self.__wrapped__: + # Return the reference to this proxy object. Returning r would + # return the untraced reference. + return self + else: + # r is a different connection object. + # This should not happen in practice but play it safe so that + # the original functionality is maintained. + return r + elif hasattr(r, "execute"): + # r is Cursor-like. + if iswrapped(r): + return r + else: + pin = Pin.get_from(self) + if not pin: + return r + return self._self_cursor_cls(r, pin, self._self_config) + else: + # Otherwise r is some other object, so maintain the functionality + # of the original. + return r + + def _trace_method(self, method, name, extra_tags, *args, **kwargs): + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + return method(*args, **kwargs) + + with pin.tracer.trace(name, service=ext_service(pin, self._self_config)) as s: + s.set_tag_str(COMPONENT, self._self_config.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + return method(*args, **kwargs) + + def cursor(self, *args, **kwargs): + cursor = self.__wrapped__.cursor(*args, **kwargs) + pin = Pin.get_from(self) + if not pin: + return cursor + return self._self_cursor_cls(cursor=cursor, pin=pin, cfg=self._self_config) + + def commit(self, *args, **kwargs): + span_name = "{}.{}".format(self._self_datadog_name, "commit") + return self._trace_method(self.__wrapped__.commit, span_name, {}, *args, **kwargs) + + def rollback(self, *args, **kwargs): + span_name = "{}.{}".format(self._self_datadog_name, "rollback") + return self._trace_method(self.__wrapped__.rollback, span_name, {}, *args, **kwargs) + + +def _get_vendor(conn): + """Return the vendor (e.g postgres, mysql) of the given + database. + """ + try: + name = _get_module_name(conn) + except Exception: + log.debug("couldn't parse module name", exc_info=True) + name = "sql" + return sql.normalize_vendor(name) + + +def _get_module_name(conn): + return conn.__class__.__module__.split(".")[0] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi_async/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi_async/__init__.py new file mode 100644 index 0000000..af20463 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dbapi_async/__init__.py @@ -0,0 +1,258 @@ +from ddtrace import config +from ddtrace.appsec._iast._utils import _is_iast_enabled +from ddtrace.internal.constants import COMPONENT + +from ...appsec._constants import IAST_SPAN_TAGS +from ...appsec._iast._metrics import increment_iast_span_metric +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.logger import get_logger +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...pin import Pin +from ..dbapi import TracedConnection +from ..dbapi import TracedCursor +from ..trace_utils import ext_service +from ..trace_utils import iswrapped + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return "" + + +class TracedAsyncCursor(TracedCursor): + async def __aenter__(self): + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that error + # messages will be the same. + await self.__wrapped__.__aenter__() + + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that error + # messages will be the same. + return await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) + + async def _trace_method(self, method, name, resource, extra_tags, dbm_propagator, *args, **kwargs): + """ + Internal function to trace the call to the underlying cursor method + :param method: The callable to be wrapped + :param name: The name of the resulting span. + :param resource: The sql query. Sql queries are obfuscated on the agent side. + :param extra_tags: A dict of tags to store into the span's meta + :param dbm_propagator: _DBM_Propagator, prepends dbm comments to sql statements + :param args: The args that will be passed as positional args to the wrapped method + :param kwargs: The args that will be passed as kwargs to the wrapped method + :return: The result of the wrapped method invocation + """ + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + return await method(*args, **kwargs) + measured = name == self._self_datadog_name + + with pin.tracer.trace( + name, service=ext_service(pin, self._self_config), resource=resource, span_type=SpanTypes.SQL + ) as s: + if measured: + s.set_tag(SPAN_MEASURED_KEY) + # No reason to tag the query since it is set as the resource by the agent. See: + # https://github.com/DataDog/datadog-trace-agent/blob/bda1ebbf170dd8c5879be993bdd4dbae70d10fda/obfuscate/sql.go#L232 + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + s.set_tag_str(COMPONENT, self._self_config.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + if _is_iast_enabled(): + from ddtrace.appsec._iast._metrics import _set_metric_iast_executed_sink + from ddtrace.appsec._iast._taint_utils import check_tainted_args + from ddtrace.appsec._iast.taint_sinks.sql_injection import SqlInjection + + increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, SqlInjection.vulnerability_type) + _set_metric_iast_executed_sink(SqlInjection.vulnerability_type) + if check_tainted_args(args, kwargs, pin.tracer, self._self_config.integration_name, method): + SqlInjection.report(evidence_value=args[0]) + + # set analytics sample rate if enabled but only for non-FetchTracedCursor + if not isinstance(self, FetchTracedAsyncCursor): + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, self._self_config.get_analytics_sample_rate()) + + if dbm_propagator: + args, kwargs = dbm_propagator.inject(s, args, kwargs) + + try: + return await method(*args, **kwargs) + finally: + # Try to fetch custom properties that were passed by the specific Database implementation + self._set_post_execute_tags(s) + + async def executemany(self, query, *args, **kwargs): + """Wraps the cursor.executemany method""" + self._self_last_execute_operation = query + # Always return the result as-is + # DEV: Some libraries return `None`, others `int`, and others the cursor objects + # These differences should be overridden at the integration specific layer (e.g. in `sqlite3/patch.py`) + # FIXME[matt] properly handle kwargs here. arg names can be different + # with different libs. + return await self._trace_method( + self.__wrapped__.executemany, + self._self_datadog_name, + query, + {"sql.executemany": "true"}, + self._self_dbm_propagator, + query, + *args, + **kwargs, + ) + + async def execute(self, query, *args, **kwargs): + """Wraps the cursor.execute method""" + self._self_last_execute_operation = query + + # Always return the result as-is + # DEV: Some libraries return `None`, others `int`, and others the cursor objects + # These differences should be overridden at the integration specific layer (e.g. in `sqlite3/patch.py`) + return await self._trace_method( + self.__wrapped__.execute, + self._self_datadog_name, + query, + {}, + self._self_dbm_propagator, + query, + *args, + **kwargs, + ) + + +class FetchTracedAsyncCursor(TracedAsyncCursor): + """FetchTracedAsyncCursor for psycopg""" + + async def fetchone(self, *args, **kwargs): + """Wraps the cursor.fetchone method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchone") + return await self._trace_method( + self.__wrapped__.fetchone, span_name, self._self_last_execute_operation, {}, None, *args, **kwargs + ) + + async def fetchall(self, *args, **kwargs): + """Wraps the cursor.fetchall method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchall") + return await self._trace_method( + self.__wrapped__.fetchall, span_name, self._self_last_execute_operation, {}, None, *args, **kwargs + ) + + async def fetchmany(self, *args, **kwargs): + """Wraps the cursor.fetchmany method""" + span_name = "{}.{}".format(self._self_datadog_name, "fetchmany") + # We want to trace the information about how many rows were requested. Note that this number may be larger + # the number of rows actually returned if less then requested are available from the query. + size_tag_key = "db.fetch.size" + + try: + extra_tags = {size_tag_key: get_argument_value(args, kwargs, 0, "size")} + except ArgumentError: + default_array_size = getattr(self.__wrapped__, "arraysize", None) + extra_tags = {size_tag_key: default_array_size} if default_array_size else {} + + return await self._trace_method( + self.__wrapped__.fetchmany, span_name, self._self_last_execute_operation, extra_tags, None, *args, **kwargs + ) + + +class TracedAsyncConnection(TracedConnection): + def __init__(self, conn, pin=None, cfg=config.dbapi2, cursor_cls=None): + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = FetchTracedAsyncCursor if cfg.trace_fetch_methods else TracedAsyncCursor + super(TracedAsyncConnection, self).__init__(conn, pin, cfg, cursor_cls) + + async def __aenter__(self): + """Context management is not defined by the dbapi spec. + + This means unfortunately that the database clients each define their own + implementations. + + The ones we know about are: + + - mysqlclient<2.0 which returns a cursor instance. >=2.0 returns a + connection instance. + - psycopg returns a connection. + - pyodbc returns a connection. + - pymysql doesn't implement it. + - sqlite3 returns the connection. + """ + r = await self.__wrapped__.__aenter__() + + if hasattr(r, "cursor"): + # r is Connection-like. + if r is self.__wrapped__: + # Return the reference to this proxy object. Returning r would + # return the untraced reference. + return self + else: + # r is a different connection object. + # This should not happen in practice but play it safe so that + # the original functionality is maintained. + log.warning( + "Unexpected object type returned from __wrapped__.__aenter__()." + "Expected a wrapped instance, but received a different object." + ) + return r + elif hasattr(r, "execute"): + # r is Cursor-like. + if iswrapped(r): + return r + else: + pin = Pin.get_from(self) + if not pin: + return r + return self._self_cursor_cls(r, pin, self._self_config) + else: + # Otherwise r is some other object, so maintain the functionality + # of the original. + log.warning( + "Unexpected object type returned from __wrapped__.__aenter__()." + "Expected a wrapped instance, but received a different object." + ) + return r + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that errors + # messages will be the same. + return await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) + + async def _trace_method(self, method, name, extra_tags, *args, **kwargs): + pin = Pin.get_from(self) + if not pin or not pin.enabled(): + return await method(*args, **kwargs) + + with pin.tracer.trace(name, service=ext_service(pin, self._self_config)) as s: + s.set_tag_str(COMPONENT, self._self_config.integration_name) + + # set span.kind to the type of request being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tags(pin.tags) + s.set_tags(extra_tags) + + return await method(*args, **kwargs) + + async def commit(self, *args, **kwargs): + span_name = "{}.{}".format(self._self_datadog_name, "commit") + return await self._trace_method(self.__wrapped__.commit, span_name, {}, *args, **kwargs) + + async def rollback(self, *args, **kwargs): + span_name = "{}.{}".format(self._self_datadog_name, "rollback") + return await self._trace_method(self.__wrapped__.rollback, span_name, {}, *args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/__init__.py new file mode 100644 index 0000000..10f52a3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/__init__.py @@ -0,0 +1,192 @@ +""" +The Django__ integration traces requests, views, template renderers, database +and cache calls in a Django application. + + +Enable Django tracing automatically via ``ddtrace-run``:: + + ddtrace-run python manage.py runserver + + +Django tracing can also be enabled manually:: + + import ddtrace.auto + + +To have Django capture the tracer logs, ensure the ``LOGGING`` variable in +``settings.py`` looks similar to:: + + LOGGING = { + 'loggers': { + 'ddtrace': { + 'handlers': ['console'], + 'level': 'WARNING', + }, + }, + } + + +Configuration +~~~~~~~~~~~~~ + +.. important:: + + Note that the in-code configuration must be run before Django is instrumented. This means that in-code configuration + will not work with ``ddtrace-run`` and before a call to ``patch`` or ``import ddtrace.auto``. + + +.. py:data:: ddtrace.config.django['distributed_tracing_enabled'] + + Whether or not to parse distributed tracing headers from requests received by your Django app. + + Default: ``True`` + +.. py:data:: ddtrace.config.django['service_name'] + + The service name reported for your Django app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'django'`` + +.. py:data:: ddtrace.config.django['cache_service_name'] + + The service name reported for your Django app cache layer. + + Can also be configured via the ``DD_DJANGO_CACHE_SERVICE_NAME`` environment variable. + + Default: ``'django'`` + +.. py:data:: ddtrace.config.django['database_service_name'] + + A string reported as the service name of the Django app database layer. + + Can also be configured via the ``DD_DJANGO_DATABASE_SERVICE_NAME`` environment variable. + + Takes precedence over database_service_name_prefix. + + Default: ``''`` + +.. py:data:: ddtrace.config.django['database_service_name_prefix'] + + A string to be prepended to the service name reported for your Django app database layer. + + Can also be configured via the ``DD_DJANGO_DATABASE_SERVICE_NAME_PREFIX`` environment variable. + + The database service name is the name of the database appended with 'db'. Has a lower precedence than database_service_name. + + Default: ``''`` + +.. py:data:: ddtrace.config.django["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also be configured via the ``DD_DJANGO_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + +.. py:data:: ddtrace.config.django['instrument_middleware'] + + Whether or not to instrument middleware. + + Can also be enabled with the ``DD_DJANGO_INSTRUMENT_MIDDLEWARE`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.django['instrument_templates'] + + Whether or not to instrument template rendering. + + Can also be enabled with the ``DD_DJANGO_INSTRUMENT_TEMPLATES`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.django['instrument_databases'] + + Whether or not to instrument databases. + + Can also be enabled with the ``DD_DJANGO_INSTRUMENT_DATABASES`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.django['instrument_caches'] + + Whether or not to instrument caches. + + Can also be enabled with the ``DD_DJANGO_INSTRUMENT_CACHES`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.django.http['trace_query_string'] + + Whether or not to include the query string as a tag. + + Default: ``False`` + +.. py:data:: ddtrace.config.django['include_user_name'] + + Whether or not to include the authenticated user's username as a tag on the root request span. + + Can also be configured via the ``DD_DJANGO_INCLUDE_USER_NAME`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.django['use_handler_resource_format'] + + Whether or not to use the resource format `"{method} {handler}"`. Can also be + enabled with the ``DD_DJANGO_USE_HANDLER_RESOURCE_FORMAT`` environment + variable. + + The default resource format for Django >= 2.2.0 is otherwise `"{method} {urlpattern}"`. + + Default: ``False`` + +.. py:data:: ddtrace.config.django['use_handler_with_url_name_resource_format'] + + Whether or not to use the resource format `"{method} {handler}.{url_name}"`. Can also be + enabled with the ``DD_DJANGO_USE_HANDLER_WITH_URL_NAME_RESOURCE_FORMAT`` environment + variable. + + This configuration applies only for Django <= 2.2.0. + + Default: ``False`` + +.. py:data:: ddtrace.config.django['use_legacy_resource_format'] + + Whether or not to use the legacy resource format `"{handler}"`. Can also be + enabled with the ``DD_DJANGO_USE_LEGACY_RESOURCE_FORMAT`` environment + variable. + + The default resource format for Django >= 2.2.0 is otherwise `"{method} {urlpattern}"`. + + Default: ``False`` + +Example:: + + from ddtrace import config + + # Enable distributed tracing + config.django['distributed_tracing_enabled'] = True + + # Override service name + config.django['service_name'] = 'custom-service-name' + + +:ref:`Headers tracing ` is supported for this integration. + +.. __: https://www.djangoproject.com/ +""" # noqa: E501 +from ...internal.utils.importlib import require_modules + + +required_modules = ["django"] + + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "_patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/_asgi.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/_asgi.py new file mode 100644 index 0000000..74f563d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/_asgi.py @@ -0,0 +1,36 @@ +""" +Module providing async hooks. Do not import this module unless using Python >= 3.6. +""" +from ddtrace.contrib.asgi import span_from_scope + +from ...internal.utils import get_argument_value +from .. import trace_utils +from .utils import REQUEST_DEFAULT_RESOURCE +from .utils import _after_request_tags +from .utils import _before_request_tags + + +@trace_utils.with_traced_module +async def traced_get_response_async(django, pin, func, instance, args, kwargs): + """Trace django.core.handlers.base.BaseHandler.get_response() (or other implementations). + + This is the main entry point for requests. + + Django requests are handled by a Handler.get_response method (inherited from base.BaseHandler). + This method invokes the middleware chain and returns the response generated by the chain. + """ + request = get_argument_value(args, kwargs, 0, "request") + span = span_from_scope(request.scope) + if span is None: + return await func(*args, **kwargs) + + # Reset the span resource so we can know if it was modified during the request or not + span.resource = REQUEST_DEFAULT_RESOURCE + _before_request_tags(pin, span, request) + response = None + try: + response = await func(*args, **kwargs) + finally: + # DEV: Always set these tags, this is where `span.resource` is set + _after_request_tags(pin, span, request, response) + return response diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/compat.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/compat.py new file mode 100644 index 0000000..20f0a52 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/compat.py @@ -0,0 +1,31 @@ +import django + + +if django.VERSION >= (1, 10, 1): + from django.urls import get_resolver + + def user_is_authenticated(user): + # Explicit comparison due to the following bug + # https://code.djangoproject.com/ticket/26988 + return user.is_authenticated == True # noqa E712 + +else: + from django.conf import settings + from django.core import urlresolvers + + def user_is_authenticated(user): + return user.is_authenticated() + + if django.VERSION >= (1, 9, 0): + + def get_resolver(urlconf=None): + urlconf = urlconf or settings.ROOT_URLCONF + urlresolvers.set_urlconf(urlconf) + return urlresolvers.get_resolver(urlconf) + + else: + + def get_resolver(urlconf=None): + urlconf = urlconf or settings.ROOT_URLCONF + urlresolvers.set_urlconf(urlconf) + return urlresolvers.RegexURLResolver(r"^/", urlconf) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/patch.py new file mode 100644 index 0000000..b6b76f9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/patch.py @@ -0,0 +1,875 @@ +""" +The Django patching works as follows: + +Django internals are instrumented via normal `patch()`. + +`django.apps.registry.Apps.populate` is patched to add instrumentation for any +specific Django apps like Django Rest Framework (DRF). +""" +import functools +from inspect import getmro +from inspect import isclass +from inspect import isfunction +import os + +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.contrib import dbapi +from ddtrace.contrib import func_name +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import http +from ddtrace.ext import sql as sqlx +from ddtrace.internal import core +from ddtrace.internal.compat import Iterable +from ddtrace.internal.compat import maybe_stringify +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.constants import STATUS_403_TYPE_AUTO +from ddtrace.internal.core.event_hub import ResultType +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils import http as http_utils +from ddtrace.internal.utils.formats import asbool +from ddtrace.settings.asm import config as asm_config +from ddtrace.settings.integration import IntegrationConfig +from ddtrace.vendor import wrapt +from ddtrace.vendor.wrapt.importer import when_imported + +from ...appsec._utils import _UserInfoRetriever +from ...internal.utils import get_argument_value +from .. import trace_utils +from ..trace_utils import _get_request_header_user_agent + + +log = get_logger(__name__) + +config._add( + "django", + dict( + _default_service=schematize_service_name("django"), + cache_service_name=os.getenv("DD_DJANGO_CACHE_SERVICE_NAME", default="django"), + database_service_name_prefix=os.getenv("DD_DJANGO_DATABASE_SERVICE_NAME_PREFIX", default=""), + database_service_name=os.getenv("DD_DJANGO_DATABASE_SERVICE_NAME", default=""), + trace_fetch_methods=asbool(os.getenv("DD_DJANGO_TRACE_FETCH_METHODS", default=False)), + distributed_tracing_enabled=True, + instrument_middleware=asbool(os.getenv("DD_DJANGO_INSTRUMENT_MIDDLEWARE", default=True)), + instrument_templates=asbool(os.getenv("DD_DJANGO_INSTRUMENT_TEMPLATES", default=True)), + instrument_databases=asbool(os.getenv("DD_DJANGO_INSTRUMENT_DATABASES", default=True)), + instrument_caches=asbool(os.getenv("DD_DJANGO_INSTRUMENT_CACHES", default=True)), + analytics_enabled=None, # None allows the value to be overridden by the global config + analytics_sample_rate=None, + trace_query_string=None, # Default to global config + include_user_name=asbool(os.getenv("DD_DJANGO_INCLUDE_USER_NAME", default=True)), + use_handler_with_url_name_resource_format=asbool( + os.getenv("DD_DJANGO_USE_HANDLER_WITH_URL_NAME_RESOURCE_FORMAT", default=False) + ), + use_handler_resource_format=asbool(os.getenv("DD_DJANGO_USE_HANDLER_RESOURCE_FORMAT", default=False)), + use_legacy_resource_format=asbool(os.getenv("DD_DJANGO_USE_LEGACY_RESOURCE_FORMAT", default=False)), + ), +) + +_NotSet = object() +psycopg_cursor_cls = Psycopg2TracedCursor = Psycopg3TracedCursor = _NotSet + + +def get_version(): + # type: () -> str + import django + + return django.__version__ + + +def patch_conn(django, conn): + global psycopg_cursor_cls, Psycopg2TracedCursor, Psycopg3TracedCursor + + if psycopg_cursor_cls is _NotSet: + try: + from psycopg.cursor import Cursor as psycopg_cursor_cls + + from ddtrace.contrib.psycopg.cursor import Psycopg3TracedCursor + except ImportError: + Psycopg3TracedCursor = None + try: + from psycopg2._psycopg import cursor as psycopg_cursor_cls + + from ddtrace.contrib.psycopg.cursor import Psycopg2TracedCursor + except ImportError: + psycopg_cursor_cls = None + Psycopg2TracedCursor = None + + def cursor(django, pin, func, instance, args, kwargs): + alias = getattr(conn, "alias", "default") + + if config.django.database_service_name: + service = config.django.database_service_name + else: + database_prefix = config.django.database_service_name_prefix + service = "{}{}{}".format(database_prefix, alias, "db") + service = schematize_service_name(service) + + vendor = getattr(conn, "vendor", "db") + prefix = sqlx.normalize_vendor(vendor) + tags = { + "django.db.vendor": vendor, + "django.db.alias": alias, + } + pin = Pin(service, tags=tags, tracer=pin.tracer) + cursor = func(*args, **kwargs) + traced_cursor_cls = dbapi.TracedCursor + try: + if cursor.cursor.__class__.__module__.startswith("psycopg2."): + # Import lazily to avoid importing psycopg2 if not already imported. + from ddtrace.contrib.psycopg.cursor import Psycopg2TracedCursor + + traced_cursor_cls = Psycopg2TracedCursor + elif type(cursor.cursor).__name__ == "Psycopg3TracedCursor": + # Import lazily to avoid importing psycopg if not already imported. + from ddtrace.contrib.psycopg.cursor import Psycopg3TracedCursor + + traced_cursor_cls = Psycopg3TracedCursor + except AttributeError: + pass + + # Each db alias will need its own config for dbapi + cfg = IntegrationConfig( + config.django.global_config, # global_config needed for analytics sample rate + "{}-{}".format("django", alias), # name not used but set anyway + _default_service=config.django._default_service, + _dbapi_span_name_prefix=prefix, + trace_fetch_methods=config.django.trace_fetch_methods, + analytics_enabled=config.django.analytics_enabled, + analytics_sample_rate=config.django.analytics_sample_rate, + ) + return traced_cursor_cls(cursor, pin, cfg) + + if not isinstance(conn.cursor, wrapt.ObjectProxy): + conn.cursor = wrapt.FunctionWrapper(conn.cursor, trace_utils.with_traced_module(cursor)(django)) + + +def instrument_dbs(django): + def get_connection(wrapped, instance, args, kwargs): + conn = wrapped(*args, **kwargs) + try: + patch_conn(django, conn) + except Exception: + log.debug("Error instrumenting database connection %r", conn, exc_info=True) + return conn + + if not isinstance(django.db.utils.ConnectionHandler.__getitem__, wrapt.ObjectProxy): + django.db.utils.ConnectionHandler.__getitem__ = wrapt.FunctionWrapper( + django.db.utils.ConnectionHandler.__getitem__, get_connection + ) + + +@trace_utils.with_traced_module +def traced_cache(django, pin, func, instance, args, kwargs): + from . import utils + + if not config.django.instrument_caches: + return func(*args, **kwargs) + + cache_backend = "{}.{}".format(instance.__module__, instance.__class__.__name__) + tags = {COMPONENT: config.django.integration_name, "django.cache.backend": cache_backend} + if args: + keys = utils.quantize_key_values(args[0]) + tags["django.cache.key"] = keys + + with core.context_with_data( + "django.cache", + span_name="django.cache", + span_type=SpanTypes.CACHE, + service=config.django.cache_service_name, + resource=utils.resource_from_cache_prefix(func_name(func), instance), + tags=tags, + pin=pin, + ) as ctx, ctx["call"]: + result = func(*args, **kwargs) + rowcount = 0 + if func.__name__ == "get_many": + rowcount = sum(1 for doc in result if doc) if result and isinstance(result, Iterable) else 0 + elif func.__name__ == "get": + try: + # check also for special case for Django~3.2 that returns an empty Sentinel + # object for empty results + # also check if result is Iterable first since some iterables return ambiguous + # truth results with ``==`` + if result is None or ( + not isinstance(result, Iterable) and result == getattr(instance, "_missing_key", None) + ): + rowcount = 0 + else: + rowcount = 1 + except (AttributeError, NotImplementedError, ValueError): + pass + core.dispatch("django.cache", (ctx, rowcount)) + return result + + +def instrument_caches(django): + cache_backends = set([cache["BACKEND"] for cache in django.conf.settings.CACHES.values()]) + for cache_path in cache_backends: + split = cache_path.split(".") + cache_module = ".".join(split[:-1]) + cache_cls = split[-1] + for method in ["get", "set", "add", "delete", "incr", "decr", "get_many", "set_many", "delete_many"]: + try: + cls = django.utils.module_loading.import_string(cache_path) + # DEV: this can be removed when we add an idempotent `wrap` + if not trace_utils.iswrapped(cls, method): + trace_utils.wrap(cache_module, "{0}.{1}".format(cache_cls, method), traced_cache(django)) + except Exception: + log.debug("Error instrumenting cache %r", cache_path, exc_info=True) + + +@trace_utils.with_traced_module +def traced_populate(django, pin, func, instance, args, kwargs): + """django.apps.registry.Apps.populate is the method used to populate all the apps. + + It is used as a hook to install instrumentation for 3rd party apps (like DRF). + + `populate()` works in 3 phases: + + - Phase 1: Initializes the app configs and imports the app modules. + - Phase 2: Imports models modules for each app. + - Phase 3: runs ready() of each app config. + + If all 3 phases successfully run then `instance.ready` will be `True`. + """ + + # populate() can be called multiple times, we don't want to instrument more than once + if instance.ready: + log.debug("Django instrumentation already installed, skipping.") + return func(*args, **kwargs) + + ret = func(*args, **kwargs) + + if not instance.ready: + log.debug("populate() failed skipping instrumentation.") + return ret + + settings = django.conf.settings + + # Instrument databases + if config.django.instrument_databases: + try: + instrument_dbs(django) + except Exception: + log.debug("Error instrumenting Django database connections", exc_info=True) + + # Instrument caches + if config.django.instrument_caches: + try: + instrument_caches(django) + except Exception: + log.debug("Error instrumenting Django caches", exc_info=True) + + # Instrument Django Rest Framework if it's installed + INSTALLED_APPS = getattr(settings, "INSTALLED_APPS", []) + + if "rest_framework" in INSTALLED_APPS: + try: + from .restframework import patch_restframework + + patch_restframework(django) + except Exception: + log.debug("Error patching rest_framework", exc_info=True) + + return ret + + +def traced_func(django, name, resource=None, ignored_excs=None): + def wrapped(django, pin, func, instance, args, kwargs): + tags = {COMPONENT: config.django.integration_name} + with core.context_with_data( + "django.func.wrapped", span_name=name, resource=resource, tags=tags, pin=pin + ) as ctx, ctx["call"]: + core.dispatch( + "django.func.wrapped", + ( + args, + kwargs, + django.core.handlers.wsgi.WSGIRequest if hasattr(django.core.handlers, "wsgi") else object, + ctx, + ignored_excs, + ), + ) + return func(*args, **kwargs) + + return trace_utils.with_traced_module(wrapped)(django) + + +def traced_process_exception(django, name, resource=None): + def wrapped(django, pin, func, instance, args, kwargs): + tags = {COMPONENT: config.django.integration_name} + with core.context_with_data( + "django.process_exception", span_name=name, resource=resource, tags=tags, pin=pin + ) as ctx, ctx["call"]: + resp = func(*args, **kwargs) + core.dispatch( + "django.process_exception", (ctx, hasattr(resp, "status_code") and 500 <= resp.status_code < 600) + ) + return resp + + return trace_utils.with_traced_module(wrapped)(django) + + +@trace_utils.with_traced_module +def traced_load_middleware(django, pin, func, instance, args, kwargs): + """ + Patches django.core.handlers.base.BaseHandler.load_middleware to instrument all + middlewares. + """ + settings_middleware = [] + # Gather all the middleware + if getattr(django.conf.settings, "MIDDLEWARE", None): + settings_middleware += django.conf.settings.MIDDLEWARE + if getattr(django.conf.settings, "MIDDLEWARE_CLASSES", None): + settings_middleware += django.conf.settings.MIDDLEWARE_CLASSES + + # Iterate over each middleware provided in settings.py + # Each middleware can either be a function or a class + for mw_path in settings_middleware: + mw = django.utils.module_loading.import_string(mw_path) + + # Instrument function-based middleware + if isfunction(mw) and not trace_utils.iswrapped(mw): + split = mw_path.split(".") + if len(split) < 2: + continue + base = ".".join(split[:-1]) + attr = split[-1] + + # DEV: We need to have a closure over `mw_path` for the resource name or else + # all function based middleware will share the same resource name + def _wrapper(resource): + # Function-based middleware is a factory which returns a handler function for + # requests. + # So instead of tracing the factory, we want to trace its returned value. + # We wrap the factory to return a traced version of the handler function. + def wrapped_factory(func, instance, args, kwargs): + # r is the middleware handler function returned from the factory + r = func(*args, **kwargs) + if r: + return wrapt.FunctionWrapper( + r, + traced_func(django, "django.middleware", resource=resource), + ) + # If r is an empty middleware function (i.e. returns None), don't wrap since + # NoneType cannot be called + else: + return r + + return wrapped_factory + + trace_utils.wrap(base, attr, _wrapper(resource=mw_path)) + + # Instrument class-based middleware + elif isclass(mw): + for hook in [ + "process_request", + "process_response", + "process_view", + "process_template_response", + "__call__", + ]: + if hasattr(mw, hook) and not trace_utils.iswrapped(mw, hook): + trace_utils.wrap( + mw, hook, traced_func(django, "django.middleware", resource=mw_path + ".{0}".format(hook)) + ) + # Do a little extra for `process_exception` + if hasattr(mw, "process_exception") and not trace_utils.iswrapped(mw, "process_exception"): + res = mw_path + ".{0}".format("process_exception") + trace_utils.wrap( + mw, "process_exception", traced_process_exception(django, "django.middleware", resource=res) + ) + + return func(*args, **kwargs) + + +def _gather_block_metadata(request, request_headers, ctx: core.ExecutionContext): + from . import utils + + try: + metadata = {http.STATUS_CODE: "403", http.METHOD: request.method} + url = utils.get_request_uri(request) + query = request.META.get("QUERY_STRING", "") + if query and config.django.trace_query_string: + metadata[http.QUERY_STRING] = query + user_agent = _get_request_header_user_agent(request_headers) + if user_agent: + metadata[http.USER_AGENT] = user_agent + except Exception as e: + log.warning("Could not gather some metadata on blocked request: %s", str(e)) # noqa: G200 + core.dispatch("django.block_request_callback", (ctx, metadata, config.django, url, query)) + + +def _block_request_callable(request, request_headers, ctx: core.ExecutionContext): + # This is used by user-id blocking to block responses. It could be called + # at any point so it's a callable stored in the ASM context. + from django.core.exceptions import PermissionDenied + + core.root.set_item(HTTP_REQUEST_BLOCKED, STATUS_403_TYPE_AUTO) + _gather_block_metadata(request, request_headers, ctx) + raise PermissionDenied() + + +@trace_utils.with_traced_module +def traced_get_response(django, pin, func, instance, args, kwargs): + """Trace django.core.handlers.base.BaseHandler.get_response() (or other implementations). + + This is the main entry point for requests. + + Django requests are handled by a Handler.get_response method (inherited from base.BaseHandler). + This method invokes the middleware chain and returns the response generated by the chain. + """ + from ddtrace.contrib.django.compat import get_resolver + + from . import utils + + request = get_argument_value(args, kwargs, 0, "request") + if request is None: + return func(*args, **kwargs) + + request_headers = utils._get_request_headers(request) + + with core.context_with_data( + "django.traced_get_response", + remote_addr=request.META.get("REMOTE_ADDR"), + headers=request_headers, + headers_case_sensitive=django.VERSION < (2, 2), + span_name=schematize_url_operation("django.request", protocol="http", direction=SpanDirection.INBOUND), + resource=utils.REQUEST_DEFAULT_RESOURCE, + service=trace_utils.int_service(pin, config.django), + span_type=SpanTypes.WEB, + tags={COMPONENT: config.django.integration_name, SPAN_KIND: SpanKind.SERVER}, + distributed_headers_config=config.django, + distributed_headers=request_headers, + pin=pin, + ) as ctx, ctx.get_item("call"): + core.dispatch( + "django.traced_get_response.pre", + ( + functools.partial(_block_request_callable, request, request_headers, ctx), + ctx, + request, + utils._before_request_tags, + ), + ) + + response = None + + def blocked_response(): + from django.http import HttpResponse + + block_config = core.get_item(HTTP_REQUEST_BLOCKED) + desired_type = block_config.get("type", "auto") + status = block_config.get("status_code", 403) + if desired_type == "none": + response = HttpResponse("", status=status) + location = block_config.get("location", "") + if location: + response["location"] = location + else: + if desired_type == "auto": + ctype = "text/html" if "text/html" in request_headers.get("Accept", "").lower() else "text/json" + else: + ctype = "text/" + desired_type + content = http_utils._get_blocked_template(ctype) + response = HttpResponse(content, content_type=ctype, status=status) + response.content = content + utils._after_request_tags(pin, ctx["call"], request, response) + return response + + try: + if core.get_item(HTTP_REQUEST_BLOCKED): + response = blocked_response() + return response + + query = request.META.get("QUERY_STRING", "") + uri = utils.get_request_uri(request) + if uri is not None and query: + uri += "?" + query + resolver = get_resolver(getattr(request, "urlconf", None)) + if resolver: + try: + path = resolver.resolve(request.path_info).kwargs + log.debug("resolver.pattern %s", path) + except Exception: + path = None + + core.dispatch("django.start_response", (ctx, request, utils._extract_body, query, uri, path)) + core.dispatch("django.start_response.post", ("Django",)) + + if core.get_item(HTTP_REQUEST_BLOCKED): + response = blocked_response() + return response + + response = func(*args, **kwargs) + + if core.get_item(HTTP_REQUEST_BLOCKED): + response = blocked_response() + return response + + return response + finally: + core.dispatch("django.finalize_response.pre", (ctx, utils._after_request_tags, request, response)) + if not core.get_item(HTTP_REQUEST_BLOCKED): + core.dispatch("django.finalize_response", ("Django",)) + if core.get_item(HTTP_REQUEST_BLOCKED): + response = blocked_response() + return response # noqa: B012 + + +@trace_utils.with_traced_module +def traced_template_render(django, pin, wrapped, instance, args, kwargs): + # DEV: Check here in case this setting is configured after a template has been instrumented + if not config.django.instrument_templates: + return wrapped(*args, **kwargs) + + template_name = maybe_stringify(getattr(instance, "name", None)) + if template_name: + resource = template_name + else: + resource = "{0}.{1}".format(func_name(instance), wrapped.__name__) + + tags = {COMPONENT: config.django.integration_name} + if template_name: + tags["django.template.name"] = template_name + engine = getattr(instance, "engine", None) + if engine: + tags["django.template.engine.class"] = func_name(engine) + + with core.context_with_data( + "django.template.render", + span_name="django.template.render", + resource=resource, + span_type=http.TEMPLATE, + tags=tags, + pin=pin, + ) as ctx, ctx["call"]: + return wrapped(*args, **kwargs) + + +def instrument_view(django, view): + """ + Helper to wrap Django views. + + We want to wrap all lifecycle/http method functions for every class in the MRO for this view + """ + if hasattr(view, "__mro__"): + for cls in reversed(getmro(view)): + _instrument_view(django, cls) + + return _instrument_view(django, view) + + +def _instrument_view(django, view): + """Helper to wrap Django views.""" + from . import utils + + # All views should be callable, double check before doing anything + if not callable(view): + return view + + # Patch view HTTP methods and lifecycle methods + http_method_names = getattr(view, "http_method_names", ("get", "delete", "post", "options", "head")) + lifecycle_methods = ("setup", "dispatch", "http_method_not_allowed") + for name in list(http_method_names) + list(lifecycle_methods): + try: + func = getattr(view, name, None) + if not func or isinstance(func, wrapt.ObjectProxy): + continue + + resource = "{0}.{1}".format(func_name(view), name) + op_name = "django.view.{0}".format(name) + trace_utils.wrap(view, name, traced_func(django, name=op_name, resource=resource)) + except Exception: + log.debug("Failed to instrument Django view %r function %s", view, name, exc_info=True) + + # Patch response methods + response_cls = getattr(view, "response_class", None) + if response_cls: + methods = ("render",) + for name in methods: + try: + func = getattr(response_cls, name, None) + # Do not wrap if the method does not exist or is already wrapped + if not func or isinstance(func, wrapt.ObjectProxy): + continue + + resource = "{0}.{1}".format(func_name(response_cls), name) + op_name = "django.response.{0}".format(name) + trace_utils.wrap(response_cls, name, traced_func(django, name=op_name, resource=resource)) + except Exception: + log.debug("Failed to instrument Django response %r function %s", response_cls, name, exc_info=True) + + # If the view itself is not wrapped, wrap it + if not isinstance(view, wrapt.ObjectProxy): + view = utils.DjangoViewProxy( + view, traced_func(django, "django.view", resource=func_name(view), ignored_excs=[django.http.Http404]) + ) + return view + + +@trace_utils.with_traced_module +def traced_urls_path(django, pin, wrapped, instance, args, kwargs): + """Wrapper for url path helpers to ensure all views registered as urls are traced.""" + try: + if "view" in kwargs: + kwargs["view"] = instrument_view(django, kwargs["view"]) + elif len(args) >= 2: + args = list(args) + args[1] = instrument_view(django, args[1]) + args = tuple(args) + except Exception: + log.debug("Failed to instrument Django url path %r %r", args, kwargs, exc_info=True) + return wrapped(*args, **kwargs) + + +@trace_utils.with_traced_module +def traced_as_view(django, pin, func, instance, args, kwargs): + """ + Wrapper for django's View.as_view class method + """ + try: + instrument_view(django, instance) + except Exception: + log.debug("Failed to instrument Django view %r", instance, exc_info=True) + view = func(*args, **kwargs) + return wrapt.FunctionWrapper(view, traced_func(django, "django.view", resource=func_name(instance))) + + +@trace_utils.with_traced_module +def traced_get_asgi_application(django, pin, func, instance, args, kwargs): + from ddtrace.contrib.asgi import TraceMiddleware + + def django_asgi_modifier(span, scope): + span.name = schematize_url_operation("django.request", protocol="http", direction=SpanDirection.INBOUND) + + return TraceMiddleware(func(*args, **kwargs), integration_config=config.django, span_modifier=django_asgi_modifier) + + +class _DjangoUserInfoRetriever(_UserInfoRetriever): + def get_username(self): + if hasattr(self.user, "USERNAME_FIELD") and not asm_config._user_model_name_field: + user_type = type(self.user) + return getattr(self.user, user_type.USERNAME_FIELD, None) + + return super(_DjangoUserInfoRetriever, self).get_username() + + def get_name(self): + if not asm_config._user_model_name_field: + if hasattr(self.user, "get_full_name"): + try: + return self.user.get_full_name() + except Exception: + log.debug("User model get_full_name member produced an exception: ", exc_info=True) + + if hasattr(self.user, "first_name") and hasattr(self.user, "last_name"): + return "%s %s" % (self.user.first_name, self.user.last_name) + + return super(_DjangoUserInfoRetriever, self).get_name() + + def get_user_email(self): + if hasattr(self.user, "EMAIL_FIELD") and not asm_config._user_model_name_field: + user_type = type(self.user) + return getattr(self.user, user_type.EMAIL_FIELD, None) + + return super(_DjangoUserInfoRetriever, self).get_user_email() + + +@trace_utils.with_traced_module +def traced_login(django, pin, func, instance, args, kwargs): + func(*args, **kwargs) + + try: + mode = asm_config._automatic_login_events_mode + request = get_argument_value(args, kwargs, 0, "request") + user = get_argument_value(args, kwargs, 1, "user") + + if mode == "disabled": + return + + core.dispatch( + "django.login", + ( + pin, + request, + user, + mode, + _DjangoUserInfoRetriever(user), + ), + ) + except Exception: + log.debug("Error while trying to trace Django login", exc_info=True) + + +@trace_utils.with_traced_module +def traced_authenticate(django, pin, func, instance, args, kwargs): + result_user = func(*args, **kwargs) + try: + mode = asm_config._automatic_login_events_mode + if mode == "disabled": + return result_user + + result = core.dispatch_with_results( + "django.auth", + ( + result_user, + mode, + kwargs, + pin, + _DjangoUserInfoRetriever(result_user), + ), + ).user + if result and result.value[0]: + return result.value[1] + + except Exception: + log.debug("Error while trying to trace Django authenticate", exc_info=True) + + return result_user + + +def unwrap_views(func, instance, args, kwargs): + """ + Django channels uses path() and re_path() to route asgi applications. This broke our initial + assumption that + django path/re_path/url functions only accept views. Here we unwrap ddtrace view + instrumentation from asgi + applications. + + Ex. ``channels.routing.URLRouter([path('', get_asgi_application())])`` + On startup ddtrace.contrib.django.path.instrument_view() will wrap get_asgi_application in a + DjangoViewProxy. + Since get_asgi_application is not a django view callback this function will unwrap it. + """ + from . import utils + + routes = get_argument_value(args, kwargs, 0, "routes") + for route in routes: + if isinstance(route.callback, utils.DjangoViewProxy): + route.callback = route.callback.__wrapped__ + + return func(*args, **kwargs) + + +def _patch(django): + Pin().onto(django) + + when_imported("django.apps.registry")(lambda m: trace_utils.wrap(m, "Apps.populate", traced_populate(django))) + + if config.django.instrument_middleware: + when_imported("django.core.handlers.base")( + lambda m: trace_utils.wrap(m, "BaseHandler.load_middleware", traced_load_middleware(django)) + ) + + when_imported("django.core.handlers.wsgi")(lambda m: trace_utils.wrap(m, "WSGIRequest.__init__", wrap_wsgi_environ)) + core.dispatch("django.patch", ()) + + @when_imported("django.core.handlers.base") + def _(m): + import django + + trace_utils.wrap(m, "BaseHandler.get_response", traced_get_response(django)) + if django.VERSION >= (3, 1): + # Have to inline this import as the module contains syntax incompatible with Python 3.5 and below + from ._asgi import traced_get_response_async + + trace_utils.wrap(m, "BaseHandler.get_response_async", traced_get_response_async(django)) + + @when_imported("django.contrib.auth") + def _(m): + trace_utils.wrap(m, "login", traced_login(django)) + trace_utils.wrap(m, "authenticate", traced_authenticate(django)) + + # Only wrap get_asgi_application if get_response_async exists. Otherwise we will effectively double-patch + # because get_response and get_asgi_application will be used. We must rely on the version instead of coalescing + # with the previous patching hook because of circular imports within `django.core.asgi`. + if django.VERSION >= (3, 1): + when_imported("django.core.asgi")( + lambda m: trace_utils.wrap(m, "get_asgi_application", traced_get_asgi_application(django)) + ) + + if config.django.instrument_templates: + when_imported("django.template.base")( + lambda m: trace_utils.wrap(m, "Template.render", traced_template_render(django)) + ) + + if django.VERSION < (4, 0, 0): + when_imported("django.conf.urls")(lambda m: trace_utils.wrap(m, "url", traced_urls_path(django))) + + if django.VERSION >= (2, 0, 0): + + @when_imported("django.urls") + def _(m): + trace_utils.wrap(m, "path", traced_urls_path(django)) + trace_utils.wrap(m, "re_path", traced_urls_path(django)) + + when_imported("django.views.generic.base")(lambda m: trace_utils.wrap(m, "View.as_view", traced_as_view(django))) + + @when_imported("channels.routing") + def _(m): + import channels + + channels_version = tuple(int(x) for x in channels.__version__.split(".")) + if channels_version >= (3, 0): + # ASGI3 is only supported in channels v3.0+ + trace_utils.wrap(m, "URLRouter.__init__", unwrap_views) + + +def wrap_wsgi_environ(wrapped, _instance, args, kwargs): + result = core.dispatch_with_results("django.wsgi_environ", (wrapped, _instance, args, kwargs)).wrapped_result + # if the callback is registered and runs, return the result + if result: + return result.value + # if the callback is not registered, return the original result + elif result.response_type == ResultType.RESULT_UNDEFINED: + return wrapped(*args, **kwargs) + # if an exception occurs, raise it. It should never happen. + elif result.exception: + raise result.exception + + +def patch(): + import django + + if getattr(django, "_datadog_patch", False): + return + _patch(django) + + django._datadog_patch = True + + +def _unpatch(django): + trace_utils.unwrap(django.apps.registry.Apps, "populate") + trace_utils.unwrap(django.core.handlers.base.BaseHandler, "load_middleware") + trace_utils.unwrap(django.core.handlers.base.BaseHandler, "get_response") + trace_utils.unwrap(django.core.handlers.base.BaseHandler, "get_response_async") + trace_utils.unwrap(django.template.base.Template, "render") + trace_utils.unwrap(django.conf.urls.static, "static") + trace_utils.unwrap(django.conf.urls, "url") + trace_utils.unwrap(django.contrib.auth.login, "login") + trace_utils.unwrap(django.contrib.auth.authenticate, "authenticate") + if django.VERSION >= (2, 0, 0): + trace_utils.unwrap(django.urls, "path") + trace_utils.unwrap(django.urls, "re_path") + trace_utils.unwrap(django.views.generic.base.View, "as_view") + for conn in django.db.connections.all(): + trace_utils.unwrap(conn, "cursor") + trace_utils.unwrap(django.db.utils.ConnectionHandler, "__getitem__") + + +def unpatch(): + import django + + if not getattr(django, "_datadog_patch", False): + return + + _unpatch(django) + + django._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/restframework.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/restframework.py new file mode 100644 index 0000000..ed0c789 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/restframework.py @@ -0,0 +1,33 @@ +import rest_framework.views + +from ddtrace.vendor.wrapt import wrap_function_wrapper as wrap + +from ..trace_utils import iswrapped +from ..trace_utils import with_traced_module + + +@with_traced_module +def _traced_handle_exception(django, pin, wrapped, instance, args, kwargs): + """Sets the error message, error type and exception stack trace to the current span + before calling the original exception handler. + """ + span = pin.tracer.current_span() + + if span is not None: + span.set_traceback() + + return wrapped(*args, **kwargs) + + +def patch_restframework(django): + """Patches rest_framework app. + + To trace exceptions occurring during view processing we currently use a TraceExceptionMiddleware. + However the rest_framework handles exceptions before they come to our middleware. + So we need to manually patch the rest_framework exception handler + to set the exception stack trace in the current span. + """ + + # trace the handle_exception method + if not iswrapped(rest_framework.views.APIView, "handle_exception"): + wrap("rest_framework.views", "APIView.handle_exception", _traced_handle_exception(django)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/utils.py new file mode 100644 index 0000000..71571ea --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/django/utils.py @@ -0,0 +1,421 @@ +import json +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Mapping # noqa:F401 +from typing import Text # noqa:F401 +from typing import Union # noqa:F401 + +import django +from django.utils.functional import SimpleLazyObject +import xmltodict + +from ddtrace import Span +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import func_name +from ddtrace.ext import SpanTypes +from ddtrace.ext import user as _user +from ddtrace.internal import compat +from ddtrace.internal.utils.http import parse_form_multipart +from ddtrace.internal.utils.http import parse_form_params +from ddtrace.propagation._utils import from_wsgi_header + +from ...internal import core +from ...internal.logger import get_logger +from ...internal.utils.formats import stringify_cache_args +from ...vendor.wrapt import FunctionWrapper +from .. import trace_utils +from .compat import get_resolver +from .compat import user_is_authenticated + + +try: + from json import JSONDecodeError +except ImportError: + # handling python 2.X import error + JSONDecodeError = ValueError # type: ignore + + +log = get_logger(__name__) + +if django.VERSION < (1, 10, 0): + Resolver404 = django.core.urlresolvers.Resolver404 +else: + Resolver404 = django.urls.exceptions.Resolver404 + + +DJANGO22 = django.VERSION >= (2, 2, 0) + +REQUEST_DEFAULT_RESOURCE = "__django_request" +_BODY_METHODS = {"POST", "PUT", "DELETE", "PATCH"} + +_quantize_text = Union[Text, bytes] +_quantize_param = Union[_quantize_text, List[_quantize_text], Dict[_quantize_text, Any], Any] + + +def resource_from_cache_prefix(resource, cache): + """ + Combine the resource name with the cache prefix (if any) + """ + if getattr(cache, "key_prefix", None): + name = " ".join((resource, cache.key_prefix)) + else: + name = resource + + # enforce lowercase to make the output nicer to read + return name.lower() + + +def quantize_key_values(keys): + # type: (_quantize_param) -> Text + """ + Used for Django cache key normalization. + + If a dict is provided we return a list of keys as text. + + If a list or tuple is provided we convert each element to text. + + If text is provided we convert to text. + """ + args = [] # type: List[Union[Text, bytes, Any]] + + # Normalize input values into a List[Text, bytes] + if isinstance(keys, dict): + args = list(keys.keys()) + elif isinstance(keys, (list, tuple)): + args = keys + else: + args = [keys] + + return stringify_cache_args(args) + + +def get_django_2_route(request, resolver_match): + # Try to use `resolver_match.route` if available + # Otherwise, look for `resolver.pattern.regex.pattern` + route = resolver_match.route + if route: + return route + + resolver = get_resolver(getattr(request, "urlconf", None)) + if resolver: + try: + return resolver.pattern.regex.pattern + except AttributeError: + pass + + return None + + +def set_tag_array(span, prefix, value): + """Helper to set a span tag as a single value or an array""" + if not value: + return + + if len(value) == 1: + if value[0]: + span.set_tag_str(prefix, value[0]) + else: + for i, v in enumerate(value, start=0): + if v: + span.set_tag_str("".join((prefix, ".", str(i))), v) + + +def get_request_uri(request): + """ + Helper to rebuild the original request url + + query string or fragments are not included. + """ + # DEV: Use django.http.request.HttpRequest._get_raw_host() when available + # otherwise back-off to PEP 333 as done in django 1.8.x + if hasattr(request, "_get_raw_host"): + host = request._get_raw_host() + else: + try: + # Try to build host how Django would have + # https://github.com/django/django/blob/e8d0d2a5efc8012dcc8bf1809dec065ebde64c81/django/http/request.py#L85-L102 + if "HTTP_HOST" in request.META: + host = request.META["HTTP_HOST"] + else: + host = request.META["SERVER_NAME"] + port = str(request.META["SERVER_PORT"]) + if port != ("443" if request.is_secure() else "80"): + host = "".join((host, ":", port)) + except Exception: + # This really shouldn't ever happen, but lets guard here just in case + log.debug("Failed to build Django request host", exc_info=True) + host = "unknown" + + # If request scheme is missing, possible in case where wsgi.url_scheme + # environ has not been set, return None and skip providing a uri + if request.scheme is None: + return + + # Build request url from the information available + # DEV: We are explicitly omitting query strings since they may contain sensitive information + urlparts = {"scheme": request.scheme, "netloc": host, "path": request.path} + + # If any url part is a SimpleLazyObject, use its __class__ property to cast + # str/bytes and allow for _setup() to execute + for k, v in urlparts.items(): + if isinstance(v, SimpleLazyObject): + if issubclass(v.__class__, str): + v = str(v) + elif issubclass(v.__class__, bytes): + v = bytes(v) + else: + # lazy object that is not str or bytes should not happen here + # but if it does skip providing a uri + log.debug( + "Skipped building Django request uri, %s is SimpleLazyObject wrapping a %s class", + k, + v.__class__.__name__, + ) + return None + urlparts[k] = compat.ensure_text(v) + + return "".join((urlparts["scheme"], "://", urlparts["netloc"], urlparts["path"])) + + +def _set_resolver_tags(pin, span, request): + # Default to just the HTTP method when we cannot determine a reasonable resource + resource = request.method + + try: + # Get resolver match result and build resource name pieces + resolver_match = request.resolver_match + if not resolver_match: + # The request quite likely failed (e.g. 404) so we do the resolution anyway. + resolver = get_resolver(getattr(request, "urlconf", None)) + resolver_match = resolver.resolve(request.path_info) + + if hasattr(resolver_match[0], "view_class"): + # In django==4.0, view.__name__ defaults to .views.view + # Accessing view.view_class is equired for django>4.0 to get the name of underlying view + handler = func_name(resolver_match[0].view_class) + else: + handler = func_name(resolver_match[0]) + + route = None + # In Django >= 2.2.0 we can access the original route or regex pattern + # TODO: Validate if `resolver.pattern.regex.pattern` is available on django<2.2 + if DJANGO22: + # Determine the resolver and resource name for this request + route = get_django_2_route(request, resolver_match) + if route: + span.set_tag_str("http.route", route) + + if config.django.use_handler_resource_format: + resource = " ".join((request.method, handler)) + elif config.django.use_legacy_resource_format: + resource = handler + else: + if route: + resource = " ".join((request.method, route)) + else: + if config.django.use_handler_with_url_name_resource_format: + # Append url name in order to distinguish different routes of the same ViewSet + url_name = resolver_match.url_name + if url_name: + handler = ".".join([handler, url_name]) + + resource = " ".join((request.method, handler)) + + span.set_tag_str("django.view", resolver_match.view_name) + set_tag_array(span, "django.namespace", resolver_match.namespaces) + + # Django >= 2.0.0 + if hasattr(resolver_match, "app_names"): + set_tag_array(span, "django.app", resolver_match.app_names) + + except Resolver404: + # Normalize all 404 requests into a single resource name + # DEV: This is for potential cardinality issues + resource = " ".join((request.method, "404")) + except Exception: + log.debug( + "Failed to resolve request path %r with path info %r", + request, + getattr(request, "path_info", "not-set"), + exc_info=True, + ) + finally: + # Only update the resource name if it was not explicitly set + # by anyone during the request lifetime + if span.resource == REQUEST_DEFAULT_RESOURCE: + span.resource = resource + + +def _before_request_tags(pin, span, request): + # DEV: Do not set `span.resource` here, leave it as `None` + # until `_set_resolver_tags` so we can know if the user + # has explicitly set it during the request lifetime + span.service = trace_utils.int_service(pin, config.django) + span.span_type = SpanTypes.WEB + span._metrics[SPAN_MEASURED_KEY] = 1 + + analytics_sr = config.django.get_analytics_sample_rate(use_global_config=True) + if analytics_sr is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, analytics_sr) + + span.set_tag_str("django.request.class", func_name(request)) + + +def _extract_body(request): + # DEV: Do not use request.POST or request.data, this could prevent custom parser to be used after + if request.method in _BODY_METHODS: + req_body = None + content_type = request.content_type if hasattr(request, "content_type") else request.META.get("CONTENT_TYPE") + headers = core.dispatch_with_results("django.extract_body").headers.value + try: + if content_type == "application/x-www-form-urlencoded": + req_body = parse_form_params(request.body.decode("UTF-8", errors="ignore")) + elif content_type == "multipart/form-data": + req_body = parse_form_multipart(request.body.decode("UTF-8", errors="ignore"), headers) + elif content_type in ("application/json", "text/json"): + req_body = json.loads(request.body.decode("UTF-8", errors="ignore")) + elif content_type in ("application/xml", "text/xml"): + req_body = xmltodict.parse(request.body.decode("UTF-8", errors="ignore")) + else: # text/plain, others: don't use them + req_body = None + except BaseException: + log.debug("Failed to parse request body", exc_info=True) + return req_body + + +def _get_request_headers(request): + # type: (Any) -> Mapping[str, str] + if DJANGO22: + request_headers = request.headers # type: Mapping[str, str] + else: + request_headers = {} # type: Mapping[str, str] + for header, value in request.META.items(): + name = from_wsgi_header(header) + if name: + request_headers[name] = value + + return request_headers + + +def _after_request_tags(pin, span: Span, request, response): + # Response can be None in the event that the request failed + # We still want to set additional request tags that are resolved + # during the request. + + try: + user = getattr(request, "user", None) + if user is not None: + # Note: getattr calls to user / user_is_authenticated may result in ImproperlyConfigured exceptions from + # Django's get_user_model(): + # https://github.com/django/django/blob/a464ead29db8bf6a27a5291cad9eb3f0f3f0472b/django/contrib/auth/__init__.py + # + # FIXME: getattr calls to user fail in async contexts. + # Sample Error: django.core.exceptions.SynchronousOnlyOperation: You cannot call this from an async context + # - use a thread or sync_to_async. + try: + if hasattr(user, "is_authenticated"): + span.set_tag_str("django.user.is_authenticated", str(user_is_authenticated(user))) + + uid = getattr(user, "pk", None) + if uid: + span.set_tag_str("django.user.id", str(uid)) + span.set_tag_str(_user.ID, str(uid)) + if config.django.include_user_name: + username = getattr(user, "username", None) + if username: + span.set_tag_str("django.user.name", username) + except Exception: + log.debug("Error retrieving authentication information for user", exc_info=True) + + # DEV: Resolve the view and resource name at the end of the request in case + # urlconf changes at any point during the request + _set_resolver_tags(pin, span, request) + if response: + status = response.status_code + span.set_tag_str("django.response.class", func_name(response)) + if hasattr(response, "template_name"): + # template_name is a bit of a misnomer, as it could be any of: + # a list of strings, a tuple of strings, a single string, or an instance of Template + # for more detail, see: + # https://docs.djangoproject.com/en/3.0/ref/template-response/#django.template.response.SimpleTemplateResponse.template_name + template = response.template_name + + if isinstance(template, str): + template_names = [template] + elif isinstance( + template, + ( + list, + tuple, + ), + ): + template_names = template + elif hasattr(template, "template"): + # ^ checking by attribute here because + # django backend implementations don't have a common base + # `.template` is also the most consistent across django versions + template_names = [template.template.name] + else: + template_names = None + + set_tag_array(span, "django.response.template", template_names) + + url = get_request_uri(request) + + request_headers = core.dispatch_with_results("django.after_request_headers").headers.value + if not request_headers: + request_headers = _get_request_headers(request) + + response_headers = dict(response.items()) if response else {} + + response_cookies = {} + if response.cookies: + for k, v in response.cookies.items(): + response_cookies[k] = v.OutputString() + + raw_uri = url + if raw_uri and request.META.get("QUERY_STRING"): + raw_uri += "?" + request.META["QUERY_STRING"] + + core.dispatch( + "django.after_request_headers.post", + ( + request_headers, + response_headers, + span, + config.django, + request, + url, + raw_uri, + status, + response_cookies, + ), + ) + content = getattr(response, "content", None) + if content is None: + content = getattr(response, "streaming_content", None) + core.dispatch("django.after_request_headers.finalize", (content, None)) + finally: + if span.resource == REQUEST_DEFAULT_RESOURCE: + span.resource = request.method + + +class DjangoViewProxy(FunctionWrapper): + """ + This custom function wrapper is used to wrap the callback passed to django views handlers (path/re_path/url). + This allows us to distinguish between wrapped django views and wrapped asgi applications in django channels. + """ + + @property + def __module__(self): + """ + DjangoViewProxy.__module__ defaults to ddtrace.contrib.django when a wrapped function does not have + a __module__ attribute. This method ensures that DjangoViewProxy.__module__ always returns the module + attribute of the wrapped function or an empty string if this attribute is not available. + The function Django.urls.path() does not have a __module__ attribute and would require this override + to resolve the correct module name. + """ + return self.__wrapped__.__module__ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/__init__.py new file mode 100644 index 0000000..8666443 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/__init__.py @@ -0,0 +1,50 @@ +""" +Instrument dogpile.cache__ to report all cached lookups. + +This will add spans around the calls to your cache backend (e.g. redis, memory, +etc). The spans will also include the following tags: + +- key/keys: The key(s) dogpile passed to your backend. Note that this will be + the output of the region's ``function_key_generator``, but before any key + mangling is applied (i.e. the region's ``key_mangler``). +- region: Name of the region. +- backend: Name of the backend class. +- hit: If the key was found in the cache. +- expired: If the key is expired. This is only relevant if the key was found. + +While cache tracing will generally already have keys in tags, some caching +setups will not have useful tag values - such as when you're using consistent +hashing with memcached - the key(s) will appear as a mangled hash. +:: + + # Patch before importing dogpile.cache + from ddtrace import patch + patch(dogpile_cache=True) + + from dogpile.cache import make_region + + region = make_region().configure( + "dogpile.cache.pylibmc", + expiration_time=3600, + arguments={"url": ["127.0.0.1"]}, + ) + + @region.cache_on_arguments() + def hello(name): + # Some complicated, slow calculation + return "Hello, {}".format(name) + +.. __: https://dogpilecache.sqlalchemy.org/ +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["dogpile.cache"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/lock.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/lock.py new file mode 100644 index 0000000..8bf1c0e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/lock.py @@ -0,0 +1,39 @@ +import dogpile + +from ...internal.utils.formats import asbool +from ...pin import Pin + + +def _wrap_lock_ctor(func, instance, args, kwargs): + """ + This seems rather odd. But to track hits, we need to patch the wrapped function that + dogpile passes to the region and locks. Unfortunately it's a closure defined inside + the get_or_create* methods themselves, so we can't easily patch those. + """ + func(*args, **kwargs) + ori_backend_fetcher = instance.value_and_created_fn + + def wrapped_backend_fetcher(): + pin = Pin.get_from(dogpile.cache) + if not pin or not pin.enabled(): + return ori_backend_fetcher() + + hit = False + expired = True + try: + value, createdtime = ori_backend_fetcher() + hit = value is not dogpile.cache.api.NO_VALUE + # dogpile sometimes returns None, but only checks for truthiness. Coalesce + # to minimize APM users' confusion. + expired = instance._is_expired(createdtime) or False + return value, createdtime + finally: + # Keys are checked in random order so the 'final' answer for partial hits + # should really be false (ie. if any are 'negative', then the tag value + # should be). This means ANDing all hit values and ORing all expired values. + span = pin.tracer.current_span() + if span: + span.set_tag("hit", asbool(span.get_tag("hit") or "True") and hit) + span.set_tag("expired", asbool(span.get_tag("expired") or "False") or expired) + + instance.value_and_created_fn = wrapped_backend_fetcher diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/patch.py new file mode 100644 index 0000000..1621512 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/patch.py @@ -0,0 +1,52 @@ +try: + import dogpile.cache as dogpile_cache + import dogpile.lock as dogpile_lock +except AttributeError: + from dogpile import cache as dogpile_cache + from dogpile import lock as dogpile_lock + +from ddtrace.internal.schema import schematize_service_name +from ddtrace.pin import _DD_PIN_NAME +from ddtrace.pin import _DD_PIN_PROXY_NAME +from ddtrace.pin import Pin +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from .lock import _wrap_lock_ctor +from .region import _wrap_get_create +from .region import _wrap_get_create_multi + + +_get_or_create = dogpile_cache.region.CacheRegion.get_or_create +_get_or_create_multi = dogpile_cache.region.CacheRegion.get_or_create_multi +_lock_ctor = dogpile_lock.Lock.__init__ + + +def get_version(): + # type: () -> str + return getattr(dogpile_cache, "__version__", "") + + +def patch(): + if getattr(dogpile_cache, "_datadog_patch", False): + return + dogpile_cache._datadog_patch = True + + _w("dogpile.cache.region", "CacheRegion.get_or_create", _wrap_get_create) + _w("dogpile.cache.region", "CacheRegion.get_or_create_multi", _wrap_get_create_multi) + _w("dogpile.lock", "Lock.__init__", _wrap_lock_ctor) + + Pin(service=schematize_service_name("dogpile.cache")).onto(dogpile_cache) + + +def unpatch(): + if not getattr(dogpile_cache, "_datadog_patch", False): + return + dogpile_cache._datadog_patch = False + # This looks silly but the unwrap util doesn't support class instance methods, even + # though wrapt does. This was causing the patches to stack on top of each other + # during testing. + dogpile_cache.region.CacheRegion.get_or_create = _get_or_create + dogpile_cache.region.CacheRegion.get_or_create_multi = _get_or_create_multi + dogpile_lock.Lock.__init__ = _lock_ctor + setattr(dogpile_cache, _DD_PIN_NAME, None) + setattr(dogpile_cache, _DD_PIN_PROXY_NAME, None) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/region.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/region.py new file mode 100644 index 0000000..28e8dbb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/dogpile_cache/region.py @@ -0,0 +1,55 @@ +import dogpile + +from ddtrace.ext import SpanTypes +from ddtrace.internal.constants import COMPONENT + +from ...constants import SPAN_MEASURED_KEY +from ...ext import db +from ...internal.schema import schematize_cache_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ...pin import Pin + + +def _wrap_get_create(func, instance, args, kwargs): + pin = Pin.get_from(dogpile.cache) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + key = get_argument_value(args, kwargs, 0, "key") + with pin.tracer.trace( + schematize_cache_operation("dogpile.cache", cache_provider="dogpile"), + service=schematize_service_name(None), + resource="get_or_create", + span_type=SpanTypes.CACHE, + ) as span: + span.set_tag_str(COMPONENT, "dogpile_cache") + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag("key", key) + span.set_tag("region", instance.name) + span.set_tag("backend", instance.actual_backend.__class__.__name__) + response = func(*args, **kwargs) + span.set_metric(db.ROWCOUNT, 1) + return response + + +def _wrap_get_create_multi(func, instance, args, kwargs): + pin = Pin.get_from(dogpile.cache) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + keys = get_argument_value(args, kwargs, 0, "keys") + with pin.tracer.trace( + schematize_cache_operation("dogpile.cache", cache_provider="dogpile"), + service=schematize_service_name(None), + resource="get_or_create_multi", + span_type="cache", + ) as span: + span.set_tag_str(COMPONENT, "dogpile_cache") + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag("keys", keys) + span.set_tag("region", instance.name) + span.set_tag("backend", instance.actual_backend.__class__.__name__) + response = func(*args, **kwargs) + span.set_metric(db.ROWCOUNT, len(response)) + return response diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/__init__.py new file mode 100644 index 0000000..bd0deb9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/__init__.py @@ -0,0 +1,57 @@ +""" +The Elasticsearch integration will trace Elasticsearch queries. + +Enabling +~~~~~~~~ + +The elasticsearch integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + from elasticsearch import Elasticsearch + + patch(elasticsearch=True) + # This will report spans with the default instrumentation + es = Elasticsearch(port=ELASTICSEARCH_CONFIG['port']) + # Example of instrumented query + es.indices.create(index='books', ignore=400) + + # Use a pin to specify metadata related to this client + es = Elasticsearch(port=ELASTICSEARCH_CONFIG['port']) + Pin.override(es.transport, service='elasticsearch-videos') + es.indices.create(index='videos', ignore=400) + +OpenSearch is also supported (`opensearch-py`):: + + from ddtrace import patch + from opensearchpy import OpenSearch + + patch(elasticsearch=True) + os = OpenSearch() + # Example of instrumented query + os.indices.create(index='books', ignore=400) + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.elasticsearch['service'] + + The service name reported for your elasticsearch app. + + +Example:: + + from ddtrace import config + + # Override service name + config.elasticsearch['service'] = 'custom-service-name' +""" +from .patch import get_version +from .patch import get_versions +from .patch import patch + + +__all__ = ["patch", "get_version", "get_versions"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/patch.py new file mode 100644 index 0000000..f23a997 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/patch.py @@ -0,0 +1,269 @@ +from importlib import import_module +from typing import List # noqa:F401 + +from ddtrace import config +from ddtrace._trace import _limits +from ddtrace.contrib.trace_utils import ext_service +from ddtrace.contrib.trace_utils import extract_netloc_and_query_info_from_url +from ddtrace.ext import net +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.logger import get_logger +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import elasticsearch as metadata +from ...ext import http +from ...internal.compat import parse +from ...internal.schema import schematize_service_name +from ...internal.utils.wrappers import unwrap as _u +from ...pin import Pin +from .quantize import quantize + + +log = get_logger(__name__) + +config._add( + "elasticsearch", + { + "_default_service": schematize_service_name("elasticsearch"), + }, +) + + +def _es_modules(): + module_names = ( + "elasticsearch", + "elasticsearch1", + "elasticsearch2", + "elasticsearch5", + "elasticsearch6", + "elasticsearch7", + # Starting with version 8, the default transport which is what we + # actually patch is found in the separate elastic_transport package + "elastic_transport", + "opensearchpy", + ) + for module_name in module_names: + try: + module = import_module(module_name) + versions[module_name] = getattr(module, "__versionstr__", "") + yield module + except ImportError: + pass + + +versions = {} + + +def get_version_tuple(elasticsearch): + return getattr(elasticsearch, "__version__", "") + + +def get_version(): + # type: () -> str + return "" + + +def get_versions(): + # type: () -> List[str] + return versions + + +def _get_transport_module(elasticsearch): + try: + # elasticsearch7/opensearch async + return elasticsearch._async.transport + except AttributeError: + try: + # elasticsearch<8/opensearch sync + return elasticsearch.transport + except AttributeError: + # elastic_transport (elasticsearch8) + return elasticsearch + + +# NB: We are patching the default elasticsearch transport module +def patch(): + for elasticsearch in _es_modules(): + _patch(_get_transport_module(elasticsearch)) + + +def _patch(transport): + if getattr(transport, "_datadog_patch", False): + return + if hasattr(transport, "Transport"): + transport._datadog_patch = True + _w(transport.Transport, "perform_request", _get_perform_request(transport)) + Pin().onto(transport.Transport) + if hasattr(transport, "AsyncTransport"): + transport._datadog_patch = True + _w(transport.AsyncTransport, "perform_request", _get_perform_request_async(transport)) + Pin().onto(transport.AsyncTransport) + + +def unpatch(): + for elasticsearch in _es_modules(): + _unpatch(_get_transport_module(elasticsearch)) + + +def _unpatch(transport): + if not getattr(transport, "_datadog_patch", False): + return + for classname in ("Transport", "AsyncTransport"): + try: + cls = getattr(transport, classname) + except AttributeError: + continue + transport._datadog_patch = False + _u(cls, "perform_request") + + +def _get_perform_request_coro(transport): + def _perform_request(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + yield func(*args, **kwargs) + return + + with pin.tracer.trace( + "elasticsearch.query", service=ext_service(pin, config.elasticsearch), span_type=SpanTypes.ELASTICSEARCH + ) as span: + if pin.tags: + span.set_tags(pin.tags) + + span.set_tag_str(COMPONENT, config.elasticsearch.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + # Don't instrument if the trace is not sampled + if not span.sampled: + yield func(*args, **kwargs) + return + + method, target = args + params = kwargs.get("params") + body = kwargs.get("body") + + # elastic_transport gets target url with query params already appended + parsed = parse.urlparse(target) + url = parsed.path + if params: + encoded_params = parse.urlencode(params) + else: + encoded_params = parsed.query + + span.set_tag_str(metadata.METHOD, method) + span.set_tag_str(metadata.URL, url) + span.set_tag_str(metadata.PARAMS, encoded_params) + try: + # elasticsearch<8 + connections = instance.connection_pool.connections + except AttributeError: + # elastic_transport + connections = instance.node_pool.all() + for connection in connections: + hostname, _ = extract_netloc_and_query_info_from_url(connection.host) + if hostname: + span.set_tag_str(net.TARGET_HOST, hostname) + break + + if config.elasticsearch.trace_query_string: + span.set_tag_str(http.QUERY_STRING, encoded_params) + + if method in ["GET", "POST"]: + try: + # elasticsearch<8 + ser_body = instance.serializer.dumps(body) + except AttributeError: + # elastic_transport + ser_body = instance.serializers.dumps(body) + # Elasticsearch request bodies can be very large resulting in traces being too large + # to send. + # When this occurs, drop the value. + # Ideally the body should be truncated, however we cannot truncate as the obfuscation + # logic for the body lives in the agent and truncating would make the body undecodable. + if len(ser_body) <= _limits.MAX_SPAN_META_VALUE_LEN: + span.set_tag_str(metadata.BODY, ser_body) + else: + span.set_tag_str( + metadata.BODY, + "" % (len(ser_body), _limits.MAX_SPAN_META_VALUE_LEN), + ) + status = None + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.elasticsearch.get_analytics_sample_rate()) + + span = quantize(span) + + try: + result = yield func(*args, **kwargs) + except transport.TransportError as e: + span.set_tag(http.STATUS_CODE, getattr(e, "status_code", 500)) + span.error = 1 + raise + + try: + # Optional metadata extraction with soft fail. + if isinstance(result, tuple): + try: + # elastic_transport returns a named tuple + meta, data = result.meta, result.body + status = meta.status + except AttributeError: + # elasticsearch<2.4; it returns both the status and the body + status, data = result + else: + # elasticsearch>=2.4,<8; internal change for ``Transport.perform_request`` + # that just returns the body + data = result + + took = data.get("took") + if took: + span.set_metric(metadata.TOOK, int(took)) + except Exception: + log.debug("Unexpected exception", exc_info=True) + + if status: + span.set_tag(http.STATUS_CODE, status) + + return + + return _perform_request + + +def _get_perform_request(transport): + _perform_request_coro = _get_perform_request_coro(transport) + + def _perform_request(func, instance, args, kwargs): + coro = _perform_request_coro(func, instance, args, kwargs) + result = next(coro) + try: + coro.send(result) + except StopIteration: + pass + return result + + return _perform_request + + +def _get_perform_request_async(transport): + _perform_request_coro = _get_perform_request_coro(transport) + + async def _perform_request(func, instance, args, kwargs): + coro = _perform_request_coro(func, instance, args, kwargs) + result = await next(coro) + try: + coro.send(result) + except StopIteration: + pass + return result + + return _perform_request diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/quantize.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/quantize.py new file mode 100644 index 0000000..5b526a3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/elasticsearch/quantize.py @@ -0,0 +1,35 @@ +import re + +from ...ext import elasticsearch as metadata + + +# Replace any ID +ID_REGEXP = re.compile(r"/([0-9]+)([/\?]|$)") +ID_PLACEHOLDER = r"/?\2" + +# Remove digits from potential timestamped indexes (should be an option). +# For now, let's say 2+ digits +INDEX_REGEXP = re.compile(r"[0-9]{2,}") +INDEX_PLACEHOLDER = r"?" + + +def quantize(span): + """Quantize an elasticsearch span + + We want to extract a meaningful `resource` from the request. + We do it based on the method + url, with some cleanup applied to the URL. + + The URL might a ID, but also it is common to have timestamped indexes. + While the first is easy to catch, the second should probably be configurable. + + All of this should probably be done in the Agent. Later. + """ + url = span.get_tag(metadata.URL) + method = span.get_tag(metadata.METHOD) + + quantized_url = ID_REGEXP.sub(ID_PLACEHOLDER, url) + quantized_url = INDEX_REGEXP.sub(INDEX_PLACEHOLDER, quantized_url) + + span.resource = "{method} {url}".format(method=method, url=quantized_url) + + return span diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/__init__.py new file mode 100644 index 0000000..41fdf55 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/__init__.py @@ -0,0 +1,58 @@ +""" +To trace the falcon web framework, install the trace middleware:: + + import falcon + from ddtrace import tracer + from ddtrace.contrib.falcon import TraceMiddleware + + mw = TraceMiddleware(tracer, 'my-falcon-app') + falcon.API(middleware=[mw]) + +You can also use the autopatching functionality:: + + import falcon + from ddtrace import tracer, patch + + patch(falcon=True) + + app = falcon.API() + +To disable distributed tracing when using autopatching, set the +``DD_FALCON_DISTRIBUTED_TRACING`` environment variable to ``False``. + +**Supported span hooks** + +The following is a list of available tracer hooks that can be used to intercept +and modify spans created by this integration. + +- ``request`` + - Called before the response has been finished + - ``def on_falcon_request(span, request, response)`` + + +Example:: + + import ddtrace.auto + import falcon + from ddtrace import config + + app = falcon.API() + + @config.falcon.hooks.on('request') + def on_falcon_request(span, request, response): + span.set_tag('my.custom', 'tag') + +:ref:`Headers tracing ` is supported for this integration. +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["falcon"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .middleware import TraceMiddleware + from .patch import get_version + from .patch import patch + + __all__ = ["TraceMiddleware", "patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/middleware.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/middleware.py new file mode 100644 index 0000000..ba398f0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/middleware.py @@ -0,0 +1,123 @@ +import sys + +from ddtrace import config +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import http as httpx +from ddtrace.internal.constants import COMPONENT + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...internal.schema import SpanDirection +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from .. import trace_utils + + +class TraceMiddleware(object): + def __init__(self, tracer, service=None, distributed_tracing=None): + if service is None: + service = schematize_service_name("falcon") + # store tracing references + self.tracer = tracer + self.service = service + if distributed_tracing is not None: + config.falcon["distributed_tracing"] = distributed_tracing + + def process_request(self, req, resp): + # Falcon uppercases all header names. + headers = dict((k.lower(), v) for k, v in req.headers.items()) + trace_utils.activate_distributed_headers(self.tracer, int_config=config.falcon, request_headers=headers) + + span = self.tracer.trace( + schematize_url_operation("falcon.request", protocol="http", direction=SpanDirection.INBOUND), + service=self.service, + span_type=SpanTypes.WEB, + ) + span.set_tag_str(COMPONENT, config.falcon.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + span.set_tag(SPAN_MEASURED_KEY) + + # set analytics sample rate with global config enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.falcon.get_analytics_sample_rate(use_global_config=True)) + + trace_utils.set_http_meta( + span, config.falcon, method=req.method, url=req.url, query=req.query_string, request_headers=req.headers + ) + + def process_resource(self, req, resp, resource, params): + span = self.tracer.current_span() + if not span: + return # unexpected + span.resource = "%s %s" % (req.method, _name(resource)) + + def process_response(self, req, resp, resource, req_succeeded=None): + # req_succeded is not a kwarg in the API, but we need that to support + # Falcon 1.0 that doesn't provide this argument + span = self.tracer.current_span() + if not span: + return # unexpected + + status = resp.status.partition(" ")[0] + + # falcon does not map errors or unmatched routes + # to proper status codes, so we have to try to infer them + # here. + if resource is None: + status = "404" + span.resource = "%s 404" % req.method + span.set_tag(httpx.STATUS_CODE, status) + span.finish() + return + + err_type = sys.exc_info()[0] + if err_type is not None: + if req_succeeded is None: + # backward-compatibility with Falcon 1.0; any version + # greater than 1.0 has req_succeded in [True, False] + # TODO[manu]: drop the support at some point + status = _detect_and_set_status_error(err_type, span) + elif req_succeeded is False: + # Falcon 1.1+ provides that argument that is set to False + # if get an Exception (404 is still an exception) + status = _detect_and_set_status_error(err_type, span) + + route = req.root_path or "" + req.uri_template + + trace_utils.set_http_meta( + span, + config.falcon, + status_code=status, + response_headers=resp._headers, + route=route, + ) + + # Emit span hook for this response + # DEV: Emit before closing so they can overwrite `span.resource` if they want + config.falcon.hooks.emit("request", span, req, resp) + + # Close the span + span.finish() + + +def _is_404(err_type): + return "HTTPNotFound" in err_type.__name__ + + +def _detect_and_set_status_error(err_type, span): + """Detect the HTTP status code from the current stacktrace and + set the traceback to the given Span + """ + if not _is_404(err_type): + span.set_traceback() + return "500" + elif _is_404(err_type): + return "404" + + +def _name(r): + return "%s.%s" % (r.__module__, r.__class__.__name__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/patch.py new file mode 100644 index 0000000..d8deeb6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/falcon/patch.py @@ -0,0 +1,52 @@ +import os + +import falcon + +from ddtrace import config +from ddtrace import tracer +from ddtrace.vendor import wrapt + +from ...internal.utils.formats import asbool +from ...internal.utils.version import parse_version +from .middleware import TraceMiddleware + + +FALCON_VERSION = parse_version(falcon.__version__) + + +config._add( + "falcon", + dict( + distributed_tracing=asbool(os.getenv("DD_FALCON_DISTRIBUTED_TRACING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(falcon, "__version__", "") + + +def patch(): + """ + Patch falcon.API to include contrib.falcon.TraceMiddleware + by default + """ + if getattr(falcon, "_datadog_patch", False): + return + + falcon._datadog_patch = True + if FALCON_VERSION >= (3, 0, 0): + wrapt.wrap_function_wrapper("falcon", "App.__init__", traced_init) + if FALCON_VERSION < (4, 0, 0): + wrapt.wrap_function_wrapper("falcon", "API.__init__", traced_init) + + +def traced_init(wrapped, instance, args, kwargs): + mw = kwargs.pop("middleware", []) + service = config._get_service(default="falcon") + + mw.insert(0, TraceMiddleware(tracer, service)) + kwargs["middleware"] = mw + + wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/__init__.py new file mode 100644 index 0000000..015d2cb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/__init__.py @@ -0,0 +1,72 @@ +""" +The fastapi integration will trace requests to and from FastAPI. + +Enabling +~~~~~~~~ + +The fastapi integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + from fastapi import FastAPI + + patch(fastapi=True) + app = FastAPI() + + +On Python 3.6 and below, you must enable the legacy ``AsyncioContextProvider`` before using the middleware:: + + from ddtrace.contrib.asyncio.provider import AsyncioContextProvider + from ddtrace import tracer # Or whichever tracer instance you plan to use + tracer.configure(context_provider=AsyncioContextProvider()) + + +When registering your own ASGI middleware using FastAPI's ``add_middleware()`` function, +keep in mind that Datadog spans close after your middleware's call to ``await self.app()`` returns. +This means that accesses of span data from within the middleware should be performed +prior to this call. + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.fastapi['service_name'] + + The service name reported for your fastapi app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'fastapi'`` + +.. py:data:: ddtrace.config.fastapi['request_span_name'] + + The span name for a fastapi request. + + Default: ``'fastapi.request'`` + + +Example:: + + from ddtrace import config + + # Override service name + config.fastapi['service_name'] = 'custom-service-name' + + # Override request span name + config.fastapi['request_span_name'] = 'custom-request-span-name' + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["fastapi"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/patch.py new file mode 100644 index 0000000..60d67e4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/fastapi/patch.py @@ -0,0 +1,100 @@ +import fastapi +import fastapi.routing + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib.asgi.middleware import TraceMiddleware +from ddtrace.contrib.starlette.patch import _trace_background_tasks +from ddtrace.contrib.starlette.patch import traced_handler +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.vendor.wrapt import ObjectProxy +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + + +log = get_logger(__name__) + +config._add( + "fastapi", + dict( + _default_service=schematize_service_name("fastapi"), + request_span_name="fastapi.request", + distributed_tracing=True, + trace_query_string=None, # Default to global config + ), +) + + +def get_version(): + # type: () -> str + return getattr(fastapi, "__version__", "") + + +def wrap_middleware_stack(wrapped, instance, args, kwargs): + return TraceMiddleware(app=wrapped(*args, **kwargs), integration_config=config.fastapi) + + +async def traced_serialize_response(wrapped, instance, args, kwargs): + """Wrapper for fastapi.routing.serialize_response function. + + This function is called on all non-Response objects to + convert them to a serializable form. + + This is the wrapper which calls ``jsonable_encoder``. + + This function does not do the actual encoding from + obj -> json string (e.g. json.dumps()). That is handled + by the Response.render function. + + DEV: We do not wrap ``jsonable_encoder`` because it calls + itself recursively, so there is a chance the overhead + added by creating spans will be higher than desired for + the result. + """ + pin = Pin.get_from(fastapi) + if not pin or not pin.enabled(): + return await wrapped(*args, **kwargs) + + with pin.tracer.trace("fastapi.serialize_response"): + return await wrapped(*args, **kwargs) + + +def patch(): + if getattr(fastapi, "_datadog_patch", False): + return + + fastapi._datadog_patch = True + Pin().onto(fastapi) + _w("fastapi.applications", "FastAPI.build_middleware_stack", wrap_middleware_stack) + _w("fastapi.routing", "serialize_response", traced_serialize_response) + + if not isinstance(fastapi.BackgroundTasks.add_task, ObjectProxy): + _w("fastapi", "BackgroundTasks.add_task", _trace_background_tasks(fastapi)) + + # We need to check that Starlette instrumentation hasn't already patched these + if not isinstance(fastapi.routing.APIRoute.handle, ObjectProxy): + _w("fastapi.routing", "APIRoute.handle", traced_handler) + + if not isinstance(fastapi.routing.Mount.handle, ObjectProxy): + _w("starlette.routing", "Mount.handle", traced_handler) + + +def unpatch(): + if not getattr(fastapi, "_datadog_patch", False): + return + + fastapi._datadog_patch = False + + _u(fastapi.applications.FastAPI, "build_middleware_stack") + _u(fastapi.routing, "serialize_response") + + # We need to check that Starlette instrumentation hasn't already unpatched these + if isinstance(fastapi.routing.APIRoute.handle, ObjectProxy): + _u(fastapi.routing.APIRoute, "handle") + + if isinstance(fastapi.routing.Mount.handle, ObjectProxy): + _u(fastapi.routing.Mount, "handle") + + if isinstance(fastapi.BackgroundTasks.add_task, ObjectProxy): + _u(fastapi.BackgroundTasks, "add_task") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/__init__.py new file mode 100644 index 0000000..c3a619a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/__init__.py @@ -0,0 +1,113 @@ +""" +The Flask__ integration will add tracing to all requests to your Flask application. + +This integration will track the entire Flask lifecycle including user-defined endpoints, hooks, +signals, and template rendering. + +To configure tracing manually:: + + import ddtrace.auto + + from flask import Flask + + app = Flask(__name__) + + + @app.route('/') + def index(): + return 'hello world' + + + if __name__ == '__main__': + app.run() + + +You may also enable Flask tracing automatically via ddtrace-run:: + + ddtrace-run python app.py + +Note that if you are using IAST/Custom Code to detect vulnerabilities (`DD_IAST_ENABLED=1`) +and your main `app.py` file contains code outside the `app.run()` call (e.g. routes or +utility functions) you will need to import and call `ddtrace_iast_flask_patch()` before +the `app.run()` to ensure the code inside the main module is patched to propagation works: + + from flask import Flask + from ddtrace.appsec._iast import ddtrace_iast_flask_patch + + app = Flask(__name__) + + if __name__ == '__main__': + ddtrace_iast_flask_patch() + app.run() + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.flask['distributed_tracing_enabled'] + + Whether to parse distributed tracing headers from requests received by your Flask app. + + Default: ``True`` + +.. py:data:: ddtrace.config.flask['service_name'] + + The service name reported for your Flask app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'flask'`` + +.. py:data:: ddtrace.config.flask['collect_view_args'] + + Whether to add request tags for view function argument values. + + Default: ``True`` + +.. py:data:: ddtrace.config.flask['template_default_name'] + + The default template name to use when one does not exist. + + Default: ```` + +.. py:data:: ddtrace.config.flask['trace_signals'] + + Whether to trace Flask signals (``before_request``, ``after_request``, etc). + + Default: ``True`` + + +Example:: + + from ddtrace import config + + # Enable distributed tracing + config.flask['distributed_tracing_enabled'] = True + + # Override service name + config.flask['service_name'] = 'custom-service-name' + + # Report 401, and 403 responses as errors + config.http_server.error_statuses = '401,403' + +.. __: http://flask.pocoo.org/ + +:ref:`All HTTP tags ` are supported for this integration. + +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["flask"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + # DEV: We do this so we can `@mock.patch('ddtrace.contrib.flask._patch.')` in tests + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + get_version = _patch.get_version + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/patch.py new file mode 100644 index 0000000..c062674 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/patch.py @@ -0,0 +1,559 @@ +import flask +import werkzeug +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import NotFound +from werkzeug.exceptions import abort + +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanTypes +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.constants import STATUS_403_TYPE_AUTO +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...internal import core +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from ...internal.utils import http as http_utils + + +# Not all versions of flask/werkzeug have this mixin +try: + from werkzeug.wrappers.json import JSONMixin + + _HAS_JSON_MIXIN = True +except ImportError: + _HAS_JSON_MIXIN = False + +from ddtrace import Pin +from ddtrace import config +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...contrib.wsgi.wsgi import _DDWSGIMiddlewareBase +from ...internal.logger import get_logger +from ...internal.utils import get_argument_value +from ...internal.utils.importlib import func_name +from ...internal.utils.version import parse_version +from ..trace_utils import unwrap as _u +from .wrappers import _wrap_call_with_pin_check +from .wrappers import get_current_app +from .wrappers import simple_call_wrapper +from .wrappers import with_instance_pin +from .wrappers import wrap_function +from .wrappers import wrap_view + + +try: + from json import JSONDecodeError +except ImportError: + # handling python 2.X import error + JSONDecodeError = ValueError # type: ignore + + +log = get_logger(__name__) + +FLASK_VERSION = "flask.version" +_BODY_METHODS = {"POST", "PUT", "DELETE", "PATCH"} + +# Configure default configuration +config._add( + "flask", + dict( + # Flask service configuration + _default_service=schematize_service_name("flask"), + collect_view_args=True, + distributed_tracing_enabled=True, + template_default_name="", + trace_signals=True, + ), +) + + +def get_version(): + # type: () -> str + return getattr(flask, "__version__", "") + + +if _HAS_JSON_MIXIN: + + class RequestWithJson(werkzeug.Request, JSONMixin): + pass + + _RequestType = RequestWithJson +else: + _RequestType = werkzeug.Request + +# Extract flask version into a tuple e.g. (0, 12, 1) or (1, 0, 2) +# DEV: This makes it so we can do `if flask_version >= (0, 12, 0):` +# DEV: Example tests: +# (0, 10, 0) > (0, 10) +# (0, 10, 0) >= (0, 10, 0) +# (0, 10, 1) >= (0, 10) +# (0, 11, 1) >= (0, 10) +# (0, 11, 1) >= (0, 10, 2) +# (1, 0, 0) >= (0, 10) +# (0, 9) == (0, 9) +# (0, 9, 0) != (0, 9) +# (0, 8, 5) <= (0, 9) +flask_version_str = getattr(flask, "__version__", "") +flask_version = parse_version(flask_version_str) + + +class _FlaskWSGIMiddleware(_DDWSGIMiddlewareBase): + _request_call_name = schematize_url_operation("flask.request", protocol="http", direction=SpanDirection.INBOUND) + _application_call_name = "flask.application" + _response_call_name = "flask.response" + + def _wrapped_start_response(self, start_response, ctx, status_code, headers, exc_info=None): + core.dispatch("flask.start_response.pre", (flask.request, ctx, config.flask, status_code, headers)) + if not core.get_item(HTTP_REQUEST_BLOCKED): + headers_from_context = "" + result = core.dispatch_with_results("flask.start_response", ("Flask",)).waf + if result: + headers_from_context = result.value + if core.get_item(HTTP_REQUEST_BLOCKED): + # response code must be set here, or it will be too late + block_config = core.get_item(HTTP_REQUEST_BLOCKED) + desired_type = block_config.get("type", "auto") + status = block_config.get("status_code", 403) + if desired_type == "none": + response_headers = [] + else: + if block_config.get("type", "auto") == "auto": + ctype = "text/html" if "text/html" in headers_from_context else "text/json" + else: + ctype = "text/" + block_config["type"] + response_headers = [("content-type", ctype)] + result = start_response(str(status), response_headers) + core.dispatch("flask.start_response.blocked", (ctx, config.flask, response_headers, status)) + else: + result = start_response(status_code, headers) + else: + result = start_response(status_code, headers) + return result + + def _request_call_modifier(self, ctx, parsed_headers=None): + environ = ctx.get_item("environ") + # Create a werkzeug request from the `environ` to make interacting with it easier + # DEV: This executes before a request context is created + request = _RequestType(environ) + + req_body = None + result = core.dispatch_with_results( + "flask.request_call_modifier", + ( + ctx, + config.flask, + request, + environ, + _HAS_JSON_MIXIN, + FLASK_VERSION, + flask_version_str, + BadRequest, + ), + ).request_body + if result: + req_body = result.value + core.dispatch("flask.request_call_modifier.post", (ctx, config.flask, request, req_body)) + + +def patch(): + """ + Patch `flask` module for tracing + """ + # Check to see if we have patched Flask yet or not + if getattr(flask, "_datadog_patch", False): + return + flask._datadog_patch = True + + Pin().onto(flask.Flask) + core.dispatch("flask.patch", (flask_version,)) + # flask.app.Flask methods that have custom tracing (add metadata, wrap functions, etc) + _w("flask", "Flask.wsgi_app", patched_wsgi_app) + _w("flask", "Flask.dispatch_request", request_patcher("dispatch_request")) + _w("flask", "Flask.preprocess_request", request_patcher("preprocess_request")) + _w("flask", "Flask.add_url_rule", patched_add_url_rule) + _w("flask", "Flask.endpoint", patched_endpoint) + + _w("flask", "Flask.finalize_request", patched_finalize_request) + + if flask_version >= (2, 0, 0): + _w("flask", "Flask.register_error_handler", patched_register_error_handler) + else: + _w("flask", "Flask._register_error_handler", patched__register_error_handler) + + # flask.blueprints.Blueprint methods that have custom tracing (add metadata, wrap functions, etc) + _w("flask", "Blueprint.register", patched_blueprint_register) + _w("flask", "Blueprint.add_url_rule", patched_blueprint_add_url_rule) + + flask_hooks = [ + "before_request", + "after_request", + "teardown_request", + "teardown_appcontext", + ] + if flask_version < (2, 3, 0): + flask_hooks.append("before_first_request") + + for hook in flask_hooks: + _w("flask", "Flask.{}".format(hook), patched_flask_hook) + _w("flask", "after_this_request", patched_flask_hook) + + flask_app_traces = [ + "process_response", + "handle_exception", + "handle_http_exception", + "handle_user_exception", + "do_teardown_request", + "do_teardown_appcontext", + "send_static_file", + ] + if flask_version < (2, 2, 0): + flask_app_traces.append("try_trigger_before_first_request_functions") + + for name in flask_app_traces: + _w("flask", "Flask.{}".format(name), simple_call_wrapper("flask.{}".format(name))) + # flask static file helpers + _w("flask", "send_file", simple_call_wrapper("flask.send_file")) + + # flask.json.jsonify + _w("flask", "jsonify", patched_jsonify) + + _w("flask.templating", "_render", patched_render) + _w("flask", "render_template", _build_render_template_wrapper("render_template")) + _w("flask", "render_template_string", _build_render_template_wrapper("render_template_string")) + + bp_hooks = [ + "after_app_request", + "after_request", + "before_app_request", + "before_request", + "teardown_request", + "teardown_app_request", + ] + if flask_version < (2, 3, 0): + bp_hooks.append("before_app_first_request") + + for hook in bp_hooks: + _w("flask", "Blueprint.{}".format(hook), patched_flask_hook) + + if config.flask["trace_signals"]: + signals = [ + "template_rendered", + "request_started", + "request_finished", + "request_tearing_down", + "got_request_exception", + "appcontext_tearing_down", + ] + # These were added in 0.11.0 + if flask_version >= (0, 11): + signals.append("before_render_template") + + # These were added in 0.10.0 + if flask_version >= (0, 10): + signals.append("appcontext_pushed") + signals.append("appcontext_popped") + signals.append("message_flashed") + + for signal in signals: + module = "flask" + + # v0.9 missed importing `appcontext_tearing_down` in `flask/__init__.py` + # https://github.com/pallets/flask/blob/0.9/flask/__init__.py#L35-L37 + # https://github.com/pallets/flask/blob/0.9/flask/signals.py#L52 + # DEV: Version 0.9 doesn't have a patch version + if flask_version <= (0, 9) and signal == "appcontext_tearing_down": + module = "flask.signals" + + # DEV: Patch `receivers_for` instead of `connect` to ensure we don't mess with `disconnect` + _w(module, "{}.receivers_for".format(signal), patched_signal_receivers_for(signal)) + + +def unpatch(): + if not getattr(flask, "_datadog_patch", False): + return + flask._datadog_patch = False + + props = [ + # Flask + "Flask.wsgi_app", + "Flask.dispatch_request", + "Flask.add_url_rule", + "Flask.endpoint", + "Flask.preprocess_request", + "Flask.process_response", + "Flask.handle_exception", + "Flask.handle_http_exception", + "Flask.handle_user_exception", + "Flask.do_teardown_request", + "Flask.do_teardown_appcontext", + "Flask.send_static_file", + # Flask Hooks + "Flask.before_request", + "Flask.after_request", + "Flask.teardown_request", + "Flask.teardown_appcontext", + # Blueprint + "Blueprint.register", + "Blueprint.add_url_rule", + # Blueprint Hooks + "Blueprint.after_app_request", + "Blueprint.after_request", + "Blueprint.before_app_request", + "Blueprint.before_request", + "Blueprint.teardown_request", + "Blueprint.teardown_app_request", + # Signals + "template_rendered.receivers_for", + "request_started.receivers_for", + "request_finished.receivers_for", + "request_tearing_down.receivers_for", + "got_request_exception.receivers_for", + "appcontext_tearing_down.receivers_for", + # Top level props + "after_this_request", + "send_file", + "jsonify", + "render_template", + "render_template_string", + "templating._render", + ] + + props.append("Flask.finalize_request") + + if flask_version >= (2, 0, 0): + props.append("Flask.register_error_handler") + else: + props.append("Flask._register_error_handler") + + # These were added in 0.11.0 + if flask_version >= (0, 11): + props.append("before_render_template.receivers_for") + + # These were added in 0.10.0 + if flask_version >= (0, 10): + props.append("appcontext_pushed.receivers_for") + props.append("appcontext_popped.receivers_for") + props.append("message_flashed.receivers_for") + + # These were removed in 2.2.0 + if flask_version < (2, 2, 0): + props.append("Flask.try_trigger_before_first_request_functions") + + # These were removed in 2.3.0 + if flask_version < (2, 3, 0): + props.append("Flask.before_first_request") + props.append("Blueprint.before_app_first_request") + + for prop in props: + # Handle 'flask.request_started.receivers_for' + obj = flask + + # v0.9.0 missed importing `appcontext_tearing_down` in `flask/__init__.py` + # https://github.com/pallets/flask/blob/0.9/flask/__init__.py#L35-L37 + # https://github.com/pallets/flask/blob/0.9/flask/signals.py#L52 + # DEV: Version 0.9 doesn't have a patch version + if flask_version <= (0, 9) and prop == "appcontext_tearing_down.receivers_for": + obj = flask.signals + + if "." in prop: + attr, _, prop = prop.partition(".") + obj = getattr(obj, attr, object()) + _u(obj, prop) + + +@with_instance_pin +def patched_wsgi_app(pin, wrapped, instance, args, kwargs): + # This wrapper is the starting point for all requests. + # DEV: This is safe before this is the args for a WSGI handler + # https://www.python.org/dev/peps/pep-3333/ + environ, start_response = args + middleware = _FlaskWSGIMiddleware(wrapped, pin.tracer, config.flask, pin) + return middleware(environ, start_response) + + +def patched_finalize_request(wrapped, instance, args, kwargs): + """ + Wrapper for flask.app.Flask.finalize_request + """ + rv = wrapped(*args, **kwargs) + response = None + headers = None + if getattr(rv, "is_sequence", False): + response = rv.response + headers = rv.headers + core.dispatch("flask.finalize_request.post", (response, headers)) + return rv + + +def patched_blueprint_register(wrapped, instance, args, kwargs): + """ + Wrapper for flask.blueprints.Blueprint.register + + This wrapper just ensures the blueprint has a pin, either set manually on + itself from the user or inherited from the application + """ + app = get_argument_value(args, kwargs, 0, "app") + # Check if this Blueprint has a pin, otherwise clone the one from the app onto it + pin = Pin.get_from(instance) + if not pin: + pin = Pin.get_from(app) + if pin: + pin.clone().onto(instance) + return wrapped(*args, **kwargs) + + +def patched_blueprint_add_url_rule(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance) + if not pin: + return wrapped(*args, **kwargs) + + def _wrap(rule, endpoint=None, view_func=None, **kwargs): + if view_func: + pin.clone().onto(view_func) + return wrapped(rule, endpoint=endpoint, view_func=view_func, **kwargs) + + return _wrap(*args, **kwargs) + + +def patched_add_url_rule(wrapped, instance, args, kwargs): + """Wrapper for flask.app.Flask.add_url_rule to wrap all views attached to this app""" + + def _wrap(rule, endpoint=None, view_func=None, **kwargs): + if view_func: + # TODO: `if hasattr(view_func, 'view_class')` then this was generated from a `flask.views.View` + # should we do something special with these views? Change the name/resource? Add tags? + view_func = wrap_view(instance, view_func, name=endpoint, resource=rule) + + return wrapped(rule, endpoint=endpoint, view_func=view_func, **kwargs) + + return _wrap(*args, **kwargs) + + +def patched_endpoint(wrapped, instance, args, kwargs): + """Wrapper for flask.app.Flask.endpoint to ensure all endpoints are wrapped""" + endpoint = kwargs.get("endpoint", args[0]) + + def _wrapper(func): + return wrapped(endpoint)(wrap_function(instance, func, resource=endpoint)) + + return _wrapper + + +def patched_flask_hook(wrapped, instance, args, kwargs): + func = get_argument_value(args, kwargs, 0, "f") + return wrapped(wrap_function(instance, func)) + + +def traced_render_template(wrapped, instance, args, kwargs): + return _build_render_template_wrapper("render_template")(wrapped, instance, args, kwargs) + + +def traced_render_template_string(wrapped, instance, args, kwargs): + return _build_render_template_wrapper("render_template_string")(wrapped, instance, args, kwargs) + + +def _build_render_template_wrapper(name): + name = "flask.%s" % name + + def traced_render(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance, get_current_app()) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + with core.context_with_data( + "flask.render_template", + span_name=name, + pin=pin, + flask_config=config.flask, + tags={COMPONENT: config.flask.integration_name}, + span_type=SpanTypes.TEMPLATE, + call_key=[name + ".call", "current_span"], + ) as ctx, ctx.get_item(name + ".call"): + return wrapped(*args, **kwargs) + + return traced_render + + +def patched_render(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance, get_current_app()) + + if not pin.enabled: + return wrapped(*args, **kwargs) + + def _wrap(template, context, app): + core.dispatch("flask.render", (template, config.flask)) + return wrapped(*args, **kwargs) + + return _wrap(*args, **kwargs) + + +def patched__register_error_handler(wrapped, instance, args, kwargs): + def _wrap(key, code_or_exception, f): + return wrapped(key, code_or_exception, wrap_function(instance, f)) + + return _wrap(*args, **kwargs) + + +def patched_register_error_handler(wrapped, instance, args, kwargs): + def _wrap(code_or_exception, f): + return wrapped(code_or_exception, wrap_function(instance, f)) + + return _wrap(*args, **kwargs) + + +def _block_request_callable(call): + core.set_item(HTTP_REQUEST_BLOCKED, STATUS_403_TYPE_AUTO) + core.dispatch("flask.blocked_request_callable", (call,)) + ctype = "text/html" if "text/html" in flask.request.headers.get("Accept", "").lower() else "text/json" + abort(flask.Response(http_utils._get_blocked_template(ctype), content_type=ctype, status=403)) + + +def request_patcher(name): + @with_instance_pin + def _patched_request(pin, wrapped, instance, args, kwargs): + with core.context_with_data( + "flask._patched_request", + span_name=".".join(("flask", name)), + pin=pin, + service=trace_utils.int_service(pin, config.flask, pin), + flask_config=config.flask, + flask_request=flask.request, + block_request_callable=_block_request_callable, + ignored_exception_type=NotFound, + call_key="flask_request_call", + tags={COMPONENT: config.flask.integration_name}, + ) as ctx, ctx.get_item("flask_request_call"): + core.dispatch("flask._patched_request", (ctx,)) + return wrapped(*args, **kwargs) + + return _patched_request + + +def patched_signal_receivers_for(signal): + def outer(wrapped, instance, args, kwargs): + sender = get_argument_value(args, kwargs, 0, "sender") + # See if they gave us the flask.app.Flask as the sender + app = None + if isinstance(sender, flask.Flask): + app = sender + for receiver in wrapped(*args, **kwargs): + yield _wrap_call_with_pin_check(receiver, app, func_name(receiver), signal=signal) + + return outer + + +def patched_jsonify(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance, get_current_app()) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + with core.context_with_data( + "flask.jsonify", + span_name="flask.jsonify", + flask_config=config.flask, + tags={COMPONENT: config.flask.integration_name}, + pin=pin, + call_key="flask_jsonify_call", + ) as ctx, ctx.get_item("flask_jsonify_call"): + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/wrappers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/wrappers.py new file mode 100644 index 0000000..e1a7e47 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask/wrappers.py @@ -0,0 +1,96 @@ +import flask + +from ddtrace import config +from ddtrace.contrib import trace_utils +from ddtrace.internal import core +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor.wrapt import function_wrapper + +from ...internal.logger import get_logger +from ...internal.utils.importlib import func_name +from ...pin import Pin + + +log = get_logger(__name__) + + +def wrap_view(instance, func, name=None, resource=None): + return _wrap_call_with_pin_check(func, instance, name or func_name(func), resource=resource, do_dispatch=True) + + +def get_current_app(): + """Helper to get the flask.app.Flask from the current app context""" + try: + return flask.current_app + except RuntimeError: + # raised if current_app is None: https://github.com/pallets/flask/blob/2.1.3/src/flask/globals.py#L40 + pass + return None + + +def _wrap_call( + wrapped, pin, name, resource=None, signal=None, span_type=None, do_dispatch=False, args=None, kwargs=None +): + args = args or [] + kwargs = kwargs or {} + tags = {COMPONENT: config.flask.integration_name} + if signal: + tags["flask.signal"] = signal + with core.context_with_data( + "flask.call", + span_name=name, + pin=pin, + resource=resource, + service=trace_utils.int_service(pin, config.flask), + span_type=span_type, + tags=tags, + call_key="flask_call", + ) as ctx, ctx.get_item("flask_call"): + if do_dispatch: + result = core.dispatch_with_results("flask.wrapped_view", (kwargs,)).callback_and_args + if result: + callback_block, _kwargs = result.value + if callback_block: + return callback_block() + if _kwargs: + for k in kwargs: + kwargs[k] = _kwargs[k] + return wrapped(*args, **kwargs) + + +def _wrap_call_with_pin_check(func, instance, name, resource=None, signal=None, do_dispatch=False): + @function_wrapper + def patch_func(wrapped, _instance, args, kwargs): + pin = Pin._find(wrapped, _instance, instance, get_current_app()) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + return _wrap_call( + wrapped, pin, name, resource=resource, signal=signal, do_dispatch=do_dispatch, args=args, kwargs=kwargs + ) + + return patch_func(func) + + +def wrap_function(instance, func, name=None, resource=None): + return _wrap_call_with_pin_check(func, instance, name or func_name(func), resource=resource) + + +def simple_call_wrapper(name, span_type=None): + @with_instance_pin + def wrapper(pin, wrapped, instance, args, kwargs): + return _wrap_call(wrapped, pin, name, span_type=span_type, args=args, kwargs=kwargs) + + return wrapper + + +def with_instance_pin(func): + """Helper to wrap a function wrapper and ensure an enabled pin is available for the `instance`""" + + def wrapper(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance, get_current_app()) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + return func(pin, wrapped, instance, args, kwargs) + + return wrapper diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/__init__.py new file mode 100644 index 0000000..a859b53 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/__init__.py @@ -0,0 +1,57 @@ +""" +The flask cache tracer will track any access to a cache backend. +You can use this tracer together with the Flask tracer middleware. + +The tracer supports both `Flask-Cache `_ +and `Flask-Caching `_. + +To install the tracer, ``from ddtrace import tracer`` needs to be added:: + + from ddtrace import tracer + from ddtrace.contrib.flask_cache import get_traced_cache + +and the tracer needs to be initialized:: + + Cache = get_traced_cache(tracer, service='my-flask-cache-app') + +Here is the end result, in a sample app:: + + from flask import Flask + + from ddtrace import tracer + from ddtrace.contrib.flask_cache import get_traced_cache + + app = Flask(__name__) + + # get the traced Cache class + Cache = get_traced_cache(tracer, service='my-flask-cache-app') + + # use the Cache as usual with your preferred CACHE_TYPE + cache = Cache(app, config={'CACHE_TYPE': 'simple'}) + + def counter(): + # this access is traced + conn_counter = cache.get("conn_counter") + +Use a specific ``Cache`` implementation with:: + + from ddtrace import tracer + from ddtrace.contrib.flask_cache import get_traced_cache + + from flask_caching import Cache + + Cache = get_traced_cache(tracer, service='my-flask-cache-app', cache_cls=Cache) + +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["flask_cache", "flask_caching"] + +with require_modules(required_modules) as missing_modules: + if len(missing_modules) < len(required_modules): + from .tracers import get_traced_cache + from .tracers import get_version + + __all__ = ["get_traced_cache", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/tracers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/tracers.py new file mode 100644 index 0000000..aba1b95 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/tracers.py @@ -0,0 +1,188 @@ +""" +Datadog trace code for flask_cache +""" + +import logging +import typing + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanTypes +from ...ext import db +from ...internal.schema import schematize_cache_operation +from ...internal.schema import schematize_service_name +from .utils import _extract_client +from .utils import _extract_conn_tags +from .utils import _resource_from_cache_prefix + + +if typing.TYPE_CHECKING: # pragma: no cover + from ddtrace import Span # noqa:F401 + + +log = logging.Logger(__name__) + +DEFAULT_SERVICE = config.service or schematize_service_name("flask-cache") + +# standard tags +COMMAND_KEY = "flask_cache.key" +CACHE_BACKEND = "flask_cache.backend" +CONTACT_POINTS = "flask_cache.contact_points" + + +def get_version(): + # type: () -> str + try: + import flask_caching + + return getattr(flask_caching, "__version__", "") + except ImportError: + return "" + + +def get_traced_cache(ddtracer, service=DEFAULT_SERVICE, meta=None, cache_cls=None): + """ + Return a traced Cache object that behaves exactly as ``cache_cls``. + + ``cache_cls`` defaults to ``flask.ext.cache.Cache`` if Flask-Cache is installed + or ``flask_caching.Cache`` if flask-caching is installed. + """ + + if cache_cls is None: + # for compatibility reason, first check if flask_cache is present + try: + from flask.ext.cache import Cache + + cache_cls = Cache + except ImportError: + # use flask_caching if flask_cache is not present + from flask_caching import Cache + + cache_cls = Cache + + class TracedCache(cache_cls): + """ + Traced cache backend that monitors any operations done by flask_cache. Observed actions are: + * get, set, add, delete, clear + * all ``many_`` operations + """ + + _datadog_tracer = ddtracer + _datadog_service = service + _datadog_meta = meta + + def __trace(self, cmd): + # type: (str, bool) -> Span + """ + Start a tracing with default attributes and tags + """ + # create a new span + s = self._datadog_tracer.trace( + schematize_cache_operation(cmd, cache_provider="flask_cache"), + span_type=SpanTypes.CACHE, + service=self._datadog_service, + ) + + s.set_tag_str(COMPONENT, config.flask_cache.integration_name) + + s.set_tag(SPAN_MEASURED_KEY) + # set span tags + s.set_tag_str(CACHE_BACKEND, self.config.get("CACHE_TYPE")) + s.set_tags(self._datadog_meta) + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.flask_cache.get_analytics_sample_rate()) + # add connection meta if there is one + client = _extract_client(self.cache) + if client is not None: + try: + s.set_tags(_extract_conn_tags(client)) + except Exception: + log.debug("error parsing connection tags", exc_info=True) + + return s + + def get(self, *args, **kwargs): + """ + Track ``get`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("GET", self.config) + if len(args) > 0: + span.set_tag_str(COMMAND_KEY, args[0]) + result = super(TracedCache, self).get(*args, **kwargs) + span.set_metric(db.ROWCOUNT, 1 if result else 0) + return result + + def set(self, *args, **kwargs): + """ + Track ``set`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("SET", self.config) + if len(args) > 0: + span.set_tag_str(COMMAND_KEY, args[0]) + return super(TracedCache, self).set(*args, **kwargs) + + def add(self, *args, **kwargs): + """ + Track ``add`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("ADD", self.config) + if len(args) > 0: + span.set_tag_str(COMMAND_KEY, args[0]) + return super(TracedCache, self).add(*args, **kwargs) + + def delete(self, *args, **kwargs): + """ + Track ``delete`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("DELETE", self.config) + if len(args) > 0: + span.set_tag_str(COMMAND_KEY, args[0]) + return super(TracedCache, self).delete(*args, **kwargs) + + def delete_many(self, *args, **kwargs): + """ + Track ``delete_many`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("DELETE_MANY", self.config) + span.set_tag(COMMAND_KEY, list(args)) + return super(TracedCache, self).delete_many(*args, **kwargs) + + def clear(self, *args, **kwargs): + """ + Track ``clear`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("CLEAR", self.config) + return super(TracedCache, self).clear(*args, **kwargs) + + def get_many(self, *args, **kwargs): + """ + Track ``get_many`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("GET_MANY", self.config) + span.set_tag(COMMAND_KEY, list(args)) + result = super(TracedCache, self).get_many(*args, **kwargs) + # get many returns a list, with either the key value or None if it doesn't exist + span.set_metric(db.ROWCOUNT, sum(1 for val in result if val)) + return result + + def set_many(self, *args, **kwargs): + """ + Track ``set_many`` operation + """ + with self.__trace("flask_cache.cmd") as span: + span.resource = _resource_from_cache_prefix("SET_MANY", self.config) + if len(args) > 0: + span.set_tag(COMMAND_KEY, list(args[0].keys())) + return super(TracedCache, self).set_many(*args, **kwargs) + + return TracedCache diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/utils.py new file mode 100644 index 0000000..d7f33ec --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_cache/utils.py @@ -0,0 +1,62 @@ +# project +from ...ext import net +from ..pylibmc.addrs import parse_addresses +from ..trace_utils_redis import _extract_conn_tags as extract_redis_tags + + +def _resource_from_cache_prefix(resource, cache): + """ + Combine the resource name with the cache prefix (if any) + """ + if getattr(cache, "key_prefix", None): + name = "{} {}".format(resource, cache.key_prefix) + else: + name = resource + + # enforce lowercase to make the output nicer to read + return name.lower() + + +def _extract_client(cache): + """ + Get the client from the cache instance according to the current operation + """ + client = getattr(cache, "_client", None) + if client is None: + # flask-caching has _read_clients & _write_client for the redis backend + # These use the same connection so just try to get a reference to one of them. + # flask-caching < 2.0.0 uses _read_clients so look for that one too. + for attr in ("_write_client", "_read_client", "_read_clients"): + client = getattr(cache, attr, None) + if client is not None: + break + return client + + +def _extract_conn_tags(client): + """ + For the given client extracts connection tags + """ + tags = {} + + if hasattr(client, "servers"): + # Memcached backend supports an address pool + if isinstance(client.servers, list) and len(client.servers) > 0: + # use the first address of the pool as a host because + # the code doesn't expose more information + contact_point = client.servers[0].address + tags[net.TARGET_HOST] = contact_point[0] + tags[net.TARGET_PORT] = contact_point[1] + elif hasattr(client, "connection_pool"): + # Redis main connection + redis_tags = extract_redis_tags(client.connection_pool.connection_kwargs) + tags.update(**redis_tags) + elif hasattr(client, "addresses"): + # pylibmc + # FIXME[matt] should we memoize this? + addrs = parse_addresses(client.addresses) + if addrs: + _, host, port, _ = addrs[0] + tags[net.TARGET_PORT] = port + tags[net.TARGET_HOST] = host + return tags diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/__init__.py new file mode 100644 index 0000000..84e92e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/__init__.py @@ -0,0 +1,44 @@ +""" +The ``flask_login`` integration implements appsec automatic user login events +when ``DD_APPSEC_ENABLED=1``. This will automatically fill the following tags +when a user tries to log in using ``flask_login`` as an authentication plugin: + +- ``appsec.events.users.login.success.track`` +- ``appsec.events.users.login.failure.track`` +- ``appsec.events.users.login.success.[email|login|username]`` + +Note that, by default, this will be enabled if ``DD_APPSEC_ENABLED=1`` with +``DD_APPSEC_AUTOMATIC_USER_EVENTS_TRACKING`` set to ``safe`` which will store the user's +``id`` but not the username or email. Check the configuration docs to see how to disable this feature entirely, +or set it to extended mode which would also store the username and email or customize the id, email and name +fields to adapt them to your custom ``User`` model. + +Also, since ``flask_login`` is a "roll your own" kind of authentication system, in your main login function, where you +check the user password (usually with ``check_password_hash``) you must manually call +``track_user_login_failure_event(tracer, user_id, exists)`` to store the correct tags for authentication failure. As +a helper, you can call ``flask_login.login_user`` with a user object with a ``get_id()`` returning ``-1`` to +automatically set the tags for a login failure where the user doesn't exist. + + +Enabling +~~~~~~~~ + +This integration is enabled automatically when using ``DD_APPSEC_ENABLED=1`. Use +``DD_APPSEC_AUTOMATIC_USER_EVENTS_TRACKING=disabled`` to explicitly disable it. +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["flask_login"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = [ + "get_version", + "patch", + "unpatch", + ] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/patch.py new file mode 100644 index 0000000..4eaffb2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/flask_login/patch.py @@ -0,0 +1,108 @@ +import flask +import flask_login + +from ddtrace import Pin +from ddtrace.appsec.trace_utils import track_user_login_failure_event +from ddtrace.appsec.trace_utils import track_user_login_success_event +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...appsec._utils import _UserInfoRetriever +from ...ext import SpanTypes +from ...internal.utils import get_argument_value +from .. import trace_utils +from ..flask.wrappers import get_current_app + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return flask_login.__version__ + + +class _FlaskLoginUserInfoRetriever(_UserInfoRetriever): + def get_userid(self): + if hasattr(self.user, "get_id") and not asm_config._user_model_login_field: + return self.user.get_id() + + return super(_FlaskLoginUserInfoRetriever, self).get_userid() + + +def traced_login_user(func, instance, args, kwargs): + pin = Pin._find(func, instance, get_current_app()) + ret = func(*args, **kwargs) + + try: + mode = asm_config._automatic_login_events_mode + if not asm_config._asm_enabled or mode == "disabled": + return ret + + user = get_argument_value(args, kwargs, 0, "user") + if not user: + track_user_login_failure_event(pin.tracer, user_id=None, exists=False, login_events_mode=mode) + return ret + + if hasattr(user, "is_anonymous") and user.is_anonymous: + return ret + + if not isinstance(user, flask_login.UserMixin): + log.debug( + "Automatic Login Events Tracking: flask_login User models not inheriting from UserMixin not supported", + ) + return ret + + info_retriever = _FlaskLoginUserInfoRetriever(user) + user_id, user_extra = info_retriever.get_user_info() + if user_id == -1: + with pin.tracer.trace("flask_login.login_user", span_type=SpanTypes.AUTH): + track_user_login_failure_event(pin.tracer, user_id="missing", exists=False, login_events_mode=mode) + return ret + if not user_id: + track_user_login_failure_event(pin.tracer, user_id=None, exists=False, login_events_mode=mode) + log.debug( + "Automatic Login Events Tracking: Could not determine user id field user for the %s user Model; " + "set DD_USER_MODEL_LOGIN_FIELD to the name of the field used for the user id or implement the " + "get_id method for your model", + type(user), + ) + return ret + + with pin.tracer.trace("flask_login.login_user", span_type=SpanTypes.AUTH): + session_key = flask.session.get("_id", None) + track_user_login_success_event( + pin.tracer, + user_id=user_id, + session_id=session_key, + propagate=True, + login_events_mode=mode, + **user_extra, + ) + except Exception: + log.debug("Error while trying to trace flask_login.login_user", exc_info=True) + + return ret + + +def patch(): + if not asm_config._asm_enabled: + return + + if getattr(flask_login, "_datadog_patch", False): + return + + Pin().onto(flask_login) + _w("flask_login", "login_user", traced_login_user) + flask_login._datadog_patch = True + + +def unpatch(): + import flask_login + + if not getattr(flask_login, "_datadog_patch", False): + return + + trace_utils.unwrap(flask_login, "login_user") + flask_login._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/__init__.py new file mode 100644 index 0000000..c11b61b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/__init__.py @@ -0,0 +1,34 @@ +""" +The ``futures`` integration propagates the current active tracing context +to tasks spawned using a :class:`~concurrent.futures.ThreadPoolExecutor`. +The integration ensures that when operations are executed in another thread, +those operations can continue the previously generated trace. + + +Enabling +~~~~~~~~ + +The futures integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(futures=True) +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["concurrent.futures"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = [ + "get_version", + "patch", + "unpatch", + ] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/patch.py new file mode 100644 index 0000000..86687d7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/patch.py @@ -0,0 +1,43 @@ +import sys + +from ddtrace.internal.wrapping import unwrap as _u +from ddtrace.internal.wrapping import wrap as _w + +from .threading import _wrap_submit + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + """Enables Context Propagation between threads""" + try: + # Ensure that we get hold of the reloaded module if module cleanup was + # performed. + thread = sys.modules["concurrent.futures.thread"] + except KeyError: + import concurrent.futures.thread as thread + + if getattr(thread, "__datadog_patch", False): + return + thread.__datadog_patch = True + + _w(thread.ThreadPoolExecutor.submit, _wrap_submit) + + +def unpatch(): + """Disables Context Propagation between threads""" + try: + # Ensure that we get hold of the reloaded module if module cleanup was + # performed. + thread = sys.modules["concurrent.futures.thread"] + except KeyError: + return + + if not getattr(thread, "__datadog_patch", False): + return + thread.__datadog_patch = False + + _u(thread.ThreadPoolExecutor.submit, _wrap_submit) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/threading.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/threading.py new file mode 100644 index 0000000..ab67e21 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/futures/threading.py @@ -0,0 +1,44 @@ +import ddtrace + + +def _wrap_submit(func, args, kwargs): + """ + Wrap `Executor` method used to submit a work executed in another + thread. This wrapper ensures that a new `Context` is created and + properly propagated using an intermediate function. + """ + # If there isn't a currently active context, then do not create one + # DEV: Calling `.active()` when there isn't an active context will create a new context + # DEV: We need to do this in case they are either: + # - Starting nested futures + # - Starting futures from outside of an existing context + # + # In either of these cases we essentially will propagate the wrong context between futures + # + # The resolution is to not create/propagate a new context if one does not exist, but let the + # future's thread create the context instead. + current_ctx = None + if ddtrace.tracer.context_provider._has_active_context(): + current_ctx = ddtrace.tracer.context_provider.active() + + # The target function can be provided as a kwarg argument "fn" or the first positional argument + self = args[0] + if "fn" in kwargs: + fn = kwargs.pop("fn") + fn_args = args[1:] + else: + fn, fn_args = args[1], args[2:] + return func(self, _wrap_execution, current_ctx, fn, fn_args, kwargs) + + +def _wrap_execution(ctx, fn, args, kwargs): + """ + Intermediate target function that is executed in a new thread; + it receives the original function with arguments and keyword + arguments, including our tracing `Context`. The current context + provider sets the Active context in a thread local storage + variable because it's outside the asynchronous loop. + """ + if ctx is not None: + ddtrace.tracer.context_provider.activate(ctx) + return fn(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/__init__.py new file mode 100644 index 0000000..495a1db --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/__init__.py @@ -0,0 +1,72 @@ +""" +The gevent integration adds support for tracing across greenlets. + + +The integration patches the gevent internals to add context management logic. + +.. note:: + If ``ddtrace-run`` is not being used then be sure to ``import ddtrace.auto`` + before calling ``gevent.monkey.patch_all``. + If ``ddtrace-run`` is being used then no additional configuration is required. + + +The integration also configures the global tracer instance to use a gevent +context provider to utilize the context management logic. + +If custom tracer instances are being used in a gevent application, then +configure it with:: + + from ddtrace.contrib.gevent import context_provider + + tracer.configure(context_provider=context_provider) + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(gevent=True) + + +Example of the context propagation:: + + def my_parent_function(): + with tracer.trace("web.request") as span: + span.service = "web" + gevent.spawn(worker_function) + + + def worker_function(): + # then trace its child + with tracer.trace("greenlet.call") as span: + span.service = "greenlet" + ... + + with tracer.trace("greenlet.child_call") as child: + ... +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["gevent"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + from .provider import GeventContextProvider + + context_provider = GeventContextProvider() + + __all__ = [ + "patch", + "unpatch", + "context_provider", + "get_version" + ] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/greenlet.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/greenlet.py new file mode 100644 index 0000000..6140964 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/greenlet.py @@ -0,0 +1,60 @@ +import gevent +import gevent.pool as gpool + +from .provider import GeventContextProvider + + +GEVENT_VERSION = gevent.version_info[0:3] + + +class TracingMixin(object): + def __init__(self, *args, **kwargs): + # get the active context/span if available + current_g = gevent.getcurrent() + ctx = getattr(current_g, GeventContextProvider._CONTEXT_ATTR, None) + + # create the Greenlet as usual + super(TracingMixin, self).__init__(*args, **kwargs) + + # copy the active span/context into the new greenlet + if ctx: + setattr(self, GeventContextProvider._CONTEXT_ATTR, ctx) + + +class TracedGreenlet(TracingMixin, gevent.Greenlet): + """ + ``Greenlet`` class that is used to replace the original ``gevent`` + class. This class is supposed to do ``Context`` replacing operation, so + that any greenlet inherits the context from the parent Greenlet. + When a new greenlet is spawned from the main greenlet, a new instance + of ``Context`` is created. The main greenlet is not affected by this behavior. + + There is no need to inherit this class to create or optimize greenlets + instances, because this class replaces ``gevent.greenlet.Greenlet`` + through the ``patch()`` method. After the patch, extending the gevent + ``Greenlet`` class means extending automatically ``TracedGreenlet``. + """ + + def __init__(self, *args, **kwargs): + super(TracedGreenlet, self).__init__(*args, **kwargs) + + +class TracedIMapUnordered(TracingMixin, gpool.IMapUnordered): + def __init__(self, *args, **kwargs): + super(TracedIMapUnordered, self).__init__(*args, **kwargs) + + +if GEVENT_VERSION >= (1, 3) or GEVENT_VERSION < (1, 1): + # For gevent <1.1 and >=1.3, IMap is its own class, so we derive + # from TracingMixin + class TracedIMap(TracingMixin, gpool.IMap): + def __init__(self, *args, **kwargs): + super(TracedIMap, self).__init__(*args, **kwargs) + + +else: + # For gevent >=1.1 and <1.3, IMap derives from IMapUnordered, so we derive + # from TracedIMapUnordered and get tracing that way + class TracedIMap(gpool.IMap, TracedIMapUnordered): + def __init__(self, *args, **kwargs): + super(TracedIMap, self).__init__(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/patch.py new file mode 100644 index 0000000..5aa7035 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/patch.py @@ -0,0 +1,80 @@ +import gevent +import gevent.pool + +import ddtrace + +from ...provider import DefaultContextProvider +from .greenlet import GEVENT_VERSION +from .greenlet import TracedGreenlet +from .greenlet import TracedIMap +from .greenlet import TracedIMapUnordered +from .provider import GeventContextProvider + + +__Greenlet = gevent.Greenlet +__IMap = gevent.pool.IMap +__IMapUnordered = gevent.pool.IMapUnordered + + +def get_version(): + # type: () -> str + return getattr(gevent, "__version__", "") + + +def patch(): + """ + Patch the gevent module so that all references to the + internal ``Greenlet`` class points to the ``DatadogGreenlet`` + class. + + This action ensures that if a user extends the ``Greenlet`` + class, the ``TracedGreenlet`` is used as a parent class. + """ + if getattr(gevent, "__datadog_patch", False): + return + gevent.__datadog_patch = True + + _replace(TracedGreenlet, TracedIMap, TracedIMapUnordered) + ddtrace.tracer.configure(context_provider=GeventContextProvider()) + + +def unpatch(): + """ + Restore the original ``Greenlet``. This function must be invoked + before executing application code, otherwise the ``DatadogGreenlet`` + class may be used during initialization. + """ + if not getattr(gevent, "__datadog_patch", False): + return + gevent.__datadog_patch = False + + _replace(__Greenlet, __IMap, __IMapUnordered) + ddtrace.tracer.configure(context_provider=DefaultContextProvider()) + + +def _replace(g_class, imap_class, imap_unordered_class): + """ + Utility function that replace the gevent Greenlet class with the given one. + """ + # replace the original Greenlet classes with the new one + gevent.greenlet.Greenlet = g_class + + if GEVENT_VERSION >= (1, 3): + # For gevent >= 1.3.0, IMap and IMapUnordered were pulled out of + # gevent.pool and into gevent._imap + gevent._imap.IMap = imap_class + gevent._imap.IMapUnordered = imap_unordered_class + gevent.pool.IMap = gevent._imap.IMap + gevent.pool.IMapUnordered = gevent._imap.IMapUnordered + gevent.pool.Greenlet = gevent.greenlet.Greenlet + else: + # For gevent < 1.3, only patching of gevent.pool classes necessary + gevent.pool.IMap = imap_class + gevent.pool.IMapUnordered = imap_unordered_class + + gevent.pool.Group.greenlet_class = g_class + + # replace gevent shortcuts + gevent.Greenlet = gevent.greenlet.Greenlet + gevent.spawn = gevent.greenlet.Greenlet.spawn + gevent.spawn_later = gevent.greenlet.Greenlet.spawn_later diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/provider.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/provider.py new file mode 100644 index 0000000..13b05d3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gevent/provider.py @@ -0,0 +1,42 @@ +import gevent + +from ...provider import BaseContextProvider +from ...provider import DatadogContextMixin +from ...span import Span + + +class GeventContextProvider(BaseContextProvider, DatadogContextMixin): + """Manages the active context for gevent execution. + + This provider depends on corresponding monkey patches to copy the active + context from one greenlet to another. + """ + + # Greenlet attribute used to set/get the context + _CONTEXT_ATTR = "__datadog_context" + + def _get_current_context(self): + """Helper to get the active context from the current greenlet.""" + current_g = gevent.getcurrent() + if current_g is not None: + return getattr(current_g, self._CONTEXT_ATTR, None) + return None + + def _has_active_context(self): + """Helper to determine if there is an active context.""" + return self._get_current_context() is not None + + def activate(self, context): + """Sets the active context for the current running ``Greenlet``.""" + current_g = gevent.getcurrent() + if current_g is not None: + setattr(current_g, self._CONTEXT_ATTR, context) + super(GeventContextProvider, self).activate(context) + return context + + def active(self): + """Returns the active context for this execution flow.""" + ctx = self._get_current_context() + if isinstance(ctx, Span): + return self._update_active(ctx) + return ctx diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/__init__.py new file mode 100644 index 0000000..9e3b49d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/__init__.py @@ -0,0 +1,58 @@ +""" +This integration instruments ``graphql-core`` queries. + +Enabling +~~~~~~~~ + +The graphql integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch() ` to manually enable the integration:: + + from ddtrace import patch + patch(graphql=True) + import graphql + ... + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.graphql["service"] + + The service name reported by default for graphql instances. + + This option can also be set with the ``DD_SERVICE`` environment + variable. + + Default: ``"graphql"`` + +.. py:data:: ddtrace.config.graphql["resolvers_enabled"] + + To enable ``graphql.resolve`` spans set ``DD_TRACE_GRAPHQL_RESOLVERS_ENABLED`` to True + + Default: ``False`` + + Enabling instrumentation for resolvers will produce a ``graphql.resolve`` span for every graphql field. + For complex graphql queries this could produce large traces. + + +To configure the graphql integration using the +``Pin`` API:: + + from ddtrace import Pin + import graphql + + Pin.override(graphql, service="mygraphql") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["graphql"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/patch.py new file mode 100644 index 0000000..115c83a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/graphql/patch.py @@ -0,0 +1,321 @@ +import os +import re +import sys +from typing import TYPE_CHECKING +from typing import List + +from ddtrace import Span +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable # noqa:F401 + from typing import Dict # noqa:F401 + from typing import Iterable # noqa:F401 + from typing import Tuple # noqa:F401 + from typing import Union # noqa:F401 + + +import graphql +from graphql import MiddlewareManager +from graphql.error import GraphQLError +from graphql.execution import ExecutionResult +from graphql.language.source import Source + +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import ERROR_MSG +from ddtrace.constants import ERROR_TYPE +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.utils import ArgumentError +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils import set_argument_value +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.version import parse_version +from ddtrace.internal.wrapping import unwrap +from ddtrace.internal.wrapping import wrap +from ddtrace.pin import Pin + +from ...ext import SpanTypes +from .. import trace_utils + + +_graphql_version_str = graphql.__version__ +_graphql_version = parse_version(_graphql_version_str) + +if _graphql_version < (3, 0): + from graphql.language.ast import Document +else: + from graphql.language.ast import DocumentNode as Document + + +def get_version(): + # type: () -> str + return _graphql_version_str + + +config._add( + "graphql", + dict( + _default_service=schematize_service_name("graphql"), + resolvers_enabled=asbool(os.getenv("DD_TRACE_GRAPHQL_RESOLVERS_ENABLED", default=False)), + ), +) + +_GRAPHQL_SOURCE = "graphql.source" +_GRAPHQL_OPERATION_TYPE = "graphql.operation.type" +_GRAPHQL_OPERATION_NAME = "graphql.operation.name" + + +def patch(): + if getattr(graphql, "_datadog_patch", False): + return + graphql._datadog_patch = True + Pin().onto(graphql) + + for module_str, func_name, wrapper in _get_patching_candidates(): + _update_patching(wrap, module_str, func_name, wrapper) + + +def unpatch(): + if not getattr(graphql, "_datadog_patch", False) or _graphql_version < (2, 0): + return + + for module_str, func_name, wrapper in _get_patching_candidates(): + _update_patching(unwrap, module_str, func_name, wrapper) + + graphql._datadog_patch = False + + +def _get_patching_candidates(): + if _graphql_version < (3, 0): + return [ + ("graphql.graphql", "execute_graphql", _traced_query), + ("graphql.language.parser", "parse", _traced_parse), + ("graphql.validation.validation", "validate", _traced_validate), + ("graphql.execution.executor", "execute", _traced_execute), + ] + return [ + ("graphql.graphql", "graphql_impl", _traced_query), + ("graphql.language.parser", "parse", _traced_parse), + ("graphql.validation.validate", "validate", _traced_validate), + ("graphql.execution.execute", "execute", _traced_execute), + ] + + +def _update_patching(operation, module_str, func_name, wrapper): + module = sys.modules[module_str] + func = getattr(module, func_name) + operation(func, wrapper) + + +def _traced_parse(func, args, kwargs): + pin = Pin.get_from(graphql) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + source = get_argument_value(args, kwargs, 0, "source") + source_str = _get_source_str(source) + # If graphql.parse() is called outside graphql.graphql(), graphql.parse will + # be a top level span. Therefore we must explicitly set the service name. + with pin.tracer.trace( + name="graphql.parse", + service=trace_utils.int_service(pin, config.graphql), + span_type=SpanTypes.GRAPHQL, + ) as span: + span.set_tag_str(COMPONENT, config.graphql.integration_name) + + span.set_tag_str(_GRAPHQL_SOURCE, source_str) + return func(*args, **kwargs) + + +def _traced_validate(func, args, kwargs): + pin = Pin.get_from(graphql) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + document = get_argument_value(args, kwargs, 1, "ast") + source_str = _get_source_str(document) + # If graphql.validate() is called outside graphql.graphql(), graphql.validate will + # be a top level span. Therefore we must explicitly set the service name. + with pin.tracer.trace( + name="graphql.validate", + service=trace_utils.int_service(pin, config.graphql), + span_type=SpanTypes.GRAPHQL, + ) as span: + span.set_tag_str(COMPONENT, config.graphql.integration_name) + + span.set_tag_str(_GRAPHQL_SOURCE, source_str) + errors = func(*args, **kwargs) + _set_span_errors(errors, span) + return errors + + +def _traced_execute(func, args, kwargs): + pin = Pin.get_from(graphql) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + if config.graphql.resolvers_enabled: + # patch resolvers + args, kwargs = _inject_trace_middleware_to_args(_resolver_middleware, args, kwargs) + + # set resource name + if _graphql_version < (3, 0): + document = get_argument_value(args, kwargs, 1, "document_ast") + else: + document = get_argument_value(args, kwargs, 1, "document") + source_str = _get_source_str(document) + + with pin.tracer.trace( + name="graphql.execute", + resource=source_str, + service=trace_utils.int_service(pin, config.graphql), + span_type=SpanTypes.GRAPHQL, + ) as span: + span.set_tag_str(COMPONENT, config.graphql.integration_name) + + span.set_tag(SPAN_MEASURED_KEY) + + _set_span_operation_tags(span, document) + span.set_tag_str(_GRAPHQL_SOURCE, source_str) + + result = func(*args, **kwargs) + if isinstance(result, ExecutionResult): + # set error tags if the result contains a list of GraphqlErrors, skip if it's a promise + _set_span_errors(result.errors, span) + return result + + +def _traced_query(func, args, kwargs): + pin = Pin.get_from(graphql) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # set resource name + source = get_argument_value(args, kwargs, 1, "source") + resource = _get_source_str(source) + + with pin.tracer.trace( + name=schematize_url_operation("graphql.request", protocol="graphql", direction=SpanDirection.INBOUND), + resource=resource, + service=trace_utils.int_service(pin, config.graphql), + span_type=SpanTypes.GRAPHQL, + ) as span: + span.set_tag_str(COMPONENT, config.graphql.integration_name) + + # mark span as measured and set sample rate + span.set_tag(SPAN_MEASURED_KEY) + sample_rate = config.graphql.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + result = func(*args, **kwargs) + if isinstance(result, ExecutionResult): + # set error tags if the result contains a list of GraphqlErrors, skip if it's a promise + # If the wrapped validate and execute functions return a list of errors we will duplicate + # the span errors here. + _set_span_errors(result.errors, span) + return result + + +def _resolver_middleware(next_middleware, root, info, **args): + """ + trace middleware which wraps the resolvers of graphql fields. + Note - graphql middlewares can not be a partial. It must be a class or a function. + """ + pin = Pin.get_from(graphql) + if not pin or not pin.enabled(): + return next_middleware(root, info, **args) + + with pin.tracer.trace( + name="graphql.resolve", + resource=info.field_name, + span_type=SpanTypes.GRAPHQL, + ) as span: + span.set_tag_str(COMPONENT, config.graphql.integration_name) + + return next_middleware(root, info, **args) + + +def _inject_trace_middleware_to_args(trace_middleware, args, kwargs): + # type: (Callable, Tuple, Dict) -> Tuple[Tuple, Dict] + """ + Adds a trace middleware to graphql.execute(..., middleware, ...) + """ + middlewares_arg = 8 + if _graphql_version >= (3, 2): + # middleware is the 10th argument graphql.execute(..) version 3.2+ + middlewares_arg = 9 + + # get middlewares from args or kwargs + try: + middlewares = get_argument_value(args, kwargs, middlewares_arg, "middleware") or [] + if isinstance(middlewares, MiddlewareManager): + # First we must get the middlewares iterable from the MiddlewareManager then append + # trace_middleware. For the trace_middleware to be called a new MiddlewareManager will + # need to initialized. This is handled in graphql.execute(): + # https://github.com/graphql-python/graphql-core/blob/v3.2.1/src/graphql/execution/execute.py#L254 + middlewares = middlewares.middlewares # type: Iterable + except ArgumentError: + middlewares = [] + + # Note - graphql middlewares are called in reverse order + # add trace_middleware to the end of the list to wrap the execution of resolver and all middlewares + middlewares = list(middlewares) + [trace_middleware] + + # update args and kwargs to contain trace_middleware + args, kwargs = set_argument_value(args, kwargs, middlewares_arg, "middleware", middlewares) + return args, kwargs + + +def _get_source_str(obj): + # type: (Union[str, Source, Document]) -> str + """ + Parses graphql Documents and Source objects to retrieve + the graphql source input for a request. + """ + if isinstance(obj, str): + source_str = obj + elif isinstance(obj, Source): + source_str = obj.body + elif isinstance(obj, Document) and obj.loc is not None: + source_str = obj.loc.source.body + else: + source_str = "" + # remove new lines, tabs and extra whitespace from source_str + return re.sub(r"\s+", " ", source_str).strip() + + +def _set_span_errors(errors: List[GraphQLError], span: Span) -> None: + if not errors: + # do nothing if the list of graphql errors is empty + return + + span.error = 1 + exc_type_str = "%s.%s" % (GraphQLError.__module__, GraphQLError.__name__) + span.set_tag_str(ERROR_TYPE, exc_type_str) + error_msgs = "\n".join([str(error) for error in errors]) + # Since we do not support adding and visualizing multiple tracebacks to one span + # we will not set the error.stack tag on graphql spans. Setting only one traceback + # could be misleading and might obfuscate errors. + span.set_tag_str(ERROR_MSG, error_msgs) + + +def _set_span_operation_tags(span, document): + operation_def = graphql.get_operation_ast(document) + if not operation_def: + return + + # operation_def.operation should never be None + if _graphql_version < (3, 0): + span.set_tag_str(_GRAPHQL_OPERATION_TYPE, operation_def.operation) + else: + # OperationDefinition.operation is an Enum in graphql-core>=3 + span.set_tag_str(_GRAPHQL_OPERATION_TYPE, operation_def.operation.value) + + if operation_def.name: + span.set_tag_str(_GRAPHQL_OPERATION_NAME, operation_def.name.value) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/__init__.py new file mode 100644 index 0000000..6fbc970 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/__init__.py @@ -0,0 +1,89 @@ +""" +The gRPC integration traces the client and server using the interceptor pattern. + + +Enabling +~~~~~~~~ + +The gRPC integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(grpc=True) + + # use grpc like usual + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.grpc["service"] + + The service name reported by default for gRPC client instances. + + This option can also be set with the ``DD_GRPC_SERVICE`` environment + variable. + + Default: ``"grpc-client"`` + +.. py:data:: ddtrace.config.grpc_server["service"] + + The service name reported by default for gRPC server instances. + + This option can also be set with the ``DD_SERVICE`` or + ``DD_GRPC_SERVER_SERVICE`` environment variables. + + Default: ``"grpc-server"`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the gRPC integration on an per-channel basis use the +``Pin`` API:: + + import grpc + from ddtrace import Pin, patch, Tracer + + patch(grpc=True) + custom_tracer = Tracer() + + # override the pin on the client + Pin.override(grpc.Channel, service='mygrpc', tracer=custom_tracer) + with grpc.insecure_channel('localhost:50051') as channel: + # create stubs and send requests + pass + +To configure the gRPC integration on the server use the ``Pin`` API:: + + import grpc + from grpc.framework.foundation import logging_pool + + from ddtrace import Pin, patch, Tracer + + patch(grpc=True) + custom_tracer = Tracer() + + # override the pin on the server + Pin.override(grpc.Server, service='mygrpc', tracer=custom_tracer) + server = grpc.server(logging_pool.pool(2)) + server.add_insecure_port('localhost:50051') + add_MyServicer_to_server(MyServicer(), server) + server.start() +""" + + +from ...internal.utils.importlib import require_modules + + +required_modules = ["grpc"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_client_interceptor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_client_interceptor.py new file mode 100644 index 0000000..2e08c64 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_client_interceptor.py @@ -0,0 +1,252 @@ +import asyncio +import functools +from typing import Callable # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +import grpc +from grpc import aio +from grpc.aio._typing import RequestIterableType +from grpc.aio._typing import RequestType +from grpc.aio._typing import ResponseIterableType +from grpc.aio._typing import ResponseType + +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ... import Pin +from ... import Span +from ... import config +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import ERROR_MSG +from ...constants import ERROR_TYPE +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import to_unicode +from ...propagation.http import HTTPPropagator +from .. import trace_utils +from ..grpc import constants +from ..grpc import utils + + +def create_aio_client_interceptors(pin, host, port): + # type: (Pin, str, int) -> Tuple[aio.ClientInterceptor, ...] + return ( + _UnaryUnaryClientInterceptor(pin, host, port), + _UnaryStreamClientInterceptor(pin, host, port), + _StreamUnaryClientInterceptor(pin, host, port), + _StreamStreamClientInterceptor(pin, host, port), + ) + + +def _done_callback(span, code, details): + # type: (Span, grpc.StatusCode, str) -> Callable[[aio.Call], None] + def func(call): + # type: (aio.Call) -> None + try: + span.set_tag_str(constants.GRPC_STATUS_CODE_KEY, to_unicode(code)) + + # Handle server-side error in unary response RPCs + if code != grpc.StatusCode.OK: + _handle_error(span, call, code, details) + finally: + span.finish() + + return func + + +def _handle_error(span, call, code, details): + # type: (Span, aio.Call, grpc.StatusCode, str) -> None + span.error = 1 + span.set_tag_str(ERROR_MSG, details) + span.set_tag_str(ERROR_TYPE, to_unicode(code)) + + +def _handle_rpc_error(span, rpc_error): + # type: (Span, aio.AioRpcError) -> None + code = to_unicode(rpc_error.code()) + span.error = 1 + span.set_tag_str(constants.GRPC_STATUS_CODE_KEY, code) + span.set_tag_str(ERROR_MSG, rpc_error.details()) + span.set_tag_str(ERROR_TYPE, code) + span.finish() + + +async def _handle_cancelled_error(call, span): + # type: (aio.Call, Span) -> None + code = to_unicode(await call.code()) + span.error = 1 + span.set_tag_str(constants.GRPC_STATUS_CODE_KEY, code) + span.set_tag_str(ERROR_MSG, await call.details()) + span.set_tag_str(ERROR_TYPE, code) + span.finish() + + +class _ClientInterceptor: + def __init__(self, pin: Pin, host: str, port: int) -> None: + self._pin = pin + self._host = host + self._port = port + + def _intercept_client_call(self, method_kind, client_call_details): + # type: (str, aio.ClientCallDetails) -> Tuple[Span, aio.ClientCallDetails] + tracer = self._pin.tracer + + method_as_str = client_call_details.method.decode() + span = tracer.trace( + schematize_url_operation("grpc", protocol="grpc", direction=SpanDirection.OUTBOUND), + span_type=SpanTypes.GRPC, + service=trace_utils.ext_service(self._pin, config.grpc_aio_client), + resource=method_as_str, + ) + + span.set_tag_str(COMPONENT, config.grpc_aio_client.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + utils.set_grpc_method_meta(span, method_as_str, method_kind) + utils.set_grpc_client_meta(span, self._host, self._port) + span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_CLIENT) + + sample_rate = config.grpc_aio_client.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + # inject tags from pin + if self._pin.tags: + span.set_tags(self._pin.tags) + + # propagate distributed tracing headers if available + headers = {} + if config.grpc_aio_client.distributed_tracing_enabled: + HTTPPropagator.inject(span.context, headers) + + metadata = [] + if client_call_details.metadata is not None: + metadata = list(client_call_details.metadata) + metadata.extend(headers.items()) + + client_call_details = aio.ClientCallDetails( + client_call_details.method, + client_call_details.timeout, + metadata, + client_call_details.credentials, + client_call_details.wait_for_ready, + ) + + return span, client_call_details + + # NOTE: Since this function is executed as an async generator when the RPC is called, + # `continuation` must be called before the RPC. + async def _wrap_stream_response( + self, + call: Union[aio.StreamStreamCall, aio.UnaryStreamCall], + span: Span, + ) -> ResponseIterableType: + try: + async for response in call: + yield response + code = await call.code() + details = await call.details() + # NOTE: The callback is registered after the iteration is done, + # otherwise `call.code()` and `call.details()` block indefinitely. + call.add_done_callback(_done_callback(span, code, details)) + except aio.AioRpcError as rpc_error: + # NOTE: We can also handle the error in done callbacks, + # but reuse this error handling function used in unary response RPCs. + _handle_rpc_error(span, rpc_error) + raise + except asyncio.CancelledError: + # NOTE: We can't handle the cancelled error in done callbacks + # because they cannot handle awaitable functions. + await _handle_cancelled_error(call, span) + raise + + # NOTE: `continuation` must be called inside of this function to catch exceptions. + async def _wrap_unary_response( + self, + continuation: Callable[[], Union[aio.StreamUnaryCall, aio.UnaryUnaryCall]], + span: Span, + ): + # type: (...) -> Union[aio.StreamUnaryCall, aio.UnaryUnaryCall] + try: + call = await continuation() + code = await call.code() + details = await call.details() + # NOTE: As both `code` and `details` are available after the RPC is done (= we get `call` object), + # and we can't call awaitable functions inside the non-async callback, + # there is no other way but to register the callback here. + call.add_done_callback(_done_callback(span, code, details)) + return call + except aio.AioRpcError as rpc_error: + # NOTE: `AioRpcError` is raised in `await continuation(...)` + # and `call` object is not assigned yet in that case. + # So we can't handle the error in done callbacks. + _handle_rpc_error(span, rpc_error) + raise + + +class _UnaryUnaryClientInterceptor(aio.UnaryUnaryClientInterceptor, _ClientInterceptor): + async def intercept_unary_unary( + self, + continuation: Callable[[aio.ClientCallDetails, RequestType], aio.UnaryUnaryCall], + client_call_details: aio.ClientCallDetails, + request: RequestType, + ) -> Union[aio.UnaryUnaryCall, ResponseType]: + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_UNARY, + client_call_details, + ) + continuation_with_args = functools.partial(continuation, client_call_details, request) + return await self._wrap_unary_response(continuation_with_args, span) + + +class _UnaryStreamClientInterceptor(aio.UnaryStreamClientInterceptor, _ClientInterceptor): + async def intercept_unary_stream( + self, + continuation: Callable[[aio.ClientCallDetails, RequestType], aio.UnaryStreamCall], + client_call_details: aio.ClientCallDetails, + request: RequestType, + ) -> Union[aio.UnaryStreamCall, ResponseIterableType]: + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_SERVER_STREAMING, + client_call_details, + ) + call = await continuation(client_call_details, request) + return self._wrap_stream_response(call, span) + + +class _StreamUnaryClientInterceptor(aio.StreamUnaryClientInterceptor, _ClientInterceptor): + async def intercept_stream_unary( + self, + continuation: Callable[[aio.ClientCallDetails, RequestType], aio.StreamUnaryCall], + client_call_details: aio.ClientCallDetails, + request_iterator: RequestIterableType, + ) -> aio.StreamUnaryCall: + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_CLIENT_STREAMING, + client_call_details, + ) + continuation_with_args = functools.partial(continuation, client_call_details, request_iterator) + return await self._wrap_unary_response(continuation_with_args, span) + + +class _StreamStreamClientInterceptor(aio.StreamStreamClientInterceptor, _ClientInterceptor): + async def intercept_stream_stream( + self, + continuation: Callable[[aio.ClientCallDetails, RequestType], aio.StreamStreamCall], + client_call_details: aio.ClientCallDetails, + request_iterator: RequestIterableType, + ) -> Union[aio.StreamStreamCall, ResponseIterableType]: + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_BIDI_STREAMING, + client_call_details, + ) + call = await continuation(client_call_details, request_iterator) + return self._wrap_stream_response(call, span) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_server_interceptor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_server_interceptor.py new file mode 100644 index 0000000..11e0a27 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/aio_server_interceptor.py @@ -0,0 +1,301 @@ +import inspect +from typing import Any # noqa:F401 +from typing import Awaitable # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Iterable # noqa:F401 +from typing import Union # noqa:F401 + +import grpc +from grpc import aio +from grpc.aio._typing import RequestIterableType +from grpc.aio._typing import RequestType +from grpc.aio._typing import ResponseIterableType +from grpc.aio._typing import ResponseType + +from ddtrace import Pin # noqa:F401 +from ddtrace import Span # noqa:F401 +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import ERROR_MSG +from ...constants import ERROR_TYPE +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import to_unicode +from .. import trace_utils +from ..grpc import constants +from ..grpc.utils import set_grpc_method_meta + + +Continuation = Callable[[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]] + + +# Used to get a status code from integer +# as `grpc._cython.cygrpc._ServicerContext.code()` returns an integer. +_INT2CODE = {s.value[0]: s for s in grpc.StatusCode} + + +def _is_coroutine_handler(handler): + # type: (grpc.RpcMethodHandler) -> bool + if not handler.request_streaming and not handler.response_streaming: + return inspect.iscoroutinefunction(handler.unary_unary) + elif not handler.request_streaming and handler.response_streaming: + return inspect.iscoroutinefunction(handler.unary_stream) + elif handler.request_streaming and not handler.response_streaming: + return inspect.iscoroutinefunction(handler.stream_unary) + else: + return inspect.iscoroutinefunction(handler.stream_stream) + + +def _is_async_gen_handler(handler): + # type: (grpc.RpcMethodHandler) -> bool + if not handler.response_streaming: + return False + if handler.request_streaming: + return inspect.isasyncgenfunction(handler.stream_stream) + else: + return inspect.isasyncgenfunction(handler.unary_stream) + + +def create_aio_server_interceptor(pin): + # type: (Pin) -> _ServerInterceptor + async def interceptor_function( + continuation, # type: Continuation + handler_call_details, # type: grpc.HandlerCallDetails + ): + # type: (...) -> Union[TracedRpcMethodHandlerType, None] + rpc_method_handler = await continuation(handler_call_details) + + # continuation returns an RpcMethodHandler instance if the RPC is + # considered serviced, or None otherwise + # https://grpc.github.io/grpc/python/grpc.html#grpc.ServerInterceptor.intercept_service + + if rpc_method_handler is None: + return None + + # Since streaming response RPC can be either a coroutine or an async generator, we're checking either here. + if _is_coroutine_handler(rpc_method_handler): + return _TracedCoroRpcMethodHandler(pin, handler_call_details, rpc_method_handler) + elif _is_async_gen_handler(rpc_method_handler): + return _TracedAsyncGenRpcMethodHandler(pin, handler_call_details, rpc_method_handler) + else: + return _TracedRpcMethodHandler(pin, handler_call_details, rpc_method_handler) + + return _ServerInterceptor(interceptor_function) + + +def _handle_server_exception( + servicer_context, # type: Union[None, grpc.ServicerContext] + span, # type: Span +): + # type: (...) -> None + span.error = 1 + if servicer_context is None: + return + if hasattr(servicer_context, "details"): + span.set_tag_str(ERROR_MSG, to_unicode(servicer_context.details())) + if hasattr(servicer_context, "code") and servicer_context.code() != 0 and servicer_context.code() in _INT2CODE: + span.set_tag_str(ERROR_TYPE, to_unicode(_INT2CODE[servicer_context.code()])) + + +async def _wrap_aio_stream_response( + behavior: Callable[[Union[RequestIterableType, RequestType], aio.ServicerContext], ResponseIterableType], + request_or_iterator: Union[RequestIterableType, RequestType], + servicer_context: aio.ServicerContext, + span: Span, +) -> ResponseIterableType: + try: + call = behavior(request_or_iterator, servicer_context) + async for response in call: + yield response + except Exception: + span.set_traceback() + _handle_server_exception(servicer_context, span) + raise + finally: + span.finish() + + +async def _wrap_aio_unary_response( + behavior: Callable[[Union[RequestIterableType, RequestType], aio.ServicerContext], Awaitable[ResponseType]], + request_or_iterator: Union[RequestIterableType, RequestType], + servicer_context: aio.ServicerContext, + span: Span, +) -> ResponseType: + try: + return await behavior(request_or_iterator, servicer_context) + except Exception: + span.set_traceback() + _handle_server_exception(servicer_context, span) + raise + finally: + span.finish() + + +def _wrap_stream_response( + behavior, # type: Callable[[Any, grpc.ServicerContext], Iterable[Any]] + request_or_iterator, # type: Any + servicer_context, # type: grpc.ServicerContext + span, # type: Span +): + # type: (...) -> Iterable[Any] + try: + for response in behavior(request_or_iterator, servicer_context): + yield response + except Exception: + span.set_traceback() + _handle_server_exception(servicer_context, span) + raise + finally: + span.finish() + + +def _wrap_unary_response( + behavior, # type: Callable[[Any, grpc.ServicerContext], Any] + request_or_iterator, # type: Any + servicer_context, # type: grpc.ServicerContext + span, # type: Span +): + # type: (...) -> Any + try: + return behavior(request_or_iterator, servicer_context) + except Exception: + span.set_traceback() + _handle_server_exception(servicer_context, span) + raise + finally: + span.finish() + + +def _create_span(pin, handler_call_details, method_kind): + # type: (Pin, grpc.HandlerCallDetails, str) -> Span + tracer = pin.tracer + headers = dict(handler_call_details.invocation_metadata) + + trace_utils.activate_distributed_headers(tracer, int_config=config.grpc_aio_server, request_headers=headers) + + span = tracer.trace( + schematize_url_operation("grpc", protocol="grpc", direction=SpanDirection.INBOUND), + span_type=SpanTypes.GRPC, + service=trace_utils.int_service(pin, config.grpc_aio_server), + resource=handler_call_details.method, + ) + + span.set_tag_str(COMPONENT, config.grpc_aio_server.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + span.set_tag(SPAN_MEASURED_KEY) + + set_grpc_method_meta(span, handler_call_details.method, method_kind) + span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_SERVER) + + sample_rate = config.grpc_aio_server.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + if pin.tags: + span.set_tags(pin.tags) + + return span + + +class _TracedCoroRpcMethodHandler(wrapt.ObjectProxy): + def __init__(self, pin, handler_call_details, wrapped): + # type: (Pin, grpc.HandlerCallDetails, grpc.RpcMethodHandler) -> None + super(_TracedCoroRpcMethodHandler, self).__init__(wrapped) + self._pin = pin + self._handler_call_details = handler_call_details + + async def unary_unary(self, request: RequestType, context: aio.ServicerContext) -> ResponseType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_UNARY) + return await _wrap_aio_unary_response(self.__wrapped__.unary_unary, request, context, span) + + async def unary_stream(self, request: RequestType, context: aio.ServicerContext) -> ResponseType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_SERVER_STREAMING) + return await _wrap_aio_unary_response(self.__wrapped__.unary_stream, request, context, span) + + async def stream_unary(self, request_iterator: RequestIterableType, context: aio.ServicerContext) -> ResponseType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_CLIENT_STREAMING) + return await _wrap_aio_unary_response(self.__wrapped__.stream_unary, request_iterator, context, span) + + async def stream_stream(self, request_iterator: RequestIterableType, context: aio.ServicerContext) -> ResponseType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_BIDI_STREAMING) + return await _wrap_aio_unary_response(self.__wrapped__.stream_stream, request_iterator, context, span) + + +class _TracedAsyncGenRpcMethodHandler(wrapt.ObjectProxy): + def __init__(self, pin, handler_call_details, wrapped): + # type: (Pin, grpc.HandlerCallDetails, grpc.RpcMethodHandler) -> None + super(_TracedAsyncGenRpcMethodHandler, self).__init__(wrapped) + self._pin = pin + self._handler_call_details = handler_call_details + + async def unary_stream(self, request: RequestType, context: aio.ServicerContext) -> ResponseIterableType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_SERVER_STREAMING) + async for response in _wrap_aio_stream_response(self.__wrapped__.unary_stream, request, context, span): + yield response + + async def stream_stream( + self, request_iterator: RequestIterableType, context: aio.ServicerContext + ) -> ResponseIterableType: + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_BIDI_STREAMING) + async for response in _wrap_aio_stream_response( + self.__wrapped__.stream_stream, request_iterator, context, span + ): + yield response + + +class _TracedRpcMethodHandler(wrapt.ObjectProxy): + def __init__(self, pin, handler_call_details, wrapped): + # type: (Pin, grpc.HandlerCallDetails, grpc.RpcMethodHandler) -> None + super(_TracedRpcMethodHandler, self).__init__(wrapped) + self._pin = pin + self._handler_call_details = handler_call_details + + def unary_unary(self, request, context): + # type: (Any, grpc.ServicerContext) -> Any + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_UNARY) + return _wrap_unary_response(self.__wrapped__.unary_unary, request, context, span) + + def unary_stream(self, request, context): + # type: (Any, grpc.ServicerContext) -> Iterable[Any] + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_SERVER_STREAMING) + for response in _wrap_stream_response(self.__wrapped__.unary_stream, request, context, span): + yield response + + def stream_unary(self, request_iterator, context): + # type: (Iterable[Any], grpc.ServicerContext) -> Any + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_CLIENT_STREAMING) + return _wrap_unary_response(self.__wrapped__.stream_unary, request_iterator, context, span) + + def stream_stream(self, request_iterator, context): + # type: (Iterable[Any], grpc.ServicerContext) -> Iterable[Any] + span = _create_span(self._pin, self._handler_call_details, constants.GRPC_METHOD_KIND_BIDI_STREAMING) + for response in _wrap_stream_response(self.__wrapped__.stream_stream, request_iterator, context, span): + yield response + + +TracedRpcMethodHandlerType = Union[ + _TracedAsyncGenRpcMethodHandler, _TracedCoroRpcMethodHandler, _TracedRpcMethodHandler +] + + +class _ServerInterceptor(aio.ServerInterceptor): + def __init__(self, interceptor_function): + self._fn = interceptor_function + + async def intercept_service( + self, + continuation, # type: Continuation + handler_call_details, # type: grpc.HandlerCallDetails + ): + # type: (...) -> Union[TracedRpcMethodHandlerType, None] + return await self._fn(continuation, handler_call_details) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/client_interceptor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/client_interceptor.py new file mode 100644 index 0000000..59dc4ab --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/client_interceptor.py @@ -0,0 +1,275 @@ +import collections + +import grpc + +from ddtrace import config +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.internal.compat import to_unicode +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import ERROR_MSG +from ...constants import ERROR_STACK +from ...constants import ERROR_TYPE +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...internal.logger import get_logger +from ...internal.schema import schematize_url_operation +from ...propagation.http import HTTPPropagator +from .. import trace_utils +from . import constants +from . import utils + + +log = get_logger(__name__) + +# DEV: Follows Python interceptors RFC laid out in +# https://github.com/grpc/proposal/blob/master/L13-python-interceptors.md + +# DEV: __version__ added in v1.21.4 +# https://github.com/grpc/grpc/commit/dd4830eae80143f5b0a9a3a1a024af4cf60e7d02 + + +def create_client_interceptor(pin, host, port): + return _ClientInterceptor(pin, host, port) + + +def intercept_channel(wrapped, instance, args, kwargs): + channel = args[0] + interceptors = args[1:] + if isinstance(getattr(channel, "_interceptor", None), _ClientInterceptor): + dd_interceptor = channel._interceptor + base_channel = getattr(channel, "_channel", None) + if base_channel: + new_channel = wrapped(channel._channel, *interceptors) + return grpc.intercept_channel(new_channel, dd_interceptor) + + return wrapped(*args, **kwargs) + + +class _ClientCallDetails( + collections.namedtuple("_ClientCallDetails", ("method", "timeout", "metadata", "credentials")), + grpc.ClientCallDetails, +): + pass + + +def _future_done_callback(span): + def func(response): + try: + # pull out response code from gRPC response to use both for `grpc.status.code` + # tag and the error type tag if the response is an exception + response_code = response.code() + # cast code to unicode for tags + status_code = to_unicode(response_code) + span.set_tag_str(constants.GRPC_STATUS_CODE_KEY, status_code) + + if response_code != grpc.StatusCode.OK: + _handle_error(span, response, status_code) + finally: + span.finish() + + return func + + +def _handle_response(span, response): + # use duck-typing to support future-like response as in the case of + # google-api-core which has its own future base class + # https://github.com/googleapis/python-api-core/blob/49c6755a21215bbb457b60db91bab098185b77da/google/api_core/future/base.py#L23 + if hasattr(response, "add_done_callback"): + response.add_done_callback(_future_done_callback(span)) + + +def _handle_error(span, response_error, status_code): + # response_error should be a grpc.Future and so we expect to have cancelled(), + # exception() and traceback() methods if a computation has resulted in an + # exception being raised + if ( + not callable(getattr(response_error, "cancelled", None)) + and not callable(getattr(response_error, "exception", None)) + and not callable(getattr(response_error, "traceback", None)) + ): + return + + if response_error.cancelled(): + # handle cancelled futures separately to avoid raising grpc.FutureCancelledError + span.error = 1 + exc_val = to_unicode(response_error.details()) + span.set_tag_str(ERROR_MSG, exc_val) + span.set_tag_str(ERROR_TYPE, status_code) + return + + exception = response_error.exception() + traceback = response_error.traceback() + + if exception is not None and traceback is not None: + span.error = 1 + if isinstance(exception, grpc.RpcError): + # handle internal gRPC exceptions separately to get status code and + # details as tags properly + exc_val = to_unicode(response_error.details()) + span.set_tag_str(ERROR_MSG, exc_val) + span.set_tag_str(ERROR_TYPE, status_code) + span.set_tag_str(ERROR_STACK, str(traceback)) + else: + exc_type = type(exception) + span.set_exc_info(exc_type, exception, traceback) + status_code = to_unicode(response_error.code()) + + +class _WrappedResponseCallFuture(wrapt.ObjectProxy): + def __init__(self, wrapped, span): + super(_WrappedResponseCallFuture, self).__init__(wrapped) + self._span = span + # Registers callback on the _MultiThreadedRendezvous future to finish + # span in case StopIteration is never raised but RPC is terminated + _handle_response(self._span, self.__wrapped__) + + def __iter__(self): + return self + + def _next(self): + # While an iterator ObjectProxy requires only __iter__ and __next__, we + # make sure to also proxy the grpc._channel._Rendezvous._next method in + # case it is being called directly as in google.api_core.grpc_helpers + # and grpc_gcp._channel. + # https://github.com/grpc/grpc/blob/5195a06ddea8da6603c6672e0ed09fec9b5c16ac/src/python/grpcio/grpc/_channel.py#L418-L419 + # https://github.com/googleapis/python-api-core/blob/35e87e0aca52167029784379ca84e979098e1d6c/google/api_core/grpc_helpers.py#L84 + # https://github.com/GoogleCloudPlatform/grpc-gcp-python/blob/5a2cd9807bbaf1b85402a2a364775e5b65853df6/src/grpc_gcp/_channel.py#L102 + try: + return next(self.__wrapped__) + except StopIteration: + # Callback will handle span finishing + raise + except grpc.RpcError as rpc_error: + # DEV: grpcio<1.18.0 grpc.RpcError is raised rather than returned as response + # https://github.com/grpc/grpc/commit/8199aff7a66460fbc4e9a82ade2e95ef076fd8f9 + # handle as a response + _handle_response(self._span, rpc_error) + raise + except Exception: + # DEV: added for safety though should not be reached since wrapped response + log.debug("unexpected non-grpc exception raised, closing open span", exc_info=True) + self._span.set_traceback() + self._span.finish() + raise + + def __next__(self): + return self._next() + + next = __next__ + + +class _ClientInterceptor( + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor, +): + def __init__(self, pin, host, port): + self._pin = pin + self._host = host + self._port = port + + def _intercept_client_call(self, method_kind, client_call_details): + tracer = self._pin.tracer + + span = tracer.trace( + schematize_url_operation("grpc", protocol="grpc", direction=SpanDirection.OUTBOUND), + span_type=SpanTypes.GRPC, + service=trace_utils.ext_service(self._pin, config.grpc), + resource=client_call_details.method, + ) + + span.set_tag_str(COMPONENT, config.grpc.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + utils.set_grpc_method_meta(span, client_call_details.method, method_kind) + utils.set_grpc_client_meta(span, self._host, self._port) + span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_CLIENT) + + sample_rate = config.grpc.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + # inject tags from pin + if self._pin.tags: + span.set_tags(self._pin.tags) + + # propagate distributed tracing headers if available + headers = {} + if config.grpc.distributed_tracing_enabled: + HTTPPropagator.inject(span.context, headers) + + metadata = [] + if client_call_details.metadata is not None: + metadata = list(client_call_details.metadata) + metadata.extend(headers.items()) + + client_call_details = _ClientCallDetails( + client_call_details.method, + client_call_details.timeout, + metadata, + client_call_details.credentials, + ) + + return span, client_call_details + + def intercept_unary_unary(self, continuation, client_call_details, request): + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_UNARY, + client_call_details, + ) + try: + response = continuation(client_call_details, request) + _handle_response(span, response) + except grpc.RpcError as rpc_error: + # DEV: grpcio<1.18.0 grpc.RpcError is raised rather than returned as response + # https://github.com/grpc/grpc/commit/8199aff7a66460fbc4e9a82ade2e95ef076fd8f9 + # handle as a response + _handle_response(span, rpc_error) + raise + + return response + + def intercept_unary_stream(self, continuation, client_call_details, request): + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_SERVER_STREAMING, + client_call_details, + ) + response_iterator = continuation(client_call_details, request) + response_iterator = _WrappedResponseCallFuture(response_iterator, span) + return response_iterator + + def intercept_stream_unary(self, continuation, client_call_details, request_iterator): + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_CLIENT_STREAMING, + client_call_details, + ) + try: + response = continuation(client_call_details, request_iterator) + _handle_response(span, response) + except grpc.RpcError as rpc_error: + # DEV: grpcio<1.18.0 grpc.RpcError is raised rather than returned as response + # https://github.com/grpc/grpc/commit/8199aff7a66460fbc4e9a82ade2e95ef076fd8f9 + # handle as a response + _handle_response(span, rpc_error) + raise + + return response + + def intercept_stream_stream(self, continuation, client_call_details, request_iterator): + span, client_call_details = self._intercept_client_call( + constants.GRPC_METHOD_KIND_BIDI_STREAMING, + client_call_details, + ) + response_iterator = continuation(client_call_details, request_iterator) + response_iterator = _WrappedResponseCallFuture(response_iterator, span) + return response_iterator diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/constants.py new file mode 100644 index 0000000..232c709 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/constants.py @@ -0,0 +1,26 @@ +import grpc + + +GRPC_PIN_MODULE_SERVER = grpc.Server +GRPC_PIN_MODULE_CLIENT = grpc.Channel +GRPC_METHOD_PATH_KEY = "grpc.method.path" +GRPC_METHOD_PACKAGE_SERVICE_KEY = "rpc.service" +GRPC_METHOD_PACKAGE_KEY = "grpc.method.package" +GRPC_METHOD_SERVICE_KEY = "grpc.method.service" +GRPC_METHOD_NAME_KEY = "grpc.method.name" +GRPC_METHOD_KIND_KEY = "grpc.method.kind" +GRPC_STATUS_CODE_KEY = "grpc.status.code" +GRPC_REQUEST_METADATA_PREFIX_KEY = "grpc.request.metadata." +GRPC_RESPONSE_METADATA_PREFIX_KEY = "grpc.response.metadata." +GRPC_HOST_KEY = "grpc.host" +GRPC_SPAN_KIND_KEY = "span.kind" +GRPC_SPAN_KIND_VALUE_CLIENT = "client" +GRPC_SPAN_KIND_VALUE_SERVER = "server" +GRPC_METHOD_KIND_UNARY = "unary" +GRPC_METHOD_KIND_CLIENT_STREAMING = "client_streaming" +GRPC_METHOD_KIND_SERVER_STREAMING = "server_streaming" +GRPC_METHOD_KIND_BIDI_STREAMING = "bidi_streaming" +GRPC_SERVICE_SERVER = "grpc-server" +GRPC_AIO_SERVICE_SERVER = "grpc-aio-server" +GRPC_SERVICE_CLIENT = "grpc-client" +GRPC_AIO_SERVICE_CLIENT = "grpc-aio-client" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/patch.py new file mode 100644 index 0000000..25bf4df --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/patch.py @@ -0,0 +1,260 @@ +import grpc + +from ddtrace import Pin +from ddtrace import config +from ddtrace.internal.schema import schematize_service_name +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ..trace_utils import unwrap as _u +from . import constants +from . import utils +from .client_interceptor import create_client_interceptor +from .client_interceptor import intercept_channel +from .server_interceptor import create_server_interceptor + + +def get_version(): + # type: () -> str + return getattr(grpc, "__version__", "") + + +try: + # `grpc.aio` is only available with `grpcio>=1.32`. + import grpc.aio + + from .aio_client_interceptor import create_aio_client_interceptors + from .aio_server_interceptor import create_aio_server_interceptor + + HAS_GRPC_AIO = True + # NOTE: These are not defined in constants.py because we would end up having + # try-except in both files. + GRPC_AIO_PIN_MODULE_SERVER = grpc.aio.Server + GRPC_AIO_PIN_MODULE_CLIENT = grpc.aio.Channel +except ImportError: + HAS_GRPC_AIO = False + # NOTE: These are defined just to prevent a 'not defined' error. + # Be sure not to use them when `HAS_GRPC_AIO` is False. + GRPC_AIO_PIN_MODULE_SERVER = None + GRPC_AIO_PIN_MODULE_CLIENT = None + +config._add( + "grpc_server", + dict( + _default_service=schematize_service_name(constants.GRPC_SERVICE_SERVER), + distributed_tracing_enabled=True, + ), +) + +config._add( + "grpc_client", + dict( + _default_service=schematize_service_name(constants.GRPC_SERVICE_CLIENT), + distributed_tracing_enabled=True, + ), +) + + +# TODO[tbutt]: keeping name for client config unchanged to maintain backwards +# compatibility but should change in future +config._add( + "grpc", + dict( + _default_service=schematize_service_name(constants.GRPC_SERVICE_CLIENT), + distributed_tracing_enabled=True, + ), +) + + +if HAS_GRPC_AIO: + config._add( + "grpc_aio_server", + dict( + _default_service=schematize_service_name(constants.GRPC_AIO_SERVICE_SERVER), + distributed_tracing_enabled=True, + ), + ) + + config._add( + "grpc_aio_client", + dict( + _default_service=schematize_service_name(constants.GRPC_AIO_SERVICE_CLIENT), + distributed_tracing_enabled=True, + ), + ) + + +def patch(): + _patch_client() + _patch_server() + if HAS_GRPC_AIO: + _patch_aio_client() + _patch_aio_server() + + +def unpatch(): + _unpatch_client() + _unpatch_server() + if HAS_GRPC_AIO: + _unpatch_aio_client() + _unpatch_aio_server() + + +def _patch_client(): + if getattr(constants.GRPC_PIN_MODULE_CLIENT, "__datadog_patch", False): + return + constants.GRPC_PIN_MODULE_CLIENT.__datadog_patch = True + + Pin().onto(constants.GRPC_PIN_MODULE_CLIENT) + + _w("grpc", "insecure_channel", _client_channel_interceptor) + _w("grpc", "secure_channel", _client_channel_interceptor) + _w("grpc", "intercept_channel", intercept_channel) + + +def _patch_aio_client(): + if getattr(GRPC_AIO_PIN_MODULE_CLIENT, "__datadog_patch", False): + return + GRPC_AIO_PIN_MODULE_CLIENT.__datadog_patch = True + + Pin().onto(GRPC_AIO_PIN_MODULE_CLIENT) + + _w("grpc.aio", "insecure_channel", _aio_client_channel_interceptor) + _w("grpc.aio", "secure_channel", _aio_client_channel_interceptor) + + +def _unpatch_client(): + if not getattr(constants.GRPC_PIN_MODULE_CLIENT, "__datadog_patch", False): + return + constants.GRPC_PIN_MODULE_CLIENT.__datadog_patch = False + + pin = Pin.get_from(constants.GRPC_PIN_MODULE_CLIENT) + if pin: + pin.remove_from(constants.GRPC_PIN_MODULE_CLIENT) + + _u(grpc, "secure_channel") + _u(grpc, "insecure_channel") + _u(grpc, "intercept_channel") + + +def _unpatch_aio_client(): + if not getattr(GRPC_AIO_PIN_MODULE_CLIENT, "__datadog_patch", False): + return + GRPC_AIO_PIN_MODULE_CLIENT.__datadog_patch = False + + pin = Pin.get_from(GRPC_AIO_PIN_MODULE_CLIENT) + if pin: + pin.remove_from(GRPC_AIO_PIN_MODULE_CLIENT) + + _u(grpc.aio, "insecure_channel") + _u(grpc.aio, "secure_channel") + + +def _patch_server(): + if getattr(constants.GRPC_PIN_MODULE_SERVER, "__datadog_patch", False): + return + constants.GRPC_PIN_MODULE_SERVER.__datadog_patch = True + + Pin().onto(constants.GRPC_PIN_MODULE_SERVER) + + _w("grpc", "server", _server_constructor_interceptor) + + +def _patch_aio_server(): + if getattr(GRPC_AIO_PIN_MODULE_SERVER, "__datadog_patch", False): + return + GRPC_AIO_PIN_MODULE_SERVER.__datadog_patch = True + + Pin().onto(GRPC_AIO_PIN_MODULE_SERVER) + + _w("grpc.aio", "server", _aio_server_constructor_interceptor) + + +def _unpatch_server(): + if not getattr(constants.GRPC_PIN_MODULE_SERVER, "__datadog_patch", False): + return + constants.GRPC_PIN_MODULE_SERVER.__datadog_patch = False + + pin = Pin.get_from(constants.GRPC_PIN_MODULE_SERVER) + if pin: + pin.remove_from(constants.GRPC_PIN_MODULE_SERVER) + + _u(grpc, "server") + + +def _unpatch_aio_server(): + if not getattr(GRPC_AIO_PIN_MODULE_SERVER, "__datadog_patch", False): + return + GRPC_AIO_PIN_MODULE_SERVER.__datadog_patch = False + + pin = Pin.get_from(GRPC_AIO_PIN_MODULE_SERVER) + if pin: + pin.remove_from(GRPC_AIO_PIN_MODULE_SERVER) + + _u(grpc.aio, "server") + + +def _client_channel_interceptor(wrapped, instance, args, kwargs): + channel = wrapped(*args, **kwargs) + + pin = Pin.get_from(channel) + if not pin or not pin.enabled(): + return channel + + (host, port) = utils._parse_target_from_args(args, kwargs) + + interceptor_function = create_client_interceptor(pin, host, port) + return grpc.intercept_channel(channel, interceptor_function) + + +def _aio_client_channel_interceptor(wrapped, instance, args, kwargs): + channel = wrapped(*args, **kwargs) + + pin = Pin.get_from(channel) + if not pin or not pin.enabled(): + return channel + + (host, port) = utils._parse_target_from_args(args, kwargs) + + interceptors = create_aio_client_interceptors(pin, host, port) + # DEV: Inject our tracing interceptor first in the list of interceptors + if "interceptors" in kwargs: + kwargs["interceptors"] = interceptors + tuple(kwargs["interceptors"]) + else: + kwargs["interceptors"] = interceptors + + return wrapped(*args, **kwargs) + + +def _server_constructor_interceptor(wrapped, instance, args, kwargs): + # DEV: we clone the pin on the grpc module and configure it for the server + # interceptor + + pin = Pin.get_from(constants.GRPC_PIN_MODULE_SERVER) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + interceptor = create_server_interceptor(pin) + + # DEV: Inject our tracing interceptor first in the list of interceptors + if "interceptors" in kwargs: + kwargs["interceptors"] = (interceptor,) + tuple(kwargs["interceptors"]) + else: + kwargs["interceptors"] = (interceptor,) + + return wrapped(*args, **kwargs) + + +def _aio_server_constructor_interceptor(wrapped, instance, args, kwargs): + pin = Pin.get_from(GRPC_AIO_PIN_MODULE_SERVER) + + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + interceptor = create_aio_server_interceptor(pin) + # DEV: Inject our tracing interceptor first in the list of interceptors + if "interceptors" in kwargs: + kwargs["interceptors"] = (interceptor,) + tuple(kwargs["interceptors"]) + else: + kwargs["interceptors"] = (interceptor,) + + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/server_interceptor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/server_interceptor.py new file mode 100644 index 0000000..0691cd9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/server_interceptor.py @@ -0,0 +1,135 @@ +import grpc + +from ddtrace import config +from ddtrace.internal.compat import to_unicode +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import ERROR_MSG +from ...constants import ERROR_TYPE +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from .. import trace_utils +from . import constants +from .utils import set_grpc_method_meta + + +def create_server_interceptor(pin): + def interceptor_function(continuation, handler_call_details): + if not pin.enabled: + return continuation(handler_call_details) + + rpc_method_handler = continuation(handler_call_details) + + # continuation returns an RpcMethodHandler instance if the RPC is + # considered serviced, or None otherwise + # https://grpc.github.io/grpc/python/grpc.html#grpc.ServerInterceptor.intercept_service + + if rpc_method_handler: + return _TracedRpcMethodHandler(pin, handler_call_details, rpc_method_handler) + + return rpc_method_handler + + return _ServerInterceptor(interceptor_function) + + +def _handle_server_exception(server_context, span): + if server_context is not None and hasattr(server_context, "_state") and server_context._state is not None: + code = to_unicode(server_context._state.code) + details = to_unicode(server_context._state.details) + span.error = 1 + span.set_tag_str(ERROR_MSG, details) + span.set_tag_str(ERROR_TYPE, code) + + +def _wrap_response_iterator(response_iterator, server_context, span): + try: + for response in response_iterator: + yield response + except Exception: + span.set_traceback() + _handle_server_exception(server_context, span) + raise + finally: + span.finish() + + +class _TracedRpcMethodHandler(wrapt.ObjectProxy): + def __init__(self, pin, handler_call_details, wrapped): + super(_TracedRpcMethodHandler, self).__init__(wrapped) + self._pin = pin + self._handler_call_details = handler_call_details + + def _fn(self, method_kind, behavior, args, kwargs): + tracer = self._pin.tracer + headers = dict(self._handler_call_details.invocation_metadata) + + trace_utils.activate_distributed_headers(tracer, int_config=config.grpc_server, request_headers=headers) + + span = tracer.trace( + schematize_url_operation("grpc", protocol="grpc", direction=SpanDirection.INBOUND), + span_type=SpanTypes.GRPC, + service=trace_utils.int_service(self._pin, config.grpc_server), + resource=self._handler_call_details.method, + ) + + span.set_tag_str(COMPONENT, config.grpc_server.integration_name) + + # set span.kind tag equal to type of span + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + span.set_tag(SPAN_MEASURED_KEY) + + set_grpc_method_meta(span, self._handler_call_details.method, method_kind) + span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_SERVER) + + sample_rate = config.grpc_server.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + # access server context by taking second argument as server context + # if not found, skip using context to tag span with server state information + server_context = args[1] if isinstance(args[1], grpc.ServicerContext) else None + + if self._pin.tags: + span.set_tags(self._pin.tags) + + try: + response_or_iterator = behavior(*args, **kwargs) + + if self.__wrapped__.response_streaming: + response_or_iterator = _wrap_response_iterator(response_or_iterator, server_context, span) + except Exception: + span.set_traceback() + _handle_server_exception(server_context, span) + raise + finally: + if not self.__wrapped__.response_streaming: + span.finish() + + return response_or_iterator + + def unary_unary(self, *args, **kwargs): + return self._fn(constants.GRPC_METHOD_KIND_UNARY, self.__wrapped__.unary_unary, args, kwargs) + + def unary_stream(self, *args, **kwargs): + return self._fn(constants.GRPC_METHOD_KIND_SERVER_STREAMING, self.__wrapped__.unary_stream, args, kwargs) + + def stream_unary(self, *args, **kwargs): + return self._fn(constants.GRPC_METHOD_KIND_CLIENT_STREAMING, self.__wrapped__.stream_unary, args, kwargs) + + def stream_stream(self, *args, **kwargs): + return self._fn(constants.GRPC_METHOD_KIND_BIDI_STREAMING, self.__wrapped__.stream_stream, args, kwargs) + + +class _ServerInterceptor(grpc.ServerInterceptor): + def __init__(self, interceptor_function): + self._fn = interceptor_function + + def intercept_service(self, continuation, handler_call_details): + return self._fn(continuation, handler_call_details) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/utils.py new file mode 100644 index 0000000..4589dcf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/grpc/utils.py @@ -0,0 +1,83 @@ +import ipaddress +import logging + +from ddtrace.internal.compat import parse + +from ...ext import net +from . import constants + + +log = logging.getLogger(__name__) + + +def parse_method_path(method_path): + """Returns (package_service, package, service, method) tuple from parsing method path""" + # unpack method path based on "/{package}.{service}/{method}" + # first remove leading "/" as unnecessary + package_service, method_name = method_path.lstrip("/").rsplit("/", 1) + + package_service_split = package_service.rsplit(".", 1) + # {package} is optional + if len(package_service_split) == 2: + return package_service, package_service_split[0], package_service_split[1], method_name + + return package_service, None, package_service_split[0], method_name + + +def set_grpc_method_meta(span, method, method_kind): + method_path = method + method_package_service, method_package, method_service, method_name = parse_method_path(method_path) + if method_package_service is not None: + span.set_tag_str(constants.GRPC_METHOD_PACKAGE_SERVICE_KEY, method_package_service) + if method_path is not None: + span.set_tag_str(constants.GRPC_METHOD_PATH_KEY, method_path) + if method_package is not None: + span.set_tag_str(constants.GRPC_METHOD_PACKAGE_KEY, method_package) + if method_service is not None: + span.set_tag_str(constants.GRPC_METHOD_SERVICE_KEY, method_service) + if method_name is not None: + span.set_tag_str(constants.GRPC_METHOD_NAME_KEY, method_name) + if method_kind is not None: + span.set_tag_str(constants.GRPC_METHOD_KIND_KEY, method_kind) + + +def set_grpc_client_meta(span, host, port): + if host: + span.set_tag_str(constants.GRPC_HOST_KEY, host) + try: + ipaddress.ip_address(host) + except ValueError: + span.set_tag_str(net.PEER_HOSTNAME, host) + else: + span.set_tag_str(net.TARGET_IP, host) + if port: + span.set_tag_str(net.TARGET_PORT, str(port)) + span.set_tag_str(constants.GRPC_SPAN_KIND_KEY, constants.GRPC_SPAN_KIND_VALUE_CLIENT) + + +def _parse_target_from_args(args, kwargs): + if "target" in kwargs: + target = kwargs["target"] + else: + target = args[0] + + try: + if target is None: + return + + # ensure URI follows RFC 3986 and is preceded by double slash + # https://tools.ietf.org/html/rfc3986#section-3.2 + parsed = parse.urlsplit("//" + target if not target.startswith("//") else target) + port = None + try: + port = parsed.port + except ValueError: + log.warning("Non-integer port in target '%s'", target) + + # an empty hostname in Python 2.7 will be an empty string rather than + # None + hostname = parsed.hostname if parsed.hostname is not None and len(parsed.hostname) > 0 else None + + return hostname, port + except ValueError: + log.warning("Malformed target '%s'.", target) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gunicorn/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gunicorn/__init__.py new file mode 100644 index 0000000..d098a3f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/gunicorn/__init__.py @@ -0,0 +1,22 @@ +""" +ddtrace works with Gunicorn. + +.. note:: + If you cannot wrap your Gunicorn server with the ``ddtrace-run`` command and + it uses ``gevent`` workers be sure to ``import ddtrace.auto`` as early as + possible in your application's lifecycle. + Do not use ``ddtrace-run`` with ``import ddtrace.auto``. +""" + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + pass + + +def unpatch(): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/__init__.py new file mode 100644 index 0000000..b3cdaf9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/__init__.py @@ -0,0 +1,66 @@ +""" +Trace the standard library ``httplib``/``http.client`` libraries to trace +HTTP requests. + + +Enabling +~~~~~~~~ + +The httplib integration is disabled by default. It can be enabled when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto` +using the ``DD_TRACE_HTTPLIB_ENABLED`` environment variable:: + + DD_TRACE_HTTPLIB_ENABLED=true ddtrace-run .... + +The integration can also be enabled manually in code with +:func:`patch_all()`:: + + from ddtrace import patch_all + patch_all(httplib=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.httplib['distributed_tracing'] + + Include distributed tracing headers in requests sent from httplib. + + This option can also be set with the ``DD_HTTPLIB_DISTRIBUTED_TRACING`` + environment variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + + +The integration can be configured per instance:: + + from ddtrace import config + + # Disable distributed tracing globally. + config.httplib['distributed_tracing'] = False + + # Change the service distributed tracing option only for this HTTP + # connection. + + # Python 2 + connection = urllib.HTTPConnection('www.datadog.com') + + # Python 3 + connection = http.client.HTTPConnection('www.datadog.com') + + cfg = config.get_from(connection) + cfg['distributed_tracing'] = True + + +:ref:`Headers tracing ` is supported for this integration. +""" +from .patch import get_version +from .patch import patch +from .patch import unpatch + + +__all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/patch.py new file mode 100644 index 0000000..47aa60b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httplib/patch.py @@ -0,0 +1,229 @@ +import os +import sys + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import httplib +from ...internal.compat import parse +from ...internal.constants import _HTTPLIB_NO_TRACE_REQUEST +from ...internal.logger import get_logger +from ...internal.schema import schematize_url_operation +from ...internal.utils.formats import asbool +from ...pin import Pin +from ...propagation.http import HTTPPropagator +from .. import trace_utils +from ..trace_utils import unwrap as _u + + +span_name = "http.client.request" +span_name = schematize_url_operation(span_name, protocol="http", direction=SpanDirection.OUTBOUND) + +log = get_logger(__name__) + + +config._add( + "httplib", + { + "distributed_tracing": asbool(os.getenv("DD_HTTPLIB_DISTRIBUTED_TRACING", default=True)), + "default_http_tag_query_string": os.getenv("DD_HTTP_CLIENT_TAG_QUERY_STRING", "true"), + }, +) + + +def get_version(): + # type: () -> str + return "" + + +def _wrap_init(func, instance, args, kwargs): + Pin(service=None, _config=config.httplib).onto(instance) + return func(*args, **kwargs) + + +def _wrap_getresponse(func, instance, args, kwargs): + # Use any attached tracer if available, otherwise use the global tracer + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + resp = None + try: + resp = func(*args, **kwargs) + return resp + finally: + try: + # Get the span attached to this instance, if available + span = getattr(instance, "_datadog_span", None) + if span: + if resp: + trace_utils.set_http_meta( + span, config.httplib, status_code=resp.status, response_headers=resp.getheaders() + ) + + span.finish() + delattr(instance, "_datadog_span") + except Exception: + log.debug("error applying request tags", exc_info=True) + + +def _wrap_request(func, instance, args, kwargs): + # Use any attached tracer if available, otherwise use the global tracer + pin = Pin.get_from(instance) + if should_skip_request(pin, instance): + return func(*args, **kwargs) + + cfg = config.get_from(instance) + + try: + # Create a new span and attach to this instance (so we can retrieve/update/close later on the response) + span = pin.tracer.trace(span_name, span_type=SpanTypes.HTTP) + + span.set_tag_str(COMPONENT, config.httplib.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + instance._datadog_span = span + + # propagate distributed tracing headers + if cfg.get("distributed_tracing"): + if len(args) > 3: + headers = args[3] + else: + headers = kwargs.setdefault("headers", {}) + HTTPPropagator.inject(span.context, headers) + except Exception: + log.debug("error configuring request", exc_info=True) + span = getattr(instance, "_datadog_span", None) + if span: + span.finish() + + try: + return func(*args, **kwargs) + except Exception: + span = getattr(instance, "_datadog_span", None) + exc_info = sys.exc_info() + if span: + span.set_exc_info(*exc_info) + span.finish() + raise + + +def _wrap_putrequest(func, instance, args, kwargs): + # Use any attached tracer if available, otherwise use the global tracer + pin = Pin.get_from(instance) + if should_skip_request(pin, instance): + return func(*args, **kwargs) + + try: + if hasattr(instance, "_datadog_span"): + # Reuse an existing span set in _wrap_request + span = instance._datadog_span + else: + # Create a new span and attach to this instance (so we can retrieve/update/close later on the response) + span = pin.tracer.trace(span_name, span_type=SpanTypes.HTTP) + + span.set_tag_str(COMPONENT, config.httplib.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + instance._datadog_span = span + + method, path = args[:2] + scheme = "https" if isinstance(instance, httplib.HTTPSConnection) else "http" + port = ":{port}".format(port=instance.port) + + if (scheme == "http" and instance.port == 80) or (scheme == "https" and instance.port == 443): + port = "" + url = "{scheme}://{host}{port}{path}".format(scheme=scheme, host=instance.host, port=port, path=path) + + # sanitize url + parsed = parse.urlparse(url) + sanitized_url = parse.urlunparse( + (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, parsed.fragment) # drop query + ) + trace_utils.set_http_meta( + span, config.httplib, method=method, url=sanitized_url, target_host=instance.host, query=parsed.query + ) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.httplib.get_analytics_sample_rate()) + except Exception: + log.debug("error applying request tags", exc_info=True) + + # Close the span to prevent memory leaks. + span = getattr(instance, "_datadog_span", None) + if span: + span.finish() + + try: + return func(*args, **kwargs) + except Exception: + span = getattr(instance, "_datadog_span", None) + exc_info = sys.exc_info() + if span: + span.set_exc_info(*exc_info) + span.finish() + raise + + +def _wrap_putheader(func, instance, args, kwargs): + span = getattr(instance, "_datadog_span", None) + if span: + request_headers = {args[0]: args[1]} + trace_utils.set_http_meta(span, config.httplib, request_headers=request_headers) + + return func(*args, **kwargs) + + +def should_skip_request(pin, request): + """Helper to determine if the provided request should be traced""" + if getattr(request, _HTTPLIB_NO_TRACE_REQUEST, False): + return True + + if not pin or not pin.enabled(): + return True + + # httplib is used to send apm events (profiling,di, tracing, etc.) to the datadog agent + # Tracing these requests introduces a significant noise and instability in ddtrace tests. + # TO DO: Avoid tracing requests to APM internal services (ie: extend this functionality to agentless products). + agent_url = pin.tracer.agent_trace_url + if agent_url: + parsed = parse.urlparse(agent_url) + return request.host == parsed.hostname and request.port == parsed.port + return False + + +def patch(): + """patch the built-in urllib/httplib/httplib.client methods for tracing""" + if getattr(httplib, "__datadog_patch", False): + return + httplib.__datadog_patch = True + + # Patch the desired methods + httplib.HTTPConnection.__init__ = wrapt.FunctionWrapper(httplib.HTTPConnection.__init__, _wrap_init) + httplib.HTTPConnection.getresponse = wrapt.FunctionWrapper(httplib.HTTPConnection.getresponse, _wrap_getresponse) + httplib.HTTPConnection.request = wrapt.FunctionWrapper(httplib.HTTPConnection.request, _wrap_request) + httplib.HTTPConnection.putrequest = wrapt.FunctionWrapper(httplib.HTTPConnection.putrequest, _wrap_putrequest) + httplib.HTTPConnection.putheader = wrapt.FunctionWrapper(httplib.HTTPConnection.putheader, _wrap_putheader) + + +def unpatch(): + """unpatch any previously patched modules""" + if not getattr(httplib, "__datadog_patch", False): + return + httplib.__datadog_patch = False + + _u(httplib.HTTPConnection, "__init__") + _u(httplib.HTTPConnection, "getresponse") + _u(httplib.HTTPConnection, "request") + _u(httplib.HTTPConnection, "putrequest") + _u(httplib.HTTPConnection, "putheader") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/__init__.py new file mode 100644 index 0000000..509847c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/__init__.py @@ -0,0 +1,91 @@ +""" +The httpx__ integration traces all HTTP requests made with the ``httpx`` +library. + +Enabling +~~~~~~~~ + +The ``httpx`` integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Alternatively, use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(httpx=True) + + # use httpx like usual + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.httpx['service'] + + The default service name for ``httpx`` requests. + By default the ``httpx`` integration will not define a service name and inherit + its service name from its parent span. + + If you are making calls to uninstrumented third party applications you can + set this setting, use the ``ddtrace.config.httpx['split_by_domain']`` setting, + or use a ``Pin`` to override an individual connection's settings (see example + below for ``Pin`` usage). + + This option can also be set with the ``DD_HTTPX_SERVICE`` environment + variable. + + Default: ``None`` + + +.. py:data:: ddtrace.config.httpx['distributed_tracing'] + + Whether or not to inject distributed tracing headers into requests. + + Default: ``True`` + + +.. py:data:: ddtrace.config.httpx['split_by_domain'] + + Whether or not to use the domain name of requests as the service name. This + setting can be overridden with session overrides (described in the Instance + Configuration section). + + This setting takes precedence over ``ddtrace.config.httpx['service']`` + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular ``httpx`` client instances use the :class:`Pin ` API:: + + import httpx + from ddtrace import Pin + + client = httpx.Client() + # Override service name for this instance + Pin.override(client, service="custom-http-service") + + async_client = httpx.AsyncClient( + # Override service name for this instance + Pin.override(async_client, service="custom-async-http-service") + + +:ref:`Headers tracing ` is supported for this integration. + +:ref:`HTTP Tagging ` is supported for this integration. + +.. __: https://www.python-httpx.org/ +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["httpx"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/patch.py new file mode 100644 index 0000000..11647da --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/httpx/patch.py @@ -0,0 +1,204 @@ +import os + +import httpx + +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib.trace_utils import distributed_tracing_enabled +from ddtrace.contrib.trace_utils import ext_service +from ddtrace.contrib.trace_utils import set_http_meta +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.internal.compat import ensure_binary +from ddtrace.internal.compat import ensure_text +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.version import parse_version +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.pin import Pin +from ddtrace.propagation.http import HTTPPropagator +from ddtrace.vendor.wrapt import BoundFunctionWrapper +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + + +HTTPX_VERSION = parse_version(httpx.__version__) + + +def get_version(): + # type: () -> str + return getattr(httpx, "__version__", "") + + +config._add( + "httpx", + { + "distributed_tracing": asbool(os.getenv("DD_HTTPX_DISTRIBUTED_TRACING", default=True)), + "split_by_domain": asbool(os.getenv("DD_HTTPX_SPLIT_BY_DOMAIN", default=False)), + "default_http_tag_query_string": os.getenv("DD_HTTP_CLIENT_TAG_QUERY_STRING", "true"), + }, +) + + +def _url_to_str(url): + # type: (httpx.URL) -> str + """ + Helper to convert the httpx.URL parts from bytes to a str + """ + # httpx==0.13.0 added URL.raw, removed in httpx==0.23.1. Otherwise, must construct manually + if HTTPX_VERSION < (0, 13, 0): + # Manually construct the same way httpx==0.13 does it: + # https://github.com/encode/httpx/blob/2c2c6a71a9ff520d237f8283a586df2753f01f5e/httpx/_models.py#L161 + scheme = url.scheme.encode("ascii") + host = url.host.encode("ascii") + port = url.port + raw_path = url.full_path.encode("ascii") + elif HTTPX_VERSION < (0, 23, 1): + scheme, host, port, raw_path = url.raw + else: + scheme = url.raw_scheme + host = url.raw_host + port = url.port + raw_path = url.raw_path + url = scheme + b"://" + host + if port is not None: + url += b":" + ensure_binary(str(port)) + url += raw_path + return ensure_text(url) + + +def _get_service_name(pin, request): + # type: (Pin, httpx.Request) -> typing.Text + if config.httpx.split_by_domain: + if hasattr(request.url, "netloc"): + return ensure_text(request.url.netloc, errors="backslashreplace") + else: + service = ensure_binary(request.url.host) + if request.url.port: + service += b":" + ensure_binary(str(request.url.port)) + return ensure_text(service, errors="backslashreplace") + return ext_service(pin, config.httpx) + + +def _init_span(span, request): + # type: (Span, httpx.Request) -> None + span.set_tag(SPAN_MEASURED_KEY) + + if distributed_tracing_enabled(config.httpx): + HTTPPropagator.inject(span.context, request.headers) + + sample_rate = config.httpx.get_analytics_sample_rate(use_global_config=True) + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + +def _set_span_meta(span, request, response): + # type: (Span, httpx.Request, httpx.Response) -> None + set_http_meta( + span, + config.httpx, + method=request.method, + url=_url_to_str(request.url), + target_host=request.url.host, + status_code=response.status_code if response else None, + query=request.url.query, + request_headers=request.headers, + response_headers=response.headers if response else None, + ) + + +async def _wrapped_async_send( + wrapped: BoundFunctionWrapper, + instance, # type: httpx.AsyncClient + args, # type: typing.Tuple[httpx.Request] + kwargs, # type: typing.Dict[typing.Str, typing.Any] +): + # type: (...) -> typing.Coroutine[None, None, httpx.Response] + req = get_argument_value(args, kwargs, 0, "request") + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await wrapped(*args, **kwargs) + + operation_name = schematize_url_operation("http.request", protocol="http", direction=SpanDirection.OUTBOUND) + with pin.tracer.trace(operation_name, service=_get_service_name(pin, req), span_type=SpanTypes.HTTP) as span: + span.set_tag_str(COMPONENT, config.httpx.integration_name) + + # set span.kind to the operation type being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + _init_span(span, req) + resp = None + try: + resp = await wrapped(*args, **kwargs) + return resp + finally: + _set_span_meta(span, req, resp) + + +def _wrapped_sync_send( + wrapped: BoundFunctionWrapper, + instance, # type: httpx.AsyncClient + args, # type: typing.Tuple[httpx.Request] + kwargs, # type: typing.Dict[typing.Str, typing.Any] +): + # type: (...) -> httpx.Response + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + req = get_argument_value(args, kwargs, 0, "request") + + operation_name = schematize_url_operation("http.request", protocol="http", direction=SpanDirection.OUTBOUND) + with pin.tracer.trace(operation_name, service=_get_service_name(pin, req), span_type=SpanTypes.HTTP) as span: + span.set_tag_str(COMPONENT, config.httpx.integration_name) + + # set span.kind to the operation type being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + _init_span(span, req) + resp = None + try: + resp = wrapped(*args, **kwargs) + return resp + finally: + _set_span_meta(span, req, resp) + + +def patch(): + # type: () -> None + if getattr(httpx, "_datadog_patch", False): + return + + httpx._datadog_patch = True + + pin = Pin() + + if HTTPX_VERSION >= (0, 11): + # httpx==0.11 created synchronous Client class separate from AsyncClient + _w(httpx.Client, "send", _wrapped_sync_send) + _w(httpx.AsyncClient, "send", _wrapped_async_send) + pin.onto(httpx.AsyncClient) + else: + # httpx==0.9 Client class was asynchronous, httpx==0.10 made Client synonymous with AsyncClient + _w(httpx.Client, "send", _wrapped_async_send) + + pin.onto(httpx.Client) + + +def unpatch(): + # type: () -> None + if not getattr(httpx, "_datadog_patch", False): + return + + httpx._datadog_patch = False + + if HTTPX_VERSION >= (0, 11): + # See above patching code for when this patching occurred + _u(httpx.AsyncClient, "send") + + _u(httpx.Client, "send") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/__init__.py new file mode 100644 index 0000000..5e2214f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/__init__.py @@ -0,0 +1,41 @@ +""" +The ``jinja2`` integration traces templates loading, compilation and rendering. +Auto instrumentation is available using the ``patch``. The following is an example:: + + from ddtrace import patch + from jinja2 import Environment, FileSystemLoader + + patch(jinja2=True) + + env = Environment( + loader=FileSystemLoader("templates") + ) + template = env.get_template('mytemplate.html') + + +The library can be configured globally and per instance, using the Configuration API:: + + from ddtrace import config + + # Change service name globally + config.jinja2['service_name'] = 'jinja-templates' + + # change the service name only for this environment + cfg = config.get_from(env) + cfg['service_name'] = 'jinja-templates' + +By default, the service name is set to None, so it is inherited from the parent span. +If there is no parent span and the service name is not overridden the agent will drop the traces. +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["jinja2"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/constants.py new file mode 100644 index 0000000..1bda6e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/constants.py @@ -0,0 +1 @@ +DEFAULT_TEMPLATE_NAME = "" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/patch.py new file mode 100644 index 0000000..fede2c6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/jinja2/patch.py @@ -0,0 +1,110 @@ +import os + +import jinja2 + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanTypes +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...pin import Pin +from ..trace_utils import unwrap as _u +from .constants import DEFAULT_TEMPLATE_NAME + + +# default settings +config._add( + "jinja2", + { + "service_name": os.getenv("DD_JINJA2_SERVICE_NAME"), + }, +) + + +def get_version(): + # type: () -> str + return getattr(jinja2, "__version__", "") + + +def patch(): + if getattr(jinja2, "__datadog_patch", False): + # already patched + return + jinja2.__datadog_patch = True + Pin( + service=config.jinja2["service_name"], + _config=config.jinja2, + ).onto(jinja2.environment.Environment) + _w(jinja2, "environment.Template.render", _wrap_render) + _w(jinja2, "environment.Template.generate", _wrap_render) + _w(jinja2, "environment.Environment.compile", _wrap_compile) + _w(jinja2, "environment.Environment._load_template", _wrap_load_template) + + +def unpatch(): + if not getattr(jinja2, "__datadog_patch", False): + return + jinja2.__datadog_patch = False + _u(jinja2.Template, "render") + _u(jinja2.Template, "generate") + _u(jinja2.Environment, "compile") + _u(jinja2.Environment, "_load_template") + + +def _wrap_render(wrapped, instance, args, kwargs): + """Wrap `Template.render()` or `Template.generate()`""" + pin = Pin.get_from(instance.environment) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + template_name = str(instance.name or DEFAULT_TEMPLATE_NAME) + with pin.tracer.trace("jinja2.render", pin.service, span_type=SpanTypes.TEMPLATE) as span: + span.set_tag_str(COMPONENT, config.jinja2.integration_name) + + span.set_tag(SPAN_MEASURED_KEY) + try: + return wrapped(*args, **kwargs) + finally: + span.resource = template_name + span.set_tag_str("jinja2.template_name", template_name) + + +def _wrap_compile(wrapped, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + try: + template_name = get_argument_value(args, kwargs, 1, "name") + except ArgumentError: + template_name = DEFAULT_TEMPLATE_NAME + + with pin.tracer.trace("jinja2.compile", pin.service, span_type=SpanTypes.TEMPLATE) as span: + try: + return wrapped(*args, **kwargs) + finally: + span.set_tag_str(COMPONENT, config.jinja2.integration_name) + + span.resource = template_name + span.set_tag_str("jinja2.template_name", template_name) + + +def _wrap_load_template(wrapped, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + template_name = get_argument_value(args, kwargs, 0, "name") + with pin.tracer.trace("jinja2.load", pin.service, span_type=SpanTypes.TEMPLATE) as span: + template = None + try: + template = wrapped(*args, **kwargs) + return template + finally: + span.resource = template_name + span.set_tag_str("jinja2.template_name", template_name) + if template: + span.set_tag_str("jinja2.template_path", template.filename) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/__init__.py new file mode 100644 index 0000000..f49a5cc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/__init__.py @@ -0,0 +1,55 @@ +""" +This integration instruments the ``confluent-kafka`` +library to trace event streaming. + +Enabling +~~~~~~~~ + +The kafka integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch() ` to manually enable the integration:: + + from ddtrace import patch + patch(kafka=True) + import confluent_kafka + ... + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.kafka["service"] + + The service name reported by default for your kafka spans. + + This option can also be set with the ``DD_KAFKA_SERVICE`` environment + variable. + + Default: ``"kafka"`` + + +To configure the kafka integration using the +``Pin`` API:: + + from ddtrace import Pin + from ddtrace import patch + + # Make sure to patch before importing confluent_kafka + patch(kafka=True) + + import confluent_kafka + + Pin.override(confluent_kafka, service="custom-service-name") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["confluent_kafka"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/patch.py new file mode 100644 index 0000000..4218b11 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kafka/patch.py @@ -0,0 +1,287 @@ +import os + +import confluent_kafka + +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import kafka as kafkax +from ddtrace.internal import core +from ddtrace.internal.compat import time_ns +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.constants import MESSAGING_SYSTEM +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_messaging_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils import ArgumentError +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils import set_argument_value +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.version import parse_version +from ddtrace.pin import Pin +from ddtrace.propagation.http import HTTPPropagator as Propagator + + +_Producer = confluent_kafka.Producer +_Consumer = confluent_kafka.Consumer +_SerializingProducer = confluent_kafka.SerializingProducer if hasattr(confluent_kafka, "SerializingProducer") else None +_DeserializingConsumer = ( + confluent_kafka.DeserializingConsumer if hasattr(confluent_kafka, "DeserializingConsumer") else None +) + + +log = get_logger(__name__) + + +config._add( + "kafka", + dict( + _default_service=schematize_service_name("kafka"), + distributed_tracing_enabled=asbool(os.getenv("DD_KAFKA_PROPAGATION_ENABLED", default=False)), + trace_empty_poll_enabled=asbool(os.getenv("DD_KAFKA_EMPTY_POLL_ENABLED", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(confluent_kafka, "__version__", "") + + +KAFKA_VERSION_TUPLE = parse_version(get_version()) + + +_SerializationContext = confluent_kafka.serialization.SerializationContext if KAFKA_VERSION_TUPLE >= (1, 4, 0) else None +_MessageField = confluent_kafka.serialization.MessageField if KAFKA_VERSION_TUPLE >= (1, 4, 0) else None + + +class TracedProducerMixin: + def __init__(self, config, *args, **kwargs): + super(TracedProducerMixin, self).__init__(config, *args, **kwargs) + self._dd_bootstrap_servers = ( + config.get("bootstrap.servers") + if config.get("bootstrap.servers") is not None + else config.get("metadata.broker.list") + ) + + # in older versions of confluent_kafka, bool(Producer()) evaluates to False, + # which makes the Pin functionality ignore it. + def __bool__(self): + return True + + __nonzero__ = __bool__ + + +class TracedConsumerMixin: + def __init__(self, config, *args, **kwargs): + super(TracedConsumerMixin, self).__init__(config, *args, **kwargs) + self._group_id = config.get("group.id", "") + self._auto_commit = asbool(config.get("enable.auto.commit", True)) + + +class TracedConsumer(TracedConsumerMixin, confluent_kafka.Consumer): + pass + + +class TracedProducer(TracedProducerMixin, confluent_kafka.Producer): + pass + + +class TracedDeserializingConsumer(TracedConsumerMixin, confluent_kafka.DeserializingConsumer): + pass + + +class TracedSerializingProducer(TracedProducerMixin, confluent_kafka.SerializingProducer): + pass + + +def patch(): + if getattr(confluent_kafka, "_datadog_patch", False): + return + confluent_kafka._datadog_patch = True + + confluent_kafka.Producer = TracedProducer + confluent_kafka.Consumer = TracedConsumer + if _SerializingProducer is not None: + confluent_kafka.SerializingProducer = TracedSerializingProducer + if _DeserializingConsumer is not None: + confluent_kafka.DeserializingConsumer = TracedDeserializingConsumer + + for producer in (TracedProducer, TracedSerializingProducer): + trace_utils.wrap(producer, "produce", traced_produce) + for consumer in (TracedConsumer, TracedDeserializingConsumer): + trace_utils.wrap(consumer, "poll", traced_poll) + trace_utils.wrap(consumer, "commit", traced_commit) + Pin().onto(confluent_kafka.Producer) + Pin().onto(confluent_kafka.Consumer) + Pin().onto(confluent_kafka.SerializingProducer) + Pin().onto(confluent_kafka.DeserializingConsumer) + + +def unpatch(): + if getattr(confluent_kafka, "_datadog_patch", False): + confluent_kafka._datadog_patch = False + + for producer in (TracedProducer, TracedSerializingProducer): + if trace_utils.iswrapped(producer.produce): + trace_utils.unwrap(producer, "produce") + for consumer in (TracedConsumer, TracedDeserializingConsumer): + if trace_utils.iswrapped(consumer.poll): + trace_utils.unwrap(consumer, "poll") + if trace_utils.iswrapped(consumer.commit): + trace_utils.unwrap(consumer, "commit") + + confluent_kafka.Producer = _Producer + confluent_kafka.Consumer = _Consumer + if _SerializingProducer is not None: + confluent_kafka.SerializingProducer = _SerializingProducer + if _DeserializingConsumer is not None: + confluent_kafka.DeserializingConsumer = _DeserializingConsumer + + +def traced_produce(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + topic = get_argument_value(args, kwargs, 0, "topic") or "" + core.set_item("kafka_topic", topic) + try: + value = get_argument_value(args, kwargs, 1, "value") + except ArgumentError: + value = None + message_key = kwargs.get("key", "") or "" + partition = kwargs.get("partition", -1) + headers = get_argument_value(args, kwargs, 6, "headers", optional=True) or {} + with pin.tracer.trace( + schematize_messaging_operation(kafkax.PRODUCE, provider="kafka", direction=SpanDirection.OUTBOUND), + service=trace_utils.ext_service(pin, config.kafka), + span_type=SpanTypes.WORKER, + ) as span: + core.dispatch("kafka.produce.start", (instance, args, kwargs, isinstance(instance, _SerializingProducer), span)) + span.set_tag_str(MESSAGING_SYSTEM, kafkax.SERVICE) + span.set_tag_str(COMPONENT, config.kafka.integration_name) + span.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) + span.set_tag_str(kafkax.TOPIC, topic) + + if _SerializingProducer is not None and isinstance(instance, _SerializingProducer): + serialized_key = serialize_key(instance, topic, message_key, headers) + if serialized_key is not None: + span.set_tag_str(kafkax.MESSAGE_KEY, serialized_key) + else: + span.set_tag_str(kafkax.MESSAGE_KEY, message_key) + + span.set_tag(kafkax.PARTITION, partition) + span.set_tag_str(kafkax.TOMBSTONE, str(value is None)) + span.set_tag(SPAN_MEASURED_KEY) + if instance._dd_bootstrap_servers is not None: + span.set_tag_str(kafkax.HOST_LIST, instance._dd_bootstrap_servers) + rate = config.kafka.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + + # inject headers with Datadog tags if trace propagation is enabled + if config.kafka.distributed_tracing_enabled: + # inject headers with Datadog tags: + headers = get_argument_value(args, kwargs, 6, "headers", True) or {} + Propagator.inject(span.context, headers) + args, kwargs = set_argument_value(args, kwargs, 6, "headers", headers) + return func(*args, **kwargs) + + +def traced_poll(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # we must get start time now since execute before starting a span in order to get distributed context + # if it exists + start_ns = time_ns() + # wrap in a try catch and raise exception after span is started + err = None + try: + message = func(*args, **kwargs) + except Exception as e: + err = e + ctx = None + if message and config.kafka.distributed_tracing_enabled and message.headers(): + ctx = Propagator.extract(dict(message.headers())) + if message or config.kafka.trace_empty_poll_enabled: + with pin.tracer.start_span( + name=schematize_messaging_operation(kafkax.CONSUME, provider="kafka", direction=SpanDirection.PROCESSING), + service=trace_utils.ext_service(pin, config.kafka), + span_type=SpanTypes.WORKER, + child_of=ctx if ctx is not None else pin.tracer.context_provider.active(), + activate=True, + ) as span: + # reset span start time to before function call + span.start_ns = start_ns + + span.set_tag_str(MESSAGING_SYSTEM, kafkax.SERVICE) + span.set_tag_str(COMPONENT, config.kafka.integration_name) + span.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) + span.set_tag_str(kafkax.RECEIVED_MESSAGE, str(message is not None)) + span.set_tag_str(kafkax.GROUP_ID, instance._group_id) + if message is not None: + core.set_item("kafka_topic", message.topic()) + core.dispatch("kafka.consume.start", (instance, message, span)) + + message_key = message.key() or "" + message_offset = message.offset() or -1 + span.set_tag_str(kafkax.TOPIC, message.topic()) + + # If this is a deserializing consumer, do not set the key as a tag since we + # do not have the serialization function + if ( + (_DeserializingConsumer is not None and not isinstance(instance, _DeserializingConsumer)) + or isinstance(message_key, str) + or isinstance(message_key, bytes) + ): + span.set_tag_str(kafkax.MESSAGE_KEY, message_key) + span.set_tag(kafkax.PARTITION, message.partition()) + span.set_tag_str(kafkax.TOMBSTONE, str(len(message) == 0)) + span.set_tag(kafkax.MESSAGE_OFFSET, message_offset) + span.set_tag(SPAN_MEASURED_KEY) + rate = config.kafka.get_analytics_sample_rate() + if rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, rate) + + # raise exception if one was encountered + if err is not None: + raise err + return message + else: + if err is not None: + raise err + else: + return message + + +def traced_commit(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + core.dispatch("kafka.commit.start", (instance, args, kwargs)) + + return func(*args, **kwargs) + + +def serialize_key(instance, topic, key, headers): + if _SerializationContext is not None and _MessageField is not None: + ctx = _SerializationContext(topic, _MessageField.KEY, headers) + if hasattr(instance, "_key_serializer") and instance._key_serializer is not None: + try: + key = instance._key_serializer(key, ctx) + return key + except Exception: + log.debug("Failed to set Kafka Consumer key tag: %s", str(key)) + return None + else: + log.warning("Failed to set Kafka Consumer key tag, no method available to serialize key: %s", str(key)) + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/__init__.py new file mode 100644 index 0000000..009a4fe --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/__init__.py @@ -0,0 +1,45 @@ +"""Instrument kombu to report AMQP messaging. + +``patch_all`` will not automatically patch your Kombu client to make it work, as this would conflict with the +Celery integration. You must specifically request kombu be patched, as in the example below. + +Note: To permit distributed tracing for the kombu integration you must enable the tracer with priority +sampling. Refer to the documentation here: +https://ddtrace.readthedocs.io/en/stable/advanced_usage.html#priority-sampling + +Without enabling distributed tracing, spans within a trace generated by the kombu integration might be dropped +without the whole trace being dropped. +:: + + from ddtrace import Pin, patch + import kombu + + # If not patched yet, you can patch kombu specifically + patch(kombu=True) + + # This will report a span with the default settings + conn = kombu.Connection("amqp://guest:guest@127.0.0.1:5672//") + conn.connect() + task_queue = kombu.Queue('tasks', kombu.Exchange('tasks'), routing_key='tasks') + to_publish = {'hello': 'world'} + producer = conn.Producer() + producer.publish(to_publish, + exchange=task_queue.exchange, + routing_key=task_queue.routing_key, + declare=[task_queue]) + + # Use a pin to specify metadata related to this client + Pin.override(producer, service='kombu-consumer') +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["kombu", "kombu.messaging"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/constants.py new file mode 100644 index 0000000..bcada46 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/constants.py @@ -0,0 +1 @@ +DEFAULT_SERVICE = "kombu" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/patch.py new file mode 100644 index 0000000..e1297c8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/patch.py @@ -0,0 +1,167 @@ +import os + +# 3p +import kombu + +from ddtrace import config +from ddtrace.internal import core +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_messaging_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils.formats import asbool +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import kombu as kombux +from ...internal.utils import get_argument_value +from ...internal.utils.wrappers import unwrap +from ...pin import Pin +from ...propagation.http import HTTPPropagator + +# project +from .. import trace_utils +from .constants import DEFAULT_SERVICE +from .utils import HEADER_POS +from .utils import extract_conn_tags +from .utils import get_body_length_from_args +from .utils import get_exchange_from_args +from .utils import get_routing_key_from_args + + +def get_version(): + # type: () -> str + return str(kombu.__version__) + + +# kombu default settings + +config._add( + "kombu", + { + "distributed_tracing_enabled": asbool(os.getenv("DD_KOMBU_DISTRIBUTED_TRACING", default=True)), + "service_name": config.service or os.getenv("DD_KOMBU_SERVICE_NAME", default=DEFAULT_SERVICE), + }, +) + +propagator = HTTPPropagator + + +def patch(): + """Patch the instrumented methods + + This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top + of Kombu. However, it means that any "import kombu.Connection" won't be instrumented. + """ + if getattr(kombu, "_datadog_patch", False): + return + kombu._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + # We wrap the _publish method because the publish method: + # * defines defaults in its kwargs + # * potentially overrides kwargs with values from self + # * extracts/normalizes things like exchange + _w("kombu", "Producer._publish", traced_publish) + _w("kombu", "Consumer.receive", traced_receive) + + # We do not provide a service for producer spans since they represent + # external calls to another service. + # Instead the service should be inherited from the parent. + if config.service: + prod_service = None + # DEV: backwards-compatibility for users who set a kombu service + else: + prod_service = os.getenv("DD_KOMBU_SERVICE_NAME", default=DEFAULT_SERVICE) + + Pin( + service=schematize_service_name(prod_service), + ).onto(kombu.messaging.Producer) + + Pin(service=schematize_service_name(config.kombu["service_name"])).onto(kombu.messaging.Consumer) + + +def unpatch(): + if getattr(kombu, "_datadog_patch", False): + kombu._datadog_patch = False + unwrap(kombu.Producer, "_publish") + unwrap(kombu.Consumer, "receive") + + +# +# tracing functions +# + + +def traced_receive(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # Signature only takes 2 args: (body, message) + message = get_argument_value(args, kwargs, 1, "message") + + trace_utils.activate_distributed_headers(pin.tracer, request_headers=message.headers, int_config=config.kombu) + + with pin.tracer.trace( + schematize_messaging_operation(kombux.RECEIVE_NAME, provider="kombu", direction=SpanDirection.PROCESSING), + service=pin.service, + span_type=SpanTypes.WORKER, + ) as s: + s.set_tag_str(COMPONENT, config.kombu.integration_name) + + # set span.kind to the type of operation being performed + s.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) + + s.set_tag(SPAN_MEASURED_KEY) + # run the command + exchange = message.delivery_info["exchange"] + s.resource = exchange + s.set_tag_str(kombux.EXCHANGE, exchange) + + s.set_tags(extract_conn_tags(message.channel.connection)) + s.set_tag_str(kombux.ROUTING_KEY, message.delivery_info["routing_key"]) + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.kombu.get_analytics_sample_rate()) + result = func(*args, **kwargs) + core.dispatch("kombu.amqp.receive.post", [instance, message, s]) + return result + + +def traced_publish(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + with pin.tracer.trace( + schematize_messaging_operation(kombux.PUBLISH_NAME, provider="kombu", direction=SpanDirection.OUTBOUND), + service=pin.service, + span_type=SpanTypes.WORKER, + ) as s: + s.set_tag_str(COMPONENT, config.kombu.integration_name) + + # set span.kind to the type of operation being performed + s.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) + + s.set_tag(SPAN_MEASURED_KEY) + exchange_name = get_exchange_from_args(args) + s.resource = exchange_name + s.set_tag_str(kombux.EXCHANGE, exchange_name) + if pin.tags: + s.set_tags(pin.tags) + s.set_tag_str(kombux.ROUTING_KEY, get_routing_key_from_args(args)) + s.set_tags(extract_conn_tags(instance.channel.connection)) + s.set_metric(kombux.BODY_LEN, get_body_length_from_args(args)) + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.kombu.get_analytics_sample_rate()) + # run the command + if config.kombu.distributed_tracing_enabled: + propagator.inject(s.context, args[HEADER_POS]) + core.dispatch( + "kombu.amqp.publish.pre", [args, kwargs, s] + ) # Has to happen after trace injection for actual payload size + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/utils.py new file mode 100644 index 0000000..fcce88a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/kombu/utils.py @@ -0,0 +1,49 @@ +""" +Some utils used by the dogtrace kombu integration +""" +from ...ext import kombu as kombux +from ...ext import net + + +PUBLISH_BODY_IDX = 0 +PUBLISH_ROUTING_KEY = 6 +PUBLISH_EXCHANGE_IDX = 9 + +HEADER_POS = 4 + + +def extract_conn_tags(connection): + """Transform kombu conn info into dogtrace metas""" + try: + host, port = connection.host.split(":") + return { + net.TARGET_HOST: host, + net.TARGET_PORT: port, + kombux.VHOST: connection.virtual_host, + } + except AttributeError: + # Unlikely that we don't have .host or .virtual_host but let's not die over it + return {} + + +def get_exchange_from_args(args): + """Extract the exchange + + The publish method extracts the name and hands that off to _publish (what we patch) + """ + + return args[PUBLISH_EXCHANGE_IDX] + + +def get_routing_key_from_args(args): + """Extract the routing key""" + + name = args[PUBLISH_ROUTING_KEY] + return name + + +def get_body_length_from_args(args): + """Extract the length of the body""" + + length = len(args[PUBLISH_BODY_IDX]) + return length diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/__init__.py new file mode 100644 index 0000000..4557e9d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/__init__.py @@ -0,0 +1,207 @@ +""" +The LangChain integration instruments the LangChain Python library to emit metrics, +traces, and logs (logs are disabled by default) for requests made to the LLMs, +chat models, embeddings, chains, and vector store interfaces. + +All metrics, logs, and traces submitted from the LangChain integration are tagged by: + +- ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. +- ``langchain.request.provider``: LLM provider used in the request. +- ``langchain.request.model``: LLM/Chat/Embeddings model used in the request. +- ``langchain.request.api_key``: LLM provider API key used to make the request (obfuscated into the format ``...XXXX`` where ``XXXX`` is the last 4 digits of the key). + +Metrics +~~~~~~~ + +The following metrics are collected by default by the LangChain integration. + +.. important:: + If the Agent is configured to use a non-default Statsd hostname or port, use ``DD_DOGSTATSD_URL`` to configure + ``ddtrace`` to use it. + + +.. py:data:: langchain.request.duration + + The duration of the LangChain request in seconds. + + Type: ``distribution`` + + +.. py:data:: langchain.request.error + + The number of errors from requests made with LangChain. + + Type: ``count`` + + +.. py:data:: langchain.tokens.prompt + + The number of tokens used in the prompt of a LangChain request. + + Type: ``distribution`` + + +.. py:data:: langchain.tokens.completion + + The number of tokens used in the completion of a LangChain response. + + Type: ``distribution`` + + +.. py:data:: langchain.tokens.total + + The total number of tokens used in the prompt and completion of a LangChain request/response. + + Type: ``distribution`` + + +.. py:data:: langchain.tokens.total_cost + + The estimated cost in USD based on token usage. + + Type: ``count`` + + +(beta) Prompt and Completion Sampling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following data is collected in span tags with a default sampling rate of ``1.0``: + +- Prompt inputs and completions for the ``LLM`` interface. +- Message inputs and completions for the ``ChatModel`` interface. +- Embedding inputs for the ``Embeddings`` interface. +- Prompt inputs, chain inputs, and outputs for the ``Chain`` interface. +- Query inputs and document outputs for the ``VectorStore`` interface. + +Prompt and message inputs and completions can also be emitted as log data. +Logs are **not** emitted by default. When logs are enabled they are sampled at ``0.1``. + +Read the **Global Configuration** section for information about enabling logs and configuring sampling +rates. + +.. important:: + + To submit logs, you must set the ``DD_API_KEY`` environment variable. + + Set ``DD_SITE`` to send logs to a Datadog site such as ``datadoghq.eu``. The default is ``datadoghq.com``. + + +Enabling +~~~~~~~~ + +The LangChain integration is enabled automatically when you use +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Note that these commands also enable the ``requests`` and ``aiohttp`` +integrations which trace HTTP requests to LLM providers, as well as the +``openai`` integration which traces requests to the OpenAI library. + +Alternatively, use :func:`patch() ` to manually enable the LangChain integration:: + + from ddtrace import config, patch + + # Note: be sure to configure the integration before calling ``patch()``! + # eg. config.langchain["logs_enabled"] = True + + patch(langchain=True) + + # to trace synchronous HTTP requests + # patch(langchain=True, requests=True) + + # to trace asynchronous HTTP requests (to the OpenAI library) + # patch(langchain=True, aiohttp=True) + + # to include underlying OpenAI spans from the OpenAI integration + # patch(langchain=True, openai=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.langchain["service"] + + The service name reported by default for LangChain requests. + + Alternatively, you can set this option with the ``DD_SERVICE`` or ``DD_LANGCHAIN_SERVICE`` environment + variables. + + Default: ``DD_SERVICE`` + + +.. py:data:: ddtrace.config.langchain["logs_enabled"] + + Enable collection of prompts and completions as logs. You can adjust the rate of prompts and completions collected + using the sample rate configuration described below. + + Alternatively, you can set this option with the ``DD_LANGCHAIN_LOGS_ENABLED`` environment + variable. + + Note that you must set the ``DD_API_KEY`` environment variable to enable sending logs. + + Default: ``False`` + + +.. py:data:: ddtrace.config.langchain["metrics_enabled"] + + Enable collection of LangChain metrics. + + If the Datadog Agent is configured to use a non-default Statsd hostname + or port, use ``DD_DOGSTATSD_URL`` to configure ``ddtrace`` to use it. + + Alternatively, you can set this option with the ``DD_LANGCHAIN_METRICS_ENABLED`` environment + variable. + + Default: ``True`` + + +.. py:data:: (beta) ddtrace.config.langchain["span_char_limit"] + + Configure the maximum number of characters for the following data within span tags: + + - Prompt inputs and completions + - Message inputs and completions + - Embedding inputs + + Text exceeding the maximum number of characters is truncated to the character limit + and has ``...`` appended to the end. + + Alternatively, you can set this option with the ``DD_LANGCHAIN_SPAN_CHAR_LIMIT`` environment + variable. + + Default: ``128`` + + +.. py:data:: (beta) ddtrace.config.langchain["span_prompt_completion_sample_rate"] + + Configure the sample rate for the collection of prompts and completions as span tags. + + Alternatively, you can set this option with the ``DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``1.0`` + + +.. py:data:: (beta) ddtrace.config.langchain["log_prompt_completion_sample_rate"] + + Configure the sample rate for the collection of prompts and completions as logs. + + Alternatively, you can set this option with the ``DD_LANGCHAIN_LOG_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``0.1`` + +""" # noqa: E501 +from ...internal.utils.importlib import require_modules + + +required_modules = ["langchain"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + get_version = _patch.get_version + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/constants.py new file mode 100644 index 0000000..e160453 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/constants.py @@ -0,0 +1,87 @@ +text_embedding_models = ( + "OpenAIEmbeddings", + "HuggingFaceEmbeddings", + "CohereEmbeddings", + "ElasticsearchEmbeddings", + "JinaEmbeddings", + "LlamaCppEmbeddings", + "HuggingFaceHubEmbeddings", + "ModelScopeEmbeddings", + "TensorflowHubEmbeddings", + "SagemakerEndpointEmbeddings", + "HuggingFaceInstructEmbeddings", + "MosaicMLInstructorEmbeddings", + "SelfHostedEmbeddings", + "SelfHostedHuggingFaceEmbeddings", + "SelfHostedHuggingFaceInstructEmbeddings", + "FakeEmbeddings", + "AlephAlphaAsymmetricSemanticEmbedding", + "AlephAlphaSymmetricSemanticEmbedding", + "SentenceTransformerEmbeddings", + "GooglePalmEmbeddings", + "MiniMaxEmbeddings", + "VertexAIEmbeddings", + "BedrockEmbeddings", + "DeepInfraEmbeddings", + "DashScopeEmbeddings", + "EmbaasEmbeddings", +) + +vectorstore_classes = ( + "AzureSearch", + "Redis", + "ElasticVectorSearch", + "FAISS", + "VectorStore", + "Pinecone", + "Weaviate", + "Qdrant", + "Milvus", + "Zilliz", + "SingleStoreDB", + "Chroma", + "OpenSearchVectorSearch", + "AtlasDB", + "DeepLake", + "Annoy", + "MongoDBAtlasVectorSearch", + "MyScale", + "SKLearnVectorStore", + "SupabaseVectorStore", + "AnalyticDB", + "Vectara", + "Tair", + "LanceDB", + "DocArrayHnswSearch", + "DocArrayInMemorySearch", + "Typesense", + "Hologres", + "Clickhouse", + "Tigris", + "MatchingEngine", + "AwaDB", +) + +agent_output_parser_classes = { + "chat": {"output_parser": "ChatOutputParser"}, + "conversational": {"output_parser": "ConvoOutputParser"}, + "conversational_chat": {"output_parser": "ConvoOutputParser"}, + "mrkl": {"output_parser": "MRKLOutputParser"}, + "output_parsers": { + "json": "JSONAgentOutputParser", + "openai_functions": "OpenAIFunctionsAgentOutputParser", + "react_json_single_input": "ReActJsonSingleInputOutputParser", + "react_single_input": "ReActSingleInputOutputParser", + "self_ask": "SelfAskOutputParser", + "xml": "XMLAgentOutputParser", + }, + "react": {"output_parser": "ReActOutputParser"}, + "self_ask_with_search": {"output_parser": "SelfAskOutputParser"}, + "structured_chat": {"output_parser": "StructuredChatOutputParser"}, +} + +API_KEY = "langchain.request.api_key" +MODEL = "langchain.request.model" +COMPLETION_TOKENS = "langchain.tokens.completion_tokens" +PROMPT_TOKENS = "langchain.tokens.prompt_tokens" +TOTAL_COST = "langchain.tokens.total_cost" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/patch.py new file mode 100644 index 0000000..a49d8a0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/langchain/patch.py @@ -0,0 +1,825 @@ +import os +import sys +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union + +import langchain + +from ddtrace.appsec._iast import _is_iast_enabled + + +try: + from langchain.callbacks.openai_info import get_openai_token_cost_for_model +except ImportError: + from langchain_community.callbacks.openai_info import get_openai_token_cost_for_model +from pydantic import SecretStr + +from ddtrace import config +from ddtrace.contrib.langchain.constants import API_KEY +from ddtrace.contrib.langchain.constants import COMPLETION_TOKENS +from ddtrace.contrib.langchain.constants import MODEL +from ddtrace.contrib.langchain.constants import PROMPT_TOKENS +from ddtrace.contrib.langchain.constants import TOTAL_COST +from ddtrace.contrib.langchain.constants import agent_output_parser_classes +from ddtrace.contrib.langchain.constants import text_embedding_models +from ddtrace.contrib.langchain.constants import vectorstore_classes +from ddtrace.contrib.trace_utils import unwrap +from ddtrace.contrib.trace_utils import with_traced_module +from ddtrace.contrib.trace_utils import wrap +from ddtrace.internal.llmobs.integrations import LangChainIntegration +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import ArgumentError +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.formats import deep_getattr +from ddtrace.pin import Pin +from ddtrace.vendor import wrapt + + +if TYPE_CHECKING: + from ddtrace import Span # noqa:F401 + + +log = get_logger(__name__) + + +def get_version(): + # type: () -> str + return getattr(langchain, "__version__", "") + + +config._add( + "langchain", + { + "logs_enabled": asbool(os.getenv("DD_LANGCHAIN_LOGS_ENABLED", False)), + "metrics_enabled": asbool(os.getenv("DD_LANGCHAIN_METRICS_ENABLED", True)), + "span_prompt_completion_sample_rate": float(os.getenv("DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), + "log_prompt_completion_sample_rate": float(os.getenv("DD_LANGCHAIN_LOG_PROMPT_COMPLETION_SAMPLE_RATE", 0.1)), + "span_char_limit": int(os.getenv("DD_LANGCHAIN_SPAN_CHAR_LIMIT", 128)), + }, +) + + +def _extract_model_name(instance): + # type: (langchain.llm.BaseLLM) -> Optional[str] + """Extract model name or ID from llm instance.""" + for attr in ("model", "model_name", "model_id", "model_key", "repo_id"): + if hasattr(instance, attr): + return getattr(instance, attr) + return None + + +def _format_api_key(api_key: Union[str, SecretStr]) -> str: + """Obfuscate a given LLM provider API key by returning the last four characters.""" + if hasattr(api_key, "get_secret_value"): + api_key = api_key.get_secret_value() + + if not api_key or len(api_key) < 4: + return "" + return "...%s" % api_key[-4:] + + +def _extract_api_key(instance): + # type: (Any) -> str + """ + Extract and format LLM-provider API key from instance. + Note that langchain's LLM/ChatModel/Embeddings interfaces do not have a + standard attribute name for storing the provider-specific API key, so make a + best effort here by checking for attributes that end with `api_key/api_token`. + """ + api_key_attrs = [a for a in dir(instance) if a.endswith(("api_token", "api_key"))] + if api_key_attrs and hasattr(instance, str(api_key_attrs[0])): + api_key = getattr(instance, api_key_attrs[0], None) + if api_key: + return _format_api_key(api_key) + return "" + + +def _tag_openai_token_usage(span, llm_output, propagated_cost=0, propagate=False): + # type: (Span, Dict[str, Any], int, bool) -> None + """ + Extract token usage from llm_output, tag on span. + Calculate the total cost for each LLM/chat_model, then propagate those values up the trace so that + the root span will store the total token_usage/cost of all of its descendants. + """ + for token_type in ("prompt", "completion", "total"): + current_metric_value = span.get_metric("langchain.tokens.%s_tokens" % token_type) or 0 + metric_value = llm_output["token_usage"].get("%s_tokens" % token_type, 0) + span.set_metric("langchain.tokens.%s_tokens" % token_type, current_metric_value + metric_value) + total_cost = span.get_metric(TOTAL_COST) or 0 + if not propagate: + try: + completion_cost = get_openai_token_cost_for_model( + span.get_tag(MODEL), + span.get_metric(COMPLETION_TOKENS), + is_completion=True, + ) + prompt_cost = get_openai_token_cost_for_model(span.get_tag(MODEL), span.get_metric(PROMPT_TOKENS)) + total_cost = completion_cost + prompt_cost + except ValueError: + # If not in langchain's openai model catalog, the above helpers will raise a ValueError. + log.debug("Cannot calculate token/cost as the model is not in LangChain's OpenAI model catalog.") + span.set_metric(TOTAL_COST, propagated_cost + total_cost) + if span._parent is not None: + _tag_openai_token_usage(span._parent, llm_output, propagated_cost=propagated_cost + total_cost, propagate=True) + + +@with_traced_module +def traced_llm_generate(langchain, pin, func, instance, args, kwargs): + llm_provider = instance._llm_type + prompts = get_argument_value(args, kwargs, 0, "prompts") + integration = langchain._datadog_integration + model = _extract_model_name(instance) + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="llm", + provider=llm_provider, + model=model, + api_key=_extract_api_key(instance), + ) + completions = None + try: + if integration.is_pc_sampled_span(span): + for idx, prompt in enumerate(prompts): + span.set_tag_str("langchain.request.prompts.%d" % idx, integration.trunc(str(prompt))) + for param, val in getattr(instance, "_identifying_params", {}).items(): + if isinstance(val, dict): + for k, v in val.items(): + span.set_tag_str("langchain.request.%s.parameters.%s.%s" % (llm_provider, param, k), str(v)) + else: + span.set_tag_str("langchain.request.%s.parameters.%s" % (llm_provider, param), str(val)) + + completions = func(*args, **kwargs) + if isinstance(instance, langchain.llms.OpenAI): + _tag_openai_token_usage(span, completions.llm_output) + integration.record_usage(span, completions.llm_output) + + for idx, completion in enumerate(completions.generations): + if integration.is_pc_sampled_span(span): + span.set_tag_str("langchain.response.completions.%d.text" % idx, integration.trunc(completion[0].text)) + if completion and completion[0].generation_info is not None: + span.set_tag_str( + "langchain.response.completions.%d.finish_reason" % idx, + str(completion[0].generation_info.get("finish_reason")), + ) + span.set_tag_str( + "langchain.response.completions.%d.logprobs" % idx, + str(completion[0].generation_info.get("logprobs")), + ) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + if completions is None: + log_completions = [] + else: + log_completions = [ + [{"text": completion.text} for completion in completions] for completions in completions.generations + ] + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "prompts": prompts, + "choices": log_completions, + }, + ) + return completions + + +@with_traced_module +async def traced_llm_agenerate(langchain, pin, func, instance, args, kwargs): + llm_provider = instance._llm_type + prompts = get_argument_value(args, kwargs, 0, "prompts") + integration = langchain._datadog_integration + model = _extract_model_name(instance) + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="llm", + provider=llm_provider, + model=model, + api_key=_extract_api_key(instance), + ) + completions = None + try: + if integration.is_pc_sampled_span(span): + for idx, prompt in enumerate(prompts): + span.set_tag_str("langchain.request.prompts.%d" % idx, integration.trunc(str(prompt))) + for param, val in getattr(instance, "_identifying_params", {}).items(): + if isinstance(val, dict): + for k, v in val.items(): + span.set_tag_str("langchain.request.%s.parameters.%s.%s" % (llm_provider, param, k), str(v)) + else: + span.set_tag_str("langchain.request.%s.parameters.%s" % (llm_provider, param), str(val)) + + completions = await func(*args, **kwargs) + if isinstance(instance, langchain.llms.OpenAI): + _tag_openai_token_usage(span, completions.llm_output) + integration.record_usage(span, completions.llm_output) + + for idx, completion in enumerate(completions.generations): + if integration.is_pc_sampled_span(span): + span.set_tag_str("langchain.response.completions.%d.text" % idx, integration.trunc(completion[0].text)) + if completion and completion[0].generation_info is not None: + span.set_tag_str( + "langchain.response.completions.%d.finish_reason" % idx, + str(completion[0].generation_info.get("finish_reason")), + ) + span.set_tag_str( + "langchain.response.completions.%d.logprobs" % idx, + str(completion[0].generation_info.get("logprobs")), + ) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + if completions is None: + log_completions = [] + else: + log_completions = [ + [{"text": completion.text} for completion in completions] for completions in completions.generations + ] + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "prompts": prompts, + "choices": log_completions, + }, + ) + return completions + + +@with_traced_module +def traced_chat_model_generate(langchain, pin, func, instance, args, kwargs): + llm_provider = instance._llm_type.split("-")[0] + chat_messages = get_argument_value(args, kwargs, 0, "messages") + integration = langchain._datadog_integration + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="chat_model", + provider=llm_provider, + model=_extract_model_name(instance), + api_key=_extract_api_key(instance), + ) + chat_completions = None + try: + for message_set_idx, message_set in enumerate(chat_messages): + for message_idx, message in enumerate(message_set): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "langchain.request.messages.%d.%d.content" % (message_set_idx, message_idx), + integration.trunc(message.content), + ) + span.set_tag_str( + "langchain.request.messages.%d.%d.message_type" % (message_set_idx, message_idx), + message.__class__.__name__, + ) + for param, val in getattr(instance, "_identifying_params", {}).items(): + if isinstance(val, dict): + for k, v in val.items(): + span.set_tag_str("langchain.request.%s.parameters.%s.%s" % (llm_provider, param, k), str(v)) + else: + span.set_tag_str("langchain.request.%s.parameters.%s" % (llm_provider, param), str(val)) + + chat_completions = func(*args, **kwargs) + if isinstance(instance, langchain.chat_models.ChatOpenAI): + _tag_openai_token_usage(span, chat_completions.llm_output) + integration.record_usage(span, chat_completions.llm_output) + + for message_set_idx, message_set in enumerate(chat_completions.generations): + for idx, chat_completion in enumerate(message_set): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "langchain.response.completions.%d.%d.content" % (message_set_idx, idx), + integration.trunc(chat_completion.text), + ) + span.set_tag_str( + "langchain.response.completions.%d.%d.message_type" % (message_set_idx, idx), + chat_completion.message.__class__.__name__, + ) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + if chat_completions is None: + log_chat_completions = [] + else: + log_chat_completions = [ + [ + { + "content": message.text, + "message_type": message.message.__class__.__name__, + } + for message in messages + ] + for messages in chat_completions.generations + ] + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "messages": [ + [ + { + "content": message.content, + "message_type": message.__class__.__name__, + } + for message in messages + ] + for messages in chat_messages + ], + "choices": log_chat_completions, + }, + ) + return chat_completions + + +@with_traced_module +async def traced_chat_model_agenerate(langchain, pin, func, instance, args, kwargs): + llm_provider = instance._llm_type.split("-")[0] + chat_messages = get_argument_value(args, kwargs, 0, "messages") + integration = langchain._datadog_integration + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="chat_model", + provider=llm_provider, + model=_extract_model_name(instance), + api_key=_extract_api_key(instance), + ) + chat_completions = None + try: + for message_set_idx, message_set in enumerate(chat_messages): + for message_idx, message in enumerate(message_set): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "langchain.request.messages.%d.%d.content" % (message_set_idx, message_idx), + integration.trunc(message.content), + ) + span.set_tag_str( + "langchain.request.messages.%d.%d.message_type" % (message_set_idx, message_idx), + message.__class__.__name__, + ) + for param, val in getattr(instance, "_identifying_params", {}).items(): + if isinstance(val, dict): + for k, v in val.items(): + span.set_tag_str("langchain.request.%s.parameters.%s.%s" % (llm_provider, param, k), str(v)) + else: + span.set_tag_str("langchain.request.%s.parameters.%s" % (llm_provider, param), str(val)) + + chat_completions = await func(*args, **kwargs) + if isinstance(instance, langchain.chat_models.ChatOpenAI): + _tag_openai_token_usage(span, chat_completions.llm_output) + integration.record_usage(span, chat_completions.llm_output) + + for message_set_idx, message_set in enumerate(chat_completions.generations): + for idx, chat_completion in enumerate(message_set): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "langchain.response.completions.%d.%d.content" % (message_set_idx, idx), + integration.trunc(chat_completion.text), + ) + span.set_tag_str( + "langchain.response.completions.%d.%d.message_type" % (message_set_idx, idx), + chat_completion.message.__class__.__name__, + ) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + if chat_completions is None: + log_chat_completions = [] + else: + log_chat_completions = [ + [ + { + "content": message.text, + "message_type": message.message.__class__.__name__, + } + for message in messages + ] + for messages in chat_completions.generations + ] + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "messages": [ + [ + { + "content": message.content, + "message_type": message.__class__.__name__, + } + for message in messages + ] + for messages in chat_messages + ], + "choices": log_chat_completions, + }, + ) + return chat_completions + + +@with_traced_module +def traced_embedding(langchain, pin, func, instance, args, kwargs): + """ + This traces both embed_query(text) and embed_documents(texts), so we need to make sure + we get the right arg/kwarg. + """ + try: + input_texts = get_argument_value(args, kwargs, 0, "texts") + except ArgumentError: + input_texts = get_argument_value(args, kwargs, 0, "text") + + provider = instance.__class__.__name__.split("Embeddings")[0].lower() + integration = langchain._datadog_integration + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="embedding", + provider=provider, + model=_extract_model_name(instance), + api_key=_extract_api_key(instance), + ) + try: + if isinstance(input_texts, str): + if integration.is_pc_sampled_span(span): + span.set_tag_str("langchain.request.inputs.0.text", integration.trunc(input_texts)) + span.set_metric("langchain.request.input_count", 1) + else: + if integration.is_pc_sampled_span(span): + for idx, text in enumerate(input_texts): + span.set_tag_str("langchain.request.inputs.%d.text" % idx, integration.trunc(text)) + span.set_metric("langchain.request.input_count", len(input_texts)) + # langchain currently does not support token tracking for OpenAI embeddings: + # https://github.com/hwchase17/langchain/issues/945 + embeddings = func(*args, **kwargs) + if isinstance(embeddings, list) and embeddings and isinstance(embeddings[0], list): + for idx, embedding in enumerate(embeddings): + span.set_metric("langchain.response.outputs.%d.embedding_length" % idx, len(embedding)) + else: + span.set_metric("langchain.response.outputs.embedding_length", len(embeddings)) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={"inputs": [input_texts] if isinstance(input_texts, str) else input_texts}, + ) + return embeddings + + +@with_traced_module +def traced_chain_call(langchain, pin, func, instance, args, kwargs): + integration = langchain._datadog_integration + span = integration.trace(pin, "%s.%s" % (instance.__module__, instance.__class__.__name__), interface_type="chain") + final_outputs = {} + try: + inputs = get_argument_value(args, kwargs, 0, "inputs") + if not isinstance(inputs, dict): + inputs = {instance.input_keys[0]: inputs} + if integration.is_pc_sampled_span(span): + for k, v in inputs.items(): + span.set_tag_str("langchain.request.inputs.%s" % k, integration.trunc(str(v))) + template = deep_getattr(instance, "prompt.template", default="") + if template: + span.set_tag_str("langchain.request.prompt", integration.trunc(str(template))) + final_outputs = func(*args, **kwargs) + if integration.is_pc_sampled_span(span): + for k, v in final_outputs.items(): + span.set_tag_str("langchain.response.outputs.%s" % k, integration.trunc(str(v))) + if _is_iast_enabled(): + taint_outputs(instance, inputs, final_outputs) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + log_inputs = {} + log_outputs = {} + for k, v in inputs.items(): + log_inputs[k] = str(v) + for k, v in final_outputs.items(): + log_outputs[k] = str(v) + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "inputs": log_inputs, + "prompt": str(deep_getattr(instance, "prompt.template", default="")), + "outputs": log_outputs, + }, + ) + return final_outputs + + +@with_traced_module +async def traced_chain_acall(langchain, pin, func, instance, args, kwargs): + integration = langchain._datadog_integration + span = integration.trace(pin, "%s.%s" % (instance.__module__, instance.__class__.__name__), interface_type="chain") + final_outputs = {} + try: + inputs = get_argument_value(args, kwargs, 0, "inputs") + if not isinstance(inputs, dict): + inputs = {instance.input_keys[0]: inputs} + if integration.is_pc_sampled_span(span): + for k, v in inputs.items(): + span.set_tag_str("langchain.request.inputs.%s" % k, integration.trunc(str(v))) + template = deep_getattr(instance, "prompt.template", default="") + if template: + span.set_tag_str("langchain.request.prompt", integration.trunc(str(template))) + final_outputs = await func(*args, **kwargs) + if integration.is_pc_sampled_span(span): + for k, v in final_outputs.items(): + span.set_tag_str("langchain.response.outputs.%s" % k, integration.trunc(str(v))) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + log_inputs = {} + log_outputs = {} + for k, v in inputs.items(): + log_inputs[k] = str(v) + for k, v in final_outputs.items(): + log_outputs[k] = str(v) + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "inputs": log_inputs, + "prompt": str(deep_getattr(instance, "prompt.template", default="")), + "outputs": log_outputs, + }, + ) + return final_outputs + + +@with_traced_module +def traced_similarity_search(langchain, pin, func, instance, args, kwargs): + integration = langchain._datadog_integration + query = get_argument_value(args, kwargs, 0, "query") + k = kwargs.get("k", args[1] if len(args) >= 2 else None) + provider = instance.__class__.__name__.lower() + span = integration.trace( + pin, + "%s.%s" % (instance.__module__, instance.__class__.__name__), + interface_type="similarity_search", + provider=provider, + api_key=_extract_api_key(instance), + ) + documents = [] + try: + if integration.is_pc_sampled_span(span): + span.set_tag_str("langchain.request.query", integration.trunc(query)) + if k is not None: + span.set_tag_str("langchain.request.k", str(k)) + for kwarg_key, v in kwargs.items(): + span.set_tag_str("langchain.request.%s" % kwarg_key, str(v)) + if isinstance(instance, langchain.vectorstores.Pinecone): + span.set_tag_str( + "langchain.request.pinecone.environment", + instance._index.configuration.server_variables.get("environment", ""), + ) + span.set_tag_str( + "langchain.request.pinecone.index_name", + instance._index.configuration.server_variables.get("index_name", ""), + ) + span.set_tag_str( + "langchain.request.pinecone.project_name", + instance._index.configuration.server_variables.get("project_name", ""), + ) + api_key = instance._index.configuration.api_key.get("ApiKeyAuth", "") + span.set_tag_str(API_KEY, _format_api_key(api_key)) # override api_key for Pinecone + documents = func(*args, **kwargs) + span.set_metric("langchain.response.document_count", len(documents)) + for idx, document in enumerate(documents): + span.set_tag_str( + "langchain.response.document.%d.page_content" % idx, integration.trunc(str(document.page_content)) + ) + for kwarg_key, v in document.metadata.items(): + span.set_tag_str( + "langchain.response.document.%d.metadata.%s" % (idx, kwarg_key), integration.trunc(str(v)) + ) + except Exception: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + raise + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + if integration.is_pc_sampled_log(span): + integration.log( + span, + "info" if span.error == 0 else "error", + "sampled %s.%s" % (instance.__module__, instance.__class__.__name__), + attrs={ + "query": query, + "k": k or "", + "documents": [ + {"page_content": document.page_content, "metadata": document.metadata} for document in documents + ], + }, + ) + return documents + + +def patch(): + if getattr(langchain, "_datadog_patch", False): + return + langchain._datadog_patch = True + + Pin().onto(langchain) + integration = LangChainIntegration(integration_config=config.langchain) + langchain._datadog_integration = integration + + # Langchain doesn't allow wrapping directly from root, so we have to import the base classes first before wrapping. + # ref: https://github.com/DataDog/dd-trace-py/issues/7123 + from langchain import embeddings # noqa:F401 + from langchain import vectorstores # noqa:F401 + from langchain.chains.base import Chain # noqa:F401 + from langchain.chat_models.base import BaseChatModel # noqa:F401 + from langchain.llms.base import BaseLLM # noqa:F401 + + wrap("langchain", "llms.base.BaseLLM.generate", traced_llm_generate(langchain)) + wrap("langchain", "llms.base.BaseLLM.agenerate", traced_llm_agenerate(langchain)) + wrap("langchain", "chat_models.base.BaseChatModel.generate", traced_chat_model_generate(langchain)) + wrap("langchain", "chat_models.base.BaseChatModel.agenerate", traced_chat_model_agenerate(langchain)) + wrap("langchain", "chains.base.Chain.__call__", traced_chain_call(langchain)) + wrap("langchain", "chains.base.Chain.acall", traced_chain_acall(langchain)) + # Text embedding models override two abstract base methods instead of super calls, so we need to + # wrap each langchain-provided text embedding model. + for text_embedding_model in text_embedding_models: + if hasattr(langchain.embeddings, text_embedding_model): + # Ensure not double patched, as some Embeddings interfaces are pointers to other Embeddings. + if not isinstance( + deep_getattr(langchain.embeddings, "%s.embed_query" % text_embedding_model), wrapt.ObjectProxy + ): + wrap("langchain", "embeddings.%s.embed_query" % text_embedding_model, traced_embedding(langchain)) + if not isinstance( + deep_getattr(langchain.embeddings, "%s.embed_documents" % text_embedding_model), wrapt.ObjectProxy + ): + wrap("langchain", "embeddings.%s.embed_documents" % text_embedding_model, traced_embedding(langchain)) + # TODO: langchain >= 0.0.209 includes async embedding implementation (only for OpenAI) + # We need to do the same with Vectorstores. + for vectorstore in vectorstore_classes: + if hasattr(langchain.vectorstores, vectorstore): + # Ensure not double patched, as some Embeddings interfaces are pointers to other Embeddings. + if not isinstance( + deep_getattr(langchain.vectorstores, "%s.similarity_search" % vectorstore), wrapt.ObjectProxy + ): + wrap( + "langchain", "vectorstores.%s.similarity_search" % vectorstore, traced_similarity_search(langchain) + ) + + if _is_iast_enabled(): + from ddtrace.appsec._iast._metrics import _set_iast_error_metric + + def wrap_output_parser(module, parser): + # Ensure not double patched + if not isinstance(deep_getattr(module, "%s.parse" % parser), wrapt.ObjectProxy): + wrap(module, "%s.parse" % parser, taint_parser_output) + + try: + with_agent_output_parser(wrap_output_parser) + except Exception as e: + _set_iast_error_metric("IAST propagation error. langchain wrap_output_parser. {}".format(e)) + + +def unpatch(): + if not getattr(langchain, "_datadog_patch", False): + return + langchain._datadog_patch = False + + unwrap(langchain.llms.base.BaseLLM, "generate") + unwrap(langchain.llms.base.BaseLLM, "agenerate") + unwrap(langchain.chat_models.base.BaseChatModel, "generate") + unwrap(langchain.chat_models.base.BaseChatModel, "agenerate") + unwrap(langchain.chains.base.Chain, "__call__") + unwrap(langchain.chains.base.Chain, "acall") + for text_embedding_model in text_embedding_models: + if hasattr(langchain.embeddings, text_embedding_model): + if isinstance( + deep_getattr(langchain.embeddings, "%s.embed_query" % text_embedding_model), wrapt.ObjectProxy + ): + unwrap(getattr(langchain.embeddings, text_embedding_model), "embed_query") + if isinstance( + deep_getattr(langchain.embeddings, "%s.embed_documents" % text_embedding_model), wrapt.ObjectProxy + ): + unwrap(getattr(langchain.embeddings, text_embedding_model), "embed_documents") + for vectorstore in vectorstore_classes: + if hasattr(langchain.vectorstores, vectorstore): + if isinstance( + deep_getattr(langchain.vectorstores, "%s.similarity_search" % vectorstore), wrapt.ObjectProxy + ): + unwrap(getattr(langchain.vectorstores, vectorstore), "similarity_search") + + delattr(langchain, "_datadog_integration") + + +def taint_outputs(instance, inputs, outputs): + from ddtrace.appsec._iast._metrics import _set_iast_error_metric + from ddtrace.appsec._iast._taint_tracking import get_tainted_ranges + from ddtrace.appsec._iast._taint_tracking import taint_pyobject + + try: + ranges = None + for key in filter(lambda x: x in inputs, instance.input_keys): + input_val = inputs.get(key) + if input_val: + ranges = get_tainted_ranges(input_val) + if ranges: + break + + if ranges: + source = ranges[0].source + for key in filter(lambda x: x in outputs, instance.output_keys): + output_value = outputs[key] + outputs[key] = taint_pyobject(output_value, source.name, source.value, source.origin) + except Exception as e: + _set_iast_error_metric("IAST propagation error. langchain taint_outputs. {}".format(e)) + + +def taint_parser_output(func, instance, args, kwargs): + from ddtrace.appsec._iast._metrics import _set_iast_error_metric + from ddtrace.appsec._iast._taint_tracking import get_tainted_ranges + from ddtrace.appsec._iast._taint_tracking import taint_pyobject + + result = func(*args, **kwargs) + try: + try: + from langchain_core.agents import AgentAction + from langchain_core.agents import AgentFinish + except ImportError: + from langchain.agents import AgentAction + from langchain.agents import AgentFinish + ranges = get_tainted_ranges(args[0]) + if ranges: + source = ranges[0].source + if isinstance(result, AgentAction): + result.tool_input = taint_pyobject(result.tool_input, source.name, source.value, source.origin) + elif isinstance(result, AgentFinish) and "output" in result.return_values: + values = result.return_values + values["output"] = taint_pyobject(values["output"], source.name, source.value, source.origin) + except Exception as e: + _set_iast_error_metric("IAST propagation error. langchain taint_parser_output. {}".format(e)) + + return result + + +def with_agent_output_parser(f): + import langchain.agents + + queue = [(langchain.agents, agent_output_parser_classes)] + + while len(queue) > 0: + module, current = queue.pop(0) + if isinstance(current, str): + if hasattr(module, current): + f(module, current) + elif isinstance(current, dict): + for name, value in current.items(): + if hasattr(module, name): + queue.append((getattr(module, name), value)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/__init__.py new file mode 100644 index 0000000..72dab43 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/__init__.py @@ -0,0 +1,62 @@ +r""" +Datadog APM traces can be integrated with the logs produced by ```logbook`` by: + +1. Having ``ddtrace`` patch the ``logbook`` module. This will configure a +patcher which appends trace related values to the log. + +2. Ensuring the logger has a format which emits new values from the log record + +3. For log correlation between APM and logs, the easiest format is via JSON +so that no further configuration needs to be done in the Datadog UI assuming +that the Datadog trace values are at the top level of the JSON + +Enabling +-------- + +Patch ``logbook`` +~~~~~~~~~~~~~~~~~~~ + +If using :ref:`ddtrace-run` then set the environment variable ``DD_LOGS_INJECTION=true``. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(logbook=True) + +Proper Formatting +~~~~~~~~~~~~~~~~~ + +The trace values are patched to every log at the top level of the record. In order to correlate +logs, it is highly recommended to use JSON logs which can be achieved by using a handler with +a proper formatting:: + + handler = FileHandler('output.log', format_string='{{\"message\": "{record.message}",' + '\"dd.trace_id\": "{record.extra[dd.trace_id]}",' + '\"dd.span_id\": "{record.extra[dd.span_id]}",' + '\"dd.env\": "{record.extra[dd.env]}",' + '\"dd.service\": "{record.extra[dd.service]}",' + '\"dd.version\": "{record.extra[dd.version]}"}}') + handler.push_application() + +Note that the ``extra`` field does not have a ``dd`` object but rather only a ``dd.trace_id``, ``dd.span_id``, etc. +To access the trace values inside extra, please use the ``[]`` operator. + +This will create a handler for the application that formats the logs in a way that is JSON with all the +Datadog trace values in a JSON format that can be automatically parsed by the Datadog backend. + +For more information, please see the attached guide for the Datadog Logging Product: +https://docs.datadoghq.com/logs/log_collection/python/ +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["logbook"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/patch.py new file mode 100644 index 0000000..2fc37e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logbook/patch.py @@ -0,0 +1,74 @@ +import logbook + +import ddtrace +from ddtrace import config + +from ...internal.utils import get_argument_value +from ...vendor.wrapt import wrap_function_wrapper as _w +from ..logging.constants import RECORD_ATTR_ENV +from ..logging.constants import RECORD_ATTR_SERVICE +from ..logging.constants import RECORD_ATTR_SPAN_ID +from ..logging.constants import RECORD_ATTR_TRACE_ID +from ..logging.constants import RECORD_ATTR_VALUE_EMPTY +from ..logging.constants import RECORD_ATTR_VALUE_ZERO +from ..logging.constants import RECORD_ATTR_VERSION +from ..trace_utils import unwrap as _u + + +config._add( + "logbook", + dict(), +) + + +def get_version(): + # type: () -> str + return getattr(logbook, "__version__", "") + + +def _tracer_injection(event_dict): + span = ddtrace.tracer.current_span() + + trace_id = None + span_id = None + if span: + span_id = span.span_id + trace_id = span.trace_id + if config._128_bit_trace_id_enabled and not config._128_bit_trace_id_logging_enabled: + trace_id = span._trace_id_64bits + + # add ids to logbook event dictionary + event_dict[RECORD_ATTR_TRACE_ID] = str(trace_id or RECORD_ATTR_VALUE_ZERO) + event_dict[RECORD_ATTR_SPAN_ID] = str(span_id or RECORD_ATTR_VALUE_ZERO) + # add the env, service, and version configured for the tracer + event_dict[RECORD_ATTR_ENV] = config.env or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_SERVICE] = config.service or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_VERSION] = config.version or RECORD_ATTR_VALUE_EMPTY + + return event_dict + + +def _w_process_record(func, instance, args, kwargs): + # patch logger to include datadog info before logging + record = get_argument_value(args, kwargs, 0, "record") + _tracer_injection(record.extra) + return func(*args, **kwargs) + + +def patch(): + """ + Patch ``logbook`` module for injection of tracer information + by editing a log record created via ``logbook.base.RecordDispatcher.process_record`` + """ + if getattr(logbook, "_datadog_patch", False): + return + logbook._datadog_patch = True + + _w(logbook.base.RecordDispatcher, "process_record", _w_process_record) + + +def unpatch(): + if getattr(logbook, "_datadog_patch", False): + logbook._datadog_patch = False + + _u(logbook.base.RecordDispatcher, "process_record") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/__init__.py new file mode 100644 index 0000000..3dedbaf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/__init__.py @@ -0,0 +1,75 @@ +""" +Datadog APM traces can be integrated with the logs product by: + +1. Having ``ddtrace`` patch the ``logging`` module. This will add trace +attributes to the log record. + +2. Updating the log formatter used by the application. In order to inject +tracing information into a log the formatter must be updated to include the +tracing attributes from the log record. + + +Enabling +-------- + +Patch ``logging`` +~~~~~~~~~~~~~~~~~ + +There are a few ways to tell ddtrace to patch the ``logging`` module: + +1. If using :ref:`ddtrace-run`, you can set the environment variable ``DD_LOGS_INJECTION=true``. + +2. Use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(logging=True) + +3. (beta) Set ``log_injection_enabled`` at runtime via the Datadog UI. + + +Update Log Format +~~~~~~~~~~~~~~~~~ + +Make sure that your log format exactly matches the following:: + + import logging + from ddtrace import tracer + + FORMAT = ('%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] ' + '[dd.service=%(dd.service)s dd.env=%(dd.env)s ' + 'dd.version=%(dd.version)s ' + 'dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s] ' + '- %(message)s') + logging.basicConfig(format=FORMAT) + log = logging.getLogger() + log.level = logging.INFO + + + @tracer.wrap() + def hello(): + log.info('Hello, World!') + + hello() + +Note that most host based setups log by default to UTC time. +If the log timestamps aren't automatically in UTC, the formatter can be updated to use UTC:: + + import time + logging.Formatter.converter = time.gmtime + +For more information, please see the attached guide on common timestamp issues: +https://docs.datadoghq.com/logs/guide/logs-not-showing-expected-timestamp/ +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["logging"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/constants.py new file mode 100644 index 0000000..1e47ddf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/constants.py @@ -0,0 +1,7 @@ +RECORD_ATTR_TRACE_ID = "dd.trace_id" +RECORD_ATTR_SPAN_ID = "dd.span_id" +RECORD_ATTR_ENV = "dd.env" +RECORD_ATTR_VERSION = "dd.version" +RECORD_ATTR_SERVICE = "dd.service" +RECORD_ATTR_VALUE_ZERO = "0" +RECORD_ATTR_VALUE_EMPTY = "" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/patch.py new file mode 100644 index 0000000..c511496 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/logging/patch.py @@ -0,0 +1,145 @@ +import logging + +import attr + +import ddtrace +from ddtrace import config + +from ...internal.utils import get_argument_value +from ...vendor.wrapt import wrap_function_wrapper as _w +from ..trace_utils import unwrap as _u +from .constants import RECORD_ATTR_ENV +from .constants import RECORD_ATTR_SERVICE +from .constants import RECORD_ATTR_SPAN_ID +from .constants import RECORD_ATTR_TRACE_ID +from .constants import RECORD_ATTR_VALUE_EMPTY +from .constants import RECORD_ATTR_VALUE_ZERO +from .constants import RECORD_ATTR_VERSION + + +_LOG_SPAN_KEY = "__datadog_log_span" + +config._add( + "logging", + dict( + tracer=None, + ), +) # by default, override here for custom tracer + + +def get_version(): + # type: () -> str + return getattr(logging, "__version__", "") + + +@attr.s(slots=True) +class DDLogRecord(object): + trace_id = attr.ib(type=int) + span_id = attr.ib(type=int) + service = attr.ib(type=str) + version = attr.ib(type=str) + env = attr.ib(type=str) + + +def _get_current_span(tracer=None): + """Helper to get the currently active span""" + + if not tracer: + # With the addition of a custom ddtrace logger in _logger.py, logs that happen on startup + # don't have access to `ddtrace.tracer`. Checking that this exists prevents an error + # if log injection is enabled. + if not getattr(ddtrace, "tracer", False): + return None + + tracer = ddtrace.tracer + + # We might be calling this during library initialization, in which case `ddtrace.tracer` might + # be the `tracer` module and not the global tracer instance. + if not getattr(tracer, "enabled", False): + return None + + return tracer.current_span() + + +def _w_makeRecord(func, instance, args, kwargs): + # Get the LogRecord instance for this log + record = func(*args, **kwargs) + + setattr(record, RECORD_ATTR_VERSION, config.version or RECORD_ATTR_VALUE_EMPTY) + setattr(record, RECORD_ATTR_ENV, config.env or RECORD_ATTR_VALUE_EMPTY) + setattr(record, RECORD_ATTR_SERVICE, config.service or RECORD_ATTR_VALUE_EMPTY) + + # logs from internal logger may explicitly pass the current span to + # avoid deadlocks in getting the current span while already in locked code. + span_from_log = getattr(record, _LOG_SPAN_KEY, None) + if isinstance(span_from_log, ddtrace.Span): + span = span_from_log + else: + span = _get_current_span(tracer=config.logging.tracer) + + if span: + trace_id = span.trace_id + if config._128_bit_trace_id_enabled and not config._128_bit_trace_id_logging_enabled: + trace_id = span._trace_id_64bits + setattr(record, RECORD_ATTR_TRACE_ID, str(trace_id)) + setattr(record, RECORD_ATTR_SPAN_ID, str(span.span_id)) + else: + setattr(record, RECORD_ATTR_TRACE_ID, RECORD_ATTR_VALUE_ZERO) + setattr(record, RECORD_ATTR_SPAN_ID, RECORD_ATTR_VALUE_ZERO) + + return record + + +def _w_StrFormatStyle_format(func, instance, args, kwargs): + # The format string "dd.service={dd.service}" expects + # the record to have a "dd" property which is an object that + # has a "service" property + # PercentStyle, and StringTemplateStyle both look for + # a "dd.service" property on the record + record = get_argument_value(args, kwargs, 0, "record") + + record.dd = DDLogRecord( + trace_id=getattr(record, RECORD_ATTR_TRACE_ID, RECORD_ATTR_VALUE_ZERO), + span_id=getattr(record, RECORD_ATTR_SPAN_ID, RECORD_ATTR_VALUE_ZERO), + service=getattr(record, RECORD_ATTR_SERVICE, RECORD_ATTR_VALUE_EMPTY), + version=getattr(record, RECORD_ATTR_VERSION, RECORD_ATTR_VALUE_EMPTY), + env=getattr(record, RECORD_ATTR_ENV, RECORD_ATTR_VALUE_EMPTY), + ) + + try: + return func(*args, **kwargs) + finally: + # We need to remove this extra attribute so it does not pollute other formatters + # For example: if we format with StrFormatStyle and then a JSON logger + # then the JSON logger will have `dd.{service,version,env,trace_id,span_id}` as + # well as the `record.dd` `DDLogRecord` instance + del record.dd + + +def patch(): + """ + Patch ``logging`` module in the Python Standard Library for injection of + tracer information by wrapping the base factory method ``Logger.makeRecord`` + """ + if getattr(logging, "_datadog_patch", False): + return + logging._datadog_patch = True + + _w(logging.Logger, "makeRecord", _w_makeRecord) + if hasattr(logging, "StrFormatStyle"): + if hasattr(logging.StrFormatStyle, "_format"): + _w(logging.StrFormatStyle, "_format", _w_StrFormatStyle_format) + else: + _w(logging.StrFormatStyle, "format", _w_StrFormatStyle_format) + + +def unpatch(): + if getattr(logging, "_datadog_patch", False): + logging._datadog_patch = False + + _u(logging.Logger, "makeRecord") + if hasattr(logging, "StrFormatStyle"): + if hasattr(logging.StrFormatStyle, "_format"): + _u(logging.StrFormatStyle, "_format") + else: + _u(logging.StrFormatStyle, "format") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/__init__.py new file mode 100644 index 0000000..2727b0a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/__init__.py @@ -0,0 +1,77 @@ +r""" +Datadog APM traces can be integrated with the logs produced by ```loguru`` by: + +1. Having ``ddtrace`` patch the ``loguru`` module. This will configure a +patcher which appends trace related values to the log. + +2. Ensuring the logger has a format which emits new values from the log record + +3. For log correlation between APM and logs, the easiest format is via JSON +so that no further configuration needs to be done in the Datadog UI assuming +that the Datadog trace values are at the top level of the JSON + +Enabling +-------- + +Patch ``loguru`` +~~~~~~~~~~~~~~~~~~~ + +If using :ref:`ddtrace-run` then set the environment variable ``DD_LOGS_INJECTION=true``. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(loguru=True) + +Proper Formatting +~~~~~~~~~~~~~~~~~ + +The trace values are patched to every log at the top level of the record. In order to correlate +logs, it is highly recommended to use JSON logs. Here are two ways to do this: + +1. Use the built-in serialize function within the library that emits the entire log record into a JSON log:: + + from loguru import logger + + logger.add("app.log", serialize=True) + +This will emit the entire log record with the trace values into a file "app.log" + +2. Create a custom format that includes the trace values in JSON format:: + + def serialize(record): + subset = { + "message": record["message"], + "dd.trace_id": record["dd.trace_id"], + "dd.span_id": record["dd.span_id"], + "dd.env": record["dd.env"], + "dd.version": record["dd.version"], + "dd.service": record["dd.service"], + } + return json.dumps(subset) + + def log_format(record): + record["extra"]["serialized"] = serialize(record) + return "{extra[serialized]}\n" + logger.add("app.log", format=log_format) + +This will emit the log in a format where the output contains the trace values of the log at the top level of a JSON +along with the message. The log will not include all the possible information in the record, but rather only the values +included in the subset object within the ``serialize`` method + +For more information, please see the attached guide for the Datadog Logging Product: +https://docs.datadoghq.com/logs/log_collection/python/ +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["loguru"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/patch.py new file mode 100644 index 0000000..a6062cb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/loguru/patch.py @@ -0,0 +1,85 @@ +import loguru + +import ddtrace +from ddtrace import config + +from ...vendor.wrapt import wrap_function_wrapper as _w +from ..logging.constants import RECORD_ATTR_ENV +from ..logging.constants import RECORD_ATTR_SERVICE +from ..logging.constants import RECORD_ATTR_SPAN_ID +from ..logging.constants import RECORD_ATTR_TRACE_ID +from ..logging.constants import RECORD_ATTR_VALUE_EMPTY +from ..logging.constants import RECORD_ATTR_VALUE_ZERO +from ..logging.constants import RECORD_ATTR_VERSION +from ..trace_utils import unwrap as _u + + +config._add( + "loguru", + dict(), +) + + +def get_version(): + # type: () -> str + return getattr(loguru, "__version__", "") + + +def _tracer_injection(event_dict): + span = ddtrace.tracer.current_span() + + trace_id = None + span_id = None + if span: + span_id = span.span_id + trace_id = span.trace_id + if config._128_bit_trace_id_enabled and not config._128_bit_trace_id_logging_enabled: + trace_id = span._trace_id_64bits + + # add ids to loguru event dictionary + event_dict[RECORD_ATTR_TRACE_ID] = str(trace_id or RECORD_ATTR_VALUE_ZERO) + event_dict[RECORD_ATTR_SPAN_ID] = str(span_id or RECORD_ATTR_VALUE_ZERO) + # add the env, service, and version configured for the tracer + event_dict[RECORD_ATTR_ENV] = config.env or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_SERVICE] = config.service or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_VERSION] = config.version or RECORD_ATTR_VALUE_EMPTY + + return event_dict + + +def _w_configure(func, instance, args, kwargs): + original_patcher = kwargs.get("patcher", None) + instance._dd_original_patcher = original_patcher + if not original_patcher: + # no patcher, we do not need to worry about ddtrace fields being overridden + return func(*args, **kwargs) + + def _wrapped_patcher(record): + original_patcher(record) + record.update(_tracer_injection(record["extra"])) + + kwargs["patcher"] = _wrapped_patcher + return func(*args, **kwargs) + + +def patch(): + """ + Patch ``loguru`` module for injection of tracer information + by appending a patcher before the add function ``loguru.add`` + """ + if getattr(loguru, "_datadog_patch", False): + return + loguru._datadog_patch = True + # Adds ddtrace fields to loguru logger + loguru.logger.configure(patcher=lambda record: record.update(_tracer_injection(record["extra"]))) + # Ensures that calling loguru.logger.configure(..) does not overwrite ddtrace fields + _w(loguru.logger, "configure", _w_configure) + + +def unpatch(): + if getattr(loguru, "_datadog_patch", False): + loguru._datadog_patch = False + + _u(loguru.logger, "configure") + if hasattr(loguru.logger, "_dd_original_patcher"): + loguru.logger.configure(patcher=loguru.logger._dd_original_patcher) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/__init__.py new file mode 100644 index 0000000..1ca29a3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/__init__.py @@ -0,0 +1,24 @@ +""" +The ``mako`` integration traces templates rendering. +Auto instrumentation is available using the ``patch``. The following is an example:: + + from ddtrace import patch + from mako.template import Template + + patch(mako=True) + + t = Template(filename="index.html") + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["mako"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/constants.py new file mode 100644 index 0000000..1bda6e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/constants.py @@ -0,0 +1 @@ +DEFAULT_TEMPLATE_NAME = "" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/patch.py new file mode 100644 index 0000000..e683309 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mako/patch.py @@ -0,0 +1,68 @@ +import mako +from mako.template import DefTemplate +from mako.template import Template + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_service_name + +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanTypes +from ...internal.utils.importlib import func_name +from ...pin import Pin +from ..trace_utils import unwrap as _u +from ..trace_utils import wrap as _w +from .constants import DEFAULT_TEMPLATE_NAME + + +def get_version(): + # type: () -> str + return getattr(mako, "__version__", "") + + +def patch(): + if getattr(mako, "__datadog_patch", False): + # already patched + return + mako.__datadog_patch = True + + Pin(service=config.service or schematize_service_name("mako")).onto(Template) + + _w(mako, "template.Template.render", _wrap_render) + _w(mako, "template.Template.render_unicode", _wrap_render) + _w(mako, "template.Template.render_context", _wrap_render) + + +def unpatch(): + if not getattr(mako, "__datadog_patch", False): + return + mako.__datadog_patch = False + + _u(mako.template.Template, "render") + _u(mako.template.Template, "render_unicode") + _u(mako.template.Template, "render_context") + + +def _wrap_render(wrapped, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + # Determine the resource and `mako.template_name` tag value + # DefTemplate is a wrapper around a callable from another template, it does not have a filename + # https://github.com/sqlalchemy/mako/blob/c2c690ac9add584f2216dc655cdf8215b24ef03c/mako/template.py#L603-L622 + if isinstance(instance, DefTemplate) and hasattr(instance, "callable_"): + template_name = func_name(instance.callable_) + else: + template_name = getattr(instance, "filename", None) + template_name = template_name or DEFAULT_TEMPLATE_NAME + + with pin.tracer.trace(func_name(wrapped), pin.service, span_type=SpanTypes.TEMPLATE) as span: + span.set_tag_str(COMPONENT, "mako") + + span.set_tag(SPAN_MEASURED_KEY) + try: + return wrapped(*args, **kwargs) + finally: + span.resource = template_name + span.set_tag("mako.template_name", template_name) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/__init__.py new file mode 100644 index 0000000..77d9278 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/__init__.py @@ -0,0 +1,66 @@ +""" +The MariaDB integration instruments the +`MariaDB library `_ to trace queries. + + +Enabling +~~~~~~~~ + +The MariaDB integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(mariadb=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.mariadb["service"] + + The service name reported by default for MariaDB spans. + + This option can also be set with the ``DD_MARIADB_SERVICE`` environment + variable. + + Default: ``"mariadb"`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the mariadb integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + from ddtrace import patch + + # Make sure to patch before importing mariadb + patch(mariadb=True) + + import mariadb.connector + + # This will report a span with the default settings + conn = mariadb.connector.connect(user="alice", password="b0b", host="localhost", port=3306, database="test") + + # Use a pin to override the service name for this connection. + Pin.override(conn, service="mariadb-users") + + cursor = conn.cursor() + cursor.execute("SELECT 6*7 AS the_answer;") + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["mariadb"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/patch.py new file mode 100644 index 0000000..771bb05 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mariadb/patch.py @@ -0,0 +1,58 @@ +import os + +import mariadb + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib.dbapi import TracedConnection +from ddtrace.ext import db +from ddtrace.ext import net +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.wrappers import unwrap +from ddtrace.vendor import wrapt + + +config._add( + "mariadb", + dict( + trace_fetch_methods=asbool(os.getenv("DD_MARIADB_TRACE_FETCH_METHODS", default=False)), + _default_service=schematize_service_name("mariadb"), + _dbapi_span_name_prefix="mariadb", + ), +) + + +def get_version(): + # type: () -> str + return getattr(mariadb, "__version__", "") + + +def patch(): + if getattr(mariadb, "_datadog_patch", False): + return + mariadb._datadog_patch = True + wrapt.wrap_function_wrapper("mariadb", "connect", _connect) + + +def unpatch(): + if getattr(mariadb, "_datadog_patch", False): + mariadb._datadog_patch = False + unwrap(mariadb, "connect") + + +def _connect(func, instance, args, kwargs): + conn = func(*args, **kwargs) + tags = { + net.TARGET_HOST: kwargs.get("host", "127.0.0.1"), + net.TARGET_PORT: kwargs.get("port", 3306), + db.USER: kwargs.get("user", "test"), + db.NAME: kwargs.get("database", "test"), + db.SYSTEM: "mariadb", + } + + pin = Pin(tags=tags) + + wrapped = TracedConnection(conn, pin=pin, cfg=config.mariadb) + pin.onto(wrapped) + return wrapped diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/__init__.py new file mode 100644 index 0000000..b8eb519 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/__init__.py @@ -0,0 +1,50 @@ +""" +The molten web framework is automatically traced by ``ddtrace``:: + + import ddtrace.auto + from molten import App, Route + + def hello(name: str, age: int) -> str: + return f'Hello {age} year old named {name}!' + app = App(routes=[Route('/hello/{name}/{age}', hello)]) + + +You may also enable molten tracing automatically via ``ddtrace-run``:: + + ddtrace-run python app.py + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.molten['distributed_tracing'] + + Whether to parse distributed tracing headers from requests received by your Molten app. + + Default: ``True`` + +.. py:data:: ddtrace.config.molten['service_name'] + + The service name reported for your Molten app. + + Can also be configured via the ``DD_SERVICE`` or ``DD_MOLTEN_SERVICE`` environment variables. + + Default: ``'molten'`` + +:ref:`All HTTP tags ` are supported for this integration. + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["molten"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + get_version = _patch.get_version + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/patch.py new file mode 100644 index 0000000..cdab5a2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/patch.py @@ -0,0 +1,180 @@ +import os + +import molten + +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ... import Pin +from ... import config +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import urlencode +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from ...internal.utils.formats import asbool +from ...internal.utils.importlib import func_name +from ...internal.utils.version import parse_version +from .. import trace_utils +from ..trace_utils import unwrap as _u +from .wrappers import MOLTEN_ROUTE +from .wrappers import WrapperComponent +from .wrappers import WrapperMiddleware +from .wrappers import WrapperRenderer +from .wrappers import WrapperRouter + + +MOLTEN_VERSION = parse_version(molten.__version__) + +# Configure default configuration +config._add( + "molten", + dict( + _default_service=schematize_service_name("molten"), + distributed_tracing=asbool(os.getenv("DD_MOLTEN_DISTRIBUTED_TRACING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(molten, "__version__", "") + + +def patch(): + """Patch the instrumented methods""" + if getattr(molten, "_datadog_patch", False): + return + molten._datadog_patch = True + + pin = Pin() + + # add pin to module since many classes use __slots__ + pin.onto(molten) + + _w(molten.BaseApp, "__init__", patch_app_init) + _w(molten.App, "__call__", patch_app_call) + + +def unpatch(): + """Remove instrumentation""" + if getattr(molten, "_datadog_patch", False): + molten._datadog_patch = False + + # remove pin + pin = Pin.get_from(molten) + if pin: + pin.remove_from(molten) + + _u(molten.BaseApp, "__init__") + _u(molten.App, "__call__") + + +def patch_app_call(wrapped, instance, args, kwargs): + """Patch wsgi interface for app""" + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + # DEV: This is safe because this is the args for a WSGI handler + # https://www.python.org/dev/peps/pep-3333/ + environ, start_response = args + + request = molten.http.Request.from_environ(environ) + resource = func_name(wrapped) + + # request.headers is type Iterable[Tuple[str, str]] + trace_utils.activate_distributed_headers( + pin.tracer, int_config=config.molten, request_headers=dict(request.headers) + ) + + with pin.tracer.trace( + schematize_url_operation("molten.request", protocol="http", direction=SpanDirection.INBOUND), + service=trace_utils.int_service(pin, config.molten), + resource=resource, + span_type=SpanTypes.WEB, + ) as span: + span.set_tag_str(COMPONENT, config.molten.integration_name) + + # set span.kind tag equal to type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + span.set_tag(SPAN_MEASURED_KEY) + # set analytics sample rate with global config enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.molten.get_analytics_sample_rate(use_global_config=True)) + + @wrapt.function_wrapper + def _w_start_response(wrapped, instance, args, kwargs): + """Patch respond handling to set metadata""" + + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + status, headers, exc_info = args + code, _, _ = status.partition(" ") + + try: + code = int(code) + except ValueError: + pass + + if not span.get_tag(MOLTEN_ROUTE): + # if route never resolve, update root resource + span.resource = "{} {}".format(request.method, code) + + trace_utils.set_http_meta(span, config.molten, status_code=code) + + return wrapped(*args, **kwargs) + + # patching for extracting response code + start_response = _w_start_response(start_response) + + url = "%s://%s:%s%s" % ( + request.scheme, + request.host, + request.port, + request.path, + ) + query = urlencode(dict(request.params)) + trace_utils.set_http_meta( + span, config.molten, method=request.method, url=url, query=query, request_headers=request.headers + ) + + span.set_tag_str("molten.version", molten.__version__) + return wrapped(environ, start_response, **kwargs) + + +def patch_app_init(wrapped, instance, args, kwargs): + """Patch app initialization of middleware, components and renderers""" + # allow instance to be initialized before wrapping them + wrapped(*args, **kwargs) + + # add Pin to instance + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return + + # Wrappers here allow us to trace objects without altering class or instance + # attributes, which presents a problem when classes in molten use + # ``__slots__`` + + instance.router = WrapperRouter(instance.router) + + # wrap middleware functions/callables + instance.middleware = [WrapperMiddleware(mw) for mw in instance.middleware] + + # wrap components objects within injector + # NOTE: the app instance also contains a list of components but it does not + # appear to be used for anything passing along to the dependency injector + instance.injector.components = [WrapperComponent(c) for c in instance.injector.components] + + # but renderers objects + instance.renderers = [WrapperRenderer(r) for r in instance.renderers] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/wrappers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/wrappers.py new file mode 100644 index 0000000..851fccc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/molten/wrappers.py @@ -0,0 +1,124 @@ +import molten + +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor import wrapt + +from ... import Pin +from ...ext import SpanKind +from ...ext import http +from ...internal.utils.importlib import func_name +from .. import trace_utils + + +MOLTEN_ROUTE = "molten.route" + + +def trace_wrapped(resource, wrapped, *args, **kwargs): + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + with pin.tracer.trace( + func_name(wrapped), service=trace_utils.int_service(pin, config.molten), resource=resource + ) as span: + span.set_tag_str(COMPONENT, config.molten.integration_name) + + # set span.kind to the operation type being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + return wrapped(*args, **kwargs) + + +def trace_func(resource): + """Trace calls to function using provided resource name""" + + @wrapt.function_wrapper + def _trace_func(wrapped, instance, args, kwargs): + pin = Pin.get_from(molten) + + if not pin or not pin.enabled(): + return wrapped(*args, **kwargs) + + with pin.tracer.trace( + func_name(wrapped), service=trace_utils.int_service(pin, config.molten, pin), resource=resource + ) as span: + span.set_tag_str(COMPONENT, config.molten.integration_name) + + # set span.kind to the operation type being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + return wrapped(*args, **kwargs) + + return _trace_func + + +class WrapperComponent(wrapt.ObjectProxy): + """Tracing of components""" + + def can_handle_parameter(self, *args, **kwargs): + func = self.__wrapped__.can_handle_parameter + cname = func_name(self.__wrapped__) + resource = "{}.{}".format(cname, func.__name__) + return trace_wrapped(resource, func, *args, **kwargs) + + # TODO[tahir]: the signature of a wrapped resolve method causes DIError to + # be thrown since parameter types cannot be determined + + +class WrapperRenderer(wrapt.ObjectProxy): + """Tracing of renderers""" + + def render(self, *args, **kwargs): + func = self.__wrapped__.render + cname = func_name(self.__wrapped__) + resource = "{}.{}".format(cname, func.__name__) + return trace_wrapped(resource, func, *args, **kwargs) + + +class WrapperMiddleware(wrapt.ObjectProxy): + """Tracing of callable functional-middleware""" + + def __call__(self, *args, **kwargs): + func = self.__wrapped__.__call__ + resource = func_name(self.__wrapped__) + return trace_wrapped(resource, func, *args, **kwargs) + + +class WrapperRouter(wrapt.ObjectProxy): + """Tracing of router on the way back from a matched route""" + + def match(self, *args, **kwargs): + # catch matched route and wrap tracer around its handler and set root span resource + func = self.__wrapped__.match + route_and_params = func(*args, **kwargs) + + pin = Pin.get_from(molten) + if not pin or not pin.enabled(): + return route_and_params + + if route_and_params is not None: + route, params = route_and_params + + route.handler = trace_func(func_name(route.handler))(route.handler) + + # update root span resource while we know the matched route + resource = "{} {}".format( + route.method, + route.template, + ) + root_span = pin.tracer.current_root_span() + root_span.resource = resource + + # if no root route set make sure we record it based on this resolved + # route + if root_span: + if not root_span.get_tag(MOLTEN_ROUTE): + root_span.set_tag(MOLTEN_ROUTE, route.name) + if not root_span.get_tag(http.ROUTE): + root_span.set_tag_str(http.ROUTE, route.template) + + return route, params + + return route_and_params diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/__init__.py new file mode 100644 index 0000000..9bfb560 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/__init__.py @@ -0,0 +1,30 @@ +"""Instrument mongoengine to report MongoDB queries. + +``import ddtrace.auto`` will automatically patch your mongoengine connect method to make it work. +:: + + from ddtrace import Pin, patch + import mongoengine + + # If not patched yet, you can patch mongoengine specifically + patch(mongoengine=True) + + # At that point, mongoengine is instrumented with the default settings + mongoengine.connect('db', alias='default') + + # Use a pin to specify metadata related to this client + client = mongoengine.connect('db', alias='master') + Pin.override(client, service="mongo-master") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["mongoengine"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/patch.py new file mode 100644 index 0000000..fa7ef8c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/patch.py @@ -0,0 +1,20 @@ +import mongoengine + +from .trace import WrappedConnect + + +# Original connect function +_connect = mongoengine.connect + + +def get_version(): + # type: () -> str + return getattr(mongoengine, "__version__", "") + + +def patch(): + mongoengine.connect = WrappedConnect(_connect) + + +def unpatch(): + mongoengine.connect = _connect diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/trace.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/trace.py new file mode 100644 index 0000000..17add79 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mongoengine/trace.py @@ -0,0 +1,34 @@ +# 3p +# project +import ddtrace +from ddtrace.contrib.pymongo.client import TracedMongoClient +from ddtrace.ext import mongo as mongox +from ddtrace.internal.schema import schematize_service_name +from ddtrace.vendor import wrapt + + +# TODO(Benjamin): we should instrument register_connection instead, because more generic +# We should also extract the "alias" attribute and set it as a meta +_SERVICE = schematize_service_name(mongox.SERVICE) + + +class WrappedConnect(wrapt.ObjectProxy): + """WrappedConnect wraps mongoengines 'connect' function to ensure + that all returned connections are wrapped for tracing. + """ + + def __init__(self, connect): + super(WrappedConnect, self).__init__(connect) + ddtrace.Pin(_SERVICE, tracer=ddtrace.tracer).onto(self) + + def __call__(self, *args, **kwargs): + client = self.__wrapped__(*args, **kwargs) + pin = ddtrace.Pin.get_from(self) + if pin: + # mongoengine uses pymongo internally, so we can just piggyback on the + # existing pymongo integration and make sure that the connections it + # uses internally are traced. + client = TracedMongoClient(client) + ddtrace.Pin(service=pin.service, tracer=pin.tracer).onto(client) + + return client diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/__init__.py new file mode 100644 index 0000000..60d9d7b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/__init__.py @@ -0,0 +1,76 @@ +""" +The mysql integration instruments the mysql library to trace MySQL queries. + + +Enabling +~~~~~~~~ + +The mysql integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(mysql=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.mysql["service"] + + The service name reported by default for mysql spans. + + This option can also be set with the ``DD_MYSQL_SERVICE`` environment + variable. + + Default: ``"mysql"`` + +.. py:data:: ddtrace.config.mysql["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_MYSQL_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the mysql integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + # Make sure to import mysql.connector and not the 'connect' function, + # otherwise you won't have access to the patched version + import mysql.connector + + # This will report a span with the default settings + conn = mysql.connector.connect(user="alice", password="b0b", host="localhost", port=3306, database="test") + + # Use a pin to override the service name for this connection. + Pin.override(conn, service='mysql-users') + + cursor = conn.cursor() + cursor.execute("SELECT 6*7 AS the_answer;") + + +Only the default full-Python integration works. The binary C connector, +provided by _mysql_connector, is not supported. + +Help on mysql.connector can be found on: +https://dev.mysql.com/doc/connector-python/en/ +""" +from ...internal.utils.importlib import require_modules + + +# check `mysql-connector` availability +required_modules = ["mysql.connector"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/patch.py new file mode 100644 index 0000000..03a4f92 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysql/patch.py @@ -0,0 +1,68 @@ +import os + +import mysql.connector + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib.dbapi import TracedConnection +from ddtrace.vendor import wrapt + +from ...ext import db +from ...ext import net +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool + + +config._add( + "mysql", + dict( + _default_service=schematize_service_name("mysql"), + _dbapi_span_name_prefix="mysql", + _dbapi_span_operation_name=schematize_database_operation("mysql.query", database_provider="mysql"), + trace_fetch_methods=asbool(os.getenv("DD_MYSQL_TRACE_FETCH_METHODS", default=False)), + ), +) + + +def get_version(): + # type: () -> str + return mysql.connector.version.VERSION_TEXT + + +CONN_ATTR_BY_TAG = { + net.TARGET_HOST: "server_host", + net.TARGET_PORT: "server_port", + db.USER: "user", + db.NAME: "database", +} + + +def patch(): + wrapt.wrap_function_wrapper("mysql.connector", "connect", _connect) + # `Connect` is an alias for `connect`, patch it too + if hasattr(mysql.connector, "Connect"): + mysql.connector.Connect = mysql.connector.connect + + +def unpatch(): + if isinstance(mysql.connector.connect, wrapt.ObjectProxy): + mysql.connector.connect = mysql.connector.connect.__wrapped__ + if hasattr(mysql.connector, "Connect"): + mysql.connector.Connect = mysql.connector.connect + + +def _connect(func, instance, args, kwargs): + conn = func(*args, **kwargs) + return patch_conn(conn) + + +def patch_conn(conn): + tags = {t: getattr(conn, a) for t, a in CONN_ATTR_BY_TAG.items() if getattr(conn, a, "") != ""} + tags[db.SYSTEM] = "mysql" + pin = Pin(tags=tags) + + # grab the metadata from the conn + wrapped = TracedConnection(conn, pin=pin, cfg=config.mysql) + pin.onto(wrapped) + return wrapped diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/__init__.py new file mode 100644 index 0000000..a28d27d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/__init__.py @@ -0,0 +1,88 @@ +"""The mysqldb integration instruments the mysqlclient library to trace MySQL queries. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(mysqldb=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.mysqldb["service"] + + The service name reported by default for spans. + + This option can also be set with the ``DD_MYSQLDB_SERVICE`` environment + variable. + + Default: ``"mysql"`` + +.. py:data:: ddtrace.config.mysqldb["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_MYSQLDB_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + +.. _mysqldb_config_trace_connect: + +.. py:data:: ddtrace.config.mysqldb["trace_connect"] + + Whether or not to trace connecting. + + Can also be configured via the ``DD_MYSQLDB_TRACE_CONNECT`` environment variable. + + Note that if you are overriding the service name via the Pin on an individual cursor, that will not affect + connect traces. The service name must also be overridden on the Pin on the MySQLdb module. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + # Make sure to import MySQLdb and not the 'connect' function, + # otherwise you won't have access to the patched version + from ddtrace import Pin + import MySQLdb + + # This will report a span with the default settings + conn = MySQLdb.connect(user="alice", passwd="b0b", host="localhost", port=3306, db="test") + + # Use a pin to override the service. + Pin.override(conn, service='mysql-users') + + cursor = conn.cursor() + cursor.execute("SELECT 6*7 AS the_answer;") + + +This package works for mysqlclient. Only the default full-Python integration works. The binary C connector provided by +_mysql is not supported. + +Help on mysqlclient can be found on: +https://mysqlclient.readthedocs.io/ + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["MySQLdb"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/patch.py new file mode 100644 index 0000000..878280d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/mysqldb/patch.py @@ -0,0 +1,111 @@ +import os + +import MySQLdb + +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib.dbapi import TracedConnection +from ddtrace.contrib.trace_utils import ext_service +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_database_operation +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool +from ...internal.utils.wrappers import unwrap as _u + + +config._add( + "mysqldb", + dict( + _default_service=schematize_service_name("mysql"), + _dbapi_span_name_prefix="mysql", + _dbapi_span_operation_name=schematize_database_operation("mysql.query", database_provider="mysql"), + trace_fetch_methods=asbool(os.getenv("DD_MYSQLDB_TRACE_FETCH_METHODS", default=False)), + trace_connect=asbool(os.getenv("DD_MYSQLDB_TRACE_CONNECT", default=False)), + ), +) + +KWPOS_BY_TAG = { + net.TARGET_HOST: ("host", 0), + db.USER: ("user", 1), + db.NAME: ("db", 3), +} + + +def get_version(): + # type: () -> str + return ".".join(map(str, MySQLdb.version_info[0:3])) + + +def patch(): + # patch only once + if getattr(MySQLdb, "__datadog_patch", False): + return + MySQLdb.__datadog_patch = True + + Pin().onto(MySQLdb) + + # `Connection` and `connect` are aliases for + # `Connect`; patch them too + _w("MySQLdb", "Connect", _connect) + if hasattr(MySQLdb, "Connection"): + _w("MySQLdb", "Connection", _connect) + if hasattr(MySQLdb, "connect"): + _w("MySQLdb", "connect", _connect) + + +def unpatch(): + if not getattr(MySQLdb, "__datadog_patch", False): + return + MySQLdb.__datadog_patch = False + + pin = Pin.get_from(MySQLdb) + if pin: + pin.remove_from(MySQLdb) + + # unpatch MySQLdb + _u(MySQLdb, "Connect") + if hasattr(MySQLdb, "Connection"): + _u(MySQLdb, "Connection") + if hasattr(MySQLdb, "connect"): + _u(MySQLdb, "connect") + + +def _connect(func, instance, args, kwargs): + pin = Pin.get_from(MySQLdb) + + if not pin or not pin.enabled() or not config.mysqldb.trace_connect: + conn = func(*args, **kwargs) + else: + with pin.tracer.trace( + "MySQLdb.connection.connect", service=ext_service(pin, config.mysqldb), span_type=SpanTypes.SQL + ) as span: + span.set_tag_str(COMPONENT, config.mysqldb.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + conn = func(*args, **kwargs) + return patch_conn(conn, *args, **kwargs) + + +def patch_conn(conn, *args, **kwargs): + tags = { + t: kwargs[k] if k in kwargs else args[p] for t, (k, p) in KWPOS_BY_TAG.items() if k in kwargs or len(args) > p + } + tags[db.SYSTEM] = "mysql" + tags[net.TARGET_PORT] = conn.port + pin = Pin(tags=tags) + + # grab the metadata from the conn + wrapped = TracedConnection(conn, pin=pin, cfg=config.mysqldb) + pin.onto(wrapped) + return wrapped diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/__init__.py new file mode 100644 index 0000000..3376546 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/__init__.py @@ -0,0 +1,262 @@ +""" +The OpenAI integration instruments the OpenAI Python library to emit metrics, +traces, and logs (logs are disabled by default) for requests made to the models, +completions, chat completions, edits, images, embeddings, audio, files, fine-tunes, +and moderations endpoints. + +All metrics, logs, and traces submitted from the OpenAI integration are tagged by: + +- ``service``, ``env``, ``version``: see the `Unified Service Tagging docs `_. +- ``openai.request.endpoint``: OpenAI API endpoint used in the request. +- ``openai.request.method``: HTTP method type used in the request. +- ``openai.request.model``: OpenAI model used in the request. +- ``openai.organization.name``: OpenAI organization name used in the request. +- ``openai.organization.id``: OpenAI organization ID used in the request (when available). +- ``openai.user.api_key``: OpenAI API key used to make the request (obfuscated to match the OpenAI UI representation ``sk-...XXXX`` where ``XXXX`` is the last 4 digits of the key). + + +Metrics +~~~~~~~ + +The following metrics are collected by default by the OpenAI integration. + +.. important:: + If the Agent is configured to use a non-default Statsd hostname or port, use ``DD_DOGSTATSD_URL`` to configure + ``ddtrace`` to use it. + + +.. important:: + Ratelimit and token metrics only reflect usage of the supported completions, chat completions, and embedding + endpoints. Usage of other OpenAI endpoints will not be recorded as they are not provided. + + +.. py:data:: openai.request.duration + + The duration of the OpenAI request in seconds. + + Type: ``distribution`` + + +.. py:data:: openai.request.error + + The number of errors from requests made to OpenAI. + + Type: ``count`` + + +.. py:data:: openai.ratelimit.requests + + The maximum number of OpenAI requests permitted before exhausting the rate limit. + + Type: ``gauge`` + + +.. py:data:: openai.ratelimit.tokens + + The maximum number of OpenAI tokens permitted before exhausting the rate limit. + + Type: ``gauge`` + + +.. py:data:: openai.ratelimit.remaining.requests + + The remaining number of OpenAI requests permitted before exhausting the rate limit. + + Type: ``gauge`` + + +.. py:data:: openai.ratelimit.remaining.tokens + + The remaining number of OpenAI tokens permitted before exhausting the rate limit. + + Type: ``gauge`` + + +.. py:data:: openai.tokens.prompt + + The number of tokens used in the prompt of an OpenAI request. + + Type: ``distribution`` + + +.. py:data:: openai.tokens.completion + + The number of tokens used in the completion of a OpenAI response. + + Type: ``distribution`` + + +.. py:data:: openai.tokens.total + + The total number of tokens used in the prompt and completion of a OpenAI request/response. + + Type: ``distribution`` + + +(beta) Prompt and Completion Sampling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The following data is collected in span tags with a default sampling rate of ``1.0``: + +- Prompt inputs and completions for the ``completions`` endpoint. +- Message inputs and completions for the ``chat.completions`` endpoint. +- Embedding inputs for the ``embeddings`` endpoint. +- Edit inputs, instructions, and completions for the ``edits`` endpoint. +- Image input filenames and completion URLs for the ``images`` endpoint. +- Audio input filenames and completions for the ``audio`` endpoint. + +Prompt and message inputs and completions can also be emitted as log data. +Logs are **not** emitted by default. When logs are enabled they are sampled at ``0.1``. + +Read the **Global Configuration** section for information about enabling logs and configuring sampling +rates. + +.. important:: + + To submit logs, you must set the ``DD_API_KEY`` environment variable. + + Set ``DD_SITE`` to send logs to a Datadog site such as ``datadoghq.eu``. The default is ``datadoghq.com``. + + +(beta) Streamed Responses Support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The OpenAI integration **estimates** prompt and completion token counts if streaming is turned on. +This is because the ``usage`` field is no longer returned in streamed completions, which is what +the integration relies on for reporting metrics. + +Streaming responses should produce a ``openai.stream`` span. This span is tagged with estimated +completion and total tokens. The integration will make a best effort attempt to tag the original +parent ``openai.request`` span with completion and total usage information, but this parent span +may be flushed before this information is available. + +The ``_est_tokens`` function implements token count estimations. It returns the average of simple +token estimation techniques that do not rely on installing a tokenizer. + + +Enabling +~~~~~~~~ + +The OpenAI integration is enabled automatically when you use +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Note that these commands also enable the ``requests`` and ``aiohttp`` +integrations which trace HTTP requests from the OpenAI library. + +Alternatively, use :func:`patch() ` to manually enable the OpenAI integration:: + + from ddtrace import config, patch + + # Note: be sure to configure the integration before calling ``patch()``! + # eg. config.openai["logs_enabled"] = True + + patch(openai=True) + + # to trace synchronous HTTP requests from the OpenAI library + # patch(openai=True, requests=True) + + # to trace asynchronous HTTP requests from the OpenAI library + # patch(openai=True, aiohttp=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.openai["service"] + + The service name reported by default for OpenAI requests. + + Alternatively, you can set this option with the ``DD_SERVICE`` or ``DD_OPENAI_SERVICE`` environment + variables. + + Default: ``DD_SERVICE`` + + +.. py:data:: ddtrace.config.openai["logs_enabled"] + + Enable collection of prompts and completions as logs. You can adjust the rate of prompts and completions collected + using the sample rate configuration described below. + + Alternatively, you can set this option with the ``DD_OPENAI_LOGS_ENABLED`` environment + variable. + + Note that you must set the ``DD_API_KEY`` environment variable to enable sending logs. + + Default: ``False`` + + +.. py:data:: ddtrace.config.openai["metrics_enabled"] + + Enable collection of OpenAI metrics. + + If the Datadog Agent is configured to use a non-default Statsd hostname + or port, use ``DD_DOGSTATSD_URL`` to configure ``ddtrace`` to use it. + + Alternatively, you can set this option with the ``DD_OPENAI_METRICS_ENABLED`` environment + variable. + + Default: ``True`` + + +.. py:data:: (beta) ddtrace.config.openai["span_char_limit"] + + Configure the maximum number of characters for the following data within span tags: + + - Prompt inputs and completions + - Message inputs and completions + - Embedding inputs + + Text exceeding the maximum number of characters is truncated to the character limit + and has ``...`` appended to the end. + + Alternatively, you can set this option with the ``DD_OPENAI_SPAN_CHAR_LIMIT`` environment + variable. + + Default: ``128`` + + +.. py:data:: (beta) ddtrace.config.openai["span_prompt_completion_sample_rate"] + + Configure the sample rate for the collection of prompts and completions as span tags. + + Alternatively, you can set this option with the ``DD_OPENAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``1.0`` + + +.. py:data:: (beta) ddtrace.config.openai["log_prompt_completion_sample_rate"] + + Configure the sample rate for the collection of prompts and completions as logs. + + Alternatively, you can set this option with the ``DD_OPENAI_LOG_PROMPT_COMPLETION_SAMPLE_RATE`` environment + variable. + + Default: ``0.1`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the OpenAI integration on a per-instance basis use the +``Pin`` API:: + + import openai + from ddtrace import Pin, config + + Pin.override(openai, service="my-openai-service") +""" # noqa: E501 +from ...internal.utils.importlib import require_modules + + +required_modules = ["openai"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from . import patch as _patch + + patch = _patch.patch + unpatch = _patch.unpatch + get_version = _patch.get_version + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/_endpoint_hooks.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/_endpoint_hooks.py new file mode 100644 index 0000000..8565399 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/_endpoint_hooks.py @@ -0,0 +1,908 @@ +from .utils import _compute_prompt_token_count +from .utils import _format_openai_api_key +from .utils import _is_async_generator +from .utils import _is_generator +from .utils import _tag_tool_calls + + +API_VERSION = "v1" + + +class _EndpointHook: + """ + Base class for all OpenAI endpoint hooks. + Each new endpoint hook should declare `_request_arg_params` and `_request_kwarg_params`, + which will be tagged automatically by _EndpointHook._record_request(). + For endpoint-specific request/response parameters that requires special casing, add that logic to + the endpoint hook's `_record_request()` after a super call to the base `_EndpointHook._record_request()`. + """ + + # _request_arg_params must include the names of arg parameters in order. + # If a given arg requires special casing, replace with `None` to avoid automatic tagging. + _request_arg_params = () + # _request_kwarg_params must include the names of kwarg parameters to tag automatically. + # If a given kwarg requires special casing, remove from this tuple to avoid automatic tagging. + _request_kwarg_params = () + # _response_attrs is used to automatically tag specific response attributes. + _response_attrs = () + _base_level_tag_args = ("api_base", "api_type", "api_version") + ENDPOINT_NAME = "openai" + HTTP_METHOD_TYPE = "" + OPERATION_ID = "" # Each endpoint hook must provide an operationID as specified in the OpenAI API specs: + # https://raw.githubusercontent.com/openai/openai-openapi/master/openapi.yaml + + def _record_request(self, pin, integration, span, args, kwargs): + """ + Set base-level openai tags, as well as request params from args and kwargs. + All inherited EndpointHook classes should include a super call to this method before performing + endpoint-specific request tagging logic. + """ + endpoint = self.ENDPOINT_NAME + if endpoint is None: + endpoint = "%s" % args[0].OBJECT_NAME + span.set_tag_str("openai.request.endpoint", "/%s/%s" % (API_VERSION, endpoint)) + span.set_tag_str("openai.request.method", self.HTTP_METHOD_TYPE) + + if self._request_arg_params and len(self._request_arg_params) > 1: + for idx, arg in enumerate(self._request_arg_params, 1): + if idx >= len(args): + break + if arg is None or args[idx] is None: + continue + if arg in self._base_level_tag_args: + span.set_tag_str("openai.%s" % arg, str(args[idx])) + elif arg == "organization": + span.set_tag_str("openai.organization.id", args[idx]) + elif arg == "api_key": + span.set_tag_str("openai.user.api_key", _format_openai_api_key(args[idx])) + else: + span.set_tag_str("openai.request.%s" % arg, str(args[idx])) + for kw_attr in self._request_kwarg_params: + if kw_attr not in kwargs: + continue + if isinstance(kwargs[kw_attr], dict): + for k, v in kwargs[kw_attr].items(): + span.set_tag_str("openai.request.%s.%s" % (kw_attr, k), str(v)) + elif kw_attr == "engine": # Azure OpenAI requires using "engine" instead of "model" + span.set_tag_str("openai.request.model", str(kwargs[kw_attr])) + else: + span.set_tag_str("openai.request.%s" % kw_attr, str(kwargs[kw_attr])) + + def handle_request(self, pin, integration, span, args, kwargs): + self._record_request(pin, integration, span, args, kwargs) + resp, error = yield + if hasattr(resp, "parse"): + # Users can request the raw response, in which case we need to process on the parsed response + # and return the original raw APIResponse. + self._record_response(pin, integration, span, args, kwargs, resp.parse(), error) + return resp + return self._record_response(pin, integration, span, args, kwargs, resp, error) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + for resp_attr in self._response_attrs: + if hasattr(resp, resp_attr): + span.set_tag_str("openai.response.%s" % resp_attr, str(getattr(resp, resp_attr, ""))) + return resp + + +class _BaseCompletionHook(_EndpointHook): + """ + Share streamed response handling logic between Completion and ChatCompletion endpoints. + """ + + _request_arg_params = ("api_key", "api_base", "api_type", "request_id", "api_version", "organization") + + def _handle_streamed_response(self, integration, span, args, kwargs, resp): + """Handle streamed response objects returned from endpoint calls. + + This method helps with streamed responses by wrapping the generator returned with a + generator that traces the reading of the response. + """ + + def shared_gen(): + try: + num_prompt_tokens = span.get_metric("openai.response.usage.prompt_tokens") or 0 + num_completion_tokens = yield + span.set_metric("openai.response.usage.completion_tokens", num_completion_tokens) + total_tokens = num_prompt_tokens + num_completion_tokens + span.set_metric("openai.response.usage.total_tokens", total_tokens) + if span.get_metric("openai.request.prompt_tokens_estimated") == 0: + integration.metric(span, "dist", "tokens.prompt", num_prompt_tokens) + else: + integration.metric(span, "dist", "tokens.prompt", num_prompt_tokens, tags=["openai.estimated:true"]) + integration.metric( + span, "dist", "tokens.completion", num_completion_tokens, tags=["openai.estimated:true"] + ) + integration.metric(span, "dist", "tokens.total", total_tokens, tags=["openai.estimated:true"]) + finally: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + + num_prompt_tokens = 0 + estimated = False + prompt = kwargs.get("prompt", None) + messages = kwargs.get("messages", None) + if prompt is not None: + if isinstance(prompt, str) or isinstance(prompt, list) and isinstance(prompt[0], int): + prompt = [prompt] + for p in prompt: + estimated, prompt_tokens = _compute_prompt_token_count(p, kwargs.get("model")) + num_prompt_tokens += prompt_tokens + if messages is not None: + for m in messages: + estimated, prompt_tokens = _compute_prompt_token_count(m.get("content", ""), kwargs.get("model")) + num_prompt_tokens += prompt_tokens + span.set_metric("openai.request.prompt_tokens_estimated", int(estimated)) + span.set_metric("openai.response.usage.prompt_tokens", num_prompt_tokens) + + # A chunk corresponds to a token: + # https://community.openai.com/t/how-to-get-total-tokens-from-a-stream-of-completioncreaterequests/110700 + # https://community.openai.com/t/openai-api-get-usage-tokens-in-response-when-set-stream-true/141866 + if _is_async_generator(resp): + + async def traced_streamed_response(): + g = shared_gen() + g.send(None) + num_completion_tokens = 0 + try: + async for chunk in resp: + num_completion_tokens += 1 + yield chunk + finally: + try: + g.send(num_completion_tokens) + except StopIteration: + pass + + return traced_streamed_response() + + elif _is_generator(resp): + + def traced_streamed_response(): + g = shared_gen() + g.send(None) + num_completion_tokens = 0 + try: + for chunk in resp: + num_completion_tokens += 1 + yield chunk + finally: + try: + g.send(num_completion_tokens) + except StopIteration: + pass + + return traced_streamed_response() + + return resp + + +class _CompletionHook(_BaseCompletionHook): + _request_kwarg_params = ( + "model", + "engine", + "suffix", + "max_tokens", + "temperature", + "top_p", + "n", + "stream", + "logprobs", + "echo", + "stop", + "presence_penalty", + "frequency_penalty", + "best_of", + "logit_bias", + "user", + ) + _response_attrs = ("created", "id", "model") + ENDPOINT_NAME = "completions" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createCompletion" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if integration.is_pc_sampled_span(span): + prompt = kwargs.get("prompt", "") + if isinstance(prompt, str): + prompt = [prompt] + for idx, p in enumerate(prompt): + span.set_tag_str("openai.request.prompt.%d" % idx, integration.trunc(str(p))) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if kwargs.get("stream") and error is None: + return self._handle_streamed_response(integration, span, args, kwargs, resp) + if integration.is_pc_sampled_log(span): + attrs_dict = {"prompt": kwargs.get("prompt", "")} + if error is None: + log_choices = resp.choices + if hasattr(resp.choices[0], "model_dump"): + log_choices = [choice.model_dump() for choice in resp.choices] + attrs_dict.update({"choices": log_choices}) + integration.log( + span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict + ) + if integration.is_pc_sampled_llmobs(span): + integration.generate_completion_llm_records(resp, error, span, kwargs) + if not resp: + return + for choice in resp.choices: + span.set_tag_str("openai.response.choices.%d.finish_reason" % choice.index, str(choice.finish_reason)) + if integration.is_pc_sampled_span(span): + span.set_tag_str("openai.response.choices.%d.text" % choice.index, integration.trunc(choice.text)) + integration.record_usage(span, resp.usage) + return resp + + +class _ChatCompletionHook(_BaseCompletionHook): + _request_kwarg_params = ( + "model", + "engine", + "temperature", + "top_p", + "n", + "stream", + "stop", + "max_tokens", + "presence_penalty", + "frequency_penalty", + "logit_bias", + "user", + ) + _response_attrs = ("created", "id", "model") + ENDPOINT_NAME = "chat/completions" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createChatCompletion" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + for idx, m in enumerate(kwargs.get("messages", [])): + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "openai.request.messages.%d.content" % idx, integration.trunc(str(m.get("content", ""))) + ) + span.set_tag_str("openai.request.messages.%d.role" % idx, m.get("role", "")) + span.set_tag_str("openai.request.messages.%d.name" % idx, m.get("name", "")) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if kwargs.get("stream") and error is None: + return self._handle_streamed_response(integration, span, args, kwargs, resp) + if integration.is_pc_sampled_log(span): + log_choices = resp.choices + if hasattr(resp.choices[0], "model_dump"): + log_choices = [choice.model_dump() for choice in resp.choices] + attrs_dict = {"messages": kwargs.get("messages", []), "completion": log_choices} + integration.log( + span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict + ) + if integration.is_pc_sampled_llmobs(span): + integration.generate_chat_llm_records(resp, error, span, kwargs) + if not resp: + return + for choice in resp.choices: + idx = choice.index + finish_reason = getattr(choice, "finish_reason", None) + message = choice.message + span.set_tag_str("openai.response.choices.%d.finish_reason" % idx, str(finish_reason)) + span.set_tag_str("openai.response.choices.%d.message.role" % idx, choice.message.role) + if integration.is_pc_sampled_span(span): + span.set_tag_str( + "openai.response.choices.%d.message.content" % idx, integration.trunc(message.content or "") + ) + if getattr(message, "function_call", None): + _tag_tool_calls(integration, span, [message.function_call], idx) + if getattr(message, "tool_calls", None): + _tag_tool_calls(integration, span, message.tool_calls, idx) + integration.record_usage(span, resp.usage) + return resp + + +class _EmbeddingHook(_EndpointHook): + _request_arg_params = ("api_key", "api_base", "api_type", "request_id", "api_version", "organization") + _request_kwarg_params = ("model", "engine", "user") + _response_attrs = ("model",) + ENDPOINT_NAME = "embeddings" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createEmbedding" + + def _record_request(self, pin, integration, span, args, kwargs): + """ + Embedding endpoint allows multiple inputs, each of which we specify a request tag for, so have to + manually set them in _pre_response(). + """ + super()._record_request(pin, integration, span, args, kwargs) + embedding_input = kwargs.get("input", "") + if integration.is_pc_sampled_span(span): + if isinstance(embedding_input, str) or isinstance(embedding_input[0], int): + embedding_input = [embedding_input] + for idx, inp in enumerate(embedding_input): + span.set_tag_str("openai.request.input.%d" % idx, integration.trunc(str(inp))) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + span.set_metric("openai.response.embeddings_count", len(resp.data)) + span.set_metric("openai.response.embedding-length", len(resp.data[0].embedding)) + integration.record_usage(span, resp.usage) + return resp + + +class _ListHook(_EndpointHook): + """ + Hook for openai.ListableAPIResource, which is used by Model.list, File.list, and FineTune.list. + """ + + _request_arg_params = ("api_key", "request_id", "api_version", "organization", "api_base", "api_type") + _request_kwarg_params = ("user",) + ENDPOINT_NAME = None + HTTP_METHOD_TYPE = "GET" + OPERATION_ID = "list" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + endpoint = span.get_tag("openai.request.endpoint") + if endpoint.endswith("/models"): + span.resource = "listModels" + elif endpoint.endswith("/files"): + span.resource = "listFiles" + elif endpoint.endswith("/fine-tunes"): + span.resource = "listFineTunes" + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + span.set_metric("openai.response.count", len(resp.data or [])) + return resp + + +class _ModelListHook(_ListHook): + """ + Hook for openai.resources.models.Models.list (v1) + """ + + ENDPOINT_NAME = "models" + OPERATION_ID = "listModels" + + +class _FileListHook(_ListHook): + """ + Hook for openai.resources.files.Files.list (v1) + """ + + ENDPOINT_NAME = "files" + OPERATION_ID = "listFiles" + + +class _FineTuneListHook(_ListHook): + """ + Hook for openai.resources.fine_tunes.FineTunes.list (v1) + """ + + ENDPOINT_NAME = "fine-tunes" + OPERATION_ID = "listFineTunes" + + +class _RetrieveHook(_EndpointHook): + """Hook for openai.APIResource, which is used by Model.retrieve, File.retrieve, and FineTune.retrieve.""" + + _request_arg_params = (None, "api_key", "request_id", "request_timeout") + _request_kwarg_params = ("user",) + _response_attrs = ( + "id", + "owned_by", + "model", + "parent", + "root", + "bytes", + "created", + "created_at", + "purpose", + "filename", + "fine_tuned_model", + "status", + "status_details", + "updated_at", + ) + ENDPOINT_NAME = None + HTTP_METHOD_TYPE = "GET" + OPERATION_ID = "retrieve" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + endpoint = span.get_tag("openai.request.endpoint") + if endpoint.endswith("/models"): + span.resource = "retrieveModel" + span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) + elif endpoint.endswith("/files"): + span.resource = "retrieveFile" + span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + elif endpoint.endswith("/fine-tunes"): + span.resource = "retrieveFineTune" + span.set_tag_str( + "openai.request.fine_tune_id", args[1] if len(args) >= 2 else kwargs.get("fine_tune_id", "") + ) + span.set_tag_str("openai.request.endpoint", "%s/*" % endpoint) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + if hasattr(resp, "hyperparams"): + for hyperparam in ("batch_size", "learning_rate_multiplier", "n_epochs", "prompt_loss_weight"): + val = getattr(resp.hyperparams, hyperparam, "") + span.set_tag_str("openai.response.hyperparams.%s" % hyperparam, str(val)) + for resp_attr in ("result_files", "training_files", "validation_files"): + if hasattr(resp, resp_attr): + span.set_metric("openai.response.%s_count" % resp_attr, len(getattr(resp, resp_attr, []))) + if hasattr(resp, "events"): + span.set_metric("openai.response.events_count", len(resp.events)) + return resp + + +class _ModelRetrieveHook(_RetrieveHook): + """ + Hook for openai.resources.models.Models.retrieve + """ + + ENDPOINT_NAME = "models" + OPERATION_ID = "retrieveModel" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) + + +class _FileRetrieveHook(_RetrieveHook): + """ + Hook for openai.resources.files.Files.retrieve + """ + + ENDPOINT_NAME = "files" + OPERATION_ID = "retrieveFile" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + + +class _FineTuneRetrieveHook(_RetrieveHook): + """ + Hook for openai.resources.fine_tunes.FineTunes.retrieve + """ + + ENDPOINT_NAME = "fine-tunes" + OPERATION_ID = "retrieveFineTune" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.fine_tune_id", args[1] if len(args) >= 2 else kwargs.get("fine_tune_id", "")) + + +class _DeleteHook(_EndpointHook): + """Hook for openai.DeletableAPIResource, which is used by File.delete, and Model.delete.""" + + _request_arg_params = (None, "api_type", "api_version") + _request_kwarg_params = ("user",) + ENDPOINT_NAME = None + HTTP_METHOD_TYPE = "DELETE" + OPERATION_ID = "delete" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + endpoint = span.get_tag("openai.request.endpoint") + if endpoint.endswith("/models"): + span.resource = "deleteModel" + span.set_tag_str("openai.request.model", args[1] if len(args) >= 2 else kwargs.get("model", "")) + elif endpoint.endswith("/files"): + span.resource = "deleteFile" + span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + span.set_tag_str("openai.request.endpoint", "%s/*" % endpoint) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + if hasattr(resp, "data"): + if resp._headers.get("openai-organization"): + span.set_tag_str("openai.organization.name", resp._headers.get("openai-organization")) + span.set_tag_str("openai.response.id", resp.data.get("id", "")) + span.set_tag_str("openai.response.deleted", str(resp.data.get("deleted", ""))) + else: + span.set_tag_str("openai.response.id", str(resp.id)) + span.set_tag_str("openai.response.deleted", str(resp.deleted)) + return resp + + +class _FileDeleteHook(_DeleteHook): + """ + Hook for openai.resources.files.Files.delete + """ + + ENDPOINT_NAME = "files" + + +class _ModelDeleteHook(_DeleteHook): + """ + Hook for openai.resources.models.Models.delete + """ + + ENDPOINT_NAME = "models" + + +class _EditHook(_EndpointHook): + _request_arg_params = ("api_key", "api_base", "api_type", "request_id", "api_version", "organization") + _request_kwarg_params = ("model", "n", "temperature", "top_p", "user") + _response_attrs = ("created",) + ENDPOINT_NAME = "edits" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createEdit" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if integration.is_pc_sampled_span(span): + instruction = kwargs.get("instruction") + input_text = kwargs.get("input", "") + span.set_tag_str("openai.request.instruction", integration.trunc(instruction)) + span.set_tag_str("openai.request.input", integration.trunc(input_text)) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if integration.is_pc_sampled_log(span): + log_choices = resp.choices + if hasattr(resp.choices[0], "model_dump"): + log_choices = [choice.model_dump() for choice in resp.choices] + integration.log( + span, + "info" if error is None else "error", + "sampled %s" % self.OPERATION_ID, + attrs={ + "instruction": kwargs.get("instruction"), + "input": kwargs.get("input", ""), + "choices": log_choices, + }, + ) + if not resp: + return + choices = resp.choices + if integration.is_pc_sampled_span(span): + for choice in choices: + idx = choice.index + span.set_tag_str("openai.response.choices.%d.text" % idx, integration.trunc(str(choice.text))) + integration.record_usage(span, resp.usage) + return resp + + +class _ImageHook(_EndpointHook): + _response_attrs = ("created",) + ENDPOINT_NAME = "images" + HTTP_METHOD_TYPE = "POST" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.model", "dall-e") + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if integration.is_pc_sampled_log(span): + attrs_dict = {} + if kwargs.get("response_format", "") == "b64_json": + attrs_dict.update({"choices": [{"b64_json": "returned"} for _ in resp.data]}) + else: + log_choices = resp.data + if hasattr(resp.data[0], "model_dump"): + log_choices = [choice.model_dump() for choice in resp.data] + attrs_dict.update({"choices": log_choices}) + if "prompt" in self._request_kwarg_params: + attrs_dict.update({"prompt": kwargs.get("prompt", "")}) + if "image" in self._request_kwarg_params: + image = args[1] if len(args) >= 2 else kwargs.get("image", "") + attrs_dict.update({"image": image.name.split("/")[-1]}) + if "mask" in self._request_kwarg_params: + mask = args[2] if len(args) >= 3 else kwargs.get("mask", "") + attrs_dict.update({"mask": mask.name.split("/")[-1]}) + integration.log( + span, "info" if error is None else "error", "sampled %s" % self.OPERATION_ID, attrs=attrs_dict + ) + if not resp: + return + choices = resp.data + span.set_metric("openai.response.images_count", len(choices)) + if integration.is_pc_sampled_span(span): + for idx, choice in enumerate(choices): + if getattr(choice, "b64_json", None) is not None: + span.set_tag_str("openai.response.images.%d.b64_json" % idx, "returned") + else: + span.set_tag_str("openai.response.images.%d.url" % idx, integration.trunc(choice.url)) + return resp + + +class _ImageCreateHook(_ImageHook): + _request_arg_params = ("api_key", "api_base", "api_type", "api_version", "organization") + _request_kwarg_params = ("prompt", "n", "size", "response_format", "user") + ENDPOINT_NAME = "images/generations" + OPERATION_ID = "createImage" + + +class _ImageEditHook(_ImageHook): + _request_arg_params = (None, None, "api_key", "api_base", "api_type", "api_version", "organization") + _request_kwarg_params = ("prompt", "n", "size", "response_format", "user", "image", "mask") + ENDPOINT_NAME = "images/edits" + OPERATION_ID = "createImageEdit" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if not integration.is_pc_sampled_span: + return + image = args[1] if len(args) >= 2 else kwargs.get("image", "") + mask = args[2] if len(args) >= 3 else kwargs.get("mask", "") + if image: + if hasattr(image, "name"): + span.set_tag_str("openai.request.image", integration.trunc(image.name.split("/")[-1])) + else: + span.set_tag_str("openai.request.image", "") + if mask: + if hasattr(mask, "name"): + span.set_tag_str("openai.request.mask", integration.trunc(mask.name.split("/")[-1])) + else: + span.set_tag_str("openai.request.mask", "") + + +class _ImageVariationHook(_ImageHook): + _request_arg_params = (None, "api_key", "api_base", "api_type", "api_version", "organization") + _request_kwarg_params = ("n", "size", "response_format", "user", "image") + ENDPOINT_NAME = "images/variations" + OPERATION_ID = "createImageVariation" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if not integration.is_pc_sampled_span: + return + image = args[1] if len(args) >= 2 else kwargs.get("image", "") + if image: + if hasattr(image, "name"): + span.set_tag_str("openai.request.image", integration.trunc(image.name.split("/")[-1])) + else: + span.set_tag_str("openai.request.image", "") + + +class _BaseAudioHook(_EndpointHook): + _request_arg_params = ("model", None, "api_key", "api_base", "api_type", "api_version", "organization") + _response_attrs = ("language", "duration") + ENDPOINT_NAME = "audio" + HTTP_METHOD_TYPE = "POST" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if not integration.is_pc_sampled_span: + return + audio_file = args[2] if len(args) >= 3 else kwargs.get("file", "") + if audio_file and hasattr(audio_file, "name"): + span.set_tag_str("openai.request.filename", integration.trunc(audio_file.name.split("/")[-1])) + else: + span.set_tag_str("openai.request.filename", "") + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + text = "" + if resp: + resp_to_tag = resp.model_dump() if hasattr(resp, "model_dump") else resp + if isinstance(resp_to_tag, str): + text = resp + elif isinstance(resp_to_tag, dict): + text = resp_to_tag.get("text", "") + if "segments" in resp_to_tag: + span.set_metric("openai.response.segments_count", len(resp_to_tag.get("segments"))) + if integration.is_pc_sampled_span(span): + span.set_tag_str("openai.response.text", integration.trunc(text)) + if integration.is_pc_sampled_log(span): + file_input = args[2] if len(args) >= 3 else kwargs.get("file", "") + integration.log( + span, + "info" if error is None else "error", + "sampled %s" % self.OPERATION_ID, + attrs={ + "file": getattr(file_input, "name", "").split("/")[-1], + "prompt": kwargs.get("prompt", ""), + "language": kwargs.get("language", ""), + "text": text, + }, + ) + return resp + + +class _AudioTranscriptionHook(_BaseAudioHook): + _request_kwarg_params = ( + "prompt", + "response_format", + "temperature", + "language", + "user", + ) + ENDPOINT_NAME = "audio/transcriptions" + OPERATION_ID = "createTranscription" + + +class _AudioTranslationHook(_BaseAudioHook): + _request_kwarg_params = ( + "prompt", + "response_format", + "temperature", + "user", + ) + ENDPOINT_NAME = "audio/translations" + OPERATION_ID = "createTranslation" + + +class _ModerationHook(_EndpointHook): + _request_arg_params = ("input", "model", "api_key") + _request_kwarg_params = ("input", "model") + _response_attrs = ("id", "model") + _response_categories = ( + "hate", + "hate/threatening", + "harassment", + "harassment/threatening", + "self-harm", + "self-harm/intent", + "self-harm/instructions", + "sexual", + "sexual/minors", + "violence", + "violence/graphic", + ) + ENDPOINT_NAME = "moderations" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createModeration" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + results = resp.results[0] + categories = results.categories + scores = results.category_scores + for category in self._response_categories: + span.set_metric("openai.response.category_scores.%s" % category, getattr(scores, category, 0)) + span.set_metric("openai.response.categories.%s" % category, int(getattr(categories, category))) + span.set_metric("openai.response.flagged", int(results.flagged)) + return resp + + +class _BaseFileHook(_EndpointHook): + ENDPOINT_NAME = "files" + + +class _FileCreateHook(_BaseFileHook): + _request_arg_params = ( + None, + "purpose", + "model", + "api_key", + "api_base", + "api_type", + "api_version", + "organization", + "user_provided_filename", + ) + _request_kwarg_params = ("purpose",) + _response_attrs = ("id", "bytes", "created_at", "filename", "purpose", "status", "status_details") + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createFile" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + fp = args[1] if len(args) >= 2 else kwargs.get("file", "") + if fp and hasattr(fp, "name"): + span.set_tag_str("openai.request.filename", fp.name.split("/")[-1]) + else: + span.set_tag_str("openai.request.filename", "") + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + return resp + + +class _FileDownloadHook(_BaseFileHook): + _request_arg_params = (None, "api_key", "api_base", "api_type", "api_version", "organization") + HTTP_METHOD_TYPE = "GET" + OPERATION_ID = "downloadFile" + ENDPOINT_NAME = "files/*/content" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.file_id", args[1] if len(args) >= 2 else kwargs.get("file_id", "")) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + if isinstance(resp, bytes) or isinstance(resp, str): + span.set_metric("openai.response.total_bytes", len(resp)) + else: + span.set_metric("openai.response.total_bytes", getattr(resp, "total_bytes", 0)) + return resp + + +class _BaseFineTuneHook(_EndpointHook): + _response_attrs = ("id", "model", "fine_tuned_model", "status", "created_at", "updated_at") + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + span.set_metric("openai.response.events_count", len(resp.events)) + span.set_metric("openai.response.result_files_count", len(resp.result_files)) + span.set_metric("openai.response.training_files_count", len(resp.training_files)) + span.set_metric("openai.response.validation_files_count", len(resp.validation_files)) + hyperparams = resp.hyperparams + for hyperparam in ("batch_size", "learning_rate_multiplier", "n_epochs", "prompt_loss_weight"): + span.set_tag_str("openai.response.hyperparams.%s" % hyperparam, str(getattr(hyperparams, hyperparam, ""))) + + return resp + + +class _FineTuneCreateHook(_BaseFineTuneHook): + _request_arg_params = ("api_key", "api_base", "api_type", "request_id", "api_version", "organization") + _request_kwarg_params = ( + "training_file", + "validation_file", + "model", + "n_epochs", + "batch_size", + "learning_rate_multiplier", + "prompt_loss_weight", + "compute_classification_metrics", + "classification_n_classes", + "classification_positive_class", + "suffix", + ) + ENDPOINT_NAME = "fine-tunes" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "createFineTune" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + if "classification_betas" in kwargs: + classification_betas = kwargs.get("classification_betas", []) + if classification_betas: + span.set_metric("openai.request.classification_betas_count", len(classification_betas)) + else: + span.set_metric("openai.request.classification_betas_count", 0) + + +class _FineTuneCancelHook(_BaseFineTuneHook): + _request_arg_params = (None, "api_key", "api_type", "request_id", "api_version") + _request_kwarg_params = ("user",) + ENDPOINT_NAME = "fine-tunes/*/cancel" + HTTP_METHOD_TYPE = "POST" + OPERATION_ID = "cancelFineTune" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.fine_tune_id", args[1] if len(args) >= 2 else kwargs.get("fine_tune_id", "")) + + +class _FineTuneListEventsHook(_EndpointHook): + _request_kwarg_params = ("stream", "user") + ENDPOINT_NAME = "fine-tunes/*/events" + HTTP_METHOD_TYPE = "GET" + OPERATION_ID = "listFineTuneEvents" + + def _record_request(self, pin, integration, span, args, kwargs): + super()._record_request(pin, integration, span, args, kwargs) + span.set_tag_str("openai.request.fine_tune_id", args[1] if len(args) >= 2 else kwargs.get("fine_tune_id", "")) + + def _record_response(self, pin, integration, span, args, kwargs, resp, error): + resp = super()._record_response(pin, integration, span, args, kwargs, resp, error) + if not resp: + return + span.set_metric("openai.response.count", len(resp.data)) + return resp diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/patch.py new file mode 100644 index 0000000..cc25324 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/patch.py @@ -0,0 +1,387 @@ +import os +import sys + +from openai import version + +from ddtrace import config +from ddtrace.internal.llmobs.integrations import OpenAIIntegration +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.formats import deep_getattr +from ddtrace.internal.utils.version import parse_version +from ddtrace.internal.wrapping import wrap + +from ...pin import Pin +from . import _endpoint_hooks +from .utils import _format_openai_api_key + + +log = get_logger(__name__) + + +config._add( + "openai", + { + "logs_enabled": asbool(os.getenv("DD_OPENAI_LOGS_ENABLED", False)), + "llmobs_enabled": asbool(os.getenv("DD_OPENAI_LLMOBS_ENABLED", False)), + "metrics_enabled": asbool(os.getenv("DD_OPENAI_METRICS_ENABLED", True)), + "span_prompt_completion_sample_rate": float(os.getenv("DD_OPENAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), + "llmobs_prompt_completion_sample_rate": float(os.getenv("DD_OPENAI_LLMOBS_PROMPT_COMPLETION_SAMPLE_RATE", 1.0)), + "log_prompt_completion_sample_rate": float(os.getenv("DD_OPENAI_LOG_PROMPT_COMPLETION_SAMPLE_RATE", 0.1)), + "span_char_limit": int(os.getenv("DD_OPENAI_SPAN_CHAR_LIMIT", 128)), + }, +) + + +def get_version(): + # type: () -> str + return version.VERSION + + +OPENAI_VERSION = parse_version(get_version()) + + +if OPENAI_VERSION >= (1, 0, 0): + _RESOURCES = { + "models.Models": { + "list": _endpoint_hooks._ModelListHook, + "retrieve": _endpoint_hooks._ModelRetrieveHook, + "delete": _endpoint_hooks._ModelDeleteHook, + }, + "completions.Completions": { + "create": _endpoint_hooks._CompletionHook, + }, + "chat.Completions": { + "create": _endpoint_hooks._ChatCompletionHook, + }, + "edits.Edits": { + "create": _endpoint_hooks._EditHook, + }, + "images.Images": { + "generate": _endpoint_hooks._ImageCreateHook, + "edit": _endpoint_hooks._ImageEditHook, + "create_variation": _endpoint_hooks._ImageVariationHook, + }, + "audio.Transcriptions": { + "create": _endpoint_hooks._AudioTranscriptionHook, + }, + "audio.Translations": { + "create": _endpoint_hooks._AudioTranslationHook, + }, + "embeddings.Embeddings": { + "create": _endpoint_hooks._EmbeddingHook, + }, + "moderations.Moderations": { + "create": _endpoint_hooks._ModerationHook, + }, + "files.Files": { + "create": _endpoint_hooks._FileCreateHook, + "retrieve": _endpoint_hooks._FileRetrieveHook, + "list": _endpoint_hooks._FileListHook, + "delete": _endpoint_hooks._FileDeleteHook, + "retrieve_content": _endpoint_hooks._FileDownloadHook, + }, + "fine_tunes.FineTunes": { + "create": _endpoint_hooks._FineTuneCreateHook, + "retrieve": _endpoint_hooks._FineTuneRetrieveHook, + "list": _endpoint_hooks._FineTuneListHook, + "cancel": _endpoint_hooks._FineTuneCancelHook, + "list_events": _endpoint_hooks._FineTuneListEventsHook, + }, + } +else: + _RESOURCES = { + "model.Model": { + "list": _endpoint_hooks._ListHook, + "retrieve": _endpoint_hooks._RetrieveHook, + }, + "completion.Completion": { + "create": _endpoint_hooks._CompletionHook, + }, + "chat_completion.ChatCompletion": { + "create": _endpoint_hooks._ChatCompletionHook, + }, + "edit.Edit": { + "create": _endpoint_hooks._EditHook, + }, + "image.Image": { + "create": _endpoint_hooks._ImageCreateHook, + "create_edit": _endpoint_hooks._ImageEditHook, + "create_variation": _endpoint_hooks._ImageVariationHook, + }, + "audio.Audio": { + "transcribe": _endpoint_hooks._AudioTranscriptionHook, + "translate": _endpoint_hooks._AudioTranslationHook, + }, + "embedding.Embedding": { + "create": _endpoint_hooks._EmbeddingHook, + }, + "moderation.Moderation": { + "create": _endpoint_hooks._ModerationHook, + }, + "file.File": { + # File.list() and File.retrieve() share the same underlying method as Model.list() and Model.retrieve() + # which means they are already wrapped + "create": _endpoint_hooks._FileCreateHook, + "delete": _endpoint_hooks._DeleteHook, + "download": _endpoint_hooks._FileDownloadHook, + }, + "fine_tune.FineTune": { + # FineTune.list()/retrieve() share the same underlying method as Model.list() and Model.retrieve() + # FineTune.delete() share the same underlying method as File.delete() + # which means they are already wrapped + # FineTune.list_events does not have an async version, so have to wrap it separately + "create": _endpoint_hooks._FineTuneCreateHook, + "cancel": _endpoint_hooks._FineTuneCancelHook, + }, + } + + +def _wrap_classmethod(obj, wrapper): + wrap(obj.__func__, wrapper) + + +def patch(): + # Avoid importing openai at the module level, eventually will be an import hook + import openai + + if getattr(openai, "__datadog_patch", False): + return + + Pin().onto(openai) + integration = OpenAIIntegration(integration_config=config.openai, openai=openai) + + if OPENAI_VERSION >= (1, 0, 0): + if OPENAI_VERSION >= (1, 8, 0): + wrap(openai._base_client.SyncAPIClient._process_response, _patched_convert(openai, integration)) + wrap(openai._base_client.AsyncAPIClient._process_response, _patched_convert(openai, integration)) + else: + wrap(openai._base_client.BaseClient._process_response, _patched_convert(openai, integration)) + wrap(openai.OpenAI.__init__, _patched_client_init(openai, integration)) + wrap(openai.AsyncOpenAI.__init__, _patched_client_init(openai, integration)) + wrap(openai.AzureOpenAI.__init__, _patched_client_init(openai, integration)) + wrap(openai.AsyncAzureOpenAI.__init__, _patched_client_init(openai, integration)) + + for resource, method_hook_dict in _RESOURCES.items(): + if deep_getattr(openai.resources, resource) is None: + continue + for method_name, endpoint_hook in method_hook_dict.items(): + sync_method = deep_getattr(openai.resources, "%s.%s" % (resource, method_name)) + async_method = deep_getattr( + openai.resources, "%s.%s" % (".Async".join(resource.split(".")), method_name) + ) + wrap(sync_method, _patched_endpoint(openai, integration, endpoint_hook)) + wrap(async_method, _patched_endpoint_async(openai, integration, endpoint_hook)) + else: + import openai.api_requestor + + wrap(openai.api_requestor._make_session, _patched_make_session) + wrap(openai.util.convert_to_openai_object, _patched_convert(openai, integration)) + + for resource, method_hook_dict in _RESOURCES.items(): + if deep_getattr(openai.api_resources, resource) is None: + continue + for method_name, endpoint_hook in method_hook_dict.items(): + sync_method = deep_getattr(openai.api_resources, "%s.%s" % (resource, method_name)) + async_method = deep_getattr(openai.api_resources, "%s.a%s" % (resource, method_name)) + _wrap_classmethod(sync_method, _patched_endpoint(openai, integration, endpoint_hook)) + _wrap_classmethod(async_method, _patched_endpoint_async(openai, integration, endpoint_hook)) + + # FineTune.list_events is the only traced endpoint that does not have an async version, so have to wrap it here. + _wrap_classmethod( + openai.api_resources.fine_tune.FineTune.list_events, + _patched_endpoint(openai, integration, _endpoint_hooks._FineTuneListEventsHook), + ) + + openai.__datadog_patch = True + + +def unpatch(): + # FIXME: add unpatching. The current wrapping.unwrap method requires + # the wrapper function to be provided which we don't keep a reference to. + pass + + +def _patched_client_init(openai, integration): + """ + Patch for `openai.OpenAI/AsyncOpenAI` client init methods to add the client object to the OpenAIIntegration object. + """ + + def patched_client_init(func, args, kwargs): + func(*args, **kwargs) + client = args[0] + integration._client = client + api_key = kwargs.get("api_key") + if api_key is None: + api_key = client.api_key + if api_key is not None: + integration.user_api_key = api_key + return + + return patched_client_init + + +def _patched_make_session(func, args, kwargs): + """Patch for `openai.api_requestor._make_session` which sets the service name on the + requests session so that spans from the requests integration will use the service name openai. + This is done so that the service break down will include OpenAI time spent querying the OpenAI backend. + + This should technically be a ``peer.service`` but this concept doesn't exist yet. + """ + session = func(*args, **kwargs) + service = schematize_service_name("openai") + Pin.override(session, service=service) + return session + + +def _traced_endpoint(endpoint_hook, integration, pin, args, kwargs): + span = integration.trace(pin, endpoint_hook.OPERATION_ID) + openai_api_key = _format_openai_api_key(kwargs.get("api_key")) + err = None + if openai_api_key: + # API key can either be set on the import or per request + span.set_tag_str("openai.user.api_key", openai_api_key) + try: + # Start the hook + hook = endpoint_hook().handle_request(pin, integration, span, args, kwargs) + hook.send(None) + + resp, err = yield + + # Record any error information + if err is not None: + span.set_exc_info(*sys.exc_info()) + integration.metric(span, "incr", "request.error", 1) + + # Pass the response and the error to the hook + try: + hook.send((resp, err)) + except StopIteration as e: + if err is None: + return e.value + finally: + # Streamed responses will be finished when the generator exits, so finish non-streamed spans here. + # Streamed responses with error will need to be finished manually as well. + if not kwargs.get("stream") or err is not None: + span.finish() + integration.metric(span, "dist", "request.duration", span.duration_ns) + + +def _patched_endpoint(openai, integration, patch_hook): + def patched_endpoint(func, args, kwargs): + # FIXME: this is a temporary workaround for the fact that our bytecode wrapping seems to modify + # a function keyword argument into a cell when it shouldn't. This is only an issue on + # Python 3.11+. + if sys.version_info >= (3, 11) and kwargs.get("encoding_format", None): + kwargs["encoding_format"] = kwargs["encoding_format"].cell_contents + + pin = Pin._find(openai, args[0]) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + g = _traced_endpoint(patch_hook, integration, pin, args, kwargs) + g.send(None) + resp, err = None, None + try: + resp = func(*args, **kwargs) + return resp + except Exception as e: + err = e + raise + finally: + try: + g.send((resp, err)) + except StopIteration as e: + if err is None: + # This return takes priority over `return resp` + return e.value # noqa: B012 + + return patched_endpoint + + +def _patched_endpoint_async(openai, integration, patch_hook): + # Same as _patched_endpoint but async + async def patched_endpoint(func, args, kwargs): + # FIXME: this is a temporary workaround for the fact that our bytecode wrapping seems to modify + # a function keyword argument into a cell when it shouldn't. This is only an issue on + # Python 3.11+. + if sys.version_info >= (3, 11) and kwargs.get("encoding_format", None): + kwargs["encoding_format"] = kwargs["encoding_format"].cell_contents + + pin = Pin._find(openai, args[0]) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + g = _traced_endpoint(patch_hook, integration, pin, args, kwargs) + g.send(None) + resp, err = None, None + try: + resp = await func(*args, **kwargs) + return resp + except Exception as e: + err = e + raise + finally: + try: + g.send((resp, err)) + except StopIteration as e: + if err is None: + # This return takes priority over `return resp` + return e.value # noqa: B012 + + return patched_endpoint + + +def _patched_convert(openai, integration): + def patched_convert(func, args, kwargs): + """Patch convert captures header information in the openai response""" + pin = Pin.get_from(openai) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + span = pin.tracer.current_span() + if not span: + return func(*args, **kwargs) + + if OPENAI_VERSION < (1, 0, 0): + resp = args[0] + if not isinstance(resp, openai.openai_response.OpenAIResponse): + return func(*args, **kwargs) + headers = resp._headers + else: + resp = kwargs.get("response", {}) + headers = resp.headers + # This function is called for each chunk in the stream. + # To prevent needlessly setting the same tags for each chunk, short-circuit here. + if span.get_tag("openai.organization.name") is not None: + return func(*args, **kwargs) + if headers.get("openai-organization"): + org_name = headers.get("openai-organization") + span.set_tag_str("openai.organization.name", org_name) + + # Gauge total rate limit + if headers.get("x-ratelimit-limit-requests"): + v = headers.get("x-ratelimit-limit-requests") + if v is not None: + integration.metric(span, "gauge", "ratelimit.requests", int(v)) + span.set_metric("openai.organization.ratelimit.requests.limit", int(v)) + if headers.get("x-ratelimit-limit-tokens"): + v = headers.get("x-ratelimit-limit-tokens") + if v is not None: + integration.metric(span, "gauge", "ratelimit.tokens", int(v)) + span.set_metric("openai.organization.ratelimit.tokens.limit", int(v)) + # Gauge and set span info for remaining requests and tokens + if headers.get("x-ratelimit-remaining-requests"): + v = headers.get("x-ratelimit-remaining-requests") + if v is not None: + integration.metric(span, "gauge", "ratelimit.remaining.requests", int(v)) + span.set_metric("openai.organization.ratelimit.requests.remaining", int(v)) + if headers.get("x-ratelimit-remaining-tokens"): + v = headers.get("x-ratelimit-remaining-tokens") + if v is not None: + integration.metric(span, "gauge", "ratelimit.remaining.tokens", int(v)) + span.set_metric("openai.organization.ratelimit.tokens.remaining", int(v)) + + return func(*args, **kwargs) + + return patched_convert diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/utils.py new file mode 100644 index 0000000..29bf8d2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/openai/utils.py @@ -0,0 +1,122 @@ +import re +from typing import AsyncGenerator # noqa:F401 +from typing import Generator # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal.logger import get_logger + + +try: + from tiktoken import encoding_for_model + + tiktoken_available = True +except ModuleNotFoundError: + tiktoken_available = False + + +log = get_logger(__name__) + +_punc_regex = re.compile(r"[\w']+|[.,!?;~@#$%^&*()+/-]") + + +def _compute_prompt_token_count(prompt, model): + # type: (Union[str, List[int]], Optional[str]) -> Tuple[bool, int] + """ + Takes in a prompt(s) and model pair, and returns a tuple of whether or not the number of prompt + tokens was estimated, and the estimated/calculated prompt token count. + """ + num_prompt_tokens = 0 + estimated = False + if model is not None and tiktoken_available is True: + try: + enc = encoding_for_model(model) + if isinstance(prompt, str): + num_prompt_tokens += len(enc.encode(prompt)) + elif isinstance(prompt, list) and isinstance(prompt[0], int): + num_prompt_tokens += len(prompt) + return estimated, num_prompt_tokens + except KeyError: + # tiktoken.encoding_for_model() will raise a KeyError if it doesn't have a tokenizer for the model + estimated = True + else: + estimated = True + + # If model is unavailable or tiktoken is not imported, then provide a very rough estimate of the number of tokens + return estimated, _est_tokens(prompt) + + +def _est_tokens(prompt): + # type: (Union[str, List[int]]) -> int + """ + Provide a very rough estimate of the number of tokens in a string prompt. + Note that if the prompt is passed in as a token array (list of ints), the token count + is just the length of the token array. + """ + # If model is unavailable or tiktoken is not imported, then provide a very rough estimate of the number of tokens + # Approximate using the following assumptions: + # * English text + # * 1 token ~= 4 chars + # * 1 token ~= ¾ words + est_tokens = 0 + if isinstance(prompt, str): + est1 = len(prompt) / 4 + est2 = len(_punc_regex.findall(prompt)) * 0.75 + return round((1.5 * est1 + 0.5 * est2) / 2) + elif isinstance(prompt, list) and isinstance(prompt[0], int): + return len(prompt) + return est_tokens + + +def _format_openai_api_key(openai_api_key): + # type: (Optional[str]) -> Optional[str] + """ + Returns `sk-...XXXX`, where XXXX is the last 4 characters of the provided OpenAI API key. + This mimics how OpenAI UI formats the API key. + """ + if not openai_api_key: + return None + return "sk-...%s" % openai_api_key[-4:] + + +def _is_generator(resp): + # type: (...) -> bool + import openai + + # In OpenAI v1, the response is type `openai.Stream` instead of Generator. + if isinstance(resp, Generator): + return True + if hasattr(openai, "Stream") and isinstance(resp, openai.Stream): + return True + return False + + +def _is_async_generator(resp): + # type: (...) -> bool + import openai + + # In OpenAI v1, the response is type `openai.AsyncStream` instead of AsyncGenerator. + if isinstance(resp, AsyncGenerator): + return True + if hasattr(openai, "AsyncStream") and isinstance(resp, openai.AsyncStream): + return True + return False + + +def _tag_tool_calls(integration, span, tool_calls, choice_idx): + # type: (...) -> None + """ + Tagging logic if function_call or tool_calls are provided in the chat response. + Note: since function calls are deprecated and will be replaced with tool calls, apply the same tagging logic/schema. + """ + for idy, tool_call in enumerate(tool_calls): + if hasattr(tool_call, "function"): + # tool_call is further nested in a "function" object + tool_call = tool_call.function + span.set_tag( + "openai.response.choices.%d.message.tool_calls.%d.arguments" % (choice_idx, idy), + integration.trunc(str(tool_call.arguments)), + ) + span.set_tag("openai.response.choices.%d.message.tool_calls.%d.name" % (choice_idx, idy), str(tool_call.name)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/__init__.py new file mode 100644 index 0000000..6e42817 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/__init__.py @@ -0,0 +1,68 @@ +""" +The psycopg integration instruments the psycopg and psycopg2 libraries to trace Postgres queries. + + +Enabling +~~~~~~~~ + +The psycopg integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(psycopg=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.psycopg["service"] + + The service name reported by default for psycopg spans. + + This option can also be set with the ``DD_PSYCOPG_SERVICE`` environment + variable. + + Default: ``"postgres"`` + +.. py:data:: ddtrace.config.psycopg["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_PSYCOPG_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +.. py:data:: ddtrace.config.psycopg["trace_connect"] + + Whether or not to trace ``psycopg.connect`` method. + + Can also configured via the ``DD_PSYCOPG_TRACE_CONNECT`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the psycopg integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + import psycopg + + db = psycopg.connect(connection_factory=factory) + # Use a pin to override the service name. + Pin.override(db, service="postgres-users") + + cursor = db.cursor() + cursor.execute("select * from users where id = 1") +""" +from .patch import get_version +from .patch import get_versions +from .patch import patch + + +__all__ = ["patch", "get_version", "get_versions"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_connection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_connection.py new file mode 100644 index 0000000..8ac8989 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_connection.py @@ -0,0 +1,66 @@ +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import dbapi_async +from ddtrace.contrib.psycopg.async_cursor import Psycopg3FetchTracedAsyncCursor +from ddtrace.contrib.psycopg.async_cursor import Psycopg3TracedAsyncCursor +from ddtrace.contrib.psycopg.connection import patch_conn +from ddtrace.contrib.trace_utils import ext_service +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.internal.constants import COMPONENT + + +class Psycopg3TracedAsyncConnection(dbapi_async.TracedAsyncConnection): + def __init__(self, conn, pin=None, cursor_cls=None): + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = ( + Psycopg3FetchTracedAsyncCursor if config.psycopg.trace_fetch_methods else Psycopg3TracedAsyncCursor + ) + + super(Psycopg3TracedAsyncConnection, self).__init__(conn, pin, config.psycopg, cursor_cls=cursor_cls) + + async def execute(self, *args, **kwargs): + """Execute a query and return a cursor to read its results.""" + span_name = "{}.{}".format(self._self_datadog_name, "execute") + + async def patched_execute(*args, **kwargs): + try: + cur = self.cursor() + if kwargs.get("binary", None): + cur.format = 1 # set to 1 for binary or 0 if not + return await cur.execute(*args, **kwargs) + except Exception as ex: + raise ex.with_traceback(None) + + return await self._trace_method(patched_execute, span_name, {}, *args, **kwargs) + + +def patched_connect_async_factory(psycopg_module): + async def patched_connect_async(connect_func, _, args, kwargs): + traced_conn_cls = Psycopg3TracedAsyncConnection + + pin = Pin.get_from(psycopg_module) + + if not pin or not pin.enabled() or not pin._config.trace_connect: + conn = await connect_func(*args, **kwargs) + else: + with pin.tracer.trace( + "{}.{}".format(connect_func.__module__, connect_func.__name__), + service=ext_service(pin, pin._config), + span_type=SpanTypes.SQL, + ) as span: + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, pin._config.integration_name) + if span.get_tag(db.SYSTEM) is None: + span.set_tag_str(db.SYSTEM, pin._config.dbms_name) + + span.set_tag(SPAN_MEASURED_KEY) + conn = await connect_func(*args, **kwargs) + + return patch_conn(conn, pin=pin, traced_conn_cls=traced_conn_cls) + + return patched_connect_async diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_cursor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_cursor.py new file mode 100644 index 0000000..d0dc21e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/async_cursor.py @@ -0,0 +1,26 @@ +from ddtrace.contrib import dbapi_async +from ddtrace.contrib.psycopg.cursor import Psycopg3TracedCursor + + +class Psycopg3TracedAsyncCursor(Psycopg3TracedCursor, dbapi_async.TracedAsyncCursor): + def __init__(self, cursor, pin, cfg, *args, **kwargs): + super(Psycopg3TracedAsyncCursor, self).__init__(cursor, pin, cfg) + + async def __aenter__(self): + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that errors + # messages will be the same. + await self.__wrapped__.__aenter__() + + # and finally, yield the traced cursor. + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + # previous versions of the dbapi didn't support context managers. let's + # reference the func that would be called to ensure that errors + # messages will be the same. + return await self.__wrapped__.__aexit__(exc_type, exc_val, exc_tb) + + +class Psycopg3FetchTracedAsyncCursor(Psycopg3TracedAsyncCursor, dbapi_async.FetchTracedAsyncCursor): + """Psycopg3FetchTracedAsyncCursor for psycopg""" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/connection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/connection.py new file mode 100644 index 0000000..acf19c7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/connection.py @@ -0,0 +1,109 @@ +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import dbapi +from ddtrace.contrib.psycopg.cursor import Psycopg2FetchTracedCursor +from ddtrace.contrib.psycopg.cursor import Psycopg2TracedCursor +from ddtrace.contrib.psycopg.cursor import Psycopg3FetchTracedCursor +from ddtrace.contrib.psycopg.cursor import Psycopg3TracedCursor +from ddtrace.contrib.psycopg.extensions import _patch_extensions +from ddtrace.contrib.trace_utils import ext_service +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.ext import net +from ddtrace.ext import sql +from ddtrace.internal.constants import COMPONENT + + +class Psycopg3TracedConnection(dbapi.TracedConnection): + def __init__(self, conn, pin=None, cursor_cls=None): + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = Psycopg3FetchTracedCursor if config.psycopg.trace_fetch_methods else Psycopg3TracedCursor + + super(Psycopg3TracedConnection, self).__init__(conn, pin, config.psycopg, cursor_cls=cursor_cls) + + def execute(self, *args, **kwargs): + """Execute a query and return a cursor to read its results.""" + + def patched_execute(*args, **kwargs): + try: + cur = self.cursor() + if kwargs.get("binary", None): + cur.format = 1 # set to 1 for binary or 0 if not + return cur.execute(*args, **kwargs) + except Exception as ex: + raise ex.with_traceback(None) + + return patched_execute(*args, **kwargs) + + +class Psycopg2TracedConnection(dbapi.TracedConnection): + """TracedConnection wraps a Connection with tracing code.""" + + def __init__(self, conn, pin=None, cursor_cls=None): + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = Psycopg2FetchTracedCursor if config.psycopg.trace_fetch_methods else Psycopg2TracedCursor + + super(Psycopg2TracedConnection, self).__init__(conn, pin, config.psycopg, cursor_cls=cursor_cls) + + +def patch_conn(conn, traced_conn_cls, pin=None): + """Wrap will patch the instance so that its queries are traced.""" + # ensure we've patched extensions (this is idempotent) in + # case we're only tracing some connections. + _config = None + if pin: + extensions_to_patch = pin._config.get("_extensions_to_patch", None) + _config = pin._config + if extensions_to_patch: + _patch_extensions(extensions_to_patch) + + c = traced_conn_cls(conn) + + # if the connection has an info attr, we are using psycopg3 + if hasattr(conn, "dsn"): + dsn = sql.parse_pg_dsn(conn.dsn) + else: + dsn = sql.parse_pg_dsn(conn.info.dsn) + + tags = { + net.TARGET_HOST: dsn.get("host"), + net.TARGET_PORT: dsn.get("port", 5432), + db.NAME: dsn.get("dbname"), + db.USER: dsn.get("user"), + "db.application": dsn.get("application_name"), + db.SYSTEM: "postgresql", + } + Pin(tags=tags, _config=_config).onto(c) + return c + + +def patched_connect_factory(psycopg_module): + def patched_connect(connect_func, _, args, kwargs): + traced_conn_cls = Psycopg3TracedConnection if psycopg_module.__name__ == "psycopg" else Psycopg2TracedConnection + + pin = Pin.get_from(psycopg_module) + + if not pin or not pin.enabled() or not pin._config.trace_connect: + conn = connect_func(*args, **kwargs) + else: + with pin.tracer.trace( + "{}.{}".format(connect_func.__module__, connect_func.__name__), + service=ext_service(pin, pin._config), + span_type=SpanTypes.SQL, + ) as span: + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, pin._config.integration_name) + if span.get_tag(db.SYSTEM) is None: + span.set_tag_str(db.SYSTEM, pin._config.dbms_name) + + span.set_tag(SPAN_MEASURED_KEY) + conn = connect_func(*args, **kwargs) + + return patch_conn(conn, pin=pin, traced_conn_cls=traced_conn_cls) + + return patched_connect diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/cursor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/cursor.py new file mode 100644 index 0000000..6596b55 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/cursor.py @@ -0,0 +1,28 @@ +from ddtrace.contrib import dbapi + + +class Psycopg3TracedCursor(dbapi.TracedCursor): + """TracedCursor for psycopg instances""" + + def __init__(self, cursor, pin, cfg, *args, **kwargs): + super(Psycopg3TracedCursor, self).__init__(cursor, pin, cfg) + + def _trace_method(self, method, name, resource, extra_tags, dbm_propagator, *args, **kwargs): + # treat Composable resource objects as strings + if resource.__class__.__name__ == "SQL" or resource.__class__.__name__ == "Composed": + resource = resource.as_string(self.__wrapped__) + return super(Psycopg3TracedCursor, self)._trace_method( + method, name, resource, extra_tags, dbm_propagator, *args, **kwargs + ) + + +class Psycopg3FetchTracedCursor(Psycopg3TracedCursor, dbapi.FetchTracedCursor): + """Psycopg3FetchTracedCursor for psycopg""" + + +class Psycopg2TracedCursor(Psycopg3TracedCursor): + """TracedCursor for psycopg2""" + + +class Psycopg2FetchTracedCursor(Psycopg3FetchTracedCursor): + """FetchTracedCursor for psycopg2""" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/extensions.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/extensions.py new file mode 100644 index 0000000..a801114 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/extensions.py @@ -0,0 +1,180 @@ +""" +Tracing utilities for the psycopg2 potgres client library. +""" +import functools + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_database_operation +from ddtrace.vendor import wrapt + +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net + + +def get_psycopg2_extensions(psycopg_module): + class TracedCursor(psycopg_module.extensions.cursor): + """Wrapper around cursor creating one span per query""" + + def __init__(self, *args, **kwargs): + self._datadog_tracer = kwargs.pop("datadog_tracer", None) + self._datadog_service = kwargs.pop("datadog_service", None) + self._datadog_tags = kwargs.pop("datadog_tags", None) + super(TracedCursor, self).__init__(*args, **kwargs) + + def execute(self, query, vars=None): # noqa: A002 + """just wrap the cursor execution in a span""" + if not self._datadog_tracer: + return psycopg_module.extensions.cursor.execute(self, query, vars) + + with self._datadog_tracer.trace( + schematize_database_operation("postgres.query", database_provider="postgresql"), + service=self._datadog_service, + span_type=SpanTypes.SQL, + ) as s: + s.set_tag_str(COMPONENT, config.psycopg.integration_name) + s.set_tag_str(db.SYSTEM, config.psycopg.dbms_name) + + # set span.kind to the type of operation being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tag(SPAN_MEASURED_KEY) + if not s.sampled: + return super(TracedCursor, self).execute(query, vars) + + s.resource = query + s.set_tags(self._datadog_tags) + try: + return super(TracedCursor, self).execute(query, vars) + finally: + s.set_metric(db.ROWCOUNT, self.rowcount) + + def callproc(self, procname, vars=None): # noqa: A002 + """just wrap the execution in a span""" + return psycopg_module.extensions.cursor.callproc(self, procname, vars) + + class TracedConnection(psycopg_module.extensions.connection): + """Wrapper around psycopg2 for tracing""" + + def __init__(self, *args, **kwargs): + self._datadog_tracer = kwargs.pop("datadog_tracer", None) + self._datadog_service = kwargs.pop("datadog_service", None) + + super(TracedConnection, self).__init__(*args, **kwargs) + + # add metadata (from the connection, string, etc) + dsn = psycopg_module.extensions.parse_dsn(self.dsn) + self._datadog_tags = { + net.TARGET_HOST: dsn.get("host"), + net.TARGET_PORT: dsn.get("port"), + db.NAME: dsn.get("dbname"), + db.USER: dsn.get("user"), + db.SYSTEM: config.psycopg.dbms_name, + "db.application": dsn.get("application_name"), + } + + self._datadog_cursor_class = functools.partial( + TracedCursor, + datadog_tracer=self._datadog_tracer, + datadog_service=self._datadog_service, + datadog_tags=self._datadog_tags, + ) + + def cursor(self, *args, **kwargs): + """register our custom cursor factory""" + kwargs.setdefault("cursor_factory", self._datadog_cursor_class) + return super(TracedConnection, self).cursor(*args, **kwargs) + + # extension hooks + _extensions = [ + ( + psycopg_module.extensions.register_type, + psycopg_module.extensions, + "register_type", + _extensions_register_type, + ), + (psycopg_module._psycopg.register_type, psycopg_module._psycopg, "register_type", _extensions_register_type), + (psycopg_module.extensions.adapt, psycopg_module.extensions, "adapt", _extensions_adapt), + ] + + # `_json` attribute is only available for psycopg >= 2.5 + if getattr(psycopg_module, "_json", None): + _extensions += [ + (psycopg_module._json.register_type, psycopg_module._json, "register_type", _extensions_register_type), + ] + + # `quote_ident` attribute is only available for psycopg >= 2.7 + if getattr(psycopg_module, "extensions", None) and getattr(psycopg_module.extensions, "quote_ident", None): + _extensions += [ + (psycopg_module.extensions.quote_ident, psycopg_module.extensions, "quote_ident", _extensions_quote_ident), + ] + + return _extensions + + +def _extensions_register_type(func, _, args, kwargs): + def _unroll_args(obj, scope=None): + return obj, scope + + obj, scope = _unroll_args(*args, **kwargs) + + # register_type performs a c-level check of the object + # type so we must be sure to pass in the actual db connection + if scope and isinstance(scope, wrapt.ObjectProxy): + scope = scope.__wrapped__ + + return func(obj, scope) if scope else func(obj) + + +def _extensions_quote_ident(func, _, args, kwargs): + def _unroll_args(obj, scope=None): + return obj, scope + + obj, scope = _unroll_args(*args, **kwargs) + + # register_type performs a c-level check of the object + # type so we must be sure to pass in the actual db connection + if scope and isinstance(scope, wrapt.ObjectProxy): + scope = scope.__wrapped__ + + return func(obj, scope) if scope else func(obj) + + +def _extensions_adapt(func, _, args, kwargs): + adapt = func(*args, **kwargs) + if hasattr(adapt, "prepare"): + return AdapterWrapper(adapt) + return adapt + + +class AdapterWrapper(wrapt.ObjectProxy): + def prepare(self, *args, **kwargs): + func = self.__wrapped__.prepare + if not args: + return func(*args, **kwargs) + conn = args[0] + + # prepare performs a c-level check of the object type so + # we must be sure to pass in the actual db connection + if isinstance(conn, wrapt.ObjectProxy): + conn = conn.__wrapped__ + + return func(conn, *args[1:], **kwargs) + + +def _patch_extensions(_extensions): + # we must patch extensions all the time (it's pretty harmless) so split + # from global patching of connections. must be idempotent. + for _, module, func, wrapper in _extensions: + if not hasattr(module, func) or isinstance(getattr(module, func), wrapt.ObjectProxy): + continue + wrapt.wrap_function_wrapper(module, func, wrapper) + + +def _unpatch_extensions(_extensions): + for original, module, func, _ in _extensions: + setattr(module, func, original) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/patch.py new file mode 100644 index 0000000..79abb9b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/psycopg/patch.py @@ -0,0 +1,214 @@ +from importlib import import_module +import inspect +import os +from typing import List # noqa:F401 + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib import dbapi + + +try: + from ddtrace.contrib.psycopg.async_connection import patched_connect_async_factory + from ddtrace.contrib.psycopg.async_cursor import Psycopg3FetchTracedAsyncCursor + from ddtrace.contrib.psycopg.async_cursor import Psycopg3TracedAsyncCursor +# catch async function syntax errors when using Python<3.7 with no async support +except SyntaxError: + pass +from ddtrace.contrib.psycopg.connection import patched_connect_factory +from ddtrace.contrib.psycopg.cursor import Psycopg3FetchTracedCursor +from ddtrace.contrib.psycopg.cursor import Psycopg3TracedCursor +from ddtrace.contrib.psycopg.extensions import _patch_extensions +from ddtrace.contrib.psycopg.extensions import _unpatch_extensions +from ddtrace.contrib.psycopg.extensions import get_psycopg2_extensions +from ddtrace.propagation._database_monitoring import default_sql_injector as _default_sql_injector +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool +from ...internal.utils.wrappers import unwrap as _u +from ...propagation._database_monitoring import _DBM_Propagator + + +try: + psycopg_import = import_module("psycopg") + + # must get the original connect class method from the class __dict__ to use later in unpatch + # Python 3.11 and wrapt result in the class method being rebinded as an instance method when + # using unwrap + _original_connect = psycopg_import.Connection.__dict__["connect"] + _original_async_connect = psycopg_import.AsyncConnection.__dict__["connect"] +# AttributeError can happen due to circular imports under certain integration methods +except (ImportError, AttributeError): + pass + + +def _psycopg_sql_injector(dbm_comment, sql_statement): + for psycopg_module in config.psycopg["_patched_modules"]: + if ( + hasattr(psycopg_module, "sql") + and hasattr(psycopg_module.sql, "Composable") + and isinstance(sql_statement, psycopg_module.sql.Composable) + ): + return psycopg_module.sql.SQL(dbm_comment) + sql_statement + return _default_sql_injector(dbm_comment, sql_statement) + + +config._add( + "psycopg", + dict( + _default_service=schematize_service_name("postgres"), + _dbapi_span_name_prefix="postgres", + _dbapi_span_operation_name=schematize_database_operation("postgres.query", database_provider="postgresql"), + _patched_modules=set(), + trace_fetch_methods=asbool( + os.getenv("DD_PSYCOPG_TRACE_FETCH_METHODS", default=False) + or os.getenv("DD_PSYCOPG2_TRACE_FETCH_METHODS", default=False) + ), + trace_connect=asbool( + os.getenv("DD_PSYCOPG_TRACE_CONNECT", default=False) + or os.getenv("DD_PSYCOPG2_TRACE_CONNECT", default=False) + ), + _dbm_propagator=_DBM_Propagator(0, "query", _psycopg_sql_injector), + dbms_name="postgresql", + ), +) + + +def get_version(): + # type: () -> str + return "" + + +PATCHED_VERSIONS = {} + + +def get_versions(): + # type: () -> List[str] + return PATCHED_VERSIONS + + +def _psycopg_modules(): + module_names = ( + "psycopg", + "psycopg2", + ) + for module_name in module_names: + try: + module = import_module(module_name) + PATCHED_VERSIONS[module_name] = getattr(module, "__version__", "") + yield module + except ImportError: + pass + + +def patch(): + for psycopg_module in _psycopg_modules(): + _patch(psycopg_module) + + +def _patch(psycopg_module): + """Patch monkey patches psycopg's connection function + so that the connection's functions are traced. + """ + if getattr(psycopg_module, "_datadog_patch", False): + return + psycopg_module._datadog_patch = True + + Pin(_config=config.psycopg).onto(psycopg_module) + + if psycopg_module.__name__ == "psycopg2": + # patch all psycopg2 extensions + _psycopg2_extensions = get_psycopg2_extensions(psycopg_module) + config.psycopg["_extensions_to_patch"] = _psycopg2_extensions + _patch_extensions(_psycopg2_extensions) + + _w(psycopg_module, "connect", patched_connect_factory(psycopg_module)) + + config.psycopg["_patched_modules"].add(psycopg_module) + else: + _w(psycopg_module, "connect", patched_connect_factory(psycopg_module)) + _w(psycopg_module, "Cursor", init_cursor_from_connection_factory(psycopg_module)) + _w(psycopg_module, "AsyncCursor", init_cursor_from_connection_factory(psycopg_module)) + + _w(psycopg_module.Connection, "connect", patched_connect_factory(psycopg_module)) + _w(psycopg_module.AsyncConnection, "connect", patched_connect_async_factory(psycopg_module)) + + config.psycopg["_patched_modules"].add(psycopg_module) + + +def unpatch(): + for psycopg_module in _psycopg_modules(): + _unpatch(psycopg_module) + + +def _unpatch(psycopg_module): + if getattr(psycopg_module, "_datadog_patch", False): + psycopg_module._datadog_patch = False + + if psycopg_module.__name__ == "psycopg2": + _u(psycopg_module, "connect") + + _psycopg2_extensions = get_psycopg2_extensions(psycopg_module) + _unpatch_extensions(_psycopg2_extensions) + else: + _u(psycopg_module, "connect") + _u(psycopg_module, "Cursor") + _u(psycopg_module, "AsyncCursor") + + # _u throws an attribute error for Python 3.11, no __get__ on the BoundFunctionWrapper + # unlike Python Class Methods which implement __get__ + psycopg_module.Connection.connect = _original_connect + psycopg_module.AsyncConnection.connect = _original_async_connect + + pin = Pin.get_from(psycopg_module) + if pin: + pin.remove_from(psycopg_module) + + +def init_cursor_from_connection_factory(psycopg_module): + def init_cursor_from_connection(wrapped_cursor_cls, _, args, kwargs): + connection = kwargs.pop("connection", None) + if not connection: + args = list(args) + index = next((i for i, x in enumerate(args) if isinstance(x, dbapi.TracedConnection)), None) + if index is not None: + connection = args.pop(index) + + # if we do not have an example of a traced connection, call the original cursor function + if not connection: + return wrapped_cursor_cls(*args, **kwargs) + + pin = Pin.get_from(connection).clone() + cfg = config.psycopg + + if cfg and cfg.trace_fetch_methods: + trace_fetch_methods = True + else: + trace_fetch_methods = False + + if issubclass(wrapped_cursor_cls, psycopg_module.AsyncCursor): + traced_cursor_cls = Psycopg3FetchTracedAsyncCursor if trace_fetch_methods else Psycopg3TracedAsyncCursor + else: + traced_cursor_cls = Psycopg3FetchTracedCursor if trace_fetch_methods else Psycopg3TracedCursor + + args_mapping = inspect.signature(wrapped_cursor_cls.__init__).parameters + # inspect.signature returns ordered dict[argument_name: str, parameter_type: type] + if "row_factory" in args_mapping and "row_factory" not in kwargs: + # check for row_factory in args by checking for functions + row_factory = None + for i in range(len(args)): + if callable(args[i]): + row_factory = args.pop(i) + break + # else just use the connection row factory + if row_factory is None: + row_factory = connection.row_factory + cursor = wrapped_cursor_cls(connection=connection, row_factory=row_factory, *args, **kwargs) # noqa: B026 + else: + cursor = wrapped_cursor_cls(connection, *args, **kwargs) + + return traced_cursor_cls(cursor=cursor, pin=pin, cfg=cfg) + + return init_cursor_from_connection diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/__init__.py new file mode 100644 index 0000000..2480dd1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/__init__.py @@ -0,0 +1,33 @@ +"""Instrument pylibmc to report Memcached queries. + +``import ddtrace.auto`` will automatically patch your pylibmc client to make it work. +:: + + # Be sure to import pylibmc and not pylibmc.Client directly, + # otherwise you won't have access to the patched version + from ddtrace import Pin, patch + import pylibmc + + # If not patched yet, you can patch pylibmc specifically + patch(pylibmc=True) + + # One client instrumented with default configuration + client = pylibmc.Client(["localhost:11211"] + client.set("key1", "value1") + + # Use a pin to specify metadata related to this client + Pin.override(client, service="memcached-sessions") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["pylibmc"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .client import TracedClient + from .patch import get_version + from .patch import patch + + __all__ = ["TracedClient", "patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/addrs.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/addrs.py new file mode 100644 index 0000000..0f11d2a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/addrs.py @@ -0,0 +1,14 @@ +translate_server_specs = None + +try: + # NOTE: we rely on an undocumented method to parse addresses, + # so be a bit defensive and don't assume it exists. + from pylibmc.client import translate_server_specs +except ImportError: + pass + + +def parse_addresses(addrs): + if not translate_server_specs: + return [] + return translate_server_specs(addrs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/client.py new file mode 100644 index 0000000..af788ab --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/client.py @@ -0,0 +1,189 @@ +from contextlib import contextmanager +import random + +import pylibmc + +# project +import ddtrace +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib.pylibmc.addrs import parse_addresses +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.ext import memcached +from ddtrace.ext import net +from ddtrace.internal.compat import Iterable +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_cache_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.vendor.wrapt import ObjectProxy + + +# Original Client class +_Client = pylibmc.Client + + +log = get_logger(__name__) + + +class TracedClient(ObjectProxy): + """TracedClient is a proxy for a pylibmc.Client that times it's network operations.""" + + def __init__(self, client=None, service=memcached.SERVICE, tracer=None, *args, **kwargs): + """Create a traced client that wraps the given memcached client.""" + # The client instance/service/tracer attributes are kept for compatibility + # with the old interface: TracedClient(client=pylibmc.Client(['localhost:11211'])) + # TODO(Benjamin): Remove these in favor of patching. + if not isinstance(client, _Client): + # We are in the patched situation, just pass down all arguments to the pylibmc.Client + # Note that, in that case, client isn't a real client (just the first argument) + client = _Client(client, *args, **kwargs) + else: + log.warning( + "TracedClient instantiation is deprecated and will be remove " + "in future versions (0.6.0). Use patching instead (see the docs)." + ) + + super(TracedClient, self).__init__(client) + + schematized_service = schematize_service_name(service) + pin = ddtrace.Pin(service=schematized_service, tracer=tracer) + pin.onto(self) + + # attempt to collect the pool of urls this client talks to + try: + self._addresses = parse_addresses(client.addresses) + except Exception: + log.debug("error setting addresses", exc_info=True) + + def clone(self, *args, **kwargs): + # rewrap new connections. + cloned = self.__wrapped__.clone(*args, **kwargs) + traced_client = TracedClient(cloned) + pin = ddtrace.Pin.get_from(self) + if pin: + pin.clone().onto(traced_client) + return traced_client + + def get(self, *args, **kwargs): + return self._trace_cmd("get", *args, **kwargs) + + def set(self, *args, **kwargs): + return self._trace_cmd("set", *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._trace_cmd("delete", *args, **kwargs) + + def gets(self, *args, **kwargs): + return self._trace_cmd("gets", *args, **kwargs) + + def touch(self, *args, **kwargs): + return self._trace_cmd("touch", *args, **kwargs) + + def cas(self, *args, **kwargs): + return self._trace_cmd("cas", *args, **kwargs) + + def incr(self, *args, **kwargs): + return self._trace_cmd("incr", *args, **kwargs) + + def decr(self, *args, **kwargs): + return self._trace_cmd("decr", *args, **kwargs) + + def append(self, *args, **kwargs): + return self._trace_cmd("append", *args, **kwargs) + + def prepend(self, *args, **kwargs): + return self._trace_cmd("prepend", *args, **kwargs) + + def get_multi(self, *args, **kwargs): + return self._trace_multi_cmd("get_multi", *args, **kwargs) + + def set_multi(self, *args, **kwargs): + return self._trace_multi_cmd("set_multi", *args, **kwargs) + + def delete_multi(self, *args, **kwargs): + return self._trace_multi_cmd("delete_multi", *args, **kwargs) + + def _trace_cmd(self, method_name, *args, **kwargs): + """trace the execution of the method with the given name and will + patch the first arg. + """ + method = getattr(self.__wrapped__, method_name) + with self._span(method_name) as span: + result = method(*args, **kwargs) + if span is None: + return result + + if args: + span.set_tag_str(memcached.QUERY, "%s %s" % (method_name, args[0])) + if method_name == "get": + span.set_metric(db.ROWCOUNT, 1 if result else 0) + elif method_name == "gets": + # returns a tuple object that may be (None, None) + span.set_metric(db.ROWCOUNT, 1 if isinstance(result, Iterable) and len(result) > 0 and result[0] else 0) + return result + + def _trace_multi_cmd(self, method_name, *args, **kwargs): + """trace the execution of the multi command with the given name.""" + method = getattr(self.__wrapped__, method_name) + with self._span(method_name) as span: + result = method(*args, **kwargs) + if span is None: + return result + + pre = kwargs.get("key_prefix") + if pre: + span.set_tag_str(memcached.QUERY, "%s %s" % (method_name, pre)) + + if method_name == "get_multi": + # returns mapping of key -> value if key exists, but does not include a missing key. Empty result = {} + span.set_metric( + db.ROWCOUNT, sum(1 for doc in result if doc) if result and isinstance(result, Iterable) else 0 + ) + return result + + @contextmanager + def _no_span(self): + yield None + + def _span(self, cmd_name): + """Return a span timing the given command.""" + pin = ddtrace.Pin.get_from(self) + if not pin or not pin.enabled(): + return self._no_span() + + span = pin.tracer.trace( + schematize_cache_operation("memcached.cmd", cache_provider="memcached"), + service=pin.service, + resource=cmd_name, + span_type=SpanTypes.CACHE, + ) + + span.set_tag_str(COMPONENT, config.pylibmc.integration_name) + span.set_tag_str(db.SYSTEM, memcached.DBMS_NAME) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + try: + self._tag_span(span) + except Exception: + log.debug("error tagging span", exc_info=True) + return span + + def _tag_span(self, span): + # FIXME[matt] the host selection is buried in c code. we can't tell what it's actually + # using, so fallback to randomly choosing one. can we do better? + if self._addresses: + _, host, port, _ = random.choice(self._addresses) # nosec + span.set_tag_str(net.TARGET_HOST, host) + span.set_tag(net.TARGET_PORT, port) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.pylibmc.get_analytics_sample_rate()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/patch.py new file mode 100644 index 0000000..9cd2066 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pylibmc/patch.py @@ -0,0 +1,20 @@ +import pylibmc + +from .client import TracedClient + + +# Original Client class +_Client = pylibmc.Client + + +def get_version(): + # type: () -> str + return getattr(pylibmc, "__version__", "") + + +def patch(): + pylibmc.Client = TracedClient + + +def unpatch(): + pylibmc.Client = _Client diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/__init__.py new file mode 100644 index 0000000..25f9354 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/__init__.py @@ -0,0 +1,44 @@ +"""Instrument pymemcache to report memcached queries. + +``import ddtrace.auto`` will automatically patch the pymemcache ``Client``:: + + from ddtrace import Pin, patch + + # If not patched yet, patch pymemcache specifically + patch(pymemcache=True) + + # Import reference to Client AFTER patching + import pymemcache + from pymemcache.client.base import Client + + # Use a pin to specify metadata related all clients + Pin.override(pymemcache, service='my-memcached-service') + + # This will report a span with the default settings + client = Client(('localhost', 11211)) + client.set("my-key", "my-val") + + # Use a pin to specify metadata related to this particular client + Pin.override(client, service='my-memcached-service') + + # If using a HashClient, specify metadata on each of its underlying + # Client instances individually + client = HashClient(('localhost', 11211)) + for _c in client.clients.values(): + Pin.override(_c, service="my-service") + +Pymemcache ``HashClient`` will also be indirectly patched as it uses ``Client`` +under the hood. +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["pymemcache"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/client.py new file mode 100644 index 0000000..f332731 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/client.py @@ -0,0 +1,361 @@ +import os +import sys +from typing import Iterable + +import pymemcache +from pymemcache.client.base import Client +from pymemcache.client.base import PooledClient +from pymemcache.client.hash import HashClient +from pymemcache.exceptions import MemcacheClientError +from pymemcache.exceptions import MemcacheIllegalInputError +from pymemcache.exceptions import MemcacheServerError +from pymemcache.exceptions import MemcacheUnknownCommandError +from pymemcache.exceptions import MemcacheUnknownError + +# 3p +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor import wrapt + +# project +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import memcached as memcachedx +from ...ext import net +from ...internal.logger import get_logger +from ...internal.schema import schematize_cache_operation +from ...internal.utils.formats import asbool +from ...pin import Pin + + +log = get_logger(__name__) + + +config._add( + "pymemcache", + { + "command_enabled": asbool(os.getenv("DD_TRACE_MEMCACHED_COMMAND_ENABLED", default=False)), + }, +) + + +# keep a reference to the original unpatched clients +_Client = Client +_HashClient = HashClient + + +class _WrapperBase(wrapt.ObjectProxy): + def __init__(self, wrapped_class, *args, **kwargs): + c = wrapped_class(*args, **kwargs) + super(_WrapperBase, self).__init__(c) + + # tags to apply to each span generated by this client + tags = _get_address_tags(*args, **kwargs) + + parent_pin = Pin.get_from(pymemcache) + + if parent_pin: + pin = parent_pin.clone(tags=tags) + else: + pin = Pin(tags=tags) + + # attach the pin onto this instance + pin.onto(self) + + def _trace_function_as_command(self, func, cmd, *args, **kwargs): + p = Pin.get_from(self) + + if not p or not p.enabled(): + return func(*args, **kwargs) + + return _trace(func, p, cmd, *args, **kwargs) + + +class WrappedClient(_WrapperBase): + """Wrapper providing patched methods of a pymemcache Client. + + Relevant connection information is obtained during initialization and + attached to each span. + + Keys are tagged in spans for methods that act upon a key. + """ + + def __init__(self, *args, **kwargs): + super(WrappedClient, self).__init__(_Client, *args, **kwargs) + + def set(self, *args, **kwargs): + return self._traced_cmd("set", *args, **kwargs) + + def set_many(self, *args, **kwargs): + return self._traced_cmd("set_many", *args, **kwargs) + + def add(self, *args, **kwargs): + return self._traced_cmd("add", *args, **kwargs) + + def replace(self, *args, **kwargs): + return self._traced_cmd("replace", *args, **kwargs) + + def append(self, *args, **kwargs): + return self._traced_cmd("append", *args, **kwargs) + + def prepend(self, *args, **kwargs): + return self._traced_cmd("prepend", *args, **kwargs) + + def cas(self, *args, **kwargs): + return self._traced_cmd("cas", *args, **kwargs) + + def get(self, *args, **kwargs): + return self._traced_cmd("get", *args, **kwargs) + + def get_many(self, *args, **kwargs): + return self._traced_cmd("get_many", *args, **kwargs) + + def gets(self, *args, **kwargs): + return self._traced_cmd("gets", *args, **kwargs) + + def gets_many(self, *args, **kwargs): + return self._traced_cmd("gets_many", *args, **kwargs) + + def delete(self, *args, **kwargs): + return self._traced_cmd("delete", *args, **kwargs) + + def delete_many(self, *args, **kwargs): + return self._traced_cmd("delete_many", *args, **kwargs) + + def incr(self, *args, **kwargs): + return self._traced_cmd("incr", *args, **kwargs) + + def decr(self, *args, **kwargs): + return self._traced_cmd("decr", *args, **kwargs) + + def touch(self, *args, **kwargs): + return self._traced_cmd("touch", *args, **kwargs) + + def stats(self, *args, **kwargs): + return self._traced_cmd("stats", *args, **kwargs) + + def version(self, *args, **kwargs): + return self._traced_cmd("version", *args, **kwargs) + + def flush_all(self, *args, **kwargs): + return self._traced_cmd("flush_all", *args, **kwargs) + + def quit(self, *args, **kwargs): + return self._traced_cmd("quit", *args, **kwargs) + + def set_multi(self, *args, **kwargs): + """set_multi is an alias for set_many""" + return self._traced_cmd("set_many", *args, **kwargs) + + def get_multi(self, *args, **kwargs): + """set_multi is an alias for set_many""" + return self._traced_cmd("get_many", *args, **kwargs) + + def _traced_cmd(self, command, *args, **kwargs): + return self._trace_function_as_command( + lambda *_args, **_kwargs: getattr(self.__wrapped__, command)(*_args, **_kwargs), command, *args, **kwargs + ) + + +class WrappedHashClient(_WrapperBase): + """Wrapper that traces HashClient commands + + This wrapper proxies its command invocations to the underlying HashClient instance. + When the use_pooling setting is in use, this wrapper starts a span before + doing the proxy call. + + This is necessary because the use_pooling setting causes Client instances to be + created and destroyed dynamically in a manner that isn't affected by the + patch() function. + """ + + def _ensure_traced(self, cmd, key, default_val, *args, **kwargs): + """ + PooledClient creates Client instances dynamically on request, which means + those Client instances aren't affected by the wrappers applied in patch(). + We handle this case here by calling trace() before running the command, + specifically when the client that will be used for the command is a + PooledClient. + + To avoid double-tracing when the key's client is not a PooledClient, we + don't create a span and instead rely on patch(). In this case the + underlying Client instance is long-lived and has been patched already. + """ + client_for_key = self._get_client(key) + if isinstance(client_for_key, PooledClient): + return self._traced_cmd(cmd, client_for_key, key, default_val, *args, **kwargs) + else: + return getattr(self.__wrapped__, cmd)(key, *args, **kwargs) + + def __init__(self, *args, **kwargs): + super(WrappedHashClient, self).__init__(_HashClient, *args, **kwargs) + + def set(self, key, *args, **kwargs): + return self._ensure_traced("set", key, False, *args, **kwargs) + + def add(self, key, *args, **kwargs): + return self._ensure_traced("add", key, False, *args, **kwargs) + + def replace(self, key, *args, **kwargs): + return self._ensure_traced("replace", key, False, *args, **kwargs) + + def append(self, key, *args, **kwargs): + return self._ensure_traced("append", key, False, *args, **kwargs) + + def prepend(self, key, *args, **kwargs): + return self._ensure_traced("prepend", key, False, *args, **kwargs) + + def cas(self, key, *args, **kwargs): + return self._ensure_traced("cas", key, False, *args, **kwargs) + + def get(self, key, *args, **kwargs): + return self._ensure_traced("get", key, None, *args, **kwargs) + + def gets(self, key, *args, **kwargs): + return self._ensure_traced("gets", key, None, *args, **kwargs) + + def delete(self, key, *args, **kwargs): + return self._ensure_traced("delete", key, False, *args, **kwargs) + + def incr(self, key, *args, **kwargs): + return self._ensure_traced("incr", key, False, *args, **kwargs) + + def decr(self, key, *args, **kwargs): + return self._ensure_traced("decr", key, False, *args, **kwargs) + + def touch(self, key, *args, **kwargs): + return self._ensure_traced("touch", key, False, *args, **kwargs) + + def _traced_cmd(self, command, client, key, default_val, *args, **kwargs): + # NB this function mimics the logic of HashClient._run_cmd, tracing the call to _safely_run_func + if client is None: + return default_val + + args = list(args) + args.insert(0, key) + + return self._trace_function_as_command( + lambda *_args, **_kwargs: self._safely_run_func( + client, getattr(client, command), default_val, *_args, **_kwargs + ), + command, + *args, + **kwargs, + ) + + +_HashClient.client_class = WrappedClient + + +def _get_address_tags(*args, **kwargs): + """Attempt to get host and port from args passed to Client initializer.""" + tags = {} + try: + if len(args): + host, port = args[0] + tags[net.TARGET_HOST] = host + tags[net.TARGET_PORT] = port + except Exception: + log.debug("Error collecting client address tags") + + return tags + + +def _get_query_string(args): + """Return the query values given the arguments to a pymemcache command. + + If there are multiple query values, they are joined together + space-separated. + """ + keys = "" + + # shortcut if no args + if not args: + return keys + + # pull out the first arg which will contain any key + arg = args[0] + + # if we get a dict, convert to list of keys + if type(arg) is dict: + arg = list(arg) + + if type(arg) is str: + keys = arg + elif type(arg) is bytes: + keys = arg.decode() + elif type(arg) is list and len(arg): + if type(arg[0]) is str: + keys = " ".join(arg) + elif type(arg[0]) is bytes: + keys = b" ".join(arg).decode() + + return keys + + +def _trace(func, p, method_name, *args, **kwargs): + """Run and trace the given command. + + Any pymemcache exception is caught and span error information is + set. The exception is then reraised for the application to handle + appropriately. + + Relevant tags are set in the span. + """ + with p.tracer.trace( + schematize_cache_operation(memcachedx.CMD, cache_provider="memcached"), + service=p.service, + resource=method_name, + span_type=SpanTypes.CACHE, + ) as span: + span.set_tag_str(COMPONENT, config.pymemcache.integration_name) + span.set_tag_str(db.SYSTEM, memcachedx.DBMS_NAME) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.pymemcache.get_analytics_sample_rate()) + + # try to set relevant tags, catch any exceptions so we don't mess + # with the application + try: + span.set_tags(p.tags) + if config.pymemcache.command_enabled: + vals = _get_query_string(args) + query = "{}{}{}".format(method_name, " " if vals else "", vals) + span.set_tag_str(memcachedx.QUERY, query) + except Exception: + log.debug("Error setting relevant pymemcache tags") + + try: + result = func(*args, **kwargs) + + if method_name == "get_many" or method_name == "gets_many": + # gets_many returns a map of key -> (value, cas), else an empty dict if no matches + # get many returns a map with values, else an empty map if no matches + span.set_metric( + db.ROWCOUNT, sum(1 for doc in result if doc) if result and isinstance(result, Iterable) else 0 + ) + elif method_name == "get": + # get returns key or None + span.set_metric(db.ROWCOUNT, 1 if result else 0) + elif method_name == "gets": + # gets returns a tuple of (None, None) if key not found, else tuple of (key, index) + span.set_metric(db.ROWCOUNT, 1 if result[0] else 0) + return result + except ( + MemcacheClientError, + MemcacheServerError, + MemcacheUnknownCommandError, + MemcacheUnknownError, + MemcacheIllegalInputError, + ): + (typ, val, tb) = sys.exc_info() + span.set_exc_info(typ, val, tb) + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/patch.py new file mode 100644 index 0000000..78856c5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymemcache/patch.py @@ -0,0 +1,49 @@ +import pymemcache +import pymemcache.client.hash + +from ddtrace.ext import memcached as memcachedx +from ddtrace.internal.schema import schematize_service_name +from ddtrace.pin import _DD_PIN_NAME +from ddtrace.pin import _DD_PIN_PROXY_NAME +from ddtrace.pin import Pin + +from .client import WrappedClient +from .client import WrappedHashClient + + +_Client = pymemcache.client.base.Client +_hash_Client = pymemcache.client.hash.Client +_hash_HashClient = pymemcache.client.hash.Client + + +def get_version(): + # type: () -> str + return getattr(pymemcache, "__version__", "") + + +def patch(): + if getattr(pymemcache.client, "_datadog_patch", False): + return + + pymemcache.client._datadog_patch = True + pymemcache.client.base.Client = WrappedClient + pymemcache.client.hash.Client = WrappedClient + pymemcache.client.hash.HashClient = WrappedHashClient + + # Create a global pin with default configuration for our pymemcache clients + service = schematize_service_name(memcachedx.SERVICE) + Pin(service=service).onto(pymemcache) + + +def unpatch(): + """Remove pymemcache tracing""" + if not getattr(pymemcache.client, "_datadog_patch", False): + return + pymemcache.client._datadog_patch = False + pymemcache.client.base.Client = _Client + pymemcache.client.hash.Client = _hash_Client + pymemcache.client.hash.HashClient = _hash_HashClient + + # Remove any pins that may exist on the pymemcache reference + setattr(pymemcache, _DD_PIN_NAME, None) + setattr(pymemcache, _DD_PIN_PROXY_NAME, None) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/__init__.py new file mode 100644 index 0000000..c653dce --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/__init__.py @@ -0,0 +1,48 @@ +"""Instrument pymongo to report MongoDB queries. + +The pymongo integration works by wrapping pymongo's MongoClient to trace +network calls. Pymongo 3.0 and greater are the currently supported versions. +``import ddtrace.auto`` will automatically patch your MongoClient instance to make it work. + +:: + + # Be sure to import pymongo and not pymongo.MongoClient directly, + # otherwise you won't have access to the patched version + from ddtrace import Pin, patch + import pymongo + + # If not patched yet, you can patch pymongo specifically + patch(pymongo=True) + + # At that point, pymongo is instrumented with the default settings + client = pymongo.MongoClient() + # Example of instrumented query + db = client["test-db"] + db.teams.find({"name": "Toronto Maple Leafs"}) + + # Use a pin to specify metadata related to this client + client = pymongo.MongoClient() + pin = Pin.override(client, service="mongo-master") + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.pymongo["service"] + The service name reported by default for pymongo spans + + The option can also be set with the ``DD_PYMONGO_SERVICE`` environment variable + + Default: ``"pymongo"`` + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["pymongo"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/client.py new file mode 100644 index 0000000..ab1916b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/client.py @@ -0,0 +1,351 @@ +# stdlib +import contextlib +import json +from typing import Iterable + +# 3p +import pymongo + +# project +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor.wrapt import ObjectProxy + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import mongo as mongox +from ...ext import net as netx +from ...internal.logger import get_logger +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from .parse import parse_msg +from .parse import parse_query +from .parse import parse_spec + + +BATCH_PARTIAL_KEY = "Batch" + +# Original Client class +_MongoClient = pymongo.MongoClient + +VERSION = pymongo.version_tuple + +if VERSION < (3, 6, 0): + from pymongo.helpers import _unpack_response + + +log = get_logger(__name__) + +_DEFAULT_SERVICE = schematize_service_name("pymongo") + + +class TracedMongoClient(ObjectProxy): + def __init__(self, client=None, *args, **kwargs): + # To support the former trace_mongo_client interface, we have to keep this old interface + # TODO(Benjamin): drop it in a later version + if not isinstance(client, _MongoClient): + # Patched interface, instantiate the client + + # client is just the first arg which could be the host if it is + # None, then it could be that the caller: + + # if client is None then __init__ was: + # 1) invoked with host=None + # 2) not given a first argument (client defaults to None) + # we cannot tell which case it is, but it should not matter since + # the default value for host is None, in either case we can simply + # not provide it as an argument + if client is None: + client = _MongoClient(*args, **kwargs) + # else client is a value for host so just pass it along + else: + client = _MongoClient(client, *args, **kwargs) + + super(TracedMongoClient, self).__init__(client) + # NOTE[matt] the TracedMongoClient attempts to trace all of the network + # calls in the trace library. This is good because it measures the + # actual network time. It's bad because it uses a private API which + # could change. We'll see how this goes. + if not isinstance(client._topology, TracedTopology): + client._topology = TracedTopology(client._topology) + + # Default Pin + ddtrace.Pin(service=_DEFAULT_SERVICE).onto(self) + + def __setddpin__(self, pin): + pin.onto(self._topology) + + def __getddpin__(self): + return ddtrace.Pin.get_from(self._topology) + + +class TracedTopology(ObjectProxy): + def __init__(self, topology): + super(TracedTopology, self).__init__(topology) + + def select_server(self, *args, **kwargs): + s = self.__wrapped__.select_server(*args, **kwargs) + if not isinstance(s, TracedServer): + s = TracedServer(s) + # Reattach the pin every time in case it changed since the initial patching + ddtrace.Pin.get_from(self).onto(s) + return s + + +class TracedServer(ObjectProxy): + def __init__(self, server): + super(TracedServer, self).__init__(server) + + def _datadog_trace_operation(self, operation): + cmd = None + # Only try to parse something we think is a query. + if self._is_query(operation): + try: + cmd = parse_query(operation) + except Exception: + log.exception("error parsing query") + + pin = ddtrace.Pin.get_from(self) + # if we couldn't parse or shouldn't trace the message, just go. + if not cmd or not pin or not pin.enabled(): + return None + + span = pin.tracer.trace( + schematize_database_operation("pymongo.cmd", database_provider="mongodb"), + span_type=SpanTypes.MONGODB, + service=pin.service, + ) + + span.set_tag_str(COMPONENT, config.pymongo.integration_name) + + # set span.kind to the operation type being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + span.set_tag_str(mongox.DB, cmd.db) + span.set_tag_str(mongox.COLLECTION, cmd.coll) + span.set_tag_str(db.SYSTEM, mongox.SERVICE) + span.set_tags(cmd.tags) + + # set `mongodb.query` tag and resource for span + _set_query_metadata(span, cmd) + + # set analytics sample rate + sample_rate = config.pymongo.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + return span + + if VERSION >= (4, 5, 0): + + @contextlib.contextmanager + def checkout(self, *args, **kwargs): + with self.__wrapped__.checkout(*args, **kwargs) as s: + if not isinstance(s, TracedSocket): + s = TracedSocket(s) + ddtrace.Pin.get_from(self).onto(s) + yield s + + else: + + @contextlib.contextmanager + def get_socket(self, *args, **kwargs): + with self.__wrapped__.get_socket(*args, **kwargs) as s: + if not isinstance(s, TracedSocket): + s = TracedSocket(s) + ddtrace.Pin.get_from(self).onto(s) + yield s + + if VERSION >= (3, 12, 0): + + def run_operation(self, sock_info, operation, *args, **kwargs): + span = self._datadog_trace_operation(operation) + if span is None: + return self.__wrapped__.run_operation(sock_info, operation, *args, **kwargs) + with span: + result = self.__wrapped__.run_operation(sock_info, operation, *args, **kwargs) + if result: + if hasattr(result, "address"): + set_address_tags(span, result.address) + if self._is_query(operation) and hasattr(result, "docs"): + set_query_rowcount(docs=result.docs, span=span) + return result + + elif (3, 9, 0) <= VERSION < (3, 12, 0): + + def run_operation_with_response(self, sock_info, operation, *args, **kwargs): + span = self._datadog_trace_operation(operation) + if span is None: + return self.__wrapped__.run_operation_with_response(sock_info, operation, *args, **kwargs) + with span: + result = self.__wrapped__.run_operation_with_response(sock_info, operation, *args, **kwargs) + if result: + if hasattr(result, "address"): + set_address_tags(span, result.address) + if self._is_query(operation) and hasattr(result, "docs"): + set_query_rowcount(docs=result.docs, span=span) + return result + + else: + + def send_message_with_response(self, operation, *args, **kwargs): + span = self._datadog_trace_operation(operation) + if span is None: + return self.__wrapped__.send_message_with_response(operation, *args, **kwargs) + with span: + result = self.__wrapped__.send_message_with_response(operation, *args, **kwargs) + if result: + if hasattr(result, "address"): + set_address_tags(span, result.address) + if self._is_query(operation): + if hasattr(result, "data"): + if VERSION >= (3, 6, 0) and hasattr(result.data, "unpack_response"): + set_query_rowcount(docs=result.data.unpack_response(), span=span) + else: + data = _unpack_response(response=result.data) + if VERSION < (3, 2, 0) and data.get("number_returned", None): + span.set_metric(db.ROWCOUNT, data.get("number_returned")) + elif (3, 2, 0) <= VERSION < (3, 6, 0): + docs = data.get("data", None) + set_query_rowcount(docs=docs, span=span) + return result + + @staticmethod + def _is_query(op): + # NOTE: _Query should always have a spec field + return hasattr(op, "spec") + + +class TracedSocket(ObjectProxy): + def __init__(self, socket): + super(TracedSocket, self).__init__(socket) + + def command(self, dbname, spec, *args, **kwargs): + cmd = None + try: + cmd = parse_spec(spec, dbname) + except Exception: + log.exception("error parsing spec. skipping trace") + + pin = ddtrace.Pin.get_from(self) + # skip tracing if we don't have a piece of data we need + if not dbname or not cmd or not pin or not pin.enabled(): + return self.__wrapped__.command(dbname, spec, *args, **kwargs) + + cmd.db = dbname + with self.__trace(cmd): + return self.__wrapped__.command(dbname, spec, *args, **kwargs) + + def write_command(self, *args, **kwargs): + msg = get_argument_value(args, kwargs, 1, "msg") + cmd = None + try: + cmd = parse_msg(msg) + except Exception: + log.exception("error parsing msg") + + pin = ddtrace.Pin.get_from(self) + # if we couldn't parse it, don't try to trace it. + if not cmd or not pin or not pin.enabled(): + return self.__wrapped__.write_command(*args, **kwargs) + + with self.__trace(cmd) as s: + result = self.__wrapped__.write_command(*args, **kwargs) + if result: + s.set_metric(db.ROWCOUNT, result.get("n", -1)) + return result + + def __trace(self, cmd): + pin = ddtrace.Pin.get_from(self) + s = pin.tracer.trace( + schematize_database_operation("pymongo.cmd", database_provider="mongodb"), + span_type=SpanTypes.MONGODB, + service=pin.service, + ) + + s.set_tag_str(COMPONENT, config.pymongo.integration_name) + s.set_tag_str(db.SYSTEM, mongox.SERVICE) + + # set span.kind to the type of operation being performed + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + s.set_tag(SPAN_MEASURED_KEY) + if cmd.db: + s.set_tag_str(mongox.DB, cmd.db) + if cmd: + s.set_tag(mongox.COLLECTION, cmd.coll) + s.set_tags(cmd.tags) + s.set_metrics(cmd.metrics) + + # set `mongodb.query` tag and resource for span + _set_query_metadata(s, cmd) + + # set analytics sample rate + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.pymongo.get_analytics_sample_rate()) + + if self.address: + set_address_tags(s, self.address) + return s + + +def normalize_filter(f=None): + if f is None: + return {} + elif isinstance(f, list): + # normalize lists of filters + # e.g. {$or: [ { age: { $lt: 30 } }, { type: 1 } ]} + return [normalize_filter(s) for s in f] + elif isinstance(f, dict): + # normalize dicts of filters + # {$or: [ { age: { $lt: 30 } }, { type: 1 } ]}) + out = {} + for k, v in f.items(): + if k == "$in" or k == "$nin": + # special case $in queries so we don't loop over lists. + out[k] = "?" + elif isinstance(v, list) or isinstance(v, dict): + # RECURSION ALERT: needs to move to the agent + out[k] = normalize_filter(v) + else: + # NOTE: this shouldn't happen, but let's have a safeguard. + out[k] = "?" + return out + else: + # FIXME[matt] unexpected type. not sure this should ever happen, but at + # least it won't crash. + return {} + + +def set_address_tags(span, address): + # the address is only set after the cursor is done. + if address: + span.set_tag_str(netx.TARGET_HOST, address[0]) + span.set_tag(netx.TARGET_PORT, address[1]) + + +def _set_query_metadata(span, cmd): + """Sets span `mongodb.query` tag and resource given command query""" + if cmd.query: + nq = normalize_filter(cmd.query) + span.set_tag("mongodb.query", nq) + # needed to dump json so we don't get unicode + # dict keys like {u'foo':'bar'} + q = json.dumps(nq) + span.resource = "{} {} {}".format(cmd.name, cmd.coll, q) + else: + span.resource = "{} {}".format(cmd.name, cmd.coll) + + +def set_query_rowcount(docs, span): + # results returned in batches, get len of each batch + if isinstance(docs, Iterable) and len(docs) > 0: + cursor = docs[0].get("cursor", None) + if cursor: + rowcount = sum([len(documents) for batch_key, documents in cursor.items() if BATCH_PARTIAL_KEY in batch_key]) + span.set_metric(db.ROWCOUNT, rowcount) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/parse.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/parse.py new file mode 100644 index 0000000..1a4330d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/parse.py @@ -0,0 +1,204 @@ +import ctypes +import struct + +# 3p +import bson +from bson.codec_options import CodecOptions +from bson.son import SON + +# project +from ...ext import net as netx +from ...internal.compat import to_unicode +from ...internal.logger import get_logger + + +log = get_logger(__name__) + + +# MongoDB wire protocol commands +# http://docs.mongodb.com/manual/reference/mongodb-wire-protocol +OP_CODES = { + 1: "reply", + 1000: "msg", # DEV: 1000 was deprecated at some point, use 2013 instead + 2001: "update", + 2002: "insert", + 2003: "reserved", + 2004: "query", + 2005: "get_more", + 2006: "delete", + 2007: "kill_cursors", + 2010: "command", + 2011: "command_reply", + 2013: "msg", +} + +# The maximum message length we'll try to parse +MAX_MSG_PARSE_LEN = 1024 * 1024 + +header_struct = struct.Struct("= 3.1 stores the db and coll separately + coll = getattr(query, "coll", None) + db = getattr(query, "db", None) + + # pymongo < 3.1 _Query does not have a name field, so default to 'query' + cmd = Command(getattr(query, "name", "query"), db, coll) + cmd.query = query.spec + return cmd + + +def parse_spec(spec, db=None): + """Return a Command that has parsed the relevant detail for the given + pymongo SON spec. + """ + + # the first element is the command and collection + items = list(spec.items()) + if not items: + return None + name, coll = items[0] + cmd = Command(name, db or spec.get("$db"), coll) + + if "ordered" in spec: # in insert and update + cmd.tags["mongodb.ordered"] = spec["ordered"] + + if cmd.name == "insert": + if "documents" in spec: + cmd.metrics["mongodb.documents"] = len(spec["documents"]) + + elif cmd.name == "update": + updates = spec.get("updates") + if updates: + # FIXME[matt] is there ever more than one here? + cmd.query = updates[0].get("q") + + elif cmd.name == "delete": + dels = spec.get("deletes") + if dels: + # FIXME[matt] is there ever more than one here? + cmd.query = dels[0].get("q") + + return cmd + + +def _cstring(raw): + """Return the first null terminated cstring from the buffer.""" + return ctypes.create_string_buffer(raw).value + + +def _split_namespace(ns): + """Return a tuple of (db, collection) from the 'db.coll' string.""" + if ns: + # NOTE[matt] ns is unicode or bytes depending on the client version + # so force cast to unicode + split = to_unicode(ns).split(".", 1) + if len(split) == 1: + raise Exception("namespace doesn't contain period: %s" % ns) + return split + return (None, None) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/patch.py new file mode 100644 index 0000000..13ee461 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymongo/patch.py @@ -0,0 +1,94 @@ +import contextlib + +import pymongo + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib import trace_utils +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import mongo +from ..trace_utils import unwrap as _u +from .client import TracedMongoClient +from .client import set_address_tags + + +config._add( + "pymongo", + dict(_default_service="pymongo"), +) + + +def get_version(): + # type: () -> str + return getattr(pymongo, "__version__", "") + + +# Original Client class +_MongoClient = pymongo.MongoClient + +_VERSION = pymongo.version_tuple +_CHECKOUT_FN_NAME = "get_socket" if _VERSION < (4, 5) else "checkout" + + +def patch(): + patch_pymongo_module() + # We should progressively get rid of TracedMongoClient. We now try to + # wrap methods individually. cf #1501 + pymongo.MongoClient = TracedMongoClient + + +def unpatch(): + unpatch_pymongo_module() + pymongo.MongoClient = _MongoClient + + +def patch_pymongo_module(): + if getattr(pymongo, "_datadog_patch", False): + return + pymongo._datadog_patch = True + Pin().onto(pymongo.server.Server) + + # Whenever a pymongo command is invoked, the lib either: + # - Creates a new socket & performs a TCP handshake + # - Grabs a socket already initialized before + _w("pymongo.server", "Server.%s" % _CHECKOUT_FN_NAME, traced_get_socket) + + +def unpatch_pymongo_module(): + if not getattr(pymongo, "_datadog_patch", False): + return + pymongo._datadog_patch = False + + _u(pymongo.server.Server, _CHECKOUT_FN_NAME) + + +@contextlib.contextmanager +def traced_get_socket(wrapped, instance, args, kwargs): + pin = Pin._find(wrapped, instance) + if not pin or not pin.enabled(): + with wrapped(*args, **kwargs) as sock_info: + yield sock_info + return + + with pin.tracer.trace( + "pymongo.%s" % _CHECKOUT_FN_NAME, + service=trace_utils.int_service(pin, config.pymongo), + span_type=SpanTypes.MONGODB, + ) as span: + span.set_tag_str(COMPONENT, config.pymongo.integration_name) + span.set_tag_str(db.SYSTEM, mongo.SERVICE) + + # set span.kind tag equal to type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + with wrapped(*args, **kwargs) as sock_info: + set_address_tags(span, sock_info.address) + span.set_tag(SPAN_MEASURED_KEY) + yield sock_info diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/__init__.py new file mode 100644 index 0000000..43283e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/__init__.py @@ -0,0 +1,68 @@ +""" +The pymysql integration instruments the pymysql library to trace MySQL queries. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(pymysql=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.pymysql["service"] + + The service name reported by default for pymysql spans. + + This option can also be set with the ``DD_PYMYSQL_SERVICE`` environment + variable. + + Default: ``"mysql"`` + +.. py:data:: ddtrace.config.pymysql["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_PYMYSQL_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + from pymysql import connect + + # This will report a span with the default settings + conn = connect(user="alice", password="b0b", host="localhost", port=3306, database="test") + + # Use a pin to override the service name for this connection. + Pin.override(conn, service="pymysql-users") + + + cursor = conn.cursor() + cursor.execute("SELECT 6*7 AS the_answer;") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["pymysql"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/patch.py new file mode 100644 index 0000000..fe2fdce --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pymysql/patch.py @@ -0,0 +1,63 @@ +import os + +import pymysql + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib.dbapi import TracedConnection +from ddtrace.vendor import wrapt + +from ...ext import db +from ...ext import net +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool + + +config._add( + "pymysql", + dict( + _default_service=schematize_service_name("pymysql"), + _dbapi_span_name_prefix="pymysql", + _dbapi_span_operation_name=schematize_database_operation("pymysql.query", database_provider="mysql"), + trace_fetch_methods=asbool(os.getenv("DD_PYMYSQL_TRACE_FETCH_METHODS", default=False)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(pymysql, "__version__", "") + + +CONN_ATTR_BY_TAG = { + net.TARGET_HOST: "host", + net.TARGET_PORT: "port", + db.USER: "user", + db.NAME: "db", +} + + +def patch(): + wrapt.wrap_function_wrapper("pymysql", "connect", _connect) + + +def unpatch(): + if isinstance(pymysql.connect, wrapt.ObjectProxy): + pymysql.connect = pymysql.connect.__wrapped__ + + +def _connect(func, instance, args, kwargs): + conn = func(*args, **kwargs) + return patch_conn(conn) + + +def patch_conn(conn): + tags = {t: getattr(conn, a, "") for t, a in CONN_ATTR_BY_TAG.items()} + tags[db.SYSTEM] = "mysql" + pin = Pin(tags=tags) + + # grab the metadata from the conn + wrapped = TracedConnection(conn, pin=pin, cfg=config.pymysql) + pin.onto(wrapped) + return wrapped diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/__init__.py new file mode 100644 index 0000000..db9b343 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/__init__.py @@ -0,0 +1,42 @@ +""" +The PynamoDB integration traces all db calls made with the pynamodb +library through the connection API. + +Enabling +~~~~~~~~ + +The PynamoDB integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + import pynamodb + from ddtrace import patch, config + patch(pynamodb=True) + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.pynamodb["service"] + + The service name reported by default for the PynamoDB instance. + + This option can also be set with the ``DD_PYNAMODB_SERVICE`` environment + variable. + + Default: ``"pynamodb"`` + +""" + + +from ...internal.utils.importlib import require_modules + + +required_modules = ["pynamodb.connection.base"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/patch.py new file mode 100644 index 0000000..e5bbcb2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pynamodb/patch.py @@ -0,0 +1,108 @@ +""" +Trace queries to botocore api done via a pynamodb client +""" + +import pynamodb.connection.base + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_cloud_api_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...internal.utils.formats import deep_getattr +from ...pin import Pin +from .. import trace_utils +from ..trace_utils import unwrap + + +# Pynamodb connection class +_PynamoDB_client = pynamodb.connection.base.Connection + +config._add( + "pynamodb", + { + "_default_service": schematize_service_name("pynamodb"), + }, +) + + +def get_version(): + # type: () -> str + return getattr(pynamodb, "__version__", "") + + +def patch(): + if getattr(pynamodb.connection.base, "_datadog_patch", False): + return + pynamodb.connection.base._datadog_patch = True + + wrapt.wrap_function_wrapper("pynamodb.connection.base", "Connection._make_api_call", patched_api_call) + Pin(service=None).onto(pynamodb.connection.base.Connection) + + +def unpatch(): + if getattr(pynamodb.connection.base, "_datadog_patch", False): + pynamodb.connection.base._datadog_patch = False + unwrap(pynamodb.connection.base.Connection, "_make_api_call") + + +def patched_api_call(original_func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return original_func(*args, **kwargs) + + with pin.tracer.trace( + schematize_cloud_api_operation("pynamodb.command", cloud_provider="aws", cloud_service="dynamodb"), + service=trace_utils.ext_service(pin, config.pynamodb, "pynamodb"), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.pynamodb.integration_name) + span.set_tag_str(db.SYSTEM, "dynamodb") + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + try: + operation = get_argument_value(args, kwargs, 0, "operation_name") + span.resource = operation + + if args[1] and "TableName" in args[1]: + table_name = args[1]["TableName"] + span.set_tag_str("table_name", table_name) + span.set_tag_str("tablename", table_name) + span.resource = span.resource + " " + table_name + + except ArgumentError: + span.resource = "Unknown" + operation = None + + region_name = deep_getattr(instance, "client.meta.region_name") + + meta = { + "aws.agent": "pynamodb", + "aws.operation": operation, + "aws.region": region_name, + "region": region_name, + } + span.set_tags(meta) + + # set analytics sample rate + sample_rate = config.pynamodb.get_analytics_sample_rate(use_global_config=True) + + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + result = original_func(*args, **kwargs) + + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/__init__.py new file mode 100644 index 0000000..775c1e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/__init__.py @@ -0,0 +1,66 @@ +""" +The pyodbc integration instruments the pyodbc library to trace pyodbc queries. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(pyodbc=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.pyodbc["service"] + + The service name reported by default for pyodbc spans. + + This option can also be set with the ``DD_PYODBC_SERVICE`` environment + variable. + + Default: ``"pyodbc"`` + +.. py:data:: ddtrace.config.pyodbc["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_PYODBC_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + import pyodbc + + # This will report a span with the default settings + db = pyodbc.connect("") + + # Use a pin to override the service name for the connection. + Pin.override(db, service='pyodbc-users') + + cursor = db.cursor() + cursor.execute("select * from users where id = 1") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["pyodbc"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/patch.py new file mode 100644 index 0000000..4ca1dbb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyodbc/patch.py @@ -0,0 +1,69 @@ +import os + +import pyodbc + +from ddtrace.internal.schema import schematize_service_name + +from ... import Pin +from ... import config +from ...ext import db +from ...internal.utils.formats import asbool +from ..dbapi import TracedConnection +from ..dbapi import TracedCursor +from ..trace_utils import unwrap +from ..trace_utils import wrap + + +config._add( + "pyodbc", + dict( + _default_service=schematize_service_name("pyodbc"), + _dbapi_span_name_prefix="pyodbc", + trace_fetch_methods=asbool(os.getenv("DD_PYODBC_TRACE_FETCH_METHODS", default=False)), + ), +) + + +def get_version(): + # type: () -> str + return pyodbc.version + + +def patch(): + if getattr(pyodbc, "_datadog_patch", False): + return + pyodbc._datadog_patch = True + wrap("pyodbc", "connect", _connect) + + +def unpatch(): + if getattr(pyodbc, "_datadog_patch", False): + pyodbc._datadog_patch = False + unwrap(pyodbc, "connect") + + +def _connect(func, instance, args, kwargs): + conn = func(*args, **kwargs) + return patch_conn(conn) + + +def patch_conn(conn): + try: + tags = {db.SYSTEM: conn.getinfo(pyodbc.SQL_DBMS_NAME), db.USER: conn.getinfo(pyodbc.SQL_USER_NAME)} + except pyodbc.Error: + tags = {} + pin = Pin(service=None, tags=tags) + wrapped = PyODBCTracedConnection(conn, pin=pin) + pin.onto(wrapped) + return wrapped + + +class PyODBCTracedCursor(TracedCursor): + pass + + +class PyODBCTracedConnection(TracedConnection): + def __init__(self, conn, pin=None, cursor_cls=None): + if not cursor_cls: + cursor_cls = PyODBCTracedCursor + super(PyODBCTracedConnection, self).__init__(conn, pin, config.pyodbc, cursor_cls=cursor_cls) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/__init__.py new file mode 100644 index 0000000..ff5e5ad --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/__init__.py @@ -0,0 +1,56 @@ +r"""To trace requests from a Pyramid application, trace your application +config:: + + + from pyramid.config import Configurator + from ddtrace.contrib.pyramid import trace_pyramid + + settings = { + 'datadog_trace_service' : 'my-web-app-name', + } + + config = Configurator(settings=settings) + trace_pyramid(config) + + # use your config as normal. + config.add_route('index', '/') + +Available settings are: + +* ``datadog_trace_service``: change the `pyramid` service name +* ``datadog_trace_enabled``: sets if the Tracer is enabled or not +* ``datadog_distributed_tracing``: set it to ``False`` to disable Distributed Tracing + +If you use the ``pyramid.tweens`` settings value to set the tweens for your +application, you need to add ``ddtrace.contrib.pyramid:trace_tween_factory`` +explicitly to the list. For example:: + + settings = { + 'datadog_trace_service' : 'my-web-app-name', + 'pyramid.tweens', 'your_tween_no_1\\nyour_tween_no_2\\nddtrace.contrib.pyramid:trace_tween_factory', + } + + config = Configurator(settings=settings) + trace_pyramid(config) + + # use your config as normal. + config.add_route('index', '/') + +:ref:`All HTTP tags ` are supported for this integration. + +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["pyramid"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .trace import includeme + from .trace import trace_pyramid + from .trace import trace_tween_factory + + __all__ = ["patch", "trace_pyramid", "trace_tween_factory", "includeme", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/constants.py new file mode 100644 index 0000000..4d96877 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/constants.py @@ -0,0 +1,6 @@ +SETTINGS_SERVICE = "datadog_trace_service" +SETTINGS_TRACER = "datadog_tracer" +SETTINGS_TRACE_ENABLED = "datadog_trace_enabled" +SETTINGS_DISTRIBUTED_TRACING = "datadog_distributed_tracing" +SETTINGS_ANALYTICS_ENABLED = "datadog_analytics_enabled" +SETTINGS_ANALYTICS_SAMPLE_RATE = "datadog_analytics_sample_rate" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/patch.py new file mode 100644 index 0000000..2a5f942 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/patch.py @@ -0,0 +1,103 @@ +import os + +import pyramid +import pyramid.config + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...internal.utils.formats import asbool +from .constants import SETTINGS_ANALYTICS_ENABLED +from .constants import SETTINGS_ANALYTICS_SAMPLE_RATE +from .constants import SETTINGS_DISTRIBUTED_TRACING +from .constants import SETTINGS_SERVICE +from .trace import DD_TWEEN_NAME +from .trace import trace_pyramid + + +config._add( + "pyramid", + dict( + distributed_tracing=asbool(os.getenv("DD_PYRAMID_DISTRIBUTED_TRACING", default=True)), + ), +) + +DD_PATCH = "_datadog_patch" + + +def get_version(): + # type: () -> str + try: + import importlib.metadata as importlib_metadata + except ImportError: + import importlib_metadata # type: ignore[no-redef] + + return str(importlib_metadata.version(pyramid.__package__)) + + +def patch(): + """ + Patch pyramid.config.Configurator + """ + if getattr(pyramid.config, DD_PATCH, False): + return + + setattr(pyramid.config, DD_PATCH, True) + _w = wrapt.wrap_function_wrapper + _w("pyramid.config", "Configurator.__init__", traced_init) + + +def traced_init(wrapped, instance, args, kwargs): + settings = kwargs.pop("settings", {}) + service = config._get_service(default="pyramid") + # DEV: integration-specific analytics flag can be not set but still enabled + # globally for web frameworks + old_analytics_enabled = os.getenv("DD_PYRAMID_ANALYTICS_ENABLED") + analytics_enabled = os.environ.get("DD_TRACE_PYRAMID_ANALYTICS_ENABLED", old_analytics_enabled) + if analytics_enabled is not None: + analytics_enabled = asbool(analytics_enabled) + # TODO: why is analytics sample rate a string or a bool here? + old_analytics_sample_rate = os.getenv("DD_PYRAMID_ANALYTICS_SAMPLE_RATE", default=True) + analytics_sample_rate = os.environ.get("DD_TRACE_PYRAMID_ANALYTICS_SAMPLE_RATE", old_analytics_sample_rate) + trace_settings = { + SETTINGS_SERVICE: service, + SETTINGS_DISTRIBUTED_TRACING: config.pyramid.distributed_tracing, + SETTINGS_ANALYTICS_ENABLED: analytics_enabled, + SETTINGS_ANALYTICS_SAMPLE_RATE: analytics_sample_rate, + } + # Update over top of the defaults + # DEV: If we did `settings.update(trace_settings)` then we would only ever + # have the default values. + trace_settings.update(settings) + # If the tweens are explicitly set with 'pyramid.tweens', we need to + # explicitly set our tween too since `add_tween` will be ignored. + insert_tween_if_needed(trace_settings) + + # The original Configurator.__init__ looks up two levels to find the package + # name if it is not provided. This has to be replicated here since this patched + # call will occur at the same level in the call stack. + if not kwargs.get("package", None): + from pyramid.path import caller_package + + kwargs["package"] = caller_package(level=2) + + kwargs["settings"] = trace_settings + wrapped(*args, **kwargs) + trace_pyramid(instance) + + +def insert_tween_if_needed(settings): + tweens = settings.get("pyramid.tweens") + # If the list is empty, pyramid does not consider the tweens have been + # set explicitly. + # And if our tween is already there, nothing to do + if not tweens or not tweens.strip() or DD_TWEEN_NAME in tweens: + return + # pyramid.tweens.EXCVIEW is the name of built-in exception view provided by + # pyramid. We need our tween to be before it, otherwise unhandled + # exceptions will be caught before they reach our tween. + idx = tweens.find(pyramid.tweens.EXCVIEW) + if idx == -1: + settings["pyramid.tweens"] = tweens + "\n" + DD_TWEEN_NAME + else: + settings["pyramid.tweens"] = tweens[:idx] + DD_TWEEN_NAME + "\n" + tweens[idx:] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/trace.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/trace.py new file mode 100644 index 0000000..1cf3b53 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pyramid/trace.py @@ -0,0 +1,138 @@ +from pyramid.httpexceptions import HTTPException +import pyramid.renderers +from pyramid.settings import asbool + +# project +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.logger import get_logger +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from .. import trace_utils +from .constants import SETTINGS_ANALYTICS_ENABLED +from .constants import SETTINGS_ANALYTICS_SAMPLE_RATE +from .constants import SETTINGS_DISTRIBUTED_TRACING +from .constants import SETTINGS_SERVICE +from .constants import SETTINGS_TRACE_ENABLED +from .constants import SETTINGS_TRACER + + +log = get_logger(__name__) + +DD_TWEEN_NAME = "ddtrace.contrib.pyramid:trace_tween_factory" +DD_TRACER = "_datadog_tracer" + + +def trace_pyramid(config): + config.include("ddtrace.contrib.pyramid") + + +def includeme(config): + # Add our tween just before the default exception handler + config.add_tween(DD_TWEEN_NAME, over=pyramid.tweens.EXCVIEW) + # ensure we only patch the renderer once. + if not isinstance(pyramid.renderers.RendererHelper.render, wrapt.ObjectProxy): + wrapt.wrap_function_wrapper("pyramid.renderers", "RendererHelper.render", trace_render) + + +def trace_render(func, instance, args, kwargs): + # If the request is not traced, we do not trace + request = kwargs.get("request", {}) + if not request: + log.debug("No request passed to render, will not be traced") + return func(*args, **kwargs) + tracer = getattr(request, DD_TRACER, None) + if not tracer: + log.debug("No tracer found in request, will not be traced") + return func(*args, **kwargs) + + with tracer.trace("pyramid.render", span_type=SpanTypes.TEMPLATE) as span: + span.set_tag_str(COMPONENT, config.pyramid.integration_name) + + return func(*args, **kwargs) + + +def trace_tween_factory(handler, registry): + # configuration + settings = registry.settings + service = settings.get(SETTINGS_SERVICE) or schematize_service_name("pyramid") + tracer = settings.get(SETTINGS_TRACER) or ddtrace.tracer + enabled = asbool(settings.get(SETTINGS_TRACE_ENABLED, tracer.enabled)) + distributed_tracing = asbool(settings.get(SETTINGS_DISTRIBUTED_TRACING, True)) + + if enabled: + # make a request tracing function + def trace_tween(request): + trace_utils.activate_distributed_headers( + tracer, int_config=config.pyramid, request_headers=request.headers, override=distributed_tracing + ) + + span_name = schematize_url_operation("pyramid.request", protocol="http", direction=SpanDirection.INBOUND) + with tracer.trace(span_name, service=service, resource="404", span_type=SpanTypes.WEB) as span: + span.set_tag_str(COMPONENT, config.pyramid.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + span.set_tag(SPAN_MEASURED_KEY) + # Configure trace search sample rate + # DEV: pyramid is special case maintains separate configuration from config api + analytics_enabled = settings.get(SETTINGS_ANALYTICS_ENABLED) + + if (config.analytics_enabled and analytics_enabled is not False) or analytics_enabled is True: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, settings.get(SETTINGS_ANALYTICS_SAMPLE_RATE, True)) + + setattr(request, DD_TRACER, tracer) # used to find the tracer in templates + response = None + status = None + try: + response = handler(request) + except HTTPException as e: + # If the exception is a pyramid HTTPException, + # that's still valuable information that isn't necessarily + # a 500. For instance, HTTPFound is a 302. + # As described in docs, Pyramid exceptions are all valid + # response types + response = e + raise + except BaseException: + status = 500 + raise + finally: + # set request tags + if request.matched_route: + span.resource = "{} {}".format(request.method, request.matched_route.name) + span.set_tag_str("pyramid.route.name", request.matched_route.name) + # set response tags + if response: + status = response.status_code + response_headers = response.headers + else: + response_headers = None + + trace_utils.set_http_meta( + span, + config.pyramid, + method=request.method, + url=request.path_url, + status_code=status, + query=request.query_string, + request_headers=request.headers, + response_headers=response_headers, + route=request.matched_route.pattern if request.matched_route else None, + ) + return response + + return trace_tween + + # if timing support is not enabled, return the original handler + return handler diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/__init__.py new file mode 100644 index 0000000..30be678 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/__init__.py @@ -0,0 +1,86 @@ +""" +The pytest integration traces test executions. + +Enabling +~~~~~~~~ + +Enable traced execution of tests using ``pytest`` runner by +running ``pytest --ddtrace`` or by modifying any configuration +file read by pytest (``pytest.ini``, ``setup.cfg``, ...):: + + [pytest] + ddtrace = 1 + + +If you need to disable it, the option ``--no-ddtrace`` will take +precedence over ``--ddtrace`` and (``pytest.ini``, ``setup.cfg``, ...) + +You can enable all integrations by using the ``--ddtrace-patch-all`` option +alongside ``--ddtrace`` or by adding this to your configuration:: + + [pytest] + ddtrace = 1 + ddtrace-patch-all = 1 + + +.. note:: + The ddtrace plugin for pytest has the side effect of importing the ddtrace + package and starting a global tracer. + + If this is causing issues for your pytest runs where traced execution of + tests is not enabled, you can deactivate the plugin:: + + [pytest] + addopts = -p no:ddtrace + + See the `pytest documentation + `_ + for more details. + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.pytest["service"] + + The service name reported by default for pytest traces. + + This option can also be set with the integration specific ``DD_PYTEST_SERVICE`` environment + variable, or more generally with the `DD_SERVICE` environment variable. + + Default: Name of the repository being tested, otherwise ``"pytest"`` if the repository name cannot be found. + + +.. py:data:: ddtrace.config.pytest["operation_name"] + + The operation name reported by default for pytest traces. + + This option can also be set with the ``DD_PYTEST_OPERATION_NAME`` environment + variable. + + Default: ``"pytest.test"`` +""" + +import os + +from ddtrace import config + + +# pytest default settings +config._add( + "pytest", + dict( + _default_service="pytest", + operation_name=os.getenv("DD_PYTEST_OPERATION_NAME", default="pytest.test"), + ), +) + + +def get_version(): + # type: () -> str + import pytest + + return pytest.__version__ + + +__all__ = ["get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/constants.py new file mode 100644 index 0000000..f4ecab0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/constants.py @@ -0,0 +1,9 @@ +FRAMEWORK = "pytest" +KIND = "test" + +DDTRACE_HELP_MSG = "Enable tracing of pytest functions." +NO_DDTRACE_HELP_MSG = "Disable tracing of pytest functions." +DDTRACE_INCLUDE_CLASS_HELP_MSG = "Prepend 'ClassName.' to names of class-based tests." + +# XFail Reason +XFAIL_REASON = "pytest.xfail.reason" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/newhooks.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/newhooks.py new file mode 100644 index 0000000..c44fd0a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/newhooks.py @@ -0,0 +1,26 @@ +"""pytest-ddtrace hooks. + +These hooks are used to provide extra data used by the Datadog CI Visibility plugin. + +For example: module, suite, and test names for a given item. + +Note that these names will affect th display and reporting of tests in the Datadog UI, as well as information stored +the Intelligent Test Runner. Differing hook implementations may impact the behavior of Datadog CI Visibility products. +""" + +import pytest + + +@pytest.hookspec(firstresult=True) +def pytest_ddtrace_get_item_module_name(item: pytest.Item) -> str: + """Returns the module name to use when reporting CI Visibility results, should be unique""" + + +@pytest.hookspec(firstresult=True) +def pytest_ddtrace_get_item_suite_name(item: pytest.Item) -> str: + """Returns the suite name to use when reporting CI Visibility result, should be unique""" + + +@pytest.hookspec(firstresult=True) +def pytest_ddtrace_get_item_test_name(item: pytest.Item) -> str: + """Returns the test name to use when reporting CI Visibility result, should be unique""" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/plugin.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/plugin.py new file mode 100644 index 0000000..987edd3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest/plugin.py @@ -0,0 +1,927 @@ +""" +This custom pytest plugin implements tracing for pytest by using pytest hooks. The plugin registers tracing code +to be run at specific points during pytest execution. The most important hooks used are: + + * pytest_sessionstart: during pytest session startup, a custom trace filter is configured to the global tracer to + only send test spans, which are generated by the plugin. + * pytest_runtest_protocol: this wraps around the execution of a pytest test function, which we trace. Most span + tags are generated and added in this function. We also store the span on the underlying pytest test item to + retrieve later when we need to report test status/result. + * pytest_runtest_makereport: this hook is used to set the test status/result tag, including skipped tests and + expected failures. + +""" +from doctest import DocTest +import json +import re +from typing import Dict # noqa:F401 + +from _pytest.nodes import get_fslocation_from_item +import pytest + +import ddtrace +from ddtrace.constants import SPAN_KIND +from ddtrace.contrib.coverage.data import _coverage_data +from ddtrace.contrib.coverage.patch import patch as patch_coverage +from ddtrace.contrib.coverage.patch import run_coverage_report +from ddtrace.contrib.coverage.patch import unpatch as unpatch_coverage +from ddtrace.contrib.coverage.utils import _is_coverage_invoked_by_coverage_run +from ddtrace.contrib.coverage.utils import _is_coverage_patched +from ddtrace.contrib.pytest.constants import DDTRACE_HELP_MSG +from ddtrace.contrib.pytest.constants import DDTRACE_INCLUDE_CLASS_HELP_MSG +from ddtrace.contrib.pytest.constants import FRAMEWORK +from ddtrace.contrib.pytest.constants import KIND +from ddtrace.contrib.pytest.constants import NO_DDTRACE_HELP_MSG +from ddtrace.contrib.pytest.constants import XFAIL_REASON +from ddtrace.contrib.unittest import unpatch as unpatch_unittest +from ddtrace.ext import SpanTypes +from ddtrace.ext import test +from ddtrace.internal.ci_visibility import CIVisibility as _CIVisibility +from ddtrace.internal.ci_visibility.constants import EVENT_TYPE as _EVENT_TYPE +from ddtrace.internal.ci_visibility.constants import ITR_UNSKIPPABLE_REASON +from ddtrace.internal.ci_visibility.constants import MODULE_ID as _MODULE_ID +from ddtrace.internal.ci_visibility.constants import MODULE_TYPE as _MODULE_TYPE +from ddtrace.internal.ci_visibility.constants import SESSION_ID as _SESSION_ID +from ddtrace.internal.ci_visibility.constants import SESSION_TYPE as _SESSION_TYPE +from ddtrace.internal.ci_visibility.constants import SKIPPED_BY_ITR_REASON +from ddtrace.internal.ci_visibility.constants import SUITE +from ddtrace.internal.ci_visibility.constants import SUITE_ID as _SUITE_ID +from ddtrace.internal.ci_visibility.constants import SUITE_TYPE as _SUITE_TYPE +from ddtrace.internal.ci_visibility.constants import TEST +from ddtrace.internal.ci_visibility.coverage import _module_has_dd_coverage_enabled +from ddtrace.internal.ci_visibility.coverage import _report_coverage_to_span +from ddtrace.internal.ci_visibility.coverage import _start_coverage +from ddtrace.internal.ci_visibility.coverage import _stop_coverage +from ddtrace.internal.ci_visibility.coverage import _switch_coverage_context +from ddtrace.internal.ci_visibility.utils import _add_pct_covered_to_span +from ddtrace.internal.ci_visibility.utils import _add_start_end_source_file_path_data_to_span +from ddtrace.internal.ci_visibility.utils import _generate_fully_qualified_module_name +from ddtrace.internal.ci_visibility.utils import _generate_fully_qualified_test_name +from ddtrace.internal.ci_visibility.utils import get_relative_or_absolute_path_for_path +from ddtrace.internal.ci_visibility.utils import take_over_logger_stream_handler +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.logger import get_logger + + +PATCH_ALL_HELP_MSG = "Call ddtrace.patch_all before running tests." + +log = get_logger(__name__) + +_global_skipped_elements = 0 + + +def _is_pytest_8_or_later(): + if hasattr(pytest, "version_tuple"): + return pytest.version_tuple >= (8, 0, 0) + return False + + +def encode_test_parameter(parameter): + param_repr = repr(parameter) + # if the representation includes an id() we'll remove it + # because it isn't constant across executions + return re.sub(r" at 0[xX][0-9a-fA-F]+", "", param_repr) + + +def is_enabled(config): + """Check if the ddtrace plugin is enabled.""" + return (config.getoption("ddtrace") or config.getini("ddtrace")) and not config.getoption("no-ddtrace") + + +def _is_pytest_cov_enabled(config) -> bool: + if not config.pluginmanager.get_plugin("pytest_cov"): + return False + cov_option = config.getoption("--cov", default=False) + nocov_option = config.getoption("--no-cov", default=False) + if nocov_option is True: + return False + if type(cov_option) == list and cov_option == [True] and not nocov_option: + return True + return cov_option + + +def _extract_span(item): + """Extract span from `pytest.Item` instance.""" + return getattr(item, "_datadog_span", None) + + +def _store_span(item, span): + """Store span at `pytest.Item` instance.""" + item._datadog_span = span + + +def _extract_module_span(item): + """Extract span from `pytest.Item` instance.""" + return getattr(item, "_datadog_span_module", None) + + +def _extract_ancestor_module_span(item): + """Return the first ancestor module span found""" + while item: + module_span = _extract_module_span(item) or _extract_span(item) + if module_span is not None and module_span.name == "pytest.test_module": + return module_span + item = _get_parent(item) + + +def _extract_ancestor_suite_span(item): + """Return the first ancestor suite span found""" + while item: + suite_span = _extract_span(item) + if suite_span is not None and suite_span.name == "pytest.test_suite": + return suite_span + item = _get_parent(item) + + +def _store_module_span(item, span): + """Store span at `pytest.Item` instance.""" + item._datadog_span_module = span + + +def _mark_failed(item): + """Store test failed status at `pytest.Item` instance.""" + item_parent = _get_parent(item) + if item_parent: + _mark_failed(item_parent) + item._failed = True + + +def _check_failed(item): + """Extract test failed status from `pytest.Item` instance.""" + return getattr(item, "_failed", False) + + +def _mark_not_skipped(item): + """Mark test suite/module/session `pytest.Item` as not skipped.""" + item_parent = _get_parent(item) + if item_parent: + _mark_not_skipped(item_parent) + item._fully_skipped = False + + +def _mark_not_skipped(item): + """Mark test suite/module/session `pytest.Item` as not skipped.""" + + item_parent = _get_parent(item) + + if item_parent: + _mark_not_skipped(item_parent) + item._fully_skipped = False + + +def _get_parent(item): + """Fetches the nearest parent that is not a directory. + + This is introduced as a workaround for pytest 8.0's introduction pytest.Dir objects. + """ + if item is None or item.parent is None: + return None + + if _is_pytest_8_or_later(): + # In pytest 8.0, the parent of a Package can be another Package. In previous versions, the parent was always + # a session. + if isinstance(item, pytest.Package): + while item.parent is not None and not isinstance(item.parent, pytest.Session): + item = item.parent + return item.parent + + while item.parent is not None and isinstance(item.parent, pytest.Dir): + item = item.parent + + return item.parent + + +def _mark_test_forced(test_item): + # type: (pytest.Test) -> None + test_span = _extract_span(test_item) + test_span.set_tag_str(test.ITR_FORCED_RUN, "true") + + suite_span = _extract_ancestor_suite_span(test_item) + suite_span.set_tag_str(test.ITR_FORCED_RUN, "true") + + module_span = _extract_ancestor_module_span(test_item) + module_span.set_tag_str(test.ITR_FORCED_RUN, "true") + + session_span = _extract_span(test_item.session) + session_span.set_tag_str(test.ITR_FORCED_RUN, "true") + + +def _mark_test_unskippable(test_item): + # type: (pytest.Test) -> None + test_span = _extract_span(test_item) + test_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + + suite_span = _extract_ancestor_suite_span(test_item) + suite_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + + module_span = _extract_ancestor_module_span(test_item) + module_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + + session_span = _extract_span(test_item.session) + session_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + + +def _check_fully_skipped(item): + """Check if test suite/module/session `pytest.Item` has `_fully_skipped` marker.""" + return getattr(item, "_fully_skipped", True) + + +def _mark_test_status(item, span): + """ + Given a `pytest.Item`, determine and set the test status of the corresponding span. + """ + item_parent = _get_parent(item) + + # If any child has failed, mark span as failed. + if _check_failed(item): + status = test.Status.FAIL.value + if item_parent: + _mark_failed(item_parent) + _mark_not_skipped(item_parent) + # If all children have been skipped, mark span as skipped. + elif _check_fully_skipped(item): + status = test.Status.SKIP.value + else: + status = test.Status.PASS.value + if item_parent: + _mark_not_skipped(item_parent) + span.set_tag_str(test.STATUS, status) + + +def _extract_reason(call): + if call.excinfo is not None: + return call.excinfo.value + + +def _get_pytest_command(config): + """Extract and re-create pytest session command from pytest config.""" + command = "pytest" + if getattr(config, "invocation_params", None): + command += " {}".format(" ".join(config.invocation_params.args)) + return command + + +def _get_module_path(item): + """Extract module path from a `pytest.Item` instance.""" + # type (pytest.Item) -> str + if not isinstance(item, (pytest.Package, pytest.Module)): + return None + + if _is_pytest_8_or_later() and isinstance(item, pytest.Package): + module_path = item.nodeid + + else: + module_path = item.nodeid.rpartition("/")[0] + + return module_path + + +def _is_test_unskippable(item): + return any( + [ + True + for marker in item.iter_markers(name="skipif") + if marker.args[0] is False + and "reason" in marker.kwargs + and marker.kwargs["reason"] is ITR_UNSKIPPABLE_REASON + ] + ) + + +def _module_is_package(pytest_package_item=None, pytest_module_item=None): + # Pytest 8+ module items have a pytest.Dir object as their parent instead of the session object + if _is_pytest_8_or_later(): + return isinstance(pytest_module_item.parent, pytest.Package) + + if pytest_package_item is None and pytest_module_item is not None: + return False + return True + + +def _start_test_module_span(item): + """ + Starts a test module span at the start of a new pytest test package. + Note that ``item`` is a ``pytest.Item`` object referencing the test being run. + """ + pytest_module_item = _find_pytest_item(item, pytest.Module) + pytest_package_item = _find_pytest_item(pytest_module_item, pytest.Package) + + is_package = _module_is_package(pytest_package_item, pytest_module_item) + + if is_package: + span_target_item = pytest_package_item + else: + span_target_item = pytest_module_item + + test_session_span = _extract_span(item.session) + test_module_span = _CIVisibility._instance.tracer._start_span( + "pytest.test_module", + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + activate=True, + child_of=test_session_span, + ) + test_module_span.set_tag_str(COMPONENT, "pytest") + test_module_span.set_tag_str(SPAN_KIND, KIND) + test_module_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_module_span.set_tag_str(test.FRAMEWORK_VERSION, pytest.__version__) + test_module_span.set_tag_str(test.COMMAND, _get_pytest_command(item.config)) + test_module_span.set_tag_str(_EVENT_TYPE, _MODULE_TYPE) + if test_session_span: + test_module_span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + test_module_span.set_tag_str(_MODULE_ID, str(test_module_span.span_id)) + test_module_span.set_tag_str(test.MODULE, item.config.hook.pytest_ddtrace_get_item_module_name(item=item)) + test_module_span.set_tag_str(test.MODULE_PATH, _get_module_path(span_target_item)) + if is_package: + _store_span(span_target_item, test_module_span) + else: + _store_module_span(span_target_item, test_module_span) + + test_module_span.set_tag_str( + test.ITR_TEST_CODE_COVERAGE_ENABLED, + "true" if _CIVisibility._instance._collect_coverage_enabled else "false", + ) + + if _CIVisibility.test_skipping_enabled(): + test_module_span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "true") + test_module_span.set_tag( + test.ITR_TEST_SKIPPING_TYPE, SUITE if _CIVisibility._instance._suite_skipping_mode else TEST + ) + test_module_span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "false") + test_module_span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "false") + test_module_span.set_tag_str(test.ITR_FORCED_RUN, "false") + test_module_span.set_tag_str(test.ITR_UNSKIPPABLE, "false") + else: + test_module_span.set_tag(test.ITR_TEST_SKIPPING_ENABLED, "false") + + return test_module_span, is_package + + +def _start_test_suite_span(item, test_module_span, should_enable_coverage=False): + """ + Starts a test suite span at the start of a new pytest test module. + """ + pytest_module_item = _find_pytest_item(item, pytest.Module) + test_session_span = _extract_span(pytest_module_item.session) + if test_module_span is None and isinstance(pytest_module_item.parent, pytest.Package): + test_module_span = _extract_span(pytest_module_item.parent) + parent_span = test_module_span + if parent_span is None: + parent_span = test_session_span + + test_suite_span = _CIVisibility._instance.tracer._start_span( + "pytest.test_suite", + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + activate=True, + child_of=parent_span, + ) + test_suite_span.set_tag_str(COMPONENT, "pytest") + test_suite_span.set_tag_str(SPAN_KIND, KIND) + test_suite_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_suite_span.set_tag_str(test.FRAMEWORK_VERSION, pytest.__version__) + test_suite_span.set_tag_str(test.COMMAND, _get_pytest_command(pytest_module_item.config)) + test_suite_span.set_tag_str(_EVENT_TYPE, _SUITE_TYPE) + if test_session_span: + test_suite_span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + test_suite_span.set_tag_str(_SUITE_ID, str(test_suite_span.span_id)) + test_module_path = "" + if test_module_span is not None: + test_suite_span.set_tag_str(_MODULE_ID, str(test_module_span.span_id)) + test_suite_span.set_tag_str(test.MODULE, test_module_span.get_tag(test.MODULE)) + test_module_path = test_module_span.get_tag(test.MODULE_PATH) + test_suite_span.set_tag_str(test.MODULE_PATH, test_module_path) + test_suite_name = item.config.hook.pytest_ddtrace_get_item_suite_name(item=item) + test_suite_span.set_tag_str(test.SUITE, test_suite_name) + _store_span(pytest_module_item, test_suite_span) + + if should_enable_coverage and _module_has_dd_coverage_enabled(pytest): + fqn_module = _generate_fully_qualified_module_name(test_module_path, test_suite_name) + _switch_coverage_context(pytest._dd_coverage, fqn_module) + return test_suite_span + + +def pytest_addoption(parser): + """Add ddtrace options.""" + group = parser.getgroup("ddtrace") + + group._addoption( + "--ddtrace", + action="store_true", + dest="ddtrace", + default=False, + help=DDTRACE_HELP_MSG, + ) + + group._addoption( + "--no-ddtrace", + action="store_true", + dest="no-ddtrace", + default=False, + help=NO_DDTRACE_HELP_MSG, + ) + + group._addoption( + "--ddtrace-patch-all", + action="store_true", + dest="ddtrace-patch-all", + default=False, + help=PATCH_ALL_HELP_MSG, + ) + + group._addoption( + "--ddtrace-include-class-name", + action="store_true", + dest="ddtrace-include-class-name", + default=False, + help=DDTRACE_INCLUDE_CLASS_HELP_MSG, + ) + + parser.addini("ddtrace", DDTRACE_HELP_MSG, type="bool") + parser.addini("no-ddtrace", DDTRACE_HELP_MSG, type="bool") + parser.addini("ddtrace-patch-all", PATCH_ALL_HELP_MSG, type="bool") + parser.addini("ddtrace-include-class-name", DDTRACE_INCLUDE_CLASS_HELP_MSG, type="bool") + + +def pytest_configure(config): + unpatch_unittest() + config.addinivalue_line("markers", "dd_tags(**kwargs): add tags to current span") + if is_enabled(config): + take_over_logger_stream_handler() + _CIVisibility.enable(config=ddtrace.config.pytest) + if _is_pytest_cov_enabled(config): + patch_coverage() + + +def pytest_sessionstart(session): + if _CIVisibility.enabled: + log.debug("CI Visibility enabled - starting test session") + global _global_skipped_elements + _global_skipped_elements = 0 + test_session_span = _CIVisibility._instance.tracer.trace( + "pytest.test_session", + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + ) + test_session_span.set_tag_str(COMPONENT, "pytest") + test_session_span.set_tag_str(SPAN_KIND, KIND) + test_session_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_session_span.set_tag_str(test.FRAMEWORK_VERSION, pytest.__version__) + test_session_span.set_tag_str(_EVENT_TYPE, _SESSION_TYPE) + test_session_span.set_tag_str(test.COMMAND, _get_pytest_command(session.config)) + test_session_span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + if _CIVisibility.test_skipping_enabled(): + test_session_span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "true") + test_session_span.set_tag( + test.ITR_TEST_SKIPPING_TYPE, SUITE if _CIVisibility._instance._suite_skipping_mode else TEST + ) + test_session_span.set_tag(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "false") + test_session_span.set_tag(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "false") + test_session_span.set_tag_str(test.ITR_FORCED_RUN, "false") + test_session_span.set_tag_str(test.ITR_UNSKIPPABLE, "false") + else: + test_session_span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "false") + test_session_span.set_tag_str( + test.ITR_TEST_CODE_COVERAGE_ENABLED, + "true" if _CIVisibility._instance._collect_coverage_enabled else "false", + ) + if _is_coverage_invoked_by_coverage_run(): + patch_coverage() + if _CIVisibility._instance._collect_coverage_enabled and not _module_has_dd_coverage_enabled( + pytest, silent_mode=True + ): + pytest._dd_coverage = _start_coverage(session.config.rootdir) + + _store_span(session, test_session_span) + + +def pytest_sessionfinish(session, exitstatus): + if _CIVisibility.enabled: + log.debug("CI Visibility enabled - finishing test session") + test_session_span = _extract_span(session) + if test_session_span is not None: + if _CIVisibility.test_skipping_enabled(): + test_session_span.set_metric(test.ITR_TEST_SKIPPING_COUNT, _global_skipped_elements) + _mark_test_status(session, test_session_span) + pytest_cov_status = _is_pytest_cov_enabled(session.config) + invoked_by_coverage_run_status = _is_coverage_invoked_by_coverage_run() + if _is_coverage_patched() and (pytest_cov_status or invoked_by_coverage_run_status): + if invoked_by_coverage_run_status and not pytest_cov_status: + run_coverage_report() + _add_pct_covered_to_span(_coverage_data, test_session_span) + unpatch_coverage() + test_session_span.finish() + _CIVisibility.disable() + + +@pytest.fixture(scope="function") +def ddspan(request): + """Return the :class:`ddtrace.span.Span` instance associated with the + current test when Datadog CI Visibility is enabled. + """ + if _CIVisibility.enabled: + return _extract_span(request.node) + + +@pytest.fixture(scope="session") +def ddtracer(): + """Return the :class:`ddtrace.tracer.Tracer` instance for Datadog CI + visibility if it is enabled, otherwise return the default Datadog tracer. + """ + if _CIVisibility.enabled: + return _CIVisibility._instance.tracer + return ddtrace.tracer + + +@pytest.fixture(scope="session", autouse=True) +def patch_all(request): + """Patch all available modules for Datadog tracing when ddtrace-patch-all + is specified in command or .ini. + """ + if request.config.getoption("ddtrace-patch-all") or request.config.getini("ddtrace-patch-all"): + ddtrace.patch_all() + + +def _find_pytest_item(item, pytest_item_type): + """ + Given a `pytest.Item`, traverse upwards until we find a specified `pytest.Package` or `pytest.Module` item, + or return None. + """ + if item is None: + return None + if pytest_item_type not in [pytest.Package, pytest.Module]: + return None + parent = _get_parent(item) + while not isinstance(parent, pytest_item_type) and parent is not None: + parent = parent.parent + return parent + + +def _get_test_class_hierarchy(item): + """ + Given a `pytest.Item` function item, traverse upwards to collect and return a string listing the + test class hierarchy, or an empty string if there are no test classes. + """ + parent = _get_parent(item) + test_class_hierarchy = [] + while parent is not None: + if isinstance(parent, pytest.Class): + test_class_hierarchy.insert(0, parent.name) + parent = parent.parent + return ".".join(test_class_hierarchy) + + +def pytest_collection_modifyitems(session, config, items): + if _CIVisibility.test_skipping_enabled(): + skip = pytest.mark.skip(reason=SKIPPED_BY_ITR_REASON) + + items_to_skip_by_module = {} + current_suite_has_unskippable_test = False + + for item in items: + test_is_unskippable = _is_test_unskippable(item) + + item_name = item.config.hook.pytest_ddtrace_get_item_test_name(item=item) + + if test_is_unskippable: + log.debug( + "Test %s in module %s (file: %s ) is marked as unskippable", + item_name, + item.module.__name__, + item.module.__file__, + ) + item._dd_itr_test_unskippable = True + + # Due to suite skipping mode, defer adding ITR skip marker until unskippable status of the suite has been + # fully resolved because Pytest markers cannot be dynamically removed + if _CIVisibility._instance._suite_skipping_mode: + if item.module not in items_to_skip_by_module: + items_to_skip_by_module[item.module] = [] + current_suite_has_unskippable_test = False + + if test_is_unskippable and not current_suite_has_unskippable_test: + current_suite_has_unskippable_test = True + # Retroactively mark collected tests as forced: + for item_to_skip in items_to_skip_by_module[item.module]: + item_to_skip._dd_itr_forced = True + items_to_skip_by_module[item.module] = [] + + if _CIVisibility._instance._should_skip_path(str(get_fslocation_from_item(item)[0]), item_name): + if test_is_unskippable or ( + _CIVisibility._instance._suite_skipping_mode and current_suite_has_unskippable_test + ): + item._dd_itr_forced = True + else: + items_to_skip_by_module.setdefault(item.module, []).append(item) + + # Mark remaining tests that should be skipped + for items_to_skip in items_to_skip_by_module.values(): + for item_to_skip in items_to_skip: + item_to_skip.add_marker(skip) + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_protocol(item, nextitem): + if not _CIVisibility.enabled: + yield + return + + is_skipped = bool( + item.get_closest_marker("skip") + or any([marker for marker in item.iter_markers(name="skipif") if marker.args[0] is True]) + ) + is_skipped_by_itr = bool( + is_skipped + and any( + [ + marker + for marker in item.iter_markers(name="skip") + if "reason" in marker.kwargs and marker.kwargs["reason"] == SKIPPED_BY_ITR_REASON + ] + ) + ) + + test_session_span = _extract_span(item.session) + + pytest_module_item = _find_pytest_item(item, pytest.Module) + pytest_package_item = _find_pytest_item(pytest_module_item, pytest.Package) + + module_is_package = True + + test_module_span = _extract_span(pytest_package_item) + if not test_module_span: + test_module_span = _extract_module_span(pytest_module_item) + if test_module_span: + module_is_package = False + + if test_module_span is None: + test_module_span, module_is_package = _start_test_module_span(item) + + if _CIVisibility.test_skipping_enabled() and test_module_span.get_metric(test.ITR_TEST_SKIPPING_COUNT) is None: + test_module_span.set_tag( + test.ITR_TEST_SKIPPING_TYPE, SUITE if _CIVisibility._instance._suite_skipping_mode else TEST + ) + test_module_span.set_metric(test.ITR_TEST_SKIPPING_COUNT, 0) + + test_suite_span = _extract_ancestor_suite_span(item) + if pytest_module_item is not None and test_suite_span is None: + # Start coverage for the test suite if coverage is enabled + # In ITR suite skipping mode, all tests in a skipped suite should be marked + # as skipped + test_suite_span = _start_test_suite_span( + item, + test_module_span, + should_enable_coverage=( + _CIVisibility._instance._suite_skipping_mode + and _CIVisibility._instance._collect_coverage_enabled + and not is_skipped_by_itr + ), + ) + + if is_skipped_by_itr: + test_module_span._metrics[test.ITR_TEST_SKIPPING_COUNT] += 1 + global _global_skipped_elements + _global_skipped_elements += 1 + test_module_span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "true") + test_module_span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "true") + + test_session_span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "true") + test_session_span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "true") + + with _CIVisibility._instance.tracer._start_span( + ddtrace.config.pytest.operation_name, + service=_CIVisibility._instance._service, + resource=item.nodeid, + span_type=SpanTypes.TEST, + activate=True, + ) as span: + span.set_tag_str(COMPONENT, "pytest") + span.set_tag_str(SPAN_KIND, KIND) + span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + span.set_tag_str(_EVENT_TYPE, SpanTypes.TEST) + test_name = item.config.hook.pytest_ddtrace_get_item_test_name(item=item) + test_module_path = test_module_span.get_tag(test.MODULE_PATH) + span.set_tag_str(test.NAME, test_name) + span.set_tag_str(test.COMMAND, _get_pytest_command(item.config)) + if test_session_span: + span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + + span.set_tag_str(_MODULE_ID, str(test_module_span.span_id)) + span.set_tag_str(test.MODULE, test_module_span.get_tag(test.MODULE)) + span.set_tag_str(test.MODULE_PATH, test_module_path) + + span.set_tag_str(_SUITE_ID, str(test_suite_span.span_id)) + test_class_hierarchy = _get_test_class_hierarchy(item) + if test_class_hierarchy: + span.set_tag_str(test.CLASS_HIERARCHY, test_class_hierarchy) + if hasattr(item, "dtest") and isinstance(item.dtest, DocTest): + test_suite_name = "{}.py".format(item.dtest.globs["__name__"]) + span.set_tag_str(test.SUITE, test_suite_name) + else: + test_suite_name = test_suite_span.get_tag(test.SUITE) + span.set_tag_str(test.SUITE, test_suite_name) + + span.set_tag_str(test.TYPE, SpanTypes.TEST) + span.set_tag_str(test.FRAMEWORK_VERSION, pytest.__version__) + + if item.location and item.location[0]: + _CIVisibility.set_codeowners_of(item.location[0], span=span) + if hasattr(item, "_obj"): + test_method_object = item._obj + _add_start_end_source_file_path_data_to_span(span, test_method_object, test_name, item.config.rootdir) + + # We preemptively set FAIL as a status, because if pytest_runtest_makereport is not called + # (where the actual test status is set), it means there was a pytest error + span.set_tag_str(test.STATUS, test.Status.FAIL.value) + + # Parameterized test cases will have a `callspec` attribute attached to the pytest Item object. + # Pytest docs: https://docs.pytest.org/en/6.2.x/reference.html#pytest.Function + if getattr(item, "callspec", None): + parameters = {"arguments": {}, "metadata": {}} # type: Dict[str, Dict[str, str]] + for param_name, param_val in item.callspec.params.items(): + try: + parameters["arguments"][param_name] = encode_test_parameter(param_val) + except Exception: + parameters["arguments"][param_name] = "Could not encode" + log.warning("Failed to encode %r", param_name, exc_info=True) + span.set_tag_str(test.PARAMETERS, json.dumps(parameters)) + + markers = [marker.kwargs for marker in item.iter_markers(name="dd_tags")] + for tags in markers: + span.set_tags(tags) + _store_span(item, span) + + # Items are marked ITR-unskippable regardless of other unrelateed skipping status + if getattr(item, "_dd_itr_test_unskippable", False) or getattr(item, "_dd_itr_suite_unskippable", False): + _mark_test_unskippable(item) + if not is_skipped: + if getattr(item, "_dd_itr_forced", False): + _mark_test_forced(item) + + coverage_per_test = ( + not _CIVisibility._instance._suite_skipping_mode + and _CIVisibility._instance._collect_coverage_enabled + and not is_skipped + ) + root_directory = str(item.config.rootdir) + if coverage_per_test and _module_has_dd_coverage_enabled(pytest): + fqn_test = _generate_fully_qualified_test_name(test_module_path, test_suite_name, test_name) + _switch_coverage_context(pytest._dd_coverage, fqn_test) + # Run the actual test + yield + + # Finish coverage for the test suite if coverage is enabled + if coverage_per_test and _module_has_dd_coverage_enabled(pytest): + _report_coverage_to_span(pytest._dd_coverage, span, root_directory) + + nextitem_pytest_module_item = _find_pytest_item(nextitem, pytest.Module) + if nextitem is None or nextitem_pytest_module_item != pytest_module_item and not test_suite_span.finished: + _mark_test_status(pytest_module_item, test_suite_span) + # Finish coverage for the test suite if coverage is enabled + # In ITR suite skipping mode, all tests in a skipped suite should be marked + # as skipped + if ( + _CIVisibility._instance._suite_skipping_mode + and _CIVisibility._instance._collect_coverage_enabled + and not is_skipped_by_itr + and _module_has_dd_coverage_enabled(pytest) + ): + _report_coverage_to_span(pytest._dd_coverage, test_suite_span, root_directory) + test_suite_span.finish() + + if not module_is_package: + test_module_span.set_tag_str(test.STATUS, test_suite_span.get_tag(test.STATUS)) + test_module_span.finish() + else: + nextitem_pytest_package_item = _find_pytest_item(nextitem, pytest.Package) + if ( + nextitem is None + or nextitem_pytest_package_item != pytest_package_item + and not test_module_span.finished + ): + _mark_test_status(pytest_package_item, test_module_span) + test_module_span.finish() + + if ( + nextitem is None + and _CIVisibility._instance._collect_coverage_enabled + and _module_has_dd_coverage_enabled(pytest) + ): + _stop_coverage(pytest) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + """Store outcome for tracing.""" + outcome = yield + + if not _CIVisibility.enabled: + return + + span = _extract_span(item) + if span is None: + return + + is_setup_or_teardown = call.when == "setup" or call.when == "teardown" + has_exception = call.excinfo is not None + + if is_setup_or_teardown and not has_exception: + return + + result = outcome.get_result() + xfail = hasattr(result, "wasxfail") or "xfail" in result.keywords + has_skip_keyword = any(x in result.keywords for x in ["skip", "skipif", "skipped"]) + + # If run with --runxfail flag, tests behave as if they were not marked with xfail, + # that's why no XFAIL_REASON or test.RESULT tags will be added. + if result.skipped: + if xfail and not has_skip_keyword: + # XFail tests that fail are recorded skipped by pytest, should be passed instead + span.set_tag_str(test.STATUS, test.Status.PASS.value) + _mark_not_skipped(_get_parent(item)) + if not item.config.option.runxfail: + span.set_tag_str(test.RESULT, test.Status.XFAIL.value) + span.set_tag_str(XFAIL_REASON, getattr(result, "wasxfail", "XFail")) + else: + span.set_tag_str(test.STATUS, test.Status.SKIP.value) + reason = _extract_reason(call) + if reason is not None: + span.set_tag_str(test.SKIP_REASON, str(reason)) + if str(reason) == SKIPPED_BY_ITR_REASON: + if _CIVisibility._instance._suite_skipping_mode: + suite_span = _extract_ancestor_suite_span(item) + if suite_span is not None: + suite_span.set_tag_str(test.ITR_SKIPPED, "true") + span.set_tag_str(test.ITR_SKIPPED, "true") + elif result.passed: + _mark_not_skipped(_get_parent(item)) + span.set_tag_str(test.STATUS, test.Status.PASS.value) + if xfail and not has_skip_keyword and not item.config.option.runxfail: + # XPass (strict=False) are recorded passed by pytest + span.set_tag_str(XFAIL_REASON, getattr(result, "wasxfail", "XFail")) + span.set_tag_str(test.RESULT, test.Status.XPASS.value) + else: + # Store failure in test suite `pytest.Item` to propagate to test suite spans + _mark_failed(_get_parent(item)) + _mark_not_skipped(_get_parent(item)) + span.set_tag_str(test.STATUS, test.Status.FAIL.value) + if xfail and not has_skip_keyword and not item.config.option.runxfail: + # XPass (strict=True) are recorded failed by pytest, longrepr contains reason + span.set_tag_str(XFAIL_REASON, getattr(result, "longrepr", "XFail")) + span.set_tag_str(test.RESULT, test.Status.XPASS.value) + if call.excinfo: + span.set_exc_info(call.excinfo.type, call.excinfo.value, call.excinfo.tb) + + +@pytest.hookimpl +def pytest_addhooks(pluginmanager): + from ddtrace.contrib.pytest import newhooks + + pluginmanager.add_hookspecs(newhooks) + + +@pytest.hookimpl(trylast=True) +def pytest_ddtrace_get_item_module_name(item): + pytest_module_item = _find_pytest_item(item, pytest.Module) + pytest_package_item = _find_pytest_item(pytest_module_item, pytest.Package) + + if _module_is_package(pytest_package_item, pytest_module_item): + if _is_pytest_8_or_later(): + # pytest 8.0.0 no longer treats Packages as Module/File, so we replicate legacy behavior by concatenating + # parent package names in reverse until we hit a non-Package-type item + # https://github.com/pytest-dev/pytest/issues/11137 + package_names = [] + current_package = pytest_package_item + while isinstance(current_package, pytest.Package): + package_names.append(str(current_package.name)) + current_package = current_package.parent + + return ".".join(package_names[::-1]) + + return pytest_package_item.module.__name__ + + return pytest_module_item.nodeid.rpartition("/")[0].replace("/", ".") + + +@pytest.hookimpl(trylast=True) +def pytest_ddtrace_get_item_suite_name(item): + """ + Extract suite name from a `pytest.Item` instance. + If the module path doesn't exist, the suite path will be reported in full. + """ + pytest_module_item = _find_pytest_item(item, pytest.Module) + test_module_path = _get_module_path(pytest_module_item) + if test_module_path: + if not pytest_module_item.nodeid.startswith(test_module_path): + log.warning("Suite path is not under module path: '%s' '%s'", pytest_module_item.nodeid, test_module_path) + return get_relative_or_absolute_path_for_path(pytest_module_item.nodeid, test_module_path) + return pytest_module_item.nodeid + + +@pytest.hookimpl(trylast=True) +def pytest_ddtrace_get_item_test_name(item): + """Extract name from item, prepending class if desired""" + if hasattr(item, "cls") and item.cls: + if item.config.getoption("ddtrace-include-class-name") or item.config.getini("ddtrace-include-class-name"): + return "%s.%s" % (item.cls.__name__, item.name) + return item.name diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/__init__.py new file mode 100644 index 0000000..b1cc670 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/__init__.py @@ -0,0 +1,47 @@ +""" +The pytest-bdd integration traces executions of scenarios and steps. + +Enabling +~~~~~~~~ + +Please follow the instructions for enabling `pytest` integration. + +.. note:: + The ddtrace.pytest_bdd plugin for pytest-bdd has the side effect of importing + the ddtrace package and starting a global tracer. + + If this is causing issues for your pytest-bdd runs where traced execution of + tests is not enabled, you can deactivate the plugin:: + + [pytest] + addopts = -p no:ddtrace.pytest_bdd + + See the `pytest documentation + `_ + for more details. + +""" + +from ddtrace import config + + +# pytest-bdd default settings +config._add( + "pytest_bdd", + dict( + _default_service="pytest_bdd", + ), +) + + +def get_version(): + # type: () -> str + try: + import importlib.metadata as importlib_metadata + except ImportError: + import importlib_metadata # type: ignore[no-redef] + + return str(importlib_metadata.version("pytest-bdd")) + + +__all__ = ["get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/constants.py new file mode 100644 index 0000000..2dd377f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/constants.py @@ -0,0 +1,2 @@ +FRAMEWORK = "pytest_bdd" +STEP_KIND = "pytest_bdd.step" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/plugin.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/plugin.py new file mode 100644 index 0000000..99459d2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_bdd/plugin.py @@ -0,0 +1,139 @@ +import json +import os +import sys + +import pytest + +from ddtrace.contrib.pytest.plugin import _extract_span as _extract_feature_span +from ddtrace.contrib.pytest_bdd import get_version +from ddtrace.contrib.pytest_bdd.constants import FRAMEWORK +from ddtrace.contrib.pytest_bdd.constants import STEP_KIND +from ddtrace.ext import test +from ddtrace.internal.ci_visibility import CIVisibility as _CIVisibility +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def _extract_span(item): + """Extract span from `step_func`.""" + return getattr(item, "_datadog_span", None) + + +def _store_span(item, span): + """Store span at `step_func`.""" + item._datadog_span = span + + +def _extract_step_func_args(step, step_func, step_func_args): + """Backwards-compatible get arguments from step_func or step_func_args""" + if not (hasattr(step_func, "parser") or hasattr(step_func, "_pytest_bdd_parsers")): + return step_func_args + + # store parsed step arguments + try: + parsers = [step_func.parser] + except AttributeError: + try: + # pytest-bdd >= 6.0.0 + parsers = step_func._pytest_bdd_parsers + except AttributeError: + parsers = [] + for parser in parsers: + if parser is not None: + converters = getattr(step_func, "converters", {}) + parameters = {} + try: + for arg, value in parser.parse_arguments(step.name).items(): + try: + if arg in converters: + value = converters[arg](value) + except Exception: + log.debug("argument conversion failed.") + parameters[arg] = value + except Exception: + log.debug("argument parsing failed.") + + return parameters or None + + +def _get_step_func_args_json(step, step_func, step_func_args): + """Get step function args as JSON, catching serialization errors""" + try: + extracted_step_func_args = _extract_step_func_args(step, step_func, step_func_args) + if extracted_step_func_args: + return json.dumps(extracted_step_func_args) + return None + except TypeError as err: + log.debug("Could not serialize arguments", exc_info=True) + return json.dumps({"error_serializing_args": str(err)}) + + +def pytest_configure(config): + if config.pluginmanager.hasplugin("pytest-bdd"): + config.pluginmanager.register(_PytestBddPlugin(), "_datadog-pytest-bdd") + + +class _PytestBddPlugin: + def __init__(self): + self.framework_version = get_version() + + @staticmethod + @pytest.hookimpl(tryfirst=True) + def pytest_bdd_before_scenario(request, feature, scenario): + if _CIVisibility.enabled: + span = _extract_feature_span(request.node) + if span is not None: + location = os.path.relpath(scenario.feature.filename, str(request.config.rootdir)) + span.set_tag(test.NAME, scenario.name) + span.set_tag(test.SUITE, location) # override test suite name with .feature location + + _CIVisibility.set_codeowners_of(location, span=span) + + @pytest.hookimpl(tryfirst=True) + def pytest_bdd_before_step(self, request, feature, scenario, step, step_func): + if _CIVisibility.enabled: + feature_span = _extract_feature_span(request.node) + span = _CIVisibility._instance.tracer.start_span( + step.type, + resource=step.name, + span_type=STEP_KIND, + child_of=feature_span, + activate=True, + ) + span.set_tag_str("component", "pytest_bdd") + + span.set_tag(test.FRAMEWORK, FRAMEWORK) + span.set_tag(test.FRAMEWORK_VERSION, self.framework_version) + + location = os.path.relpath(step_func.__code__.co_filename, str(request.config.rootdir)) + span.set_tag(test.FILE, location) + _CIVisibility.set_codeowners_of(location, span=span) + + _store_span(step_func, span) + + @staticmethod + @pytest.hookimpl(trylast=True) + def pytest_bdd_after_step(request, feature, scenario, step, step_func, step_func_args): + span = _extract_span(step_func) + if span is not None: + step_func_args_json = _get_step_func_args_json(step, step_func, step_func_args) + if step_func_args: + span.set_tag(test.PARAMETERS, step_func_args_json) + span.finish() + + @staticmethod + def pytest_bdd_step_error(request, feature, scenario, step, step_func, step_func_args, exception): + span = _extract_span(step_func) + if span is not None: + if hasattr(exception, "__traceback__"): + tb = exception.__traceback__ + else: + # PY2 compatibility workaround + _, _, tb = sys.exc_info() + step_func_args_json = _get_step_func_args_json(step, step_func, step_func_args) + if step_func_args: + span.set_tag(test.PARAMETERS, step_func_args_json) + span.set_exc_info(type(exception), exception, tb) + span.finish() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/constants.py new file mode 100644 index 0000000..9742085 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/constants.py @@ -0,0 +1,58 @@ +BENCHMARK_INFO = "benchmark.duration.info" +BENCHMARK_MEAN = "benchmark.duration.mean" +BENCHMARK_RUN = "benchmark.duration.runs" + +STATISTICS_HD15IQR = "benchmark.duration.statistics.hd15iqr" +STATISTICS_IQR = "benchmark.duration.statistics.iqr" +STATISTICS_IQR_OUTLIERS = "benchmark.duration.statistics.iqr_outliers" +STATISTICS_LD15IQR = "benchmark.duration.statistics.ld15iqr" +STATISTICS_MAX = "benchmark.duration.statistics.max" +STATISTICS_MEAN = "benchmark.duration.statistics.mean" +STATISTICS_MEDIAN = "benchmark.duration.statistics.median" +STATISTICS_MIN = "benchmark.duration.statistics.min" +STATISTICS_N = "benchmark.duration.statistics.n" +STATISTICS_OPS = "benchmark.duration.statistics.ops" +STATISTICS_OUTLIERS = "benchmark.duration.statistics.outliers" +STATISTICS_Q1 = "benchmark.duration.statistics.q1" +STATISTICS_Q3 = "benchmark.duration.statistics.q3" +STATISTICS_STDDEV = "benchmark.duration.statistics.std_dev" +STATISTICS_STDDEV_OUTLIERS = "benchmark.duration.statistics.std_dev_outliers" +STATISTICS_TOTAL = "benchmark.duration.statistics.total" + +PLUGIN_HD15IQR = "hd15iqr" +PLUGIN_IQR = "iqr" +PLUGIN_IQR_OUTLIERS = "iqr_outliers" +PLUGIN_LD15IQR = "ld15iqr" +PLUGIN_MAX = "max" +PLUGIN_MEAN = "mean" +PLUGIN_MEDIAN = "median" +PLUGIN_MIN = "min" +PLUGIN_OPS = "ops" +PLUGIN_OUTLIERS = "outliers" +PLUGIN_Q1 = "q1" +PLUGIN_Q3 = "q3" +PLUGIN_ROUNDS = "rounds" +PLUGIN_STDDEV = "stddev" +PLUGIN_STDDEV_OUTLIERS = "stddev_outliers" +PLUGIN_TOTAL = "total" + +PLUGIN_METRICS = { + BENCHMARK_MEAN: PLUGIN_MEAN, + BENCHMARK_RUN: PLUGIN_ROUNDS, + STATISTICS_HD15IQR: PLUGIN_HD15IQR, + STATISTICS_IQR: PLUGIN_IQR, + STATISTICS_IQR_OUTLIERS: PLUGIN_IQR_OUTLIERS, + STATISTICS_LD15IQR: PLUGIN_LD15IQR, + STATISTICS_MAX: PLUGIN_MAX, + STATISTICS_MEAN: PLUGIN_MEAN, + STATISTICS_MEDIAN: PLUGIN_MEDIAN, + STATISTICS_MIN: PLUGIN_MIN, + STATISTICS_OPS: PLUGIN_OPS, + STATISTICS_OUTLIERS: PLUGIN_OUTLIERS, + STATISTICS_Q1: PLUGIN_Q1, + STATISTICS_Q3: PLUGIN_Q3, + STATISTICS_N: PLUGIN_ROUNDS, + STATISTICS_STDDEV: PLUGIN_STDDEV, + STATISTICS_STDDEV_OUTLIERS: PLUGIN_STDDEV_OUTLIERS, + STATISTICS_TOTAL: PLUGIN_TOTAL, +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/plugin.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/plugin.py new file mode 100644 index 0000000..381cc19 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/pytest_benchmark/plugin.py @@ -0,0 +1,33 @@ +import pytest + +from ddtrace.contrib.pytest.plugin import _extract_span +from ddtrace.contrib.pytest_benchmark.constants import BENCHMARK_INFO +from ddtrace.contrib.pytest_benchmark.constants import PLUGIN_METRICS +from ddtrace.contrib.pytest_benchmark.constants import PLUGIN_OUTLIERS +from ddtrace.ext.test import TEST_TYPE + + +def pytest_configure(config): + if config.pluginmanager.hasplugin("benchmark"): + config.pluginmanager.register(_PytestBenchmarkPlugin(), "_datadog-pytest-benchmark") + + +class _PytestBenchmarkPlugin: + @pytest.hookimpl() + def pytest_runtest_makereport(self, item, call): + fixture = hasattr(item, "funcargs") and item.funcargs.get("benchmark") + if fixture and fixture.stats: + stat_object = fixture.stats.stats + span = _extract_span(item) + + if span is None: + return + + span.set_tag_str(TEST_TYPE, "benchmark") + span.set_tag_str(BENCHMARK_INFO, "Time") + for span_path, tag in PLUGIN_METRICS.items(): + if hasattr(stat_object, tag): + if tag == PLUGIN_OUTLIERS: + span.set_tag_str(span_path, getattr(stat_object, tag)) + continue + span.set_tag(span_path, getattr(stat_object, tag)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/__init__.py new file mode 100644 index 0000000..9a29130 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/__init__.py @@ -0,0 +1,80 @@ +""" +The redis integration traces redis requests. + + +Enabling +~~~~~~~~ + +The redis integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(redis=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.redis["service"] + + The service name reported by default for redis traces. + + This option can also be set with the ``DD_REDIS_SERVICE`` environment + variable. + + Default: ``"redis"`` + + +.. py:data:: ddtrace.config.redis["cmd_max_length"] + + Max allowable size for the redis command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_REDIS_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + + +.. py:data:: ddtrace.config.redis["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_REDIS_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular redis instances use the :class:`Pin ` API:: + + import redis + from ddtrace import Pin + + client = redis.StrictRedis(host="localhost", port=6379) + + # Override service name for this instance + Pin.override(client, service="my-custom-queue") + + # Traces reported for this client will now have "my-custom-queue" + # as the service name. + client.get("my-key") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["redis", "redis.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/asyncio_patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/asyncio_patch.py new file mode 100644 index 0000000..f444fef --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/asyncio_patch.py @@ -0,0 +1,40 @@ +from ddtrace import config + +from ...internal.utils.formats import stringify_cache_args +from ...pin import Pin +from ..trace_utils_redis import _run_redis_command_async +from ..trace_utils_redis import _trace_redis_cmd +from ..trace_utils_redis import _trace_redis_execute_async_cluster_pipeline +from ..trace_utils_redis import _trace_redis_execute_pipeline + + +# +# tracing async functions +# +async def traced_async_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + with _trace_redis_cmd(pin, config.redis, instance, args) as span: + return await _run_redis_command_async(span=span, func=func, args=args, kwargs=kwargs) + + +async def traced_async_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c, cmd_max_len=config.redis.cmd_max_length) for c, _ in instance.command_stack] + with _trace_redis_execute_pipeline(pin, config.redis, cmds, instance): + return await func(*args, **kwargs) + + +async def traced_async_execute_cluster_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c.args, cmd_max_len=config.redis.cmd_max_length) for c in instance._command_stack] + with _trace_redis_execute_async_cluster_pipeline(pin, config.redis, cmds, instance): + return await func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/patch.py new file mode 100644 index 0000000..c4bf0f4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/redis/patch.py @@ -0,0 +1,164 @@ +import os + +import redis + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import CMD_MAX_LEN +from ...internal.utils.formats import asbool +from ...internal.utils.formats import stringify_cache_args +from ...pin import Pin +from ..trace_utils import unwrap +from ..trace_utils_redis import _run_redis_command +from ..trace_utils_redis import _trace_redis_cmd +from ..trace_utils_redis import _trace_redis_execute_pipeline + + +config._add( + "redis", + { + "_default_service": schematize_service_name("redis"), + "cmd_max_length": int(os.getenv("DD_REDIS_CMD_MAX_LENGTH", CMD_MAX_LEN)), + "resource_only_command": asbool(os.getenv("DD_REDIS_RESOURCE_ONLY_COMMAND", True)), + }, +) + + +def get_version(): + # type: () -> str + return getattr(redis, "__version__", "") + + +def patch(): + """Patch the instrumented methods + + This duplicated doesn't look nice. The nicer alternative is to use an ObjectProxy on top + of Redis and StrictRedis. However, it means that any "import redis.Redis" won't be instrumented. + """ + if getattr(redis, "_datadog_patch", False): + return + redis._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + if redis.VERSION < (3, 0, 0): + _w("redis", "StrictRedis.execute_command", traced_execute_command(config.redis)) + _w("redis", "StrictRedis.pipeline", traced_pipeline) + _w("redis", "Redis.pipeline", traced_pipeline) + _w("redis.client", "BasePipeline.execute", traced_execute_pipeline(config.redis, False)) + _w("redis.client", "BasePipeline.immediate_execute_command", traced_execute_command(config.redis)) + else: + _w("redis", "Redis.execute_command", traced_execute_command(config.redis)) + _w("redis", "Redis.pipeline", traced_pipeline) + _w("redis.client", "Pipeline.execute", traced_execute_pipeline(config.redis, False)) + _w("redis.client", "Pipeline.immediate_execute_command", traced_execute_command(config.redis)) + if redis.VERSION >= (4, 1): + # Redis v4.1 introduced support for redis clusters and rediscluster package was deprecated. + # https://github.com/redis/redis-py/commit/9db1eec71b443b8e7e74ff503bae651dc6edf411 + _w("redis.cluster", "RedisCluster.execute_command", traced_execute_command(config.redis)) + _w("redis.cluster", "RedisCluster.pipeline", traced_pipeline) + _w("redis.cluster", "ClusterPipeline.execute", traced_execute_pipeline(config.redis, True)) + Pin(service=None).onto(redis.cluster.RedisCluster) + # Avoid mypy invalid syntax errors when parsing Python 2 files + if redis.VERSION >= (4, 2, 0): + from .asyncio_patch import traced_async_execute_command + from .asyncio_patch import traced_async_execute_pipeline + + _w("redis.asyncio.client", "Redis.execute_command", traced_async_execute_command) + _w("redis.asyncio.client", "Redis.pipeline", traced_pipeline) + _w("redis.asyncio.client", "Pipeline.execute", traced_async_execute_pipeline) + _w("redis.asyncio.client", "Pipeline.immediate_execute_command", traced_async_execute_command) + Pin(service=None).onto(redis.asyncio.Redis) + + if redis.VERSION >= (4, 3, 0): + from .asyncio_patch import traced_async_execute_command + + _w("redis.asyncio.cluster", "RedisCluster.execute_command", traced_async_execute_command) + + if redis.VERSION >= (4, 3, 2): + from .asyncio_patch import traced_async_execute_cluster_pipeline + + _w("redis.asyncio.cluster", "RedisCluster.pipeline", traced_pipeline) + _w("redis.asyncio.cluster", "ClusterPipeline.execute", traced_async_execute_cluster_pipeline) + + Pin(service=None).onto(redis.asyncio.RedisCluster) + + Pin(service=None).onto(redis.StrictRedis) + + +def unpatch(): + if getattr(redis, "_datadog_patch", False): + redis._datadog_patch = False + + if redis.VERSION < (3, 0, 0): + unwrap(redis.StrictRedis, "execute_command") + unwrap(redis.StrictRedis, "pipeline") + unwrap(redis.Redis, "pipeline") + unwrap(redis.client.BasePipeline, "execute") + unwrap(redis.client.BasePipeline, "immediate_execute_command") + else: + unwrap(redis.Redis, "execute_command") + unwrap(redis.Redis, "pipeline") + unwrap(redis.client.Pipeline, "execute") + unwrap(redis.client.Pipeline, "immediate_execute_command") + if redis.VERSION >= (4, 1, 0): + unwrap(redis.cluster.RedisCluster, "execute_command") + unwrap(redis.cluster.RedisCluster, "pipeline") + unwrap(redis.cluster.ClusterPipeline, "execute") + if redis.VERSION >= (4, 2, 0): + unwrap(redis.asyncio.client.Redis, "execute_command") + unwrap(redis.asyncio.client.Redis, "pipeline") + unwrap(redis.asyncio.client.Pipeline, "execute") + unwrap(redis.asyncio.client.Pipeline, "immediate_execute_command") + if redis.VERSION >= (4, 3, 0): + unwrap(redis.asyncio.cluster.RedisCluster, "execute_command") + if redis.VERSION >= (4, 3, 2): + unwrap(redis.asyncio.cluster.RedisCluster, "pipeline") + unwrap(redis.asyncio.cluster.ClusterPipeline, "execute") + + +# +# tracing functions +# +def traced_execute_command(integration_config): + def _traced_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + with _trace_redis_cmd(pin, integration_config, instance, args) as span: + return _run_redis_command(span=span, func=func, args=args, kwargs=kwargs) + + return _traced_execute_command + + +def traced_pipeline(func, instance, args, kwargs): + pipeline = func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +def traced_execute_pipeline(integration_config, is_cluster=False): + def _traced_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + if is_cluster: + cmds = [ + stringify_cache_args(c.args, cmd_max_len=integration_config.cmd_max_length) + for c in instance.command_stack + ] + else: + cmds = [ + stringify_cache_args(c, cmd_max_len=integration_config.cmd_max_length) + for c, _ in instance.command_stack + ] + with _trace_redis_execute_pipeline(pin, integration_config, cmds, instance, is_cluster): + return func(*args, **kwargs) + + return _traced_execute_pipeline diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/__init__.py new file mode 100644 index 0000000..2e7ff05 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/__init__.py @@ -0,0 +1,61 @@ +"""Instrument rediscluster to report Redis Cluster queries. + +``import ddtrace.auto`` will automatically patch your Redis Cluster client to make it work. +:: + + from ddtrace import Pin, patch + import rediscluster + + # If not patched yet, you can patch redis specifically + patch(rediscluster=True) + + # This will report a span with the default settings + client = rediscluster.StrictRedisCluster(startup_nodes=[{'host':'localhost', 'port':'7000'}]) + client.get('my-key') + + # Use a pin to specify metadata related to this client + Pin.override(client, service='redis-queue') + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.rediscluster["service"] + The service name reported by default for rediscluster spans + + The option can also be set with the ``DD_REDISCLUSTER_SERVICE`` environment variable + + Default: ``'rediscluster'`` + + +.. py:data:: ddtrace.config.rediscluster["cmd_max_length"] + + Max allowable size for the rediscluster command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_REDISCLUSTER_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + +.. py:data:: ddtrace.config.aredis["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_REDIS_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["rediscluster", "rediscluster.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/patch.py new file mode 100644 index 0000000..0f52160 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rediscluster/patch.py @@ -0,0 +1,113 @@ +import os + +# 3p +import rediscluster + +# project +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib.redis.patch import traced_execute_command +from ddtrace.contrib.redis.patch import traced_pipeline +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.ext import redis as redisx +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_cache_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils.formats import CMD_MAX_LEN +from ddtrace.internal.utils.formats import stringify_cache_args +from ddtrace.internal.utils.wrappers import unwrap +from ddtrace.pin import Pin +from ddtrace.vendor import wrapt + +from ...internal.utils.formats import asbool +from .. import trace_utils + + +# DEV: In `2.0.0` `__version__` is a string and `VERSION` is a tuple, +# but in `1.x.x` `__version__` is a tuple annd `VERSION` does not exist +REDISCLUSTER_VERSION = getattr(rediscluster, "VERSION", rediscluster.__version__) + +config._add( + "rediscluster", + dict( + _default_service=schematize_service_name("rediscluster"), + cmd_max_length=int(os.getenv("DD_REDISCLUSTER_CMD_MAX_LENGTH", CMD_MAX_LEN)), + resource_only_command=asbool(os.getenv("DD_REDIS_RESOURCE_ONLY_COMMAND", True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(rediscluster, "__version__", "") + + +def patch(): + """Patch the instrumented methods""" + if getattr(rediscluster, "_datadog_patch", False): + return + rediscluster._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + if REDISCLUSTER_VERSION >= (2, 0, 0): + _w("rediscluster", "client.RedisCluster.execute_command", traced_execute_command(config.rediscluster)) + _w("rediscluster", "client.RedisCluster.pipeline", traced_pipeline) + _w("rediscluster", "pipeline.ClusterPipeline.execute", traced_execute_pipeline) + Pin().onto(rediscluster.RedisCluster) + else: + _w("rediscluster", "StrictRedisCluster.execute_command", traced_execute_command(config.rediscluster)) + _w("rediscluster", "StrictRedisCluster.pipeline", traced_pipeline) + _w("rediscluster", "StrictClusterPipeline.execute", traced_execute_pipeline) + Pin().onto(rediscluster.StrictRedisCluster) + + +def unpatch(): + if getattr(rediscluster, "_datadog_patch", False): + rediscluster._datadog_patch = False + + if REDISCLUSTER_VERSION >= (2, 0, 0): + unwrap(rediscluster.client.RedisCluster, "execute_command") + unwrap(rediscluster.client.RedisCluster, "pipeline") + unwrap(rediscluster.pipeline.ClusterPipeline, "execute") + else: + unwrap(rediscluster.StrictRedisCluster, "execute_command") + unwrap(rediscluster.StrictRedisCluster, "pipeline") + unwrap(rediscluster.StrictClusterPipeline, "execute") + + +# +# tracing functions +# + + +def traced_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + cmds = [ + stringify_cache_args(c.args, cmd_max_len=config.rediscluster.cmd_max_length) for c in instance.command_stack + ] + resource = "\n".join(cmds) + tracer = pin.tracer + with tracer.trace( + schematize_cache_operation(redisx.CMD, cache_provider=redisx.APP), + resource=resource, + service=trace_utils.ext_service(pin, config.rediscluster, "rediscluster"), + span_type=SpanTypes.REDIS, + ) as s: + s.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + s.set_tag_str(COMPONENT, config.rediscluster.integration_name) + s.set_tag_str(db.SYSTEM, redisx.APP) + s.set_tag(SPAN_MEASURED_KEY) + s.set_tag_str(redisx.RAWCMD, resource) + s.set_metric(redisx.PIPELINE_LEN, len(instance.command_stack)) + + # set analytics sample rate if enabled + s.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.rediscluster.get_analytics_sample_rate()) + + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/__init__.py new file mode 100644 index 0000000..e79120f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/__init__.py @@ -0,0 +1,87 @@ +""" +The ``requests`` integration traces all HTTP requests made with the ``requests`` +library. + +The default service name used is `requests` but it can be configured to match +the services that the specific requests are made to. + +Enabling +~~~~~~~~ + +The requests integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(requests=True) + + # use requests like usual + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.requests['service'] + + The service name reported by default for requests queries. This value will + be overridden by an instance override or if the split_by_domain setting is + enabled. + + This option can also be set with the ``DD_REQUESTS_SERVICE`` environment + variable. + + Default: ``"requests"`` + + + .. _requests-config-distributed-tracing: +.. py:data:: ddtrace.config.requests['distributed_tracing'] + + Whether or not to parse distributed tracing headers. + + Default: ``True`` + + +.. py:data:: ddtrace.config.requests['trace_query_string'] + + Whether or not to include the query string as a tag. + + Default: ``False`` + + +.. py:data:: ddtrace.config.requests['split_by_domain'] + + Whether or not to use the domain name of requests as the service name. This + setting can be overridden with session overrides (described in the Instance + Configuration section). + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To set configuration options for all requests made with a ``requests.Session`` object +use the config API:: + + from ddtrace import config + from requests import Session + + session = Session() + cfg = config.get_from(session) + cfg['service_name'] = 'auth-api' + cfg['distributed_tracing'] = False +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["requests"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + from .session import TracedSession + + __all__ = ["patch", "unpatch", "TracedSession", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/connection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/connection.py new file mode 100644 index 0000000..7f1f243 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/connection.py @@ -0,0 +1,146 @@ +from typing import Optional # noqa:F401 + +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.settings.asm import config as asm_config + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import parse +from ...internal.logger import get_logger +from ...internal.schema import schematize_url_operation +from ...internal.utils import get_argument_value +from ...propagation.http import HTTPPropagator +from .. import trace_utils + + +log = get_logger(__name__) + + +def _extract_hostname_and_path(uri): + # type: (str) -> str + parsed_uri = parse.urlparse(uri) + hostname = parsed_uri.hostname + try: + if parsed_uri.port is not None: + hostname = "%s:%s" % (hostname, str(parsed_uri.port)) + except ValueError: + # ValueError is raised in PY>3.5 when parsed_uri.port < 0 or parsed_uri.port > 65535 + hostname = "%s:?" % (hostname,) + return hostname, parsed_uri.path + + +def _extract_query_string(uri): + # type: (str) -> Optional[str] + start = uri.find("?") + 1 + if start == 0: + return None + + end = len(uri) + j = uri.rfind("#", 0, end) + if j != -1: + end = j + + if end <= start: + return None + + return uri[start:end] + + +def _wrap_request(func, instance, args, kwargs): + """This function wraps `request.request`, which includes all request verbs like `request.get` and `request.post`. + IAST needs to wrap this function because `Session.send` is too late, as we require the raw arguments of the request. + """ + if asm_config._iast_enabled: + from ddtrace.appsec._iast.taint_sinks.ssrf import _iast_report_ssrf + + _iast_report_ssrf(func, *args, **kwargs) + return func(*args, **kwargs) + + +def _wrap_send(func, instance, args, kwargs): + """Trace the `Session.send` instance method""" + # TODO[manu]: we already offer a way to provide the Global Tracer + # and is ddtrace.tracer; it's used only inside our tests and can + # be easily changed by providing a TracingTestCase that sets common + # tracing functionalities. + tracer = getattr(instance, "datadog_tracer", ddtrace.tracer) + + # skip if tracing is not enabled + if not tracer.enabled: + return func(*args, **kwargs) + + request = get_argument_value(args, kwargs, 0, "request") + if not request: + return func(*args, **kwargs) + + url = trace_utils._sanitized_url(request.url) + method = "" + if request.method is not None: + method = request.method.upper() + hostname, path = _extract_hostname_and_path(url) + host_without_port = hostname.split(":")[0] if hostname is not None else None + + cfg = config.get_from(instance) + service = None + if cfg["split_by_domain"] and hostname: + service = hostname + if service is None: + service = cfg.get("service", None) + if service is None: + service = cfg.get("service_name", None) + if service is None: + service = trace_utils.ext_service(None, config.requests) + + operation_name = schematize_url_operation("requests.request", protocol="http", direction=SpanDirection.OUTBOUND) + with tracer.trace(operation_name, service=service, resource=f"{method} {path}", span_type=SpanTypes.HTTP) as span: + span.set_tag_str(COMPONENT, config.requests.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + # Configure trace search sample rate + # DEV: analytics enabled on per-session basis + cfg = config.get_from(instance) + analytics_enabled = cfg.get("analytics_enabled") + if analytics_enabled: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, cfg.get("analytics_sample_rate", True)) + + # propagate distributed tracing headers + if cfg.get("distributed_tracing"): + HTTPPropagator.inject(span.context, request.headers) + + response = response_headers = None + try: + response = func(*args, **kwargs) + return response + finally: + try: + status = None + if response is not None: + status = response.status_code + # Storing response headers in the span. + # Note that response.headers is not a dict, but an iterable + # requests custom structure, that we convert to a dict + response_headers = dict(getattr(response, "headers", {})) + + trace_utils.set_http_meta( + span, + config.requests, + request_headers=request.headers, + response_headers=response_headers, + method=method, + url=request.url, + target_host=host_without_port, + status_code=status, + query=_extract_query_string(url), + ) + except Exception: + log.debug("requests: error adding tags", exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/constants.py new file mode 100644 index 0000000..c3f5eac --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/constants.py @@ -0,0 +1 @@ +DEFAULT_SERVICE = "requests" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/patch.py new file mode 100644 index 0000000..6cb93f3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/patch.py @@ -0,0 +1,51 @@ +import os + +import requests + +from ddtrace import config +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool +from ...pin import Pin +from ..trace_utils import unwrap as _u +from .connection import _wrap_request +from .connection import _wrap_send + + +# requests default settings +config._add( + "requests", + { + "distributed_tracing": asbool(os.getenv("DD_REQUESTS_DISTRIBUTED_TRACING", default=True)), + "split_by_domain": asbool(os.getenv("DD_REQUESTS_SPLIT_BY_DOMAIN", default=False)), + "default_http_tag_query_string": os.getenv("DD_HTTP_CLIENT_TAG_QUERY_STRING", "true"), + "_default_service": schematize_service_name("requests"), + }, +) + + +def get_version(): + # type: () -> str + return getattr(requests, "__version__", "") + + +def patch(): + """Activate http calls tracing""" + if getattr(requests, "__datadog_patch", False): + return + requests.__datadog_patch = True + + _w("requests", "Session.send", _wrap_send) + # IAST needs to wrap this function because `Session.send` is too late + _w("requests", "Session.request", _wrap_request) + Pin(_config=config.requests).onto(requests.Session) + + +def unpatch(): + """Disable traced sessions""" + if not getattr(requests, "__datadog_patch", False): + return + requests.__datadog_patch = False + + _u(requests.Session, "send") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/session.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/session.py new file mode 100644 index 0000000..1e603fb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/requests/session.py @@ -0,0 +1,21 @@ +import requests + +from ddtrace import Pin +from ddtrace import config +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from .connection import _wrap_send + + +class TracedSession(requests.Session): + """TracedSession is a requests' Session that is already traced. + You can use it if you want a finer grained control for your + HTTP clients. + """ + + pass + + +# always patch our `TracedSession` when imported +_w(TracedSession, "send", _wrap_send) +Pin(_config=config.requests).onto(TracedSession) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rq/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rq/__init__.py new file mode 100644 index 0000000..970f45d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/rq/__init__.py @@ -0,0 +1,283 @@ +""" +The RQ__ integration will trace your jobs. + + +Usage +~~~~~ + +The rq integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(rq=True) + + +Worker Usage +~~~~~~~~~~~~ + +``ddtrace-run`` can be used to easily trace your workers:: + + DD_SERVICE=myworker ddtrace-run rq worker + + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To override the service name for a queue:: + + from ddtrace import Pin + + connection = redis.Redis() + queue = rq.Queue(connection=connection) + Pin.override(queue, service="custom_queue_service") + + +To override the service name for a particular worker:: + + worker = rq.SimpleWorker([queue], connection=queue.connection) + Pin.override(worker, service="custom_worker_service") + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.rq['distributed_tracing_enabled'] +.. py:data:: ddtrace.config.rq_worker['distributed_tracing_enabled'] + + If ``True`` the integration will connect the traces sent between the enqueuer + and the RQ worker. + + This option can also be set with the ``DD_RQ_DISTRIBUTED_TRACING_ENABLED`` + environment variable on either the enqueuer or worker applications. + + Default: ``True`` + +.. py:data:: ddtrace.config.rq['service'] + + The service name reported by default for RQ spans from the app. + + This option can also be set with the ``DD_SERVICE`` or ``DD_RQ_SERVICE`` + environment variables. + + Default: ``rq`` + +.. py:data:: ddtrace.config.rq_worker['service'] + + The service name reported by default for RQ spans from workers. + + This option can also be set with the ``DD_SERVICE`` environment + variable. + + Default: ``rq-worker`` + +.. __: https://python-rq.org/ + +""" +import os + +from ddtrace import Pin +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_messaging_operation +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.utils import get_argument_value +from ...internal.utils.formats import asbool +from ...propagation.http import HTTPPropagator +from .. import trace_utils + + +__all__ = ["patch", "unpatch", "get_version"] + + +config._add( + "rq", + dict( + distributed_tracing_enabled=asbool(os.environ.get("DD_RQ_DISTRIBUTED_TRACING_ENABLED", True)), + _default_service=schematize_service_name("rq"), + ), +) + +config._add( + "rq_worker", + dict( + distributed_tracing_enabled=asbool(os.environ.get("DD_RQ_DISTRIBUTED_TRACING_ENABLED", True)), + _default_service=schematize_service_name("rq-worker"), + ), +) + + +def get_version(): + # type: () -> str + import rq + + return str(getattr(rq, "__version__", "")) + + +@trace_utils.with_traced_module +def traced_queue_enqueue_job(rq, pin, func, instance, args, kwargs): + job = get_argument_value(args, kwargs, 0, "f") + + func_name = job.func_name + job_inst = job.instance + job_inst_str = "%s.%s" % (job_inst.__module__, job_inst.__class__.__name__) if job_inst else "" + + if job_inst_str: + resource = "%s.%s" % (job_inst_str, func_name) + else: + resource = func_name + + with pin.tracer.trace( + schematize_messaging_operation("rq.queue.enqueue_job", provider="rq", direction=SpanDirection.OUTBOUND), + service=trace_utils.int_service(pin, config.rq), + resource=resource, + span_type=SpanTypes.WORKER, + ) as span: + span.set_tag_str(COMPONENT, config.rq.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.PRODUCER) + + span.set_tag_str("queue.name", instance.name) + span.set_tag_str("job.id", job.get_id()) + span.set_tag_str("job.func_name", job.func_name) + + # If the queue is_async then add distributed tracing headers to the job + if instance.is_async and config.rq.distributed_tracing_enabled: + HTTPPropagator.inject(span.context, job.meta) + return func(*args, **kwargs) + + +@trace_utils.with_traced_module +def traced_queue_fetch_job(rq, pin, func, instance, args, kwargs): + with pin.tracer.trace( + schematize_messaging_operation("rq.queue.fetch_job", provider="rq", direction=SpanDirection.PROCESSING), + service=trace_utils.int_service(pin, config.rq), + ) as span: + span.set_tag_str(COMPONENT, config.rq.integration_name) + + job_id = get_argument_value(args, kwargs, 0, "job_id") + span.set_tag_str("job.id", job_id) + return func(*args, **kwargs) + + +@trace_utils.with_traced_module +def traced_perform_job(rq, pin, func, instance, args, kwargs): + """Trace rq.Worker.perform_job""" + # `perform_job` is executed in a freshly forked, short-lived instance + job = get_argument_value(args, kwargs, 0, "job") + + if config.rq_worker.distributed_tracing_enabled: + ctx = HTTPPropagator.extract(job.meta) + if ctx.trace_id: + pin.tracer.context_provider.activate(ctx) + + try: + with pin.tracer.trace( + "rq.worker.perform_job", + service=trace_utils.int_service(pin, config.rq_worker), + span_type=SpanTypes.WORKER, + resource=job.func_name, + ) as span: + span.set_tag_str(COMPONENT, config.rq.integration_name) + + # set span.kind to the type of request being performed + span.set_tag_str(SPAN_KIND, SpanKind.CONSUMER) + span.set_tag_str("job.id", job.get_id()) + try: + return func(*args, **kwargs) + finally: + # get_status() returns None when ttl=0 + span.set_tag_str("job.status", job.get_status() or "None") + span.set_tag_str("job.origin", job.origin) + if job.is_failed: + span.error = 1 + finally: + # Force flush to agent since the process `os.exit()`s + # immediately after this method returns + pin.tracer.flush() + + +@trace_utils.with_traced_module +def traced_job_perform(rq, pin, func, instance, args, kwargs): + """Trace rq.Job.perform(...)""" + job = instance + + # Inherit the service name from whatever parent exists. + # eg. in a worker, a perform_job parent span will exist with the worker + # service. + with pin.tracer.trace("rq.job.perform", resource=job.func_name) as span: + span.set_tag_str(COMPONENT, config.rq.integration_name) + + span.set_tag("job.id", job.get_id()) + return func(*args, **kwargs) + + +@trace_utils.with_traced_module +def traced_job_fetch_many(rq, pin, func, instance, args, kwargs): + """Trace rq.Job.fetch_many(...)""" + with pin.tracer.trace( + schematize_messaging_operation("rq.job.fetch_many", provider="rq", direction=SpanDirection.PROCESSING), + service=trace_utils.ext_service(pin, config.rq_worker), + ) as span: + span.set_tag_str(COMPONENT, config.rq.integration_name) + + job_ids = get_argument_value(args, kwargs, 0, "job_ids") + span.set_tag("job_ids", job_ids) + return func(*args, **kwargs) + + +def patch(): + # Avoid importing rq at the module level, eventually will be an import hook + import rq + + if getattr(rq, "_datadog_patch", False): + return + + Pin().onto(rq) + + # Patch rq.job.Job + Pin().onto(rq.job.Job) + trace_utils.wrap(rq.job, "Job.perform", traced_job_perform(rq.job.Job)) + + # Patch rq.queue.Queue + Pin().onto(rq.queue.Queue) + trace_utils.wrap("rq.queue", "Queue.enqueue_job", traced_queue_enqueue_job(rq)) + trace_utils.wrap("rq.queue", "Queue.fetch_job", traced_queue_fetch_job(rq)) + + # Patch rq.worker.Worker + Pin().onto(rq.worker.Worker) + trace_utils.wrap(rq.worker, "Worker.perform_job", traced_perform_job(rq)) + + rq._datadog_patch = True + + +def unpatch(): + import rq + + if not getattr(rq, "_datadog_patch", False): + return + + Pin().remove_from(rq) + + # Unpatch rq.job.Job + Pin().remove_from(rq.job.Job) + trace_utils.unwrap(rq.job.Job, "perform") + + # Unpatch rq.queue.Queue + Pin().remove_from(rq.queue.Queue) + trace_utils.unwrap(rq.queue.Queue, "enqueue_job") + trace_utils.unwrap(rq.queue.Queue, "fetch_job") + + # Unpatch rq.worker.Worker + Pin().remove_from(rq.worker.Worker) + trace_utils.unwrap(rq.worker.Worker, "perform_job") + + rq._datadog_patch = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/__init__.py new file mode 100644 index 0000000..49fbf4c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/__init__.py @@ -0,0 +1,75 @@ +""" +The Sanic__ integration will trace requests to and from Sanic. + + +Enable Sanic tracing automatically via ``ddtrace-run``:: + + ddtrace-run python app.py + +Sanic tracing can also be enabled explicitly:: + + from ddtrace import patch_all + patch_all(sanic=True) + + from sanic import Sanic + from sanic.response import text + + app = Sanic(__name__) + + @app.route('/') + def index(request): + return text('hello world') + + if __name__ == '__main__': + app.run() + +On Python 3.6 and below, you must enable the legacy ``AsyncioContextProvider`` before using the middleware:: + + from ddtrace.contrib.asyncio.provider import AsyncioContextProvider + from ddtrace import tracer # Or whichever tracer instance you plan to use + tracer.configure(context_provider=AsyncioContextProvider()) + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.sanic['distributed_tracing_enabled'] + + Whether to parse distributed tracing headers from requests received by your Sanic app. + + Default: ``True`` + + +.. py:data:: ddtrace.config.sanic['service_name'] + + The service name reported for your Sanic app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'sanic'`` + + +Example:: + + from ddtrace import config + + # Enable distributed tracing + config.sanic['distributed_tracing_enabled'] = True + + # Override service name + config.sanic['service_name'] = 'custom-service-name' + +.. __: https://sanic.readthedocs.io/en/latest/ +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["sanic"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/patch.py new file mode 100644 index 0000000..6fcf002 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sanic/patch.py @@ -0,0 +1,285 @@ +import asyncio + +import sanic + +import ddtrace +from ddtrace import config +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.pin import Pin +from ddtrace.vendor import wrapt +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...internal.logger import get_logger +from .. import trace_utils + + +log = get_logger(__name__) + +config._add("sanic", dict(_default_service=schematize_service_name("sanic"), distributed_tracing=True)) + +SANIC_VERSION = (0, 0, 0) + + +def get_version(): + # type: () -> str + return getattr(sanic, "__version__", "") + + +def _get_current_span(request): + pin = Pin._find(request.ctx) + if not pin or not pin.enabled(): + return None + + return pin.tracer.current_span() + + +def update_span(span, response): + # Check for response status or headers on the response object + # DEV: This object can either be a form of BaseResponse or an Exception + # if we do not have a status code, we can assume this is an exception + # and so use 500 + status_code = getattr(response, "status", 500) + response_headers = getattr(response, "headers", None) + trace_utils.set_http_meta(span, config.sanic, status_code=status_code, response_headers=response_headers) + + +def _wrap_response_callback(span, callback): + # Only for sanic 20 and older + # Wrap response callbacks (either sync or async function) to set HTTP + # response span tags + + @wrapt.function_wrapper + def wrap_sync(wrapped, instance, args, kwargs): + r = wrapped(*args, **kwargs) + response = args[0] + update_span(span, response) + return r + + @wrapt.function_wrapper + async def wrap_async(wrapped, instance, args, kwargs): + r = await wrapped(*args, **kwargs) + response = args[0] + update_span(span, response) + return r + + if asyncio.iscoroutinefunction(callback): + return wrap_async(callback) + + return wrap_sync(callback) + + +async def patch_request_respond(wrapped, instance, args, kwargs): + # Only for sanic 21 and newer + # Wrap the framework response to set HTTP response span tags + response = await wrapped(*args, **kwargs) + span = _get_current_span(instance) + if not span: + return response + + update_span(span, response) + + # Sanic 21.9.x does not dispatch `http.lifecycle.response` in `handle_exception` + # so we have to handle finishing the span here instead + if (21, 9, 0) <= SANIC_VERSION < (21, 12, 0) and getattr(instance.ctx, "__dd_span_call_finish", False): + span.finish() + return response + + +def _get_path(request): + """Get path and replace path parameter values with names if route exists.""" + path = request.path + try: + match_info = request.match_info + except sanic.exceptions.SanicException: + return path + for key, value in match_info.items(): + try: + value = str(value) + except Exception: + log.debug("Failed to convert path parameter value to string", exc_info=True) + continue + path = path.replace(value, f"<{key}>") + return path + + +async def patch_run_request_middleware(wrapped, instance, args, kwargs): + # Set span resource from the framework request + request = args[0] + span = _get_current_span(request) + if span is not None: + span.resource = "{} {}".format(request.method, _get_path(request)) + return await wrapped(*args, **kwargs) + + +def patch(): + """Patch the instrumented methods.""" + global SANIC_VERSION + + if getattr(sanic, "__datadog_patch", False): + return + sanic.__datadog_patch = True + + SANIC_VERSION = tuple(map(int, sanic.__version__.split("."))) + + if SANIC_VERSION >= (21, 9, 0): + _w("sanic", "Sanic.__init__", patch_sanic_init) + _w(sanic.request, "Request.respond", patch_request_respond) + else: + _w("sanic", "Sanic.handle_request", patch_handle_request) + if SANIC_VERSION >= (21, 0, 0): + _w("sanic", "Sanic._run_request_middleware", patch_run_request_middleware) + _w(sanic.request, "Request.respond", patch_request_respond) + + +def unpatch(): + """Unpatch the instrumented methods.""" + if not getattr(sanic, "__datadog_patch", False): + return + + if SANIC_VERSION >= (21, 9, 0): + _u(sanic.Sanic, "__init__") + _u(sanic.request.Request, "respond") + else: + _u(sanic.Sanic, "handle_request") + if SANIC_VERSION >= (21, 0, 0): + _u(sanic.Sanic, "_run_request_middleware") + _u(sanic.request.Request, "respond") + + sanic.__datadog_patch = False + + +def patch_sanic_init(wrapped, instance, args, kwargs): + """Wrapper for creating sanic apps to automatically add our signal handlers""" + wrapped(*args, **kwargs) + + instance.add_signal(sanic_http_lifecycle_handle, "http.lifecycle.handle") + instance.add_signal(sanic_http_routing_after, "http.routing.after") + instance.add_signal(sanic_http_lifecycle_exception, "http.lifecycle.exception") + instance.add_signal(sanic_http_lifecycle_response, "http.lifecycle.response") + + +async def patch_handle_request(wrapped, instance, args, kwargs): + """Wrapper for Sanic.handle_request""" + + def unwrap(request, write_callback=None, stream_callback=None, **kwargs): + return request, write_callback, stream_callback, kwargs + + request, write_callback, stream_callback, new_kwargs = unwrap(*args, **kwargs) + + if request.scheme not in ("http", "https"): + return await wrapped(*args, **kwargs) + + with _create_sanic_request_span(request) as span: + if write_callback is not None: + new_kwargs["write_callback"] = _wrap_response_callback(span, write_callback) + if stream_callback is not None: + new_kwargs["stream_callback"] = _wrap_response_callback(span, stream_callback) + + return await wrapped(request, **new_kwargs) + + +def _create_sanic_request_span(request): + """Helper to create sanic.request span and attach a pin to request.ctx""" + pin = Pin() + pin.onto(request.ctx) + + if SANIC_VERSION < (21, 0, 0): + # Set span resource from the framework request + resource = "{} {}".format(request.method, _get_path(request)) + else: + # The path is not available anymore in 21.x. Get it from + # the _run_request_middleware instrumented method. + resource = None + + headers = request.headers.copy() + + trace_utils.activate_distributed_headers(ddtrace.tracer, int_config=config.sanic, request_headers=headers) + + span = pin.tracer.trace( + schematize_url_operation("sanic.request", protocol="http", direction=SpanDirection.INBOUND), + service=trace_utils.int_service(None, config.sanic), + resource=resource, + span_type=SpanTypes.WEB, + ) + span.set_tag_str(COMPONENT, config.sanic.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + sample_rate = config.sanic.get_analytics_sample_rate(use_global_config=True) + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + method = request.method + url = "{scheme}://{host}{path}".format(scheme=request.scheme, host=request.host, path=request.path) + query_string = request.query_string + if isinstance(query_string, bytes): + query_string = query_string.decode() + trace_utils.set_http_meta(span, config.sanic, method=method, url=url, query=query_string, request_headers=headers) + + return span + + +async def sanic_http_lifecycle_handle(request): + """Lifecycle signal called when a new request is started.""" + _create_sanic_request_span(request) + + +async def sanic_http_routing_after(request, route, kwargs, handler): + """Lifecycle signal called after routing has been resolved.""" + span = _get_current_span(request) + if not span: + return + + pattern = route.raw_path + # Sanic 21.9.0 and newer strip the leading slash from `route.raw_path` + if not pattern.startswith("/"): + pattern = "/{}".format(pattern) + if route.regex: + pattern = route.pattern + + span.resource = "{} {}".format(request.method, pattern) + span.set_tag_str("sanic.route.name", route.name) + + +async def sanic_http_lifecycle_response(request, response): + """Lifecycle signal called when a response is starting. + + Note: This signal does not get called when exceptions occur + in 21.9.x. The issue was resolved in 21.12.x + """ + span = _get_current_span(request) + if not span: + return + try: + update_span(span, response) + finally: + span.finish() + + +async def sanic_http_lifecycle_exception(request, exception): + """Lifecycle signal called when an exception occurs.""" + span = _get_current_span(request) + if not span: + return + + # Do not attach exception for exceptions not considered as errors + # ex: Http 400s + # DEV: We still need to set `__dd_span_call_finish` below + if not hasattr(exception, "status_code") or config.http_server.is_error_code(exception.status_code): + ex_type = type(exception) + ex_tb = getattr(exception, "__traceback__", None) + span.set_exc_info(ex_type, exception, ex_tb) + + # Sanic 21.9.x does not dispatch `http.lifecycle.response` in `handle_exception` + # so we need to indicate to `patch_request_respond` to finish the span + if (21, 9, 0) <= SANIC_VERSION < (21, 12, 0): + request.ctx.__dd_span_call_finish = True diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/__init__.py new file mode 100644 index 0000000..0deaf33 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/__init__.py @@ -0,0 +1,72 @@ +""" +The snowflake integration instruments the ``snowflake-connector-python`` library to trace Snowflake queries. + +Note that this integration is in beta. + +Enabling +~~~~~~~~ + +The integration is not enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch, patch_all + patch(snowflake=True) + patch_all(snowflake=True) + +or the ``DD_TRACE_SNOWFLAKE_ENABLED=true`` to enable it with ``ddtrace-run``. + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.snowflake["service"] + + The service name reported by default for snowflake spans. + + This option can also be set with the ``DD_SNOWFLAKE_SERVICE`` environment + variable. + + Default: ``"snowflake"`` + +.. py:data:: ddtrace.config.snowflake["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_SNOWFLAKE_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + from snowflake.connector import connect + + # This will report a span with the default settings + conn = connect(user="alice", password="b0b", account="dev") + + # Use a pin to override the service name for this connection. + Pin.override(conn, service="snowflake-dev") + + + cursor = conn.cursor() + cursor.execute("SELECT current_version()") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["snowflake.connector"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/patch.py new file mode 100644 index 0000000..607854f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/snowflake/patch.py @@ -0,0 +1,99 @@ +import os + +from ddtrace import Pin +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...ext import db +from ...ext import net +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool +from ..dbapi import TracedConnection +from ..dbapi import TracedCursor +from ..trace_utils import unwrap + + +config._add( + "snowflake", + dict( + _default_service=schematize_service_name("snowflake"), + # FIXME: consistent prefix span names with other dbapi integrations + # The snowflake integration was introduced following a different pattern + # than all other dbapi-compliant integrations. It sets span names to + # `sql.query` whereas other dbapi-compliant integrations are set to + # `.query`. + _dbapi_span_name_prefix="sql", + trace_fetch_methods=asbool(os.getenv("DD_SNOWFLAKE_TRACE_FETCH_METHODS", default=False)), + ), +) + + +def get_version(): + # type: () -> str + try: + import snowflake.connector as c + except AttributeError: + import sys + + c = sys.modules.get("snowflake.connector") + return str(c.__version__) + + +class _SFTracedCursor(TracedCursor): + def _set_post_execute_tags(self, span): + super(_SFTracedCursor, self)._set_post_execute_tags(span) + span.set_tag_str("sfqid", self.__wrapped__.sfqid) + + +def patch(): + try: + import snowflake.connector as c + except AttributeError: + import sys + + c = sys.modules.get("snowflake.connector") + + if getattr(c, "_datadog_patch", False): + return + c._datadog_patch = True + + wrapt.wrap_function_wrapper(c, "Connect", patched_connect) + wrapt.wrap_function_wrapper(c, "connect", patched_connect) + + +def unpatch(): + try: + import snowflake.connector as c + except AttributeError: + import sys + + c = sys.modules.get("snowflake.connector") + + if getattr(c, "_datadog_patch", False): + c._datadog_patch = False + + unwrap(c, "Connect") + unwrap(c, "connect") + + +def patched_connect(connect_func, _, args, kwargs): + conn = connect_func(*args, **kwargs) + if isinstance(conn, TracedConnection): + return conn + + # Add default tags to each query + tags = { + net.TARGET_HOST: conn.host, + net.TARGET_PORT: conn.port, + db.NAME: conn.database, + db.SYSTEM: "snowflake", + db.USER: conn.user, + "db.application": conn.application, + "db.schema": conn.schema, + "db.warehouse": conn.warehouse, + } + + pin = Pin(tags=tags) + traced_conn = TracedConnection(conn, pin=pin, cfg=config.snowflake, cursor_cls=_SFTracedCursor) + pin.onto(traced_conn) + return traced_conn diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/__init__.py new file mode 100644 index 0000000..b2ff112 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/__init__.py @@ -0,0 +1,34 @@ +""" +Enabling the SQLAlchemy integration is only necessary if there is no +instrumentation available or enabled for the underlying database engine (e.g. +pymysql, psycopg, mysql-connector, etc.). + +To trace sqlalchemy queries, add instrumentation to the engine class +using the patch method that **must be called before** importing sqlalchemy:: + + # patch before importing `create_engine` + from ddtrace import Pin, patch + patch(sqlalchemy=True) + + # use SQLAlchemy as usual + from sqlalchemy import create_engine + + engine = create_engine('sqlite:///:memory:') + engine.connect().execute("SELECT COUNT(*) FROM users") + + # Use a PIN to specify metadata related to this engine + Pin.override(engine, service='replica-db') +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["sqlalchemy", "sqlalchemy.event"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .engine import trace_engine + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["trace_engine", "patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/engine.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/engine.py new file mode 100644 index 0000000..9addddf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/engine.py @@ -0,0 +1,164 @@ +""" +To trace sqlalchemy queries, add instrumentation to the engine class or +instance you are using:: + + from ddtrace import tracer + from ddtrace.contrib.sqlalchemy import trace_engine + from sqlalchemy import create_engine + + engine = create_engine('sqlite:///:memory:') + trace_engine(engine, tracer, 'my-database') + + engine.connect().execute('select count(*) from users') +""" +# 3p +import sqlalchemy +from sqlalchemy.event import listen + +# project +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_database_operation +from ddtrace.internal.schema import schematize_service_name + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db +from ...ext import net as netx +from ...ext import sql as sqlx +from ...pin import Pin + + +def trace_engine(engine, tracer=None, service=None): + """ + Add tracing instrumentation to the given sqlalchemy engine or instance. + + :param sqlalchemy.Engine engine: a SQLAlchemy engine class or instance + :param ddtrace.Tracer tracer: a tracer instance. will default to the global + :param str service: the name of the service to trace. + """ + tracer = tracer or ddtrace.tracer # by default use global + EngineTracer(tracer, service, engine) + + +def _wrap_create_engine(func, module, args, kwargs): + """Trace the SQLAlchemy engine, creating an `EngineTracer` + object that will listen to SQLAlchemy events. A PIN object + is attached to the engine instance so that it can be + used later. + """ + # the service name is set to `None` so that the engine + # name is used by default; users can update this setting + # using the PIN object + engine = func(*args, **kwargs) + EngineTracer(ddtrace.tracer, None, engine) + return engine + + +class EngineTracer(object): + def __init__(self, tracer, service, engine): + self.tracer = tracer + self.engine = engine + self.vendor = sqlx.normalize_vendor(engine.name) + self.service = schematize_service_name(service or self.vendor) + self.name = schematize_database_operation("%s.query" % self.vendor, database_provider=self.vendor) + + # attach the PIN + Pin(tracer=tracer, service=self.service).onto(engine) + + listen(engine, "before_cursor_execute", self._before_cur_exec) + listen(engine, "after_cursor_execute", self._after_cur_exec) + + # Determine name of error event to listen for + # Ref: https://github.com/DataDog/dd-trace-py/issues/841 + if sqlalchemy.__version__[0] != "0": + error_event = "handle_error" + else: + error_event = "dbapi_error" + listen(engine, error_event, self._handle_db_error) + + def _before_cur_exec(self, conn, cursor, statement, *args): + pin = Pin.get_from(self.engine) + if not pin or not pin.enabled(): + # don't trace the execution + return + + span = pin.tracer.trace( + self.name, + service=pin.service, + span_type=SpanTypes.SQL, + resource=statement, + ) + span.set_tag_str(COMPONENT, config.sqlalchemy.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + span.set_tag(SPAN_MEASURED_KEY) + + if not _set_tags_from_url(span, conn.engine.url): + _set_tags_from_cursor(span, self.vendor, cursor) + + # set analytics sample rate + sample_rate = config.sqlalchemy.get_analytics_sample_rate() + if sample_rate is not None: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, sample_rate) + + def _after_cur_exec(self, conn, cursor, statement, *args): + pin = Pin.get_from(self.engine) + if not pin or not pin.enabled(): + # don't trace the execution + return + + span = pin.tracer.current_span() + if not span: + return + + try: + if cursor and cursor.rowcount >= 0: + span.set_tag(db.ROWCOUNT, cursor.rowcount) + finally: + span.finish() + + def _handle_db_error(self, *args): + pin = Pin.get_from(self.engine) + if not pin or not pin.enabled(): + # don't trace the execution + return + + span = pin.tracer.current_span() + if not span: + return + + try: + span.set_traceback() + finally: + span.finish() + + +def _set_tags_from_url(span, url): + """set connection tags from the url. return true if successful.""" + if url.host: + span.set_tag_str(netx.TARGET_HOST, url.host) + if url.port: + span.set_tag(netx.TARGET_PORT, url.port) + if url.database: + span.set_tag_str(sqlx.DB, url.database) + + return bool(span.get_tag(netx.TARGET_HOST)) + + +def _set_tags_from_cursor(span, vendor, cursor): + """attempt to set db connection tags by introspecting the cursor.""" + if "postgres" == vendor: + if hasattr(cursor, "connection"): + dsn = getattr(cursor.connection, "dsn", None) + if dsn: + d = sqlx.parse_pg_dsn(dsn) + span.set_tag_str(sqlx.DB, d.get("dbname")) + span.set_tag_str(netx.TARGET_HOST, d.get("host")) + span.set_metric(netx.TARGET_PORT, int(d.get("port"))) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/patch.py new file mode 100644 index 0000000..edd1c43 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlalchemy/patch.py @@ -0,0 +1,29 @@ +import sqlalchemy + +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ..trace_utils import unwrap +from .engine import _wrap_create_engine + + +def get_version(): + # type: () -> str + return getattr(sqlalchemy, "__version__", "") + + +def patch(): + if getattr(sqlalchemy.engine, "__datadog_patch", False): + return + sqlalchemy.engine.__datadog_patch = True + + # patch the engine creation function + _w("sqlalchemy", "create_engine", _wrap_create_engine) + _w("sqlalchemy.engine", "create_engine", _wrap_create_engine) + + +def unpatch(): + # unpatch sqlalchemy + if getattr(sqlalchemy.engine, "__datadog_patch", False): + sqlalchemy.engine.__datadog_patch = False + unwrap(sqlalchemy, "create_engine") + unwrap(sqlalchemy.engine, "create_engine") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/__init__.py new file mode 100644 index 0000000..bad1f91 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/__init__.py @@ -0,0 +1,66 @@ +""" +The sqlite integration instruments the built-in sqlite module to trace SQLite queries. + + +Enabling +~~~~~~~~ + +The integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(sqlite=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.sqlite["service"] + + The service name reported by default for sqlite spans. + + This option can also be set with the ``DD_SQLITE_SERVICE`` environment + variable. + + Default: ``"sqlite"`` + +.. py:data:: ddtrace.config.sqlite["trace_fetch_methods"] + + Whether or not to trace fetch methods. + + Can also configured via the ``DD_SQLITE_TRACE_FETCH_METHODS`` environment variable. + + Default: ``False`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure the integration on an per-connection basis use the +``Pin`` API:: + + from ddtrace import Pin + import sqlite3 + + # This will report a span with the default settings + db = sqlite3.connect(":memory:") + + # Use a pin to override the service name for the connection. + Pin.override(db, service='sqlite-users') + + cursor = db.cursor() + cursor.execute("select * from users where id = 1") +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["sqlite3"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/patch.py new file mode 100644 index 0000000..b5e357b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/sqlite3/patch.py @@ -0,0 +1,97 @@ +import os +import sqlite3 +import sqlite3.dbapi2 +import sys + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...contrib.dbapi import FetchTracedCursor +from ...contrib.dbapi import TracedConnection +from ...contrib.dbapi import TracedCursor +from ...ext import db +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import asbool +from ...pin import Pin + + +# Original connect method +_connect = sqlite3.connect + +config._add( + "sqlite", + dict( + _default_service=schematize_service_name("sqlite"), + _dbapi_span_name_prefix="sqlite", + _dbapi_span_operation_name=schematize_database_operation("sqlite.query", database_provider="sqlite"), + trace_fetch_methods=asbool(os.getenv("DD_SQLITE_TRACE_FETCH_METHODS", default=False)), + ), +) + + +def get_version(): + # type: () -> str + return sqlite3.sqlite_version + + +def patch(): + wrapped = wrapt.FunctionWrapper(_connect, traced_connect) + + sqlite3.connect = wrapped + sqlite3.dbapi2.connect = wrapped + + +def unpatch(): + sqlite3.connect = _connect + sqlite3.dbapi2.connect = _connect + + +def traced_connect(func, _, args, kwargs): + conn = func(*args, **kwargs) + return patch_conn(conn) + + +def patch_conn(conn): + wrapped = TracedSQLite(conn) + Pin(tags={db.SYSTEM: "sqlite"}).onto(wrapped) + return wrapped + + +class TracedSQLiteCursor(TracedCursor): + def executemany(self, *args, **kwargs): + # DEV: SQLite3 Cursor.execute always returns back the cursor instance + super(TracedSQLiteCursor, self).executemany(*args, **kwargs) + return self + + def execute(self, *args, **kwargs): + # DEV: SQLite3 Cursor.execute always returns back the cursor instance + super(TracedSQLiteCursor, self).execute(*args, **kwargs) + return self + + +class TracedSQLiteFetchCursor(TracedSQLiteCursor, FetchTracedCursor): + pass + + +class TracedSQLite(TracedConnection): + def __init__(self, conn, pin=None, cursor_cls=None): + if not cursor_cls: + # Do not trace `fetch*` methods by default + cursor_cls = TracedSQLiteFetchCursor if config.sqlite.trace_fetch_methods else TracedSQLiteCursor + + super(TracedSQLite, self).__init__(conn, pin=pin, cfg=config.sqlite, cursor_cls=cursor_cls) + + def execute(self, *args, **kwargs): + # sqlite has a few extra sugar functions + return self.cursor().execute(*args, **kwargs) + + # backup was added in Python 3.7 + if sys.version_info >= (3, 7, 0): + + def backup(self, target, *args, **kwargs): + # sqlite3 checks the type of `target`, it cannot be a wrapped connection + # https://github.com/python/cpython/blob/4652093e1b816b78e9a585d671a807ce66427417/Modules/_sqlite/connection.c#L1897-L1899 + if isinstance(target, TracedConnection): + target = target.__wrapped__ + return self.__wrapped__.backup(target, *args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/__init__.py new file mode 100644 index 0000000..ac0f347 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/__init__.py @@ -0,0 +1,86 @@ +""" +The Starlette integration will trace requests to and from Starlette. + + +Enabling +~~~~~~~~ + +The starlette integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + from starlette.applications import Starlette + + patch(starlette=True) + app = Starlette() + + +On Python 3.6 and below, you must enable the legacy ``AsyncioContextProvider`` before using the middleware:: + + from ddtrace.contrib.asyncio.provider import AsyncioContextProvider + from ddtrace import tracer # Or whichever tracer instance you plan to use + tracer.configure(context_provider=AsyncioContextProvider()) + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.starlette['distributed_tracing'] + + Whether to parse distributed tracing headers from requests received by your Starlette app. + + Can also be enabled with the ``DD_STARLETTE_DISTRIBUTED_TRACING`` environment variable. + + Default: ``True`` + +.. py:data:: ddtrace.config.starlette['analytics_enabled'] + + Whether to analyze spans for starlette in App Analytics. + + Can also be enabled with the ``DD_STARLETTE_ANALYTICS_ENABLED`` environment variable. + + Default: ``None`` + +.. py:data:: ddtrace.config.starlette['service_name'] + + The service name reported for your starlette app. + + Can also be configured via the ``DD_SERVICE`` environment variable. + + Default: ``'starlette'`` + +.. py:data:: ddtrace.config.starlette['request_span_name'] + + The span name for a starlette request. + + Default: ``'starlette.request'`` + + +Example:: + + from ddtrace import config + + # Enable distributed tracing + config.starlette['distributed_tracing'] = True + + # Override service name + config.starlette['service_name'] = 'custom-service-name' + + # Override request span name + config.starlette['request_span_name'] = 'custom-request-span-name' + +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["starlette"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/patch.py new file mode 100644 index 0000000..a9dfccc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/starlette/patch.py @@ -0,0 +1,184 @@ +import inspect +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 + +import starlette +from starlette import requests as starlette_requests +from starlette.concurrency import run_in_threadpool +from starlette.middleware import Middleware + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib.asgi.middleware import TraceMiddleware +from ddtrace.ext import http +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils import set_argument_value +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.span import Span # noqa:F401 +from ddtrace.vendor.wrapt import ObjectProxy +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...internal import core +from .. import trace_utils +from ..trace_utils import with_traced_module + + +log = get_logger(__name__) + +config._add( + "starlette", + dict( + _default_service=schematize_service_name("starlette"), + request_span_name="starlette.request", + distributed_tracing=True, + ), +) + + +def get_version(): + # type: () -> str + return getattr(starlette, "__version__", "") + + +def traced_init(wrapped, instance, args, kwargs): + mw = kwargs.pop("middleware", []) + mw.insert(0, Middleware(TraceMiddleware, integration_config=config.starlette)) + kwargs.update({"middleware": mw}) + + wrapped(*args, **kwargs) + + +def patch(): + if getattr(starlette, "_datadog_patch", False): + return + + starlette._datadog_patch = True + + _w("starlette.applications", "Starlette.__init__", traced_init) + Pin().onto(starlette) + + # We need to check that Fastapi instrumentation hasn't already patched these + if not isinstance(starlette.routing.Route.handle, ObjectProxy): + _w("starlette.routing", "Route.handle", traced_handler) + if not isinstance(starlette.routing.Mount.handle, ObjectProxy): + _w("starlette.routing", "Mount.handle", traced_handler) + + if not isinstance(starlette.background.BackgroundTasks.add_task, ObjectProxy): + _w("starlette.background", "BackgroundTasks.add_task", _trace_background_tasks(starlette)) + + +def unpatch(): + if not getattr(starlette, "_datadog_patch", False): + return + + starlette._datadog_patch = False + + _u(starlette.applications.Starlette, "__init__") + + # We need to check that Fastapi instrumentation hasn't already unpatched these + if isinstance(starlette.routing.Route.handle, ObjectProxy): + _u(starlette.routing.Route, "handle") + + if isinstance(starlette.routing.Mount.handle, ObjectProxy): + _u(starlette.routing.Mount, "handle") + + if isinstance(starlette.background.BackgroundTasks.add_task, ObjectProxy): + _u(starlette.background.BackgroundTasks, "add_task") + + +def traced_handler(wrapped, instance, args, kwargs): + # Since handle can be called multiple times for one request, we take the path of each instance + # Then combine them at the end to get the correct resource names + scope = get_argument_value(args, kwargs, 0, "scope") # type: Optional[Dict[str, Any]] + if not scope: + return wrapped(*args, **kwargs) + + # Our ASGI TraceMiddleware has not been called, skip since + # we won't have a request span to attach this information onto + # DEV: This can happen if patching happens after the app has been created + if "datadog" not in scope: + log.warning("datadog context not present in ASGI request scope, trace middleware may be missing") + return wrapped(*args, **kwargs) + + # Add the path to the resource_paths list + if "resource_paths" not in scope["datadog"]: + scope["datadog"]["resource_paths"] = [instance.path] + else: + scope["datadog"]["resource_paths"].append(instance.path) + + request_spans = scope["datadog"].get("request_spans", []) # type: List[Span] + resource_paths = scope["datadog"].get("resource_paths", []) # type: List[str] + + if len(request_spans) == len(resource_paths): + # Iterate through the request_spans and assign the correct resource name to each + for index, span in enumerate(request_spans): + # We want to set the full resource name on the first request span + # And one part less of the full resource name for each proceeding request span + # e.g. full path is /subapp/hello/{name}, first request span gets that as resource name + # Second request span gets /hello/{name} + path = "".join(resource_paths[index:]) + + if scope.get("method"): + span.resource = "{} {}".format(scope["method"], path) + else: + span.resource = path + # route should only be in the root span + if index == 0: + span.set_tag_str(http.ROUTE, path) + # at least always update the root asgi span resource name request_spans[0].resource = "".join(resource_paths) + elif request_spans and resource_paths: + route = "".join(resource_paths) + if scope.get("method"): + request_spans[0].resource = "{} {}".format(scope["method"], route) + else: + request_spans[0].resource = route + request_spans[0].set_tag_str(http.ROUTE, route) + else: + log.debug( + "unable to update the request span resource name, request_spans:%r, resource_paths:%r", + request_spans, + resource_paths, + ) + request_cookies = "" + for name, value in scope.get("headers"): + if name == b"cookie": + request_cookies = value.decode("utf-8", errors="ignore") + break + if request_spans: + trace_utils.set_http_meta( + request_spans[0], + "starlette", + request_path_params=scope.get("path_params"), + request_cookies=starlette_requests.cookie_parser(request_cookies), + route=request_spans[0].get_tag(http.ROUTE), + ) + core.dispatch("asgi.start_request", ("starlette",)) + if core.get_item(HTTP_REQUEST_BLOCKED): + raise trace_utils.InterruptException("starlette") + + return wrapped(*args, **kwargs) + + +@with_traced_module +def _trace_background_tasks(module, pin, wrapped, instance, args, kwargs): + task = get_argument_value(args, kwargs, 0, "func") + current_span = pin.tracer.current_span() + + async def traced_task(*args, **kwargs): + with pin.tracer.start_span( + f"{module.__name__}.background_task", resource=task.__name__, child_of=None, activate=True + ) as span: + if current_span: + span.link_span(current_span.context) + if inspect.iscoroutinefunction(task): + await task(*args, **kwargs) + else: + await run_in_threadpool(task, *args, **kwargs) + + args, kwargs = set_argument_value(args, kwargs, 0, "func", traced_task) + wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/__init__.py new file mode 100644 index 0000000..e7e14e5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/__init__.py @@ -0,0 +1,52 @@ +""" +Datadog APM traces can be integrated with the logs produced by structlog by: + +1. Having ``ddtrace`` patch the ``structlog`` module. This will add a +processor in the beginning of the chain that adds trace attributes +to the event_dict + +2. For log correlation between APM and logs, the easiest format is via JSON +so that no further configuration needs to be done in the Datadog UI assuming +that the Datadog trace values are at the top level of the JSON + +Enabling +-------- + +Patch ``structlog`` +~~~~~~~~~~~~~~~~~~~ + +If using :ref:`ddtrace-run` then set the environment variable ``DD_LOGS_INJECTION=true``. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(structlog=True) + +Proper Formatting +~~~~~~~~~~~~~~~~~ + +The trace attributes are injected via a processor in the processor block of the configuration +whether that be the default processor chain or a user-configured chain. + +An example of a configuration that outputs to a file that can be injected into is as below:: + + structlog.configure( + processors=[structlog.processors.JSONRenderer()], + logger_factory=structlog.WriteLoggerFactory(file=Path("app").with_suffix(".log").open("wt"))) + +For more information, please see the attached guide for the Datadog Logging Product: +https://docs.datadoghq.com/logs/log_collection/python/ +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["structlog"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/patch.py new file mode 100644 index 0000000..64b3f7a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/structlog/patch.py @@ -0,0 +1,90 @@ +import structlog + +import ddtrace +from ddtrace import config + +from ..logging.constants import RECORD_ATTR_ENV +from ..logging.constants import RECORD_ATTR_SERVICE +from ..logging.constants import RECORD_ATTR_SPAN_ID +from ..logging.constants import RECORD_ATTR_TRACE_ID +from ..logging.constants import RECORD_ATTR_VALUE_EMPTY +from ..logging.constants import RECORD_ATTR_VALUE_ZERO +from ..logging.constants import RECORD_ATTR_VERSION +from ..trace_utils import unwrap as _u +from ..trace_utils import wrap as _w + + +config._add( + "structlog", + dict(), +) + + +def get_version(): + # type: () -> str + return getattr(structlog, "__version__", "") + + +def _tracer_injection(_, __, event_dict): + span = ddtrace.tracer.current_span() + + trace_id = None + span_id = None + if span: + span_id = span.span_id + trace_id = span.trace_id + if config._128_bit_trace_id_enabled and not config._128_bit_trace_id_logging_enabled: + trace_id = span._trace_id_64bits + + # add ids to structlog event dictionary + event_dict[RECORD_ATTR_TRACE_ID] = str(trace_id or RECORD_ATTR_VALUE_ZERO) + event_dict[RECORD_ATTR_SPAN_ID] = str(span_id or RECORD_ATTR_VALUE_ZERO) + # add the env, service, and version configured for the tracer + event_dict[RECORD_ATTR_ENV] = config.env or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_SERVICE] = config.service or RECORD_ATTR_VALUE_EMPTY + event_dict[RECORD_ATTR_VERSION] = config.version or RECORD_ATTR_VALUE_EMPTY + + return event_dict + + +def _w_get_logger(func, instance, args, kwargs): + """ + Append the tracer injection processor to the ``default_processors`` list used by the logger + The ``default_processors`` list has built in defaults which protects against a user configured ``None`` value. + The argument to configure ``default_processors`` accepts an iterable type: + - List: default use case which has been accounted for + - Tuple: patched via list conversion + - Set: ignored because structlog processors care about order notably the last value to be a Renderer + - Dict: because keys are ignored, this essentially becomes a List + """ + + dd_processor = [_tracer_injection] + structlog._config._CONFIG.default_processors = dd_processor + list(structlog._config._CONFIG.default_processors) + return func(*args, **kwargs) + + +def patch(): + """ + Patch ``structlog`` module for injection of tracer information + by appending a processor before creating a logger via ``structlog.get_logger`` + """ + if getattr(structlog, "_datadog_patch", False): + return + structlog._datadog_patch = True + + if hasattr(structlog, "get_logger"): + _w(structlog, "get_logger", _w_get_logger) + + # getLogger is an alias for get_logger + if hasattr(structlog, "getLogger"): + _w(structlog, "getLogger", _w_get_logger) + + +def unpatch(): + if getattr(structlog, "_datadog_patch", False): + structlog._datadog_patch = False + + if hasattr(structlog, "get_logger"): + _u(structlog, "get_logger") + if hasattr(structlog, "getLogger"): + _u(structlog, "getLogger") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/__init__.py new file mode 100644 index 0000000..83be0b3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/__init__.py @@ -0,0 +1,33 @@ +""" +The subprocess integration will add tracing to all subprocess executions +started in your application. It will be automatically enabled if Application +Security is enabled with:: + + DD_APPSEC_ENABLED=true + + +Configuration +~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.subprocess['sensitive_wildcards'] + + Comma separated list of fnmatch-style wildcards Subprocess parameters matching these + wildcards will be scrubbed and replaced by a "?". + + Default: ``None`` for the config value but note that there are some wildcards always + enabled in this integration that you can check on + ```ddtrace.contrib.subprocess.constants.SENSITIVE_WORDS_WILDCARDS```. +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["os", "subprocess"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/constants.py new file mode 100644 index 0000000..294ef01 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/constants.py @@ -0,0 +1,18 @@ +from ddtrace.appsec._constants import Constant_Class + + +class COMMANDS(metaclass=Constant_Class): + """ + string names used by the library for tagging data for subprocess executions in context or span + """ + + SPAN_NAME = "command_execution" + COMPONENT = "component" + SHELL = "cmd.shell" + EXEC = "cmd.exec" + TRUNCATED = "cmd.truncated" + EXIT_CODE = "cmd.exit_code" + CTX_SUBP_IS_SHELL = "subprocess_popen_is_shell" + CTX_SUBP_TRUNCATED = "subprocess_popen_truncated" + CTX_SUBP_LINE = "subprocess_popen_line" + CTX_SUBP_BINARY = "subprocess_popen_binary" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/patch.py new file mode 100644 index 0000000..8d934a0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/subprocess/patch.py @@ -0,0 +1,410 @@ +import collections +from fnmatch import fnmatch +import os +import re +import shlex +import subprocess # nosec +from threading import RLock +from typing import Deque # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 +from typing import cast # noqa:F401 + +import attr + +from ddtrace import Pin +from ddtrace import config +from ddtrace.contrib import trace_utils +from ddtrace.contrib.subprocess.constants import COMMANDS +from ddtrace.ext import SpanTypes +from ddtrace.internal import core +from ddtrace.internal.compat import shjoin +from ddtrace.internal.logger import get_logger +from ddtrace.settings.asm import config as asm_config + + +log = get_logger(__name__) + +config._add( + "subprocess", + dict(sensitive_wildcards=os.getenv("DD_SUBPROCESS_SENSITIVE_WILDCARDS", default="").split(",")), +) + + +def get_version(): + # type: () -> str + return "" + + +def patch(): + # type: () -> List[str] + patched = [] # type: List[str] + if not asm_config._asm_enabled: + return patched + + import os + + if not getattr(os, "_datadog_patch", False): + Pin().onto(os) + trace_utils.wrap(os, "system", _traced_ossystem(os)) + trace_utils.wrap(os, "fork", _traced_fork(os)) + + # all os.spawn* variants eventually use this one: + trace_utils.wrap(os, "_spawnvef", _traced_osspawn(os)) + + patched.append("os") + + if not getattr(subprocess, "_datadog_patch", False): + Pin().onto(subprocess) + # We store the parameters on __init__ in the context and set the tags on wait + # (where all the Popen objects eventually arrive, unless killed before it) + trace_utils.wrap(subprocess, "Popen.__init__", _traced_subprocess_init(subprocess)) + trace_utils.wrap(subprocess, "Popen.wait", _traced_subprocess_wait(subprocess)) + + os._datadog_patch = True + subprocess._datadog_patch = True + patched.append("subprocess") + + return patched + + +@attr.s(eq=False) +class SubprocessCmdLineCacheEntry(object): + binary = attr.ib(type=str, default=None) + arguments = attr.ib(type=List, default=None) + truncated = attr.ib(type=bool, default=False) + env_vars = attr.ib(type=List, default=None) + as_list = attr.ib(type=List, default=None) + as_string = attr.ib(type=str, default=None) + + +class SubprocessCmdLine(object): + # This catches the computed values into a SubprocessCmdLineCacheEntry object + _CACHE = {} # type: Dict[str, SubprocessCmdLineCacheEntry] + _CACHE_DEQUE = collections.deque() # type: Deque[str] + _CACHE_MAXSIZE = 32 + _CACHE_LOCK = RLock() + + @classmethod + def _add_new_cache_entry(cls, key, env_vars, binary, arguments, truncated): + if key in cls._CACHE: + return + + cache_entry = SubprocessCmdLineCacheEntry() + cache_entry.binary = binary + cache_entry.arguments = arguments + cache_entry.truncated = truncated + cache_entry.env_vars = env_vars + + with cls._CACHE_LOCK: + if len(cls._CACHE_DEQUE) >= cls._CACHE_MAXSIZE: + # If the cache is full, remove the oldest entry + last_cache_key = cls._CACHE_DEQUE[-1] + del cls._CACHE[last_cache_key] + cls._CACHE_DEQUE.pop() + + cls._CACHE[key] = cache_entry + cls._CACHE_DEQUE.appendleft(key) + + return cache_entry + + @classmethod + def _clear_cache(cls): + with cls._CACHE_LOCK: + cls._CACHE_DEQUE.clear() + cls._CACHE.clear() + + TRUNCATE_LIMIT = 4 * 1024 + + ENV_VARS_ALLOWLIST = {"LD_PRELOAD", "LD_LIBRARY_PATH", "PATH"} + + BINARIES_DENYLIST = { + "md5", + } + + SENSITIVE_WORDS_WILDCARDS = [ + "*password*", + "*passwd*", + "*mysql_pwd*", + "*access_token*", + "*auth_token*", + "*api_key*", + "*apikey*", + "*secret*", + "*credentials*", + "stripetoken", + ] + _COMPILED_ENV_VAR_REGEXP = re.compile(r"\b[A-Z_]+=\w+") + + def __init__(self, shell_args, shell=False): + # type: (Union[str, List[str]], bool) -> None + cache_key = str(shell_args) + str(shell) + self._cache_entry = SubprocessCmdLine._CACHE.get(cache_key) + if self._cache_entry: + self.env_vars = self._cache_entry.env_vars + self.binary = self._cache_entry.binary + self.arguments = self._cache_entry.arguments + self.truncated = self._cache_entry.truncated + else: + self.env_vars = [] + self.binary = "" + self.arguments = [] + self.truncated = False + + if isinstance(shell_args, str): + tokens = shlex.split(shell_args) + else: + tokens = cast(List[str], shell_args) + + # Extract previous environment variables, scrubbing all the ones not + # in ENV_VARS_ALLOWLIST + if shell: + self.scrub_env_vars(tokens) + else: + self.binary = tokens[0] + self.arguments = tokens[1:] + + self.arguments = list(self.arguments) if isinstance(self.arguments, tuple) else self.arguments + self.scrub_arguments() + + # Create a new cache entry to store the computed values except as_list + # and as_string that are computed and stored lazily + self._cache_entry = SubprocessCmdLine._add_new_cache_entry( + cache_key, self.env_vars, self.binary, self.arguments, self.truncated + ) + + def scrub_env_vars(self, tokens): + for idx, token in enumerate(tokens): + if re.match(self._COMPILED_ENV_VAR_REGEXP, token): + var, value = token.split("=") + if var in self.ENV_VARS_ALLOWLIST: + self.env_vars.append(token) + else: + # scrub the value + self.env_vars.append("%s=?" % var) + else: + # Next after vars are the binary and arguments + try: + self.binary = tokens[idx] + self.arguments = tokens[idx + 1 :] + except IndexError: + pass + break + + def scrub_arguments(self): + # if the binary is in the denylist, scrub all arguments + if self.binary.lower() in self.BINARIES_DENYLIST: + self.arguments = ["?" for _ in self.arguments] + return + + param_prefixes = ("-", "/") + # Scrub case by case + new_args = [] + deque_args = collections.deque(self.arguments) + while deque_args: + current = deque_args[0] + for sensitive in self.SENSITIVE_WORDS_WILDCARDS + config.subprocess.sensitive_wildcards: + if fnmatch(current, sensitive): + is_sensitive = True + break + else: + is_sensitive = False + + if not is_sensitive: + new_args.append(current) + deque_args.popleft() + continue + + # sensitive + if current[0] not in param_prefixes: + # potentially not argument, scrub it anyway if it matches a sensitive word + new_args.append("?") + deque_args.popleft() + continue + + # potential --argument + if "=" in current: + # contains "=" like in "--password=foo", scrub it just in case + new_args.append("?") + deque_args.popleft() + continue + + try: + if deque_args[1][0] in param_prefixes: + # Next is another option scrub only the current one + new_args.append("?") + deque_args.popleft() + continue + else: + # Next is not an option but potentially a value, scrub it instead + new_args.extend([current, "?"]) + deque_args.popleft() + deque_args.popleft() + continue + except IndexError: + # No next argument, scrub this one just in case since it's sensitive + new_args.append("?") + deque_args.popleft() + + self.arguments = new_args + + def truncate_string(self, str_): + # type: (str) -> str + oversize = len(str_) - self.TRUNCATE_LIMIT + + if oversize <= 0: + self.truncated = False + return str_ + + self.truncated = True + + msg = ' "4kB argument truncated by %d characters"' % oversize + return str_[0 : -(oversize + len(msg))] + msg + + def _as_list_and_string(self): + # type: () -> Tuple[list[str], str] + + total_list = self.env_vars + [self.binary] + self.arguments + truncated_str = self.truncate_string(shjoin(total_list)) + truncated_list = shlex.split(truncated_str) + return truncated_list, truncated_str + + def as_list(self): + if self._cache_entry.as_list is not None: + return self._cache_entry.as_list + + list_res, str_res = self._as_list_and_string() + self._cache_entry.as_list = list_res + self._cache_entry.as_string = str_res + return list_res + + def as_string(self): + if self._cache_entry.as_string is not None: + return self._cache_entry.as_string + + list_res, str_res = self._as_list_and_string() + self._cache_entry.as_list = list_res + self._cache_entry.as_string = str_res + return str_res + + +def unpatch(): + # type: () -> None + trace_utils.unwrap(os, "system") + trace_utils.unwrap(os, "_spawnvef") + trace_utils.unwrap(subprocess.Popen, "__init__") + trace_utils.unwrap(subprocess.Popen, "wait") + + SubprocessCmdLine._clear_cache() + + os._datadog_patch = False + subprocess._datadog_patch = False + + +@trace_utils.with_traced_module +def _traced_ossystem(module, pin, wrapped, instance, args, kwargs): + try: + shellcmd = SubprocessCmdLine(args[0], shell=True) # nosec + + with pin.tracer.trace(COMMANDS.SPAN_NAME, resource=shellcmd.binary, span_type=SpanTypes.SYSTEM) as span: + span.set_tag_str(COMMANDS.SHELL, shellcmd.as_string()) + if shellcmd.truncated: + span.set_tag_str(COMMANDS.TRUNCATED, "yes") + span.set_tag_str(COMMANDS.COMPONENT, "os") + ret = wrapped(*args, **kwargs) + span.set_tag_str(COMMANDS.EXIT_CODE, str(ret)) + return ret + except: # noqa:E722 + log.debug( + "Could not trace subprocess execution for os.system: [args: %s kwargs: %s]", args, kwargs, exc_info=True + ) + return wrapped(*args, **kwargs) + + +@trace_utils.with_traced_module +def _traced_fork(module, pin, wrapped, instance, args, kwargs): + try: + with pin.tracer.trace(COMMANDS.SPAN_NAME, resource="fork", span_type=SpanTypes.SYSTEM) as span: + span.set_tag(COMMANDS.EXEC, ["os.fork"]) + span.set_tag_str(COMMANDS.COMPONENT, "os") + ret = wrapped(*args, **kwargs) + return ret + except: # noqa:E722 + log.debug( + "Could not trace subprocess execution for os.fork*: [args: %s kwargs: %s]", args, kwargs, exc_info=True + ) + return wrapped(*args, **kwargs) + + +@trace_utils.with_traced_module +def _traced_osspawn(module, pin, wrapped, instance, args, kwargs): + try: + mode, file, func_args, _, _ = args + shellcmd = SubprocessCmdLine(func_args, shell=False) + + with pin.tracer.trace(COMMANDS.SPAN_NAME, resource=shellcmd.binary, span_type=SpanTypes.SYSTEM) as span: + span.set_tag(COMMANDS.EXEC, shellcmd.as_list()) + if shellcmd.truncated: + span.set_tag_str(COMMANDS.TRUNCATED, "true") + span.set_tag_str(COMMANDS.COMPONENT, "os") + + if mode == os.P_WAIT: + ret = wrapped(*args, **kwargs) + span.set_tag_str(COMMANDS.EXIT_CODE, str(ret)) + return ret + except: # noqa:E722 + log.debug( + "Could not trace subprocess execution for os.spawn*: [args: %s kwargs: %s]", args, kwargs, exc_info=True + ) + + return wrapped(*args, **kwargs) + + +@trace_utils.with_traced_module +def _traced_subprocess_init(module, pin, wrapped, instance, args, kwargs): + try: + cmd_args = args[0] if len(args) else kwargs["args"] + cmd_args_list = shlex.split(cmd_args) if isinstance(cmd_args, str) else cmd_args + is_shell = kwargs.get("shell", False) + shellcmd = SubprocessCmdLine(cmd_args_list, shell=is_shell) # nosec + + with pin.tracer.trace(COMMANDS.SPAN_NAME, resource=shellcmd.binary, span_type=SpanTypes.SYSTEM): + core.set_item(COMMANDS.CTX_SUBP_IS_SHELL, is_shell) + + if shellcmd.truncated: + core.set_item(COMMANDS.CTX_SUBP_TRUNCATED, "yes") + + if is_shell: + core.set_item(COMMANDS.CTX_SUBP_LINE, shellcmd.as_string()) + else: + core.set_item(COMMANDS.CTX_SUBP_LINE, shellcmd.as_list()) + core.set_item(COMMANDS.CTX_SUBP_BINARY, shellcmd.binary) + except: # noqa:E722 + log.debug("Could not trace subprocess execution: [args: %s kwargs: %s]", args, kwargs, exc_info=True) + + return wrapped(*args, **kwargs) + + +@trace_utils.with_traced_module +def _traced_subprocess_wait(module, pin, wrapped, instance, args, kwargs): + try: + binary = core.get_item("subprocess_popen_binary") + + with pin.tracer.trace(COMMANDS.SPAN_NAME, resource=binary, span_type=SpanTypes.SYSTEM) as span: + if core.get_item(COMMANDS.CTX_SUBP_IS_SHELL): + span.set_tag_str(COMMANDS.SHELL, core.get_item(COMMANDS.CTX_SUBP_LINE)) + else: + span.set_tag(COMMANDS.EXEC, core.get_item(COMMANDS.CTX_SUBP_LINE)) + + truncated = core.get_item(COMMANDS.CTX_SUBP_TRUNCATED) + if truncated: + span.set_tag_str(COMMANDS.TRUNCATED, "yes") + span.set_tag_str(COMMANDS.COMPONENT, "subprocess") + ret = wrapped(*args, **kwargs) + span.set_tag_str(COMMANDS.EXIT_CODE, str(ret)) + return ret + except: # noqa:E722 + log.debug("Could not trace subprocess execution [args: %s kwargs: %s]", args, kwargs, exc_info=True) + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/__init__.py new file mode 100644 index 0000000..2fa3577 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/__init__.py @@ -0,0 +1,130 @@ +r""" +The Tornado integration traces all ``RequestHandler`` defined in a Tornado web application. +Auto instrumentation is available using the ``patch`` function that **must be called before** +importing the tornado library. + +**Note:** This integration requires Python 3.7 and above for Tornado 5 and 6. + +The following is an example:: + + # patch before importing tornado and concurrent.futures + from ddtrace import tracer, patch + patch(tornado=True) + + import tornado.web + import tornado.gen + import tornado.ioloop + + # create your handlers + class MainHandler(tornado.web.RequestHandler): + @tornado.gen.coroutine + def get(self): + self.write("Hello, world") + + # create your application + app = tornado.web.Application([ + (r'/', MainHandler), + ]) + + # and run it as usual + app.listen(8888) + tornado.ioloop.IOLoop.current().start() + +When any type of ``RequestHandler`` is hit, a request root span is automatically created. If +you want to trace more parts of your application, you can use the ``wrap()`` decorator and +the ``trace()`` method as usual:: + + class MainHandler(tornado.web.RequestHandler): + @tornado.gen.coroutine + def get(self): + yield self.notify() + yield self.blocking_method() + with tracer.trace('tornado.before_write') as span: + # trace more work in the handler + + @tracer.wrap('tornado.executor_handler') + @tornado.concurrent.run_on_executor + def blocking_method(self): + # do something expensive + + @tracer.wrap('tornado.notify', service='tornado-notification') + @tornado.gen.coroutine + def notify(self): + # do something + +If you are overriding the ``on_finish`` or ``log_exception`` methods on a +``RequestHandler``, you will need to call the super method to ensure the +tracer's patched methods are called:: + + class MainHandler(tornado.web.RequestHandler): + @tornado.gen.coroutine + def get(self): + self.write("Hello, world") + + def on_finish(self): + super(MainHandler, self).on_finish() + # do other clean-up + + def log_exception(self, typ, value, tb): + super(MainHandler, self).log_exception(typ, value, tb) + # do other logging + +Tornado settings can be used to change some tracing configuration, like:: + + settings = { + 'datadog_trace': { + 'default_service': 'my-tornado-app', + 'tags': {'env': 'production'}, + 'distributed_tracing': False, + 'settings': { + 'FILTERS': [ + FilterRequestsOnUrl(r'http://test\\.example\\.com'), + ], + }, + }, + } + + app = tornado.web.Application([ + (r'/', MainHandler), + ], **settings) + +The available settings are: + +* ``default_service`` (default: `tornado-web`): set the service name used by the tracer. Usually + this configuration must be updated with a meaningful name. Can also be configured via the + ``DD_SERVICE`` environment variable. +* ``tags`` (default: `{}`): set global tags that should be applied to all spans. +* ``enabled`` (default: `True`): define if the tracer is enabled or not. If set to `false`, the + code is still instrumented but no spans are sent to the APM agent. +* ``distributed_tracing`` (default: `None`): enable distributed tracing if this is called + remotely from an instrumented application. Overrides the integration config which is configured via the + ``DD_TORNADO_DISTRIBUTED_TRACING`` environment variable. + We suggest to enable it only for internal services where headers are under your control. +* ``agent_hostname`` (default: `localhost`): define the hostname of the APM agent. +* ``agent_port`` (default: `8126`): define the port of the APM agent. +* ``settings`` (default: ``{}``): Tracer extra settings used to change, for instance, the filtering behavior. +""" +from ...internal.utils.importlib import require_modules + + +required_modules = ["tornado"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .stack_context import TracerStackContext + from .stack_context import run_with_trace_context + + context_provider = TracerStackContext() + + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = [ + "patch", + "unpatch", + "context_provider", + "run_with_trace_context", + "TracerStackContext", + "get_version", + ] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/application.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/application.py new file mode 100644 index 0000000..9e36eeb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/application.py @@ -0,0 +1,59 @@ +from tornado import template + +import ddtrace +from ddtrace import config +from ddtrace.internal.schema import schematize_service_name + +from . import context_provider +from . import decorators +from .constants import CONFIG_KEY + + +def tracer_config(__init__, app, args, kwargs): + """ + Wrap Tornado web application so that we can configure services info and + tracing settings after the initialization. + """ + # call the Application constructor + __init__(*args, **kwargs) + + # default settings + settings = { + "tracer": ddtrace.tracer, + "default_service": schematize_service_name(config._get_service("tornado-web")), + "distributed_tracing": None, + "analytics_enabled": None, + } + + # update defaults with users settings + user_settings = app.settings.get(CONFIG_KEY) + if user_settings: + settings.update(user_settings) + + app.settings[CONFIG_KEY] = settings + tracer = settings["tracer"] + service = settings["default_service"] + + # extract extra settings + extra_settings = settings.get("settings", {}) + + # the tracer must use the right Context propagation and wrap executor; + # this action is done twice because the patch() method uses the + # global tracer while here we can have a different instance (even if + # this is not usual). + tracer.configure( + context_provider=context_provider, + wrap_executor=decorators.wrap_executor, + enabled=settings.get("enabled", None), + hostname=settings.get("agent_hostname", None), + port=settings.get("agent_port", None), + settings=extra_settings, + ) + + # set global tags if any + tags = settings.get("tags", None) + if tags: + tracer.set_tags(tags) + + # configure the PIN object for template rendering + ddtrace.Pin(service=service, tracer=tracer).onto(template) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/constants.py new file mode 100644 index 0000000..18a3a6e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/constants.py @@ -0,0 +1,7 @@ +""" +This module defines Tornado settings that are shared between +integration modules. +""" +CONFIG_KEY = "datadog_trace" +REQUEST_SPAN_KEY = "__datadog_request_span" +FUTURE_SPAN_KEY = "__datadog_future_span" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/decorators.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/decorators.py new file mode 100644 index 0000000..8396962 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/decorators.py @@ -0,0 +1,82 @@ +import sys + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from .constants import FUTURE_SPAN_KEY + + +def _finish_span(future): + """ + Finish the span if it's attached to the given ``Future`` object. + This method is a Tornado callback used to close a decorated function + executed as a coroutine or as a synchronous function in another thread. + """ + span = getattr(future, FUTURE_SPAN_KEY, None) + + if span: + # `tornado.concurrent.Future` in PY3 tornado>=4.0,<5 has `exc_info` + if callable(getattr(future, "exc_info", None)): + # retrieve the exception from the coroutine object + exc_info = future.exc_info() + if exc_info: + span.set_exc_info(*exc_info) + elif callable(getattr(future, "exception", None)): + # in tornado>=4.0,<5 with PY2 `concurrent.futures._base.Future` + # `exception_info()` returns `(exception, traceback)` but + # `exception()` only returns the first element in the tuple + if callable(getattr(future, "exception_info", None)): + exc, exc_tb = future.exception_info() + if exc and exc_tb: + exc_type = type(exc) + span.set_exc_info(exc_type, exc, exc_tb) + # in tornado>=5 with PY3, `tornado.concurrent.Future` is alias to + # `asyncio.Future` in PY3 `exc_info` not available, instead use + # exception method + else: + exc = future.exception() + if exc: + # we expect exception object to have a traceback attached + if hasattr(exc, "__traceback__"): + exc_type = type(exc) + exc_tb = getattr(exc, "__traceback__", None) + span.set_exc_info(exc_type, exc, exc_tb) + # if all else fails use currently handled exception for + # current thread + else: + span.set_exc_info(*sys.exc_info()) + + span.finish() + + +def wrap_executor(tracer, fn, args, kwargs, span_name, service=None, resource=None, span_type=None): + """ + Wrap executor function used to change the default behavior of + ``Tracer.wrap()`` method. A decorated Tornado function can be + a regular function or a coroutine; if a coroutine is decorated, a + span is attached to the returned ``Future`` and a callback is set + so that it will close the span when the ``Future`` is done. + """ + span = tracer.trace(span_name, service=service, resource=resource, span_type=span_type) + + span.set_tag_str(COMPONENT, config.tornado.integration_name) + + # catch standard exceptions raised in synchronous executions + try: + future = fn(*args, **kwargs) + + # duck-typing: if it has `add_done_callback` it's a Future + # object whatever is the underlying implementation + if callable(getattr(future, "add_done_callback", None)): + setattr(future, FUTURE_SPAN_KEY, span) + future.add_done_callback(_finish_span) + else: + # we don't have a future so the `future` variable + # holds the result of the function + span.finish() + except Exception: + span.set_traceback() + span.finish() + raise + + return future diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/handlers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/handlers.py new file mode 100644 index 0000000..04877dd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/handlers.py @@ -0,0 +1,148 @@ +from collections import deque + +from tornado.web import HTTPError + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.schema import schematize_url_operation +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from .. import trace_utils +from ..trace_utils import set_http_meta +from .constants import CONFIG_KEY +from .constants import REQUEST_SPAN_KEY +from .stack_context import TracerStackContext + + +def execute(func, handler, args, kwargs): + """ + Wrap the handler execute method so that the entire request is within the same + ``TracerStackContext``. This simplifies users code when the automatic ``Context`` + retrieval is used via ``Tracer.trace()`` method. + """ + # retrieve tracing settings + settings = handler.settings[CONFIG_KEY] + tracer = settings["tracer"] + service = settings["default_service"] + distributed_tracing = settings["distributed_tracing"] + + with TracerStackContext(): + trace_utils.activate_distributed_headers( + tracer, int_config=config.tornado, request_headers=handler.request.headers, override=distributed_tracing + ) + + # store the request span in the request so that it can be used later + request_span = tracer.trace( + schematize_url_operation("tornado.request", protocol="http", direction=SpanDirection.INBOUND), + service=service, + span_type=SpanTypes.WEB, + ) + + request_span.set_tag_str(COMPONENT, config.tornado.integration_name) + + # set span.kind to the type of operation being performed + request_span.set_tag_str(SPAN_KIND, SpanKind.SERVER) + + request_span.set_tag(SPAN_MEASURED_KEY) + # set analytics sample rate + # DEV: tornado is special case maintains separate configuration from config api + analytics_enabled = settings["analytics_enabled"] + if (config.analytics_enabled and analytics_enabled is not False) or analytics_enabled is True: + request_span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, settings.get("analytics_sample_rate", True)) + + http_route = _find_route(handler.application.default_router.rules, handler.request) + request_span.set_tag_str("http.route", http_route) + setattr(handler.request, REQUEST_SPAN_KEY, request_span) + + return func(*args, **kwargs) + + +def _find_route(initial_rule_set, request): + """ + We have to walk through the same chain of rules that tornado does to find a matching rule. + """ + rules = deque() + + for rule in initial_rule_set: + rules.append(rule) + + while len(rules) > 0: + rule = rules.popleft() + if rule.matcher.match(request) is not None: + if hasattr(rule.matcher, "_path"): + return rule.matcher._path + elif hasattr(rule.target, "rules"): + rules.extendleft(rule.target.rules) + + return "^$" + + +def on_finish(func, handler, args, kwargs): + """ + Wrap the ``RequestHandler.on_finish`` method. This is the last executed method + after the response has been sent, and it's used to retrieve and close the + current request span (if available). + """ + request = handler.request + request_span = getattr(request, REQUEST_SPAN_KEY, None) + if request_span: + # use the class name as a resource; if an handler is not available, the + # default handler class will be used so we don't pollute the resource + # space here + klass = handler.__class__ + request_span.resource = "{}.{}".format(klass.__module__, klass.__name__) + set_http_meta( + request_span, + config.tornado, + method=request.method, + url=request.full_url().rsplit("?", 1)[0], + status_code=handler.get_status(), + query=request.query, + ) + request_span.finish() + + return func(*args, **kwargs) + + +def log_exception(func, handler, args, kwargs): + """ + Wrap the ``RequestHandler.log_exception``. This method is called when an + Exception is not handled in the user code. In this case, we save the exception + in the current active span. If the Tornado ``Finish`` exception is raised, this wrapper + will not be called because ``Finish`` is not an exception. + """ + # safe-guard: expected arguments -> log_exception(self, typ, value, tb) + try: + value = get_argument_value(args, kwargs, 1, "value") + except ArgumentError: + value = None + + if not value: + return func(*args, **kwargs) + + # retrieve the current span + tracer = handler.settings[CONFIG_KEY]["tracer"] + current_span = tracer.current_span() + + if not current_span: + return func(*args, **kwargs) + + if isinstance(value, HTTPError): + # Tornado uses HTTPError exceptions to stop and return a status code that + # is not a 2xx. In this case we want to check the status code to be sure that + # only 5xx are traced as errors, while any other HTTPError exception is handled as + # usual. + if config.http_server.is_error_code(value.status_code): + current_span.set_exc_info(*args) + else: + # any other uncaught exception should be reported as error + current_span.set_exc_info(*args) + + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/patch.py new file mode 100644 index 0000000..7e7a1b2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/patch.py @@ -0,0 +1,72 @@ +import os + +import tornado + +import ddtrace +from ddtrace import config +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...internal.utils.formats import asbool +from ...internal.utils.wrappers import unwrap as _u +from . import application +from . import context_provider +from . import decorators +from . import handlers +from . import template + + +config._add( + "tornado", + dict( + distributed_tracing=asbool(os.getenv("DD_TORNADO_DISTRIBUTED_TRACING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(tornado, "version", "") + + +def patch(): + """ + Tracing function that patches the Tornado web application so that it will be + traced using the given ``tracer``. + """ + # patch only once + if getattr(tornado, "__datadog_patch", False): + return + tornado.__datadog_patch = True + + # patch Application to initialize properly our settings and tracer + _w("tornado.web", "Application.__init__", application.tracer_config) + + # patch RequestHandler to trace all Tornado handlers + _w("tornado.web", "RequestHandler._execute", handlers.execute) + _w("tornado.web", "RequestHandler.on_finish", handlers.on_finish) + _w("tornado.web", "RequestHandler.log_exception", handlers.log_exception) + + # patch Template system + _w("tornado.template", "Template.generate", template.generate) + + # configure the global tracer + ddtrace.tracer.configure( + context_provider=context_provider, + wrap_executor=decorators.wrap_executor, + ) + + +def unpatch(): + """ + Remove all tracing functions in a Tornado web application. + """ + if not getattr(tornado, "__datadog_patch", False): + return + tornado.__datadog_patch = False + + # unpatch Tornado + _u(tornado.web.RequestHandler, "_execute") + _u(tornado.web.RequestHandler, "on_finish") + _u(tornado.web.RequestHandler, "log_exception") + _u(tornado.web.Application, "__init__") + _u(tornado.template.Template, "generate") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/stack_context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/stack_context.py new file mode 100644 index 0000000..e6d688e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/stack_context.py @@ -0,0 +1,144 @@ +import tornado +from tornado.ioloop import IOLoop + +from ...provider import BaseContextProvider +from ...provider import DefaultContextProvider +from ...span import Span + + +# tornado.stack_context deprecated in Tornado 5 removed in Tornado 6 +# instead use DefaultContextProvider with ContextVarContextManager for asyncio +_USE_STACK_CONTEXT = not tornado.version_info >= (5, 0) + +if _USE_STACK_CONTEXT: + from tornado.stack_context import StackContextInconsistentError + from tornado.stack_context import _state + + class TracerStackContext(DefaultContextProvider): + """ + A context manager that manages ``Context`` instances in a thread-local state. + It must be used every time a Tornado's handler or coroutine is used within a + tracing Context. It is meant to work like a traditional ``StackContext``, + preserving the state across asynchronous calls. + + Every time a new manager is initialized, a new ``Context()`` is created for + this execution flow. A context created in a ``TracerStackContext`` is not + shared between different threads. + + This implementation follows some suggestions provided here: + https://github.com/tornadoweb/tornado/issues/1063 + """ + + def __init__(self): + # type: (...) -> None + # HACK(jd): this should be using super(), but calling DefaultContextProvider.__init__ + # sets the context to `None` which breaks this code. + # We therefore skip DefaultContextProvider.__init__ and call only BaseContextProvider.__init__. + BaseContextProvider.__init__(self) + self._context = None + + def enter(self): + """ + Required to preserve the ``StackContext`` protocol. + """ + pass + + def exit(self, type, value, traceback): # noqa: A002 + """ + Required to preserve the ``StackContext`` protocol. + """ + pass + + def __enter__(self): + self.old_contexts = _state.contexts + self.new_contexts = (self.old_contexts[0] + (self,), self) + _state.contexts = self.new_contexts + return self + + def __exit__(self, type, value, traceback): # noqa: A002 + final_contexts = _state.contexts + _state.contexts = self.old_contexts + + if final_contexts is not self.new_contexts: + raise StackContextInconsistentError( + "stack_context inconsistency (may be caused by yield " 'within a "with TracerStackContext" block)' + ) + + # break the reference to allow faster GC on CPython + self.new_contexts = None + + def _has_io_loop(self): + """Helper to determine if we are currently in an IO loop""" + return getattr(IOLoop._current, "instance", None) is not None + + def _has_active_context(self): + """Helper to determine if we have an active context or not""" + if not self._has_io_loop(): + return super(TracerStackContext, self)._has_active_context() + else: + # we're inside a Tornado loop so the TracerStackContext is used + return self._get_state_active_context() is not None + + def _get_state_active_context(self): + """Helper to get the currently active context from the TracerStackContext""" + # we're inside a Tornado loop so the TracerStackContext is used + for stack in reversed(_state.contexts[0]): + if isinstance(stack, self.__class__): + ctx = stack._context + if isinstance(ctx, Span): + return self._update_active(ctx) + return ctx + return None + + def active(self): + """ + Return the ``Context`` from the current execution flow. This method can be + used inside a Tornado coroutine to retrieve and use the current tracing context. + If used in a separated Thread, the `_state` thread-local storage is used to + propagate the current Active context from the `MainThread`. + """ + if not self._has_io_loop(): + # if a Tornado loop is not available, it means that this method + # has been called from a synchronous code, so we can rely in a + # thread-local storage + return super(TracerStackContext, self).active() + else: + # we're inside a Tornado loop so the TracerStackContext is used + return self._get_state_active_context() + + def activate(self, ctx): + """ + Set the active ``Context`` for this async execution. If a ``TracerStackContext`` + is not found, the context is discarded. + If used in a separated Thread, the `_state` thread-local storage is used to + propagate the current Active context from the `MainThread`. + """ + if not self._has_io_loop(): + # because we're outside of an asynchronous execution, we store + # the current context in a thread-local storage + super(TracerStackContext, self).activate(ctx) + else: + # we're inside a Tornado loop so the TracerStackContext is used + for stack_ctx in reversed(_state.contexts[0]): + if isinstance(stack_ctx, self.__class__): + stack_ctx._context = ctx + return ctx + +else: + # no-op when not using stack_context + class TracerStackContext(DefaultContextProvider): + def __enter__(self): + pass + + def __exit__(self, *exc): + pass + + +def run_with_trace_context(func, *args, **kwargs): + """ + Run the given function within a traced StackContext. This function is used to + trace Tornado web handlers, but can be used in your code to trace coroutines + execution. + """ + with TracerStackContext(): + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/template.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/template.py new file mode 100644 index 0000000..9e6ee15 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/tornado/template.py @@ -0,0 +1,35 @@ +from tornado import template + +from ddtrace import Pin +from ddtrace import config +from ddtrace.internal.constants import COMPONENT + +from ...ext import SpanTypes + + +def generate(func, renderer, args, kwargs): + """ + Wrap the ``generate`` method used in templates rendering. Because the method + may be called everywhere, the execution is traced in a tracer StackContext that + inherits the current one if it's already available. + """ + # get the module pin + pin = Pin.get_from(template) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + # change the resource and the template name + # if it's created from a string instead of a file + if "" in renderer.name: + resource = template_name = "render_string" + else: + resource = template_name = renderer.name + + # trace the original call + with pin.tracer.trace( + "tornado.template", service=pin.service, resource=resource, span_type=SpanTypes.TEMPLATE + ) as span: + span.set_tag_str(COMPONENT, config.tornado.integration_name) + + span.set_tag_str("tornado.template_name", template_name) + return func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils.py new file mode 100644 index 0000000..2efbb76 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils.py @@ -0,0 +1,682 @@ +""" +This module contains utility functions for writing ddtrace integrations. +""" +from collections import deque +import ipaddress +import re +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Generator # noqa:F401 +from typing import Iterator # noqa:F401 +from typing import List # noqa:F401 +from typing import Mapping # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 +from typing import cast # noqa:F401 + +from ddtrace import Pin +from ddtrace import config +from ddtrace.ext import http +from ddtrace.ext import net +from ddtrace.ext import user +from ddtrace.internal import core +from ddtrace.internal.compat import ip_is_global +from ddtrace.internal.compat import parse +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.cache import cached +from ddtrace.internal.utils.http import normalize_header_name +from ddtrace.internal.utils.http import redact_url +from ddtrace.internal.utils.http import strip_query_string +import ddtrace.internal.utils.wrappers +from ddtrace.propagation.http import HTTPPropagator +from ddtrace.settings.asm import config as asm_config +from ddtrace.vendor import wrapt + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace import Span # noqa:F401 + from ddtrace import Tracer # noqa:F401 + from ddtrace.settings import IntegrationConfig # noqa:F401 + + +log = get_logger(__name__) + +wrap = wrapt.wrap_function_wrapper +unwrap = ddtrace.internal.utils.wrappers.unwrap +iswrapped = ddtrace.internal.utils.wrappers.iswrapped + +REQUEST = "request" +RESPONSE = "response" + +# Tag normalization based on: https://docs.datadoghq.com/tagging/#defining-tags +# With the exception of '.' in header names which are replaced with '_' to avoid +# starting a "new object" on the UI. +NORMALIZE_PATTERN = re.compile(r"([^a-z0-9_\-:/]){1}") + +# Possible User Agent header. +USER_AGENT_PATTERNS = ("http-user-agent", "user-agent") + +IP_PATTERNS = ( + "x-forwarded-for", + "x-real-ip", + "true-client-ip", + "x-client-ip", + "x-forwarded", + "forwarded-for", + "x-cluster-client-ip", + "fastly-client-ip", + "cf-connecting-ip", + "cf-connecting-ipv6", +) + + +@cached() +def _normalized_header_name(header_name): + # type: (str) -> str + return NORMALIZE_PATTERN.sub("_", normalize_header_name(header_name)) + + +def _get_header_value_case_insensitive(headers, keyname): + # type: (Mapping[str, str], str) -> Optional[str] + """ + Get a header in a case insensitive way. This function is meant for frameworks + like Django < 2.2 that don't store the headers in a case insensitive mapping. + """ + # just in case we are lucky + shortcut_value = headers.get(keyname) + if shortcut_value is not None: + return shortcut_value + + for key, value in headers.items(): + if key.lower().replace("_", "-") == keyname: + return value + + return None + + +def _normalize_tag_name(request_or_response, header_name): + # type: (str, str) -> str + """ + Given a tag name, e.g. 'Content-Type', returns a corresponding normalized tag name, i.e + 'http.request.headers.content_type'. Rules applied actual header name are: + - any letter is converted to lowercase + - any digit is left unchanged + - any block of any length of different ASCII chars is converted to a single underscore '_' + :param request_or_response: The context of the headers: request|response + :param header_name: The header's name + :type header_name: str + :rtype: str + """ + # Looking at: + # - http://www.iana.org/assignments/message-headers/message-headers.xhtml + # - https://tools.ietf.org/html/rfc6648 + # and for consistency with other language integrations seems safe to assume the following algorithm for header + # names normalization: + # - any letter is converted to lowercase + # - any digit is left unchanged + # - any block of any length of different ASCII chars is converted to a single underscore '_' + normalized_name = _normalized_header_name(header_name) + return "http.{}.headers.{}".format(request_or_response, normalized_name) + + +def _store_headers(headers, span, integration_config, request_or_response): + # type: (Dict[str, str], Span, IntegrationConfig, str) -> None + """ + :param headers: A dict of http headers to be stored in the span + :type headers: dict or list + :param span: The Span instance where tags will be stored + :type span: ddtrace.span.Span + :param integration_config: An integration specific config object. + :type integration_config: ddtrace.settings.IntegrationConfig + """ + if not isinstance(headers, dict): + try: + headers = dict(headers) + except Exception: + return + + if integration_config is None: + log.debug("Skipping headers tracing as no integration config was provided") + return + + for header_name, header_value in headers.items(): + """config._header_tag_name gets an element of the dictionary in config.http._header_tags + which gets the value from DD_TRACE_HEADER_TAGS environment variable.""" + tag_name = integration_config._header_tag_name(header_name) + if tag_name is None: + continue + # An empty tag defaults to a http..headers.
tag + span.set_tag_str(tag_name or _normalize_tag_name(request_or_response, header_name), header_value) + + +def _get_request_header_user_agent(headers, headers_are_case_sensitive=False): + # type: (Mapping[str, str], bool) -> str + """Get user agent from request headers + :param headers: A dict of http headers to be stored in the span + :type headers: dict or list + """ + for key_pattern in USER_AGENT_PATTERNS: + if not headers_are_case_sensitive: + user_agent = headers.get(key_pattern) + else: + user_agent = _get_header_value_case_insensitive(headers, key_pattern) + + if user_agent: + return user_agent + return "" + + +# Used to cache the last header used for the cache. From the same server/framework +# usually the same header will be used on further requests, so we use this to check +# only it. +_USED_IP_HEADER = "" + + +def _get_request_header_client_ip(headers, peer_ip=None, headers_are_case_sensitive=False): + # type: (Optional[Mapping[str, str]], Optional[str], bool) -> str + + global _USED_IP_HEADER + + def get_header_value(key): # type: (str) -> Optional[str] + if not headers_are_case_sensitive: + return headers.get(key) + + return _get_header_value_case_insensitive(headers, key) + + if not headers: + try: + _ = ipaddress.ip_address(str(peer_ip)) + except ValueError: + return "" + return peer_ip + + ip_header_value = "" + user_configured_ip_header = config.client_ip_header + if user_configured_ip_header: + # Used selected the header to use to get the IP + ip_header_value = get_header_value( + user_configured_ip_header.lower().replace("_", "-") + if headers_are_case_sensitive + else user_configured_ip_header + ) + if not ip_header_value: + log.debug("DD_TRACE_CLIENT_IP_HEADER configured but '%s' header missing", user_configured_ip_header) + return "" + + try: + _ = ipaddress.ip_address(str(ip_header_value)) + except ValueError: + log.debug("Invalid IP address from configured %s header: %s", user_configured_ip_header, ip_header_value) + return "" + + else: + # No configured IP header, go through the IP_PATTERNS headers in order + if _USED_IP_HEADER: + # Check first the caught header that previously contained an IP + ip_header_value = get_header_value(_USED_IP_HEADER) + + if not ip_header_value: + for ip_header in IP_PATTERNS: + tmp_ip_header_value = get_header_value(ip_header) + if tmp_ip_header_value: + ip_header_value = tmp_ip_header_value + _USED_IP_HEADER = ip_header + break + + private_ip_from_headers = "" + + if ip_header_value: + # At this point, we have one IP header, check its value and retrieve the first public IP + ip_list = ip_header_value.split(",") + for ip in ip_list: + ip = ip.strip() + if not ip: + continue + + try: + if ip_is_global(ip): + return ip + elif not private_ip_from_headers: + # IP is private, store it just in case we don't find a public one later + private_ip_from_headers = ip + except ValueError: # invalid IP + continue + + # At this point we have none or maybe one private ip from the headers: check the peer ip in + # case it's public and, if not, return either the private_ip from the headers (if we have one) + # or the peer private ip + try: + if ip_is_global(peer_ip) or not private_ip_from_headers: + return peer_ip + except ValueError: + pass + + return private_ip_from_headers + + +def _store_request_headers(headers, span, integration_config): + # type: (Dict[str, str], Span, IntegrationConfig) -> None + """ + Store request headers as a span's tags + :param headers: All the request's http headers, will be filtered through the whitelist + :type headers: dict or list + :param span: The Span instance where tags will be stored + :type span: ddtrace.Span + :param integration_config: An integration specific config object. + :type integration_config: ddtrace.settings.IntegrationConfig + """ + _store_headers(headers, span, integration_config, REQUEST) + + +def _store_response_headers(headers, span, integration_config): + # type: (Dict[str, str], Span, IntegrationConfig) -> None + """ + Store response headers as a span's tags + :param headers: All the response's http headers, will be filtered through the whitelist + :type headers: dict or list + :param span: The Span instance where tags will be stored + :type span: ddtrace.Span + :param integration_config: An integration specific config object. + :type integration_config: ddtrace.settings.IntegrationConfig + """ + _store_headers(headers, span, integration_config, RESPONSE) + + +def _sanitized_url(url): + # type: (str) -> str + """ + Sanitize url by removing parts with potential auth info + """ + if "@" in url: + parsed = parse.urlparse(url) + netloc = parsed.netloc + + if "@" not in netloc: + # Safe url, `@` not in netloc + return url + + netloc = netloc[netloc.index("@") + 1 :] + return parse.urlunparse( + ( + parsed.scheme, + netloc, + parsed.path, + "", + parsed.query, + "", + ) + ) + + return url + + +def with_traced_module(func): + """Helper for providing tracing essentials (module and pin) for tracing + wrappers. + + This helper enables tracing wrappers to dynamically be disabled when the + corresponding pin is disabled. + + Usage:: + + @with_traced_module + def my_traced_wrapper(django, pin, func, instance, args, kwargs): + # Do tracing stuff + pass + + def patch(): + import django + wrap(django.somefunc, my_traced_wrapper(django)) + """ + + def with_mod(mod): + def wrapper(wrapped, instance, args, kwargs): + pin = Pin._find(instance, mod) + if pin and not pin.enabled(): + return wrapped(*args, **kwargs) + elif not pin: + log.debug("Pin not found for traced method %r", wrapped) + return wrapped(*args, **kwargs) + return func(mod, pin, wrapped, instance, args, kwargs) + + return wrapper + + return with_mod + + +def distributed_tracing_enabled(int_config, default=False): + # type: (IntegrationConfig, bool) -> bool + """Returns whether distributed tracing is enabled for this integration config""" + if "distributed_tracing_enabled" in int_config and int_config.distributed_tracing_enabled is not None: + return int_config.distributed_tracing_enabled + elif "distributed_tracing" in int_config and int_config.distributed_tracing is not None: + return int_config.distributed_tracing + return default + + +def int_service(pin, int_config, default=None): + # type: (Optional[Pin], IntegrationConfig, Optional[str]) -> Optional[str] + """Returns the service name for an integration which is internal + to the application. Internal meaning that the work belongs to the + user's application. Eg. Web framework, sqlalchemy, web servers. + + For internal integrations we prioritize overrides, then global defaults and + lastly the default provided by the integration. + """ + # Pin has top priority since it is user defined in code + if pin is not None and pin.service: + return pin.service + + # Config is next since it is also configured via code + # Note that both service and service_name are used by + # integrations. + if "service" in int_config and int_config.service is not None: + return cast(str, int_config.service) + if "service_name" in int_config and int_config.service_name is not None: + return cast(str, int_config.service_name) + + global_service = int_config.global_config._get_service() + if global_service: + return cast(str, global_service) + + if "_default_service" in int_config and int_config._default_service is not None: + return cast(str, int_config._default_service) + + return default + + +def ext_service(pin, int_config, default=None): + # type: (Optional[Pin], IntegrationConfig, Optional[str]) -> Optional[str] + """Returns the service name for an integration which is external + to the application. External meaning that the integration generates + spans wrapping code that is outside the scope of the user's application. Eg. A database, RPC, cache, etc. + """ + if pin is not None and pin.service: + return pin.service + + if "service" in int_config and int_config.service is not None: + return cast(str, int_config.service) + if "service_name" in int_config and int_config.service_name is not None: + return cast(str, int_config.service_name) + + if "_default_service" in int_config and int_config._default_service is not None: + return cast(str, int_config._default_service) + + # A default is required since it's an external service. + return default + + +def _set_url_tag(integration_config, span, url, query): + # type: (IntegrationConfig, Span, str, str) -> None + + if integration_config.http_tag_query_string: # Tagging query string in http.url + if config.global_query_string_obfuscation_disabled: # No redacting of query strings + span.set_tag_str(http.URL, url) + else: # Redact query strings + span.set_tag_str(http.URL, redact_url(url, config._obfuscation_query_string_pattern, query)) + else: # Not tagging query string in http.url + span.set_tag_str(http.URL, strip_query_string(url)) + + +def set_http_meta( + span, # type: Span + integration_config, # type: IntegrationConfig + method=None, # type: Optional[str] + url=None, # type: Optional[str] + target_host=None, # type: Optional[str] + status_code=None, # type: Optional[Union[int, str]] + status_msg=None, # type: Optional[str] + query=None, # type: Optional[str] + parsed_query=None, # type: Optional[Mapping[str, str]] + request_headers=None, # type: Optional[Mapping[str, str]] + response_headers=None, # type: Optional[Mapping[str, str]] + retries_remain=None, # type: Optional[Union[int, str]] + raw_uri=None, # type: Optional[str] + request_cookies=None, # type: Optional[Dict[str, str]] + request_path_params=None, # type: Optional[Dict[str, str]] + request_body=None, # type: Optional[Union[str, Dict[str, List[str]]]] + peer_ip=None, # type: Optional[str] + headers_are_case_sensitive=False, # type: bool + route=None, # type: Optional[str] + response_cookies=None, # type: Optional[Dict[str, str]] +): + # type: (...) -> None + """ + Set HTTP metas on the span + + :param method: the HTTP method + :param url: the HTTP URL + :param status_code: the HTTP status code + :param status_msg: the HTTP status message + :param query: the HTTP query part of the URI as a string + :param parsed_query: the HTTP query part of the URI as parsed by the framework and forwarded to the user code + :param request_headers: the HTTP request headers + :param response_headers: the HTTP response headers + :param raw_uri: the full raw HTTP URI (including ports and query) + :param request_cookies: the HTTP request cookies as a dict + :param request_path_params: the parameters of the HTTP URL as set by the framework: /posts/ would give us + { "id": } + """ + if method is not None: + span.set_tag_str(http.METHOD, method) + + if url is not None: + url = _sanitized_url(url) + _set_url_tag(integration_config, span, url, query) + + if target_host is not None: + span.set_tag_str(net.TARGET_HOST, target_host) + + if status_code is not None: + try: + int_status_code = int(status_code) + except (TypeError, ValueError): + log.debug("failed to convert http status code %r to int", status_code) + else: + span.set_tag_str(http.STATUS_CODE, str(status_code)) + if config.http_server.is_error_code(int_status_code): + span.error = 1 + + if status_msg is not None: + span.set_tag_str(http.STATUS_MSG, status_msg) + + if query is not None and integration_config.trace_query_string: + span.set_tag_str(http.QUERY_STRING, query) + + request_ip = peer_ip + if request_headers: + user_agent = _get_request_header_user_agent(request_headers, headers_are_case_sensitive) + if user_agent: + span.set_tag_str(http.USER_AGENT, user_agent) + + # We always collect the IP if appsec is enabled to report it on potential vulnerabilities. + # https://datadoghq.atlassian.net/wiki/spaces/APS/pages/2118779066/Client+IP+addresses+resolution + if asm_config._asm_enabled or config.retrieve_client_ip: + # Retrieve the IP if it was calculated on AppSecProcessor.on_span_start + request_ip = core.get_item("http.request.remote_ip", span=span) + + if not request_ip: + # Not calculated: framework does not support IP blocking or testing env + request_ip = ( + _get_request_header_client_ip(request_headers, peer_ip, headers_are_case_sensitive) or peer_ip + ) + + if request_ip: + span.set_tag_str(http.CLIENT_IP, request_ip) + span.set_tag_str("network.client.ip", request_ip) + + if integration_config.is_header_tracing_configured: + """We should store both http..headers. and + http.. The last one + is the DD standardized tag for user-agent""" + _store_request_headers(dict(request_headers), span, integration_config) + + if response_headers is not None and integration_config.is_header_tracing_configured: + _store_response_headers(dict(response_headers), span, integration_config) + + if retries_remain is not None: + span.set_tag_str(http.RETRIES_REMAIN, str(retries_remain)) + + core.dispatch( + "set_http_meta_for_asm", + [ + span, + request_ip, + raw_uri, + route, + method, + request_headers, + request_cookies, + parsed_query, + request_path_params, + request_body, + status_code, + response_headers, + response_cookies, + ], + ) + + if route is not None: + span.set_tag_str(http.ROUTE, route) + + +def activate_distributed_headers(tracer, int_config=None, request_headers=None, override=None): + # type: (Tracer, Optional[IntegrationConfig], Optional[Dict[str, str]], Optional[bool]) -> None + """ + Helper for activating a distributed trace headers' context if enabled in integration config. + int_config will be used to check if distributed trace headers context will be activated, but + override will override whatever value is set in int_config if passed any value other than None. + """ + if override is False: + return None + + if override or (int_config and distributed_tracing_enabled(int_config)): + context = HTTPPropagator.extract(request_headers) + + # Only need to activate the new context if something was propagated + if not context.trace_id: + return None + + # Do not reactivate a context with the same trace id + # DEV: An example could be nested web frameworks, when one layer already + # parsed request headers and activated them. + # + # Example:: + # + # app = Flask(__name__) # Traced via Flask instrumentation + # app = DDWSGIMiddleware(app) # Extra layer on top for WSGI + current_context = tracer.current_trace_context() + if current_context and current_context.trace_id == context.trace_id: + log.debug( + "will not activate extracted Context(trace_id=%r, span_id=%r), a context with that trace id is already active", # noqa: E501 + context.trace_id, + context.span_id, + ) + return None + + # We have parsed a trace id from headers, and we do not already + # have a context with the same trace id active + tracer.context_provider.activate(context) + + +def _flatten( + obj, # type: Any + sep=".", # type: str + prefix="", # type: str + exclude_policy=None, # type: Optional[Callable[[str], bool]] +): + # type: (...) -> Generator[Tuple[str, Any], None, None] + s = deque() # type: ignore + s.append((prefix, obj)) + while s: + p, v = s.pop() + if exclude_policy is not None and exclude_policy(p): + continue + if isinstance(v, dict): + s.extend((sep.join((p, k)) if p else k, v) for k, v in v.items()) + else: + yield p, v + + +def set_flattened_tags( + span, # type: Span + items, # type: Iterator[Tuple[str, Any]] + sep=".", # type: str + exclude_policy=None, # type: Optional[Callable[[str], bool]] + processor=None, # type: Optional[Callable[[Any], Any]] +): + # type: (...) -> None + for prefix, value in items: + for tag, v in _flatten(value, sep, prefix, exclude_policy): + span.set_tag(tag, processor(v) if processor is not None else v) + + +def set_user( + tracer, # type: Tracer + user_id, # type: str + name=None, # type: Optional[str] + email=None, # type: Optional[str] + scope=None, # type: Optional[str] + role=None, # type: Optional[str] + session_id=None, # type: Optional[str] + propagate=False, # type bool + span=None, # type: Optional[Span] +): + # type: (...) -> None + """Set user tags. + https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/#user-related-attributes + https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/?tab=set_tag&code-lang=python + """ + if span is None: + span = tracer.current_root_span() + if span: + if user_id: + str_user_id = str(user_id) + span.set_tag_str(user.ID, str_user_id) + if propagate: + span.context.dd_user_id = str_user_id + + # All other fields are optional + if name: + span.set_tag_str(user.NAME, name) + if email: + span.set_tag_str(user.EMAIL, email) + if scope: + span.set_tag_str(user.SCOPE, scope) + if role: + span.set_tag_str(user.ROLE, role) + if session_id: + span.set_tag_str(user.SESSION_ID, session_id) + + if asm_config._asm_enabled: + exc = core.dispatch_with_results("set_user_for_asm", [tracer, user_id]).block_user.exception + if exc: + raise exc + + else: + log.warning( + "No root span in the current execution. Skipping set_user tags. " + "See https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/" + "?tab=set_user&code-lang=python for more information.", + ) + + +def extract_netloc_and_query_info_from_url(url): + # type: (str) -> Tuple[str, str] + parse_result = parse.urlparse(url) + query = parse_result.query + + # Relative URLs don't have a netloc, so we force them + if not parse_result.netloc: + parse_result = parse.urlparse("//{url}".format(url=url)) + + netloc = parse_result.netloc.split("@", 1)[-1] # Discard auth info + netloc = netloc.split(":", 1)[0] # Discard port information + return netloc, query + + +class InterruptException(Exception): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_async.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_async.py new file mode 100644 index 0000000..63a3325 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_async.py @@ -0,0 +1,39 @@ +""" +async tracing utils + +Note that this module should only be imported in Python 3.5+. +""" +from ddtrace import Pin +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def with_traced_module(func): + """Async version of trace_utils.with_traced_module. + Usage:: + + @with_traced_module + async def my_traced_wrapper(django, pin, func, instance, args, kwargs): + # Do tracing stuff + pass + + def patch(): + import django + wrap(django.somefunc, my_traced_wrapper(django)) + """ + + def with_mod(mod): + async def wrapper(wrapped, instance, args, kwargs): + pin = Pin._find(instance, mod) + if pin and not pin.enabled(): + return await wrapped(*args, **kwargs) + elif not pin: + log.debug("Pin not found for traced method %r", wrapped) + return await wrapped(*args, **kwargs) + return await func(mod, pin, wrapped, instance, args, kwargs) + + return wrapper + + return with_mod diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_redis.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_redis.py new file mode 100644 index 0000000..88bdc11 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/trace_utils_redis.py @@ -0,0 +1,184 @@ +""" +Some utils used by the dogtrace redis integration +""" +from contextlib import contextmanager + +from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.ext import db +from ddtrace.ext import net +from ddtrace.ext import redis as redisx +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema import schematize_cache_operation +from ddtrace.internal.utils.formats import stringify_cache_args + + +format_command_args = stringify_cache_args + +SINGLE_KEY_COMMANDS = [ + "GET", + "GETDEL", + "GETEX", + "GETRANGE", + "GETSET", + "LINDEX", + "LRANGE", + "RPOP", + "LPOP", + "HGET", + "HGETALL", + "HKEYS", + "HMGET", + "HRANDFIELD", + "HVALS", +] +MULTI_KEY_COMMANDS = ["MGET"] +ROW_RETURNING_COMMANDS = SINGLE_KEY_COMMANDS + MULTI_KEY_COMMANDS + + +def _extract_conn_tags(conn_kwargs): + """Transform redis conn info into dogtrace metas""" + try: + conn_tags = { + net.TARGET_HOST: conn_kwargs["host"], + net.TARGET_PORT: conn_kwargs["port"], + redisx.DB: conn_kwargs.get("db") or 0, + } + client_name = conn_kwargs.get("client_name") + if client_name: + conn_tags[redisx.CLIENT_NAME] = client_name + return conn_tags + except Exception: + return {} + + +def determine_row_count(redis_command, span, result): + empty_results = [b"", [], {}, None] + # result can be an empty list / dict / string + if result not in empty_results: + if redis_command == "MGET": + # only include valid key results within count + result = [x for x in result if x not in empty_results] + span.set_metric(db.ROWCOUNT, len(result)) + elif redis_command == "HMGET": + # only include valid key results within count + result = [x for x in result if x not in empty_results] + span.set_metric(db.ROWCOUNT, 1 if len(result) > 0 else 0) + else: + span.set_metric(db.ROWCOUNT, 1) + else: + # set count equal to 0 if an empty result + span.set_metric(db.ROWCOUNT, 0) + + +def _run_redis_command(span, func, args, kwargs): + parsed_command = stringify_cache_args(args) + redis_command = parsed_command.split(" ")[0] + try: + result = func(*args, **kwargs) + if redis_command in ROW_RETURNING_COMMANDS: + determine_row_count(redis_command=redis_command, span=span, result=result) + return result + except Exception: + if redis_command in ROW_RETURNING_COMMANDS: + span.set_metric(db.ROWCOUNT, 0) + raise + + +@contextmanager +def _trace_redis_cmd(pin, config_integration, instance, args): + """Create a span for the execute command method and tag it""" + query = stringify_cache_args(args, cmd_max_len=config_integration.cmd_max_length) + with pin.tracer.trace( + schematize_cache_operation(redisx.CMD, cache_provider=redisx.APP), + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.REDIS, + resource=query.split(" ")[0] if config_integration.resource_only_command else query, + ) as span: + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, config_integration.integration_name) + span.set_tag_str(db.SYSTEM, redisx.APP) + span.set_tag(SPAN_MEASURED_KEY) + span_name = schematize_cache_operation(redisx.RAWCMD, cache_provider=redisx.APP) + span.set_tag_str(span_name, query) + if pin.tags: + span.set_tags(pin.tags) + # some redis clients do not have a connection_pool attribute (ex. aioredis v1.3) + if hasattr(instance, "connection_pool"): + span.set_tags(_extract_conn_tags(instance.connection_pool.connection_kwargs)) + span.set_metric(redisx.ARGS_LEN, len(args)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config_integration.get_analytics_sample_rate()) + yield span + + +@contextmanager +def _trace_redis_execute_pipeline(pin, config_integration, cmds, instance, is_cluster=False): + """Create a span for the execute pipeline method and tag it""" + cmd_string = resource = "\n".join(cmds) + if config_integration.resource_only_command: + resource = "\n".join([cmd.split(" ")[0] for cmd in cmds]) + + with pin.tracer.trace( + schematize_cache_operation(redisx.CMD, cache_provider=redisx.APP), + resource=resource, + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.REDIS, + ) as span: + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, config_integration.integration_name) + span.set_tag_str(db.SYSTEM, redisx.APP) + span.set_tag(SPAN_MEASURED_KEY) + span_name = schematize_cache_operation(redisx.RAWCMD, cache_provider=redisx.APP) + span.set_tag_str(span_name, cmd_string) + if not is_cluster: + span.set_tags(_extract_conn_tags(instance.connection_pool.connection_kwargs)) + span.set_metric(redisx.PIPELINE_LEN, len(instance.command_stack)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config_integration.get_analytics_sample_rate()) + # yield the span in case the caller wants to build on span + yield span + + +@contextmanager +def _trace_redis_execute_async_cluster_pipeline(pin, config_integration, cmds, instance): + """Create a span for the execute async cluster pipeline method and tag it""" + cmd_string = resource = "\n".join(cmds) + if config_integration.resource_only_command: + resource = "\n".join([cmd.split(" ")[0] for cmd in cmds]) + + with pin.tracer.trace( + schematize_cache_operation(redisx.CMD, cache_provider=redisx.APP), + resource=resource, + service=trace_utils.ext_service(pin, config_integration), + span_type=SpanTypes.REDIS, + ) as span: + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + span.set_tag_str(COMPONENT, config_integration.integration_name) + span.set_tag_str(db.SYSTEM, redisx.APP) + span.set_tag(SPAN_MEASURED_KEY) + span_name = schematize_cache_operation(redisx.RAWCMD, cache_provider=redisx.APP) + span.set_tag_str(span_name, cmd_string) + span.set_metric(redisx.PIPELINE_LEN, len(instance._command_stack)) + # set analytics sample rate if enabled + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config_integration.get_analytics_sample_rate()) + # yield the span in case the caller wants to build on span + yield span + + +async def _run_redis_command_async(span, func, args, kwargs): + parsed_command = stringify_cache_args(args) + redis_command = parsed_command.split(" ")[0] + try: + result = await func(*args, **kwargs) + if redis_command in ROW_RETURNING_COMMANDS: + determine_row_count(redis_command=redis_command, span=span, result=result) + return result + except Exception: + if redis_command in ROW_RETURNING_COMMANDS: + span.set_metric(db.ROWCOUNT, 0) + raise diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/__init__.py new file mode 100644 index 0000000..5e32184 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/__init__.py @@ -0,0 +1,47 @@ +""" +The unittest integration traces test executions. + + +Enabling +~~~~~~~~ + +The unittest integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Alternately, use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(unittest=True) + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.unittest["operation_name"] + + The operation name reported by default for unittest traces. + + This option can also be set with the ``DD_UNITTEST_OPERATION_NAME`` environment + variable. + + Default: ``"unittest.test"`` + + .. py:data:: ddtrace.config.unittest["strict_naming"] + + Requires all ``unittest`` tests to start with ``test`` as stated in the Python documentation + + This option can also be set with the ``DD_CIVISIBILITY_UNITTEST_STRICT_NAMING`` environment + variable. + + Default: ``True`` +""" +from ...internal.utils.importlib import require_modules +from .patch import get_version +from .patch import patch +from .patch import unpatch + + +required_modules = ["unittest"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/constants.py new file mode 100644 index 0000000..dc58863 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/constants.py @@ -0,0 +1,8 @@ +COMPONENT_VALUE = "unittest" +FRAMEWORK = "unittest" +KIND = "test" + +TEST_OPERATION_NAME = "unittest.test" +SUITE_OPERATION_NAME = "unittest.test_suite" +SESSION_OPERATION_NAME = "unittest.test_session" +MODULE_OPERATION_NAME = "unittest.test_module" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/patch.py new file mode 100644 index 0000000..30fe31e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/unittest/patch.py @@ -0,0 +1,858 @@ +import inspect +import os +from typing import Union +import unittest + +import ddtrace +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.contrib.coverage.data import _coverage_data +from ddtrace.contrib.coverage.patch import patch as patch_coverage +from ddtrace.contrib.coverage.patch import run_coverage_report +from ddtrace.contrib.coverage.patch import unpatch as unpatch_coverage +from ddtrace.contrib.coverage.utils import _is_coverage_invoked_by_coverage_run +from ddtrace.contrib.coverage.utils import _is_coverage_patched +from ddtrace.contrib.unittest.constants import COMPONENT_VALUE +from ddtrace.contrib.unittest.constants import FRAMEWORK +from ddtrace.contrib.unittest.constants import KIND +from ddtrace.contrib.unittest.constants import MODULE_OPERATION_NAME +from ddtrace.contrib.unittest.constants import SESSION_OPERATION_NAME +from ddtrace.contrib.unittest.constants import SUITE_OPERATION_NAME +from ddtrace.ext import SpanTypes +from ddtrace.ext import test +from ddtrace.ext.ci import RUNTIME_VERSION +from ddtrace.ext.ci import _get_runtime_and_os_metadata +from ddtrace.internal.ci_visibility import CIVisibility as _CIVisibility +from ddtrace.internal.ci_visibility.constants import EVENT_TYPE as _EVENT_TYPE +from ddtrace.internal.ci_visibility.constants import ITR_UNSKIPPABLE_REASON +from ddtrace.internal.ci_visibility.constants import MODULE_ID as _MODULE_ID +from ddtrace.internal.ci_visibility.constants import MODULE_TYPE as _MODULE_TYPE +from ddtrace.internal.ci_visibility.constants import SESSION_ID as _SESSION_ID +from ddtrace.internal.ci_visibility.constants import SESSION_TYPE as _SESSION_TYPE +from ddtrace.internal.ci_visibility.constants import SKIPPED_BY_ITR_REASON +from ddtrace.internal.ci_visibility.constants import SUITE_ID as _SUITE_ID +from ddtrace.internal.ci_visibility.constants import SUITE_TYPE as _SUITE_TYPE +from ddtrace.internal.ci_visibility.constants import TEST +from ddtrace.internal.ci_visibility.coverage import _module_has_dd_coverage_enabled +from ddtrace.internal.ci_visibility.coverage import _report_coverage_to_span +from ddtrace.internal.ci_visibility.coverage import _start_coverage +from ddtrace.internal.ci_visibility.coverage import _stop_coverage +from ddtrace.internal.ci_visibility.coverage import _switch_coverage_context +from ddtrace.internal.ci_visibility.utils import _add_pct_covered_to_span +from ddtrace.internal.ci_visibility.utils import _add_start_end_source_file_path_data_to_span +from ddtrace.internal.ci_visibility.utils import _generate_fully_qualified_test_name +from ddtrace.internal.ci_visibility.utils import get_relative_or_absolute_path_for_path +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.utils.wrappers import unwrap as _u +from ddtrace.vendor import wrapt + + +log = get_logger(__name__) +_global_skipped_elements = 0 + +# unittest default settings +config._add( + "unittest", + dict( + _default_service="unittest", + operation_name=os.getenv("DD_UNITTEST_OPERATION_NAME", default="unittest.test"), + strict_naming=asbool(os.getenv("DD_CIVISIBILITY_UNITTEST_STRICT_NAMING", default=True)), + ), +) + + +def get_version(): + # type: () -> str + return "" + + +def _enable_unittest_if_not_started(): + _initialize_unittest_data() + if _CIVisibility.enabled: + return + _CIVisibility.enable(config=ddtrace.config.unittest) + + +def _initialize_unittest_data(): + if not hasattr(_CIVisibility, "_unittest_data"): + _CIVisibility._unittest_data = {} + if "suites" not in _CIVisibility._unittest_data: + _CIVisibility._unittest_data["suites"] = {} + if "modules" not in _CIVisibility._unittest_data: + _CIVisibility._unittest_data["modules"] = {} + if "unskippable_tests" not in _CIVisibility._unittest_data: + _CIVisibility._unittest_data["unskippable_tests"] = set() + + +def _set_tracer(tracer: ddtrace.tracer): + """Manually sets the tracer instance to `unittest.`""" + unittest._datadog_tracer = tracer + + +def _is_test_coverage_enabled(test_object) -> bool: + return _CIVisibility._instance._collect_coverage_enabled and not _is_skipped_test(test_object) + + +def _is_skipped_test(test_object) -> bool: + testMethod = getattr(test_object, test_object._testMethodName, "") + return ( + (hasattr(test_object.__class__, "__unittest_skip__") and test_object.__class__.__unittest_skip__) + or (hasattr(testMethod, "__unittest_skip__") and testMethod.__unittest_skip__) + or _is_skipped_by_itr(test_object) + ) + + +def _is_skipped_by_itr(test_object) -> bool: + return hasattr(test_object, "_dd_itr_skip") and test_object._dd_itr_skip + + +def _should_be_skipped_by_itr(args: tuple, test_module_suite_path: str, test_name: str, test_object) -> bool: + return ( + len(args) + and _CIVisibility._instance._should_skip_path(test_module_suite_path, test_name) + and not _is_skipped_test(test_object) + ) + + +def _is_marked_as_unskippable(test_object) -> bool: + test_suite_name = _extract_suite_name_from_test_method(test_object) + test_name = _extract_test_method_name(test_object) + test_module_path = _extract_module_file_path(test_object) + test_module_suite_name = _generate_fully_qualified_test_name(test_module_path, test_suite_name, test_name) + return ( + hasattr(_CIVisibility, "_unittest_data") + and test_module_suite_name in _CIVisibility._unittest_data["unskippable_tests"] + ) + + +def _update_skipped_elements_and_set_tags(test_module_span: ddtrace.Span, test_session_span: ddtrace.Span): + global _global_skipped_elements + _global_skipped_elements += 1 + + test_module_span._metrics[test.ITR_TEST_SKIPPING_COUNT] += 1 + test_module_span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "true") + test_module_span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "true") + + test_session_span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "true") + test_session_span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "true") + + +def _store_test_span(item, span: ddtrace.Span): + """Store datadog span at `unittest` test instance.""" + item._datadog_span = span + + +def _store_module_identifier(test_object: unittest.TextTestRunner): + """Store module identifier at `unittest` module instance, this is useful to classify event types.""" + if hasattr(test_object, "test") and hasattr(test_object.test, "_tests"): + for module in test_object.test._tests: + if len(module._tests) and _extract_module_name_from_module(module): + _set_identifier(module, "module") + + +def _store_suite_identifier(module): + """Store suite identifier at `unittest` suite instance, this is useful to classify event types.""" + if hasattr(module, "_tests"): + for suite in module._tests: + if len(suite._tests) and _extract_module_name_from_module(suite): + _set_identifier(suite, "suite") + + +def _is_test(item) -> bool: + if ( + type(item) == unittest.TestSuite + or not hasattr(item, "_testMethodName") + or (ddtrace.config.unittest.strict_naming and not item._testMethodName.startswith("test")) + ): + return False + return True + + +def _extract_span(item) -> Union[ddtrace.Span, None]: + return getattr(item, "_datadog_span", None) + + +def _extract_command_name_from_session(session: unittest.TextTestRunner) -> str: + if not hasattr(session, "progName"): + return "python -m unittest" + return getattr(session, "progName", "") + + +def _extract_test_method_name(test_object) -> str: + """Extract test method name from `unittest` instance.""" + return getattr(test_object, "_testMethodName", "") + + +def _extract_session_span() -> Union[ddtrace.Span, None]: + return getattr(_CIVisibility, "_datadog_session_span", None) + + +def _extract_module_span(module_identifier: str) -> Union[ddtrace.Span, None]: + if hasattr(_CIVisibility, "_unittest_data") and module_identifier in _CIVisibility._unittest_data["modules"]: + return _CIVisibility._unittest_data["modules"][module_identifier].get("module_span") + return None + + +def _extract_suite_span(suite_identifier: str) -> Union[ddtrace.Span, None]: + if hasattr(_CIVisibility, "_unittest_data") and suite_identifier in _CIVisibility._unittest_data["suites"]: + return _CIVisibility._unittest_data["suites"][suite_identifier].get("suite_span") + return None + + +def _update_status_item(item: ddtrace.Span, status: str): + """ + Sets the status for each Span implementing the test FAIL logic override. + """ + existing_status = item.get_tag(test.STATUS) + if existing_status and (status == test.Status.SKIP.value or existing_status == test.Status.FAIL.value): + return None + item.set_tag_str(test.STATUS, status) + return None + + +def _extract_suite_name_from_test_method(item) -> str: + item_type = type(item) + return getattr(item_type, "__name__", "") + + +def _extract_module_name_from_module(item) -> str: + if _is_test(item): + return type(item).__module__ + return "" + + +def _extract_test_reason(item: tuple) -> str: + """ + Given a tuple of type [test_class, str], it returns the test failure/skip reason + """ + return item[1] + + +def _extract_test_file_name(item) -> str: + return os.path.basename(inspect.getfile(item.__class__)) + + +def _extract_module_file_path(item) -> str: + if _is_test(item): + try: + test_module_object = inspect.getfile(item.__class__) + except TypeError: + log.debug( + "Tried to collect module file path but it is a built-in Python function", + ) + return "" + return get_relative_or_absolute_path_for_path(test_module_object, os.getcwd()) + + return "" + + +def _generate_test_resource(suite_name: str, test_name: str) -> str: + return "{}.{}".format(suite_name, test_name) + + +def _generate_suite_resource(test_suite: str) -> str: + return "{}".format(test_suite) + + +def _generate_module_resource(test_module: str) -> str: + return "{}".format(test_module) + + +def _generate_session_resource(test_command: str) -> str: + return "{}".format(test_command) + + +def _set_test_skipping_tags_to_span(span: ddtrace.Span): + span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "true") + span.set_tag_str(test.ITR_TEST_SKIPPING_TYPE, TEST) + span.set_tag_str(test.ITR_TEST_SKIPPING_TESTS_SKIPPED, "false") + span.set_tag_str(test.ITR_DD_CI_ITR_TESTS_SKIPPED, "false") + span.set_tag_str(test.ITR_FORCED_RUN, "false") + span.set_tag_str(test.ITR_UNSKIPPABLE, "false") + + +def _set_identifier(item, name: str): + """ + Adds an event type classification to a `unittest` test. + """ + item._datadog_object = name + + +def _is_valid_result(instance: unittest.TextTestRunner, args: tuple) -> bool: + return instance and isinstance(instance, unittest.runner.TextTestResult) and args + + +def _is_valid_test_call(kwargs: dict) -> bool: + """ + Validates that kwargs is empty to ensure that `unittest` is running a test + """ + return not len(kwargs) + + +def _is_valid_module_suite_call(func) -> bool: + """ + Validates that the mocked function is an actual function from `unittest` + """ + return type(func).__name__ == "method" or type(func).__name__ == "instancemethod" + + +def _is_invoked_by_cli(instance: unittest.TextTestRunner) -> bool: + return ( + hasattr(instance, "progName") + or hasattr(_CIVisibility, "_datadog_entry") + and _CIVisibility._datadog_entry == "cli" + ) + + +def _extract_test_method_object(test_object): + if hasattr(test_object, "_testMethodName"): + return getattr(test_object, test_object._testMethodName, None) + return None + + +def _is_invoked_by_text_test_runner() -> bool: + return hasattr(_CIVisibility, "_datadog_entry") and _CIVisibility._datadog_entry == "TextTestRunner" + + +def _generate_module_suite_path(test_module_path: str, test_suite_name: str) -> str: + return "{}.{}".format(test_module_path, test_suite_name) + + +def _populate_suites_and_modules(test_objects: list, seen_suites: dict, seen_modules: dict): + """ + Discovers suites and modules and initializes the seen_suites and seen_modules dictionaries. + """ + if not hasattr(test_objects, "__iter__"): + return + for test_object in test_objects: + if not _is_test(test_object): + _populate_suites_and_modules(test_object, seen_suites, seen_modules) + continue + test_module_path = _extract_module_file_path(test_object) + test_suite_name = _extract_suite_name_from_test_method(test_object) + test_module_suite_path = _generate_module_suite_path(test_module_path, test_suite_name) + if test_module_path not in seen_modules: + seen_modules[test_module_path] = { + "module_span": None, + "remaining_suites": 0, + } + if test_module_suite_path not in seen_suites: + seen_suites[test_module_suite_path] = { + "suite_span": None, + "remaining_tests": 0, + } + + seen_modules[test_module_path]["remaining_suites"] += 1 + + seen_suites[test_module_suite_path]["remaining_tests"] += 1 + + +def _finish_remaining_suites_and_modules(seen_suites: dict, seen_modules: dict): + """ + Forces all suite and module spans to finish and updates their statuses. + """ + for suite in seen_suites.values(): + test_suite_span = suite["suite_span"] + if test_suite_span and not test_suite_span.finished: + _finish_span(test_suite_span) + + for module in seen_modules.values(): + test_module_span = module["module_span"] + if test_module_span and not test_module_span.finished: + _finish_span(test_module_span) + del _CIVisibility._unittest_data + + +def _update_remaining_suites_and_modules( + test_module_suite_path: str, test_module_path: str, test_module_span: ddtrace.Span, test_suite_span: ddtrace.Span +): + """ + Updates the remaining test suite and test counter and finishes spans when these have finished their execution. + """ + suite_dict = _CIVisibility._unittest_data["suites"][test_module_suite_path] + modules_dict = _CIVisibility._unittest_data["modules"][test_module_path] + + suite_dict["remaining_tests"] -= 1 + if suite_dict["remaining_tests"] == 0: + modules_dict["remaining_suites"] -= 1 + _finish_span(test_suite_span) + if modules_dict["remaining_suites"] == 0: + _finish_span(test_module_span) + + +def _update_test_skipping_count_span(span: ddtrace.Span): + if _CIVisibility.test_skipping_enabled(): + span.set_metric(test.ITR_TEST_SKIPPING_COUNT, _global_skipped_elements) + + +def _extract_skip_if_reason(args, kwargs): + if len(args) >= 2: + return _extract_test_reason(args) + elif kwargs and "reason" in kwargs: + return kwargs["reason"] + return "" + + +def patch(): + """ + Patch the instrumented methods from unittest + """ + if getattr(unittest, "_datadog_patch", False) or _CIVisibility.enabled: + return + _initialize_unittest_data() + + unittest._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + _w(unittest, "TextTestResult.addSuccess", add_success_test_wrapper) + _w(unittest, "TextTestResult.addFailure", add_failure_test_wrapper) + _w(unittest, "TextTestResult.addError", add_failure_test_wrapper) + _w(unittest, "TextTestResult.addSkip", add_skip_test_wrapper) + _w(unittest, "TextTestResult.addExpectedFailure", add_xfail_test_wrapper) + _w(unittest, "TextTestResult.addUnexpectedSuccess", add_xpass_test_wrapper) + _w(unittest, "skipIf", skip_if_decorator) + _w(unittest, "TestCase.run", handle_test_wrapper) + _w(unittest, "TestSuite.run", collect_text_test_runner_session) + _w(unittest, "TextTestRunner.run", handle_text_test_runner_wrapper) + _w(unittest, "TestProgram.runTests", handle_cli_run) + + +def unpatch(): + """ + Undo patched instrumented methods from unittest + """ + if not getattr(unittest, "_datadog_patch", False): + return + + _u(unittest.TextTestResult, "addSuccess") + _u(unittest.TextTestResult, "addFailure") + _u(unittest.TextTestResult, "addError") + _u(unittest.TextTestResult, "addSkip") + _u(unittest.TextTestResult, "addExpectedFailure") + _u(unittest.TextTestResult, "addUnexpectedSuccess") + _u(unittest, "skipIf") + _u(unittest.TestSuite, "run") + _u(unittest.TestCase, "run") + _u(unittest.TextTestRunner, "run") + _u(unittest.TestProgram, "runTests") + + unittest._datadog_patch = False + _CIVisibility.disable() + + +def _set_test_span_status(test_item, status: str, exc_info: str = None, skip_reason: str = None): + span = _extract_span(test_item) + if not span: + log.debug("Tried setting test result for test but could not find span for %s", test_item) + return None + span.set_tag_str(test.STATUS, status) + if exc_info: + span.set_exc_info(exc_info[0], exc_info[1], exc_info[2]) + if status == test.Status.SKIP.value: + span.set_tag_str(test.SKIP_REASON, skip_reason) + + +def _set_test_xpass_xfail_result(test_item, result: str): + """ + Sets `test.result` and `test.status` to a XFAIL or XPASS test. + """ + span = _extract_span(test_item) + if not span: + log.debug("Tried setting test result for an xpass or xfail test but could not find span for %s", test_item) + return None + span.set_tag_str(test.RESULT, result) + status = span.get_tag(test.STATUS) + if result == test.Status.XFAIL.value: + if status == test.Status.PASS.value: + span.set_tag_str(test.STATUS, test.Status.FAIL.value) + elif status == test.Status.FAIL.value: + span.set_tag_str(test.STATUS, test.Status.PASS.value) + + +def add_success_test_wrapper(func, instance: unittest.TextTestRunner, args: tuple, kwargs: dict): + if _is_valid_result(instance, args): + _set_test_span_status(test_item=args[0], status=test.Status.PASS.value) + + return func(*args, **kwargs) + + +def add_failure_test_wrapper(func, instance: unittest.TextTestRunner, args: tuple, kwargs: dict): + if _is_valid_result(instance, args): + _set_test_span_status(test_item=args[0], exc_info=_extract_test_reason(args), status=test.Status.FAIL.value) + + return func(*args, **kwargs) + + +def add_xfail_test_wrapper(func, instance: unittest.TextTestRunner, args: tuple, kwargs: dict): + if _is_valid_result(instance, args): + _set_test_xpass_xfail_result(test_item=args[0], result=test.Status.XFAIL.value) + + return func(*args, **kwargs) + + +def add_skip_test_wrapper(func, instance: unittest.TextTestRunner, args: tuple, kwargs: dict): + if _is_valid_result(instance, args): + _set_test_span_status(test_item=args[0], skip_reason=_extract_test_reason(args), status=test.Status.SKIP.value) + + return func(*args, **kwargs) + + +def add_xpass_test_wrapper(func, instance, args: tuple, kwargs: dict): + if _is_valid_result(instance, args): + _set_test_xpass_xfail_result(test_item=args[0], result=test.Status.XPASS.value) + + return func(*args, **kwargs) + + +def _mark_test_as_unskippable(obj): + test_name = obj.__name__ + test_suite_name = str(obj).split(".")[0].split()[1] + test_module_path = get_relative_or_absolute_path_for_path(obj.__code__.co_filename, os.getcwd()) + test_module_suite_name = _generate_fully_qualified_test_name(test_module_path, test_suite_name, test_name) + _CIVisibility._unittest_data["unskippable_tests"].add(test_module_suite_name) + return obj + + +def _using_unskippable_decorator(args, kwargs): + return args[0] is False and _extract_skip_if_reason(args, kwargs) == ITR_UNSKIPPABLE_REASON + + +def skip_if_decorator(func, instance, args: tuple, kwargs: dict): + if _using_unskippable_decorator(args, kwargs): + return _mark_test_as_unskippable + return func(*args, **kwargs) + + +def handle_test_wrapper(func, instance, args: tuple, kwargs: dict): + """ + Creates module and suite spans for `unittest` test executions. + """ + if _is_valid_test_call(kwargs) and _is_test(instance) and hasattr(_CIVisibility, "_unittest_data"): + test_name = _extract_test_method_name(instance) + test_suite_name = _extract_suite_name_from_test_method(instance) + test_module_path = _extract_module_file_path(instance) + test_module_suite_path = _generate_module_suite_path(test_module_path, test_suite_name) + test_suite_span = _extract_suite_span(test_module_suite_path) + test_module_span = _extract_module_span(test_module_path) + if test_module_span is None and test_module_path in _CIVisibility._unittest_data["modules"]: + test_module_span = _start_test_module_span(instance) + _CIVisibility._unittest_data["modules"][test_module_path]["module_span"] = test_module_span + if test_suite_span is None and test_module_suite_path in _CIVisibility._unittest_data["suites"]: + test_suite_span = _start_test_suite_span(instance) + suite_dict = _CIVisibility._unittest_data["suites"][test_module_suite_path] + suite_dict["suite_span"] = test_suite_span + if not test_module_span or not test_suite_span: + log.debug("Suite and/or module span not found for test: %s", test_name) + return func(*args, **kwargs) + with _start_test_span(instance, test_suite_span) as span: + test_session_span = _CIVisibility._datadog_session_span + root_directory = os.getcwd() + fqn_test = _generate_fully_qualified_test_name(test_module_path, test_suite_name, test_name) + + if _CIVisibility.test_skipping_enabled(): + if _is_marked_as_unskippable(instance): + span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + test_module_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + test_session_span.set_tag_str(test.ITR_UNSKIPPABLE, "true") + test_module_suite_path_without_extension = "{}/{}".format( + os.path.splitext(test_module_path)[0], test_suite_name + ) + if _should_be_skipped_by_itr(args, test_module_suite_path_without_extension, test_name, instance): + if _is_marked_as_unskippable(instance): + span.set_tag_str(test.ITR_FORCED_RUN, "true") + test_module_span.set_tag_str(test.ITR_FORCED_RUN, "true") + test_session_span.set_tag_str(test.ITR_FORCED_RUN, "true") + else: + _update_skipped_elements_and_set_tags(test_module_span, test_session_span) + instance._dd_itr_skip = True + span.set_tag_str(test.ITR_SKIPPED, "true") + span.set_tag_str(test.SKIP_REASON, SKIPPED_BY_ITR_REASON) + + if _is_skipped_by_itr(instance): + result = args[0] + result.startTest(test=instance) + result.addSkip(test=instance, reason=SKIPPED_BY_ITR_REASON) + _set_test_span_status( + test_item=instance, skip_reason=SKIPPED_BY_ITR_REASON, status=test.Status.SKIP.value + ) + result.stopTest(test=instance) + else: + if _is_test_coverage_enabled(instance): + if not _module_has_dd_coverage_enabled(unittest, silent_mode=True): + unittest._dd_coverage = _start_coverage(root_directory) + _switch_coverage_context(unittest._dd_coverage, fqn_test) + result = func(*args, **kwargs) + _update_status_item(test_suite_span, span.get_tag(test.STATUS)) + if _is_test_coverage_enabled(instance): + _report_coverage_to_span(unittest._dd_coverage, span, root_directory) + + _update_remaining_suites_and_modules( + test_module_suite_path, test_module_path, test_module_span, test_suite_span + ) + return result + return func(*args, **kwargs) + + +def collect_text_test_runner_session(func, instance: unittest.TestSuite, args: tuple, kwargs: dict): + """ + Discovers test suites and tests for the current `unittest` `TextTestRunner` execution + """ + if not _is_valid_module_suite_call(func): + return func(*args, **kwargs) + _initialize_unittest_data() + if _is_invoked_by_text_test_runner(): + seen_suites = _CIVisibility._unittest_data["suites"] + seen_modules = _CIVisibility._unittest_data["modules"] + _populate_suites_and_modules(instance._tests, seen_suites, seen_modules) + + result = func(*args, **kwargs) + + return result + result = func(*args, **kwargs) + return result + + +def _start_test_session_span(instance) -> ddtrace.Span: + """ + Starts a test session span and sets the required tags for a `unittest` session instance. + """ + tracer = getattr(unittest, "_datadog_tracer", _CIVisibility._instance.tracer) + test_command = _extract_command_name_from_session(instance) + resource_name = _generate_session_resource(test_command) + test_session_span = tracer.trace( + SESSION_OPERATION_NAME, + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + resource=resource_name, + ) + test_session_span.set_tag_str(_EVENT_TYPE, _SESSION_TYPE) + test_session_span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + + test_session_span.set_tag_str(COMPONENT, COMPONENT_VALUE) + test_session_span.set_tag_str(SPAN_KIND, KIND) + + test_session_span.set_tag_str(test.COMMAND, test_command) + test_session_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_session_span.set_tag_str(test.FRAMEWORK_VERSION, _get_runtime_and_os_metadata()[RUNTIME_VERSION]) + + test_session_span.set_tag_str(test.TEST_TYPE, SpanTypes.TEST) + test_session_span.set_tag_str( + test.ITR_TEST_CODE_COVERAGE_ENABLED, + "true" if _CIVisibility._instance._collect_coverage_enabled else "false", + ) + if _CIVisibility.test_skipping_enabled(): + _set_test_skipping_tags_to_span(test_session_span) + else: + test_session_span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "false") + _store_module_identifier(instance) + if _is_coverage_invoked_by_coverage_run(): + patch_coverage() + return test_session_span + + +def _start_test_module_span(instance) -> ddtrace.Span: + """ + Starts a test module span and sets the required tags for a `unittest` module instance. + """ + tracer = getattr(unittest, "_datadog_tracer", _CIVisibility._instance.tracer) + test_session_span = _extract_session_span() + test_module_name = _extract_module_name_from_module(instance) + resource_name = _generate_module_resource(test_module_name) + test_module_span = tracer._start_span( + MODULE_OPERATION_NAME, + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + activate=True, + child_of=test_session_span, + resource=resource_name, + ) + test_module_span.set_tag_str(_EVENT_TYPE, _MODULE_TYPE) + test_module_span.set_tag_str(_SESSION_ID, str(test_session_span.span_id)) + test_module_span.set_tag_str(_MODULE_ID, str(test_module_span.span_id)) + + test_module_span.set_tag_str(COMPONENT, COMPONENT_VALUE) + test_module_span.set_tag_str(SPAN_KIND, KIND) + + test_module_span.set_tag_str(test.COMMAND, test_session_span.get_tag(test.COMMAND)) + test_module_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_module_span.set_tag_str(test.FRAMEWORK_VERSION, _get_runtime_and_os_metadata()[RUNTIME_VERSION]) + + test_module_span.set_tag_str(test.TEST_TYPE, SpanTypes.TEST) + test_module_span.set_tag_str(test.MODULE, test_module_name) + test_module_span.set_tag_str(test.MODULE_PATH, _extract_module_file_path(instance)) + test_module_span.set_tag_str( + test.ITR_TEST_CODE_COVERAGE_ENABLED, + "true" if _CIVisibility._instance._collect_coverage_enabled else "false", + ) + if _CIVisibility.test_skipping_enabled(): + _set_test_skipping_tags_to_span(test_module_span) + test_module_span.set_metric(test.ITR_TEST_SKIPPING_COUNT, 0) + else: + test_module_span.set_tag_str(test.ITR_TEST_SKIPPING_ENABLED, "false") + _store_suite_identifier(instance) + return test_module_span + + +def _start_test_suite_span(instance) -> ddtrace.Span: + """ + Starts a test suite span and sets the required tags for a `unittest` suite instance. + """ + tracer = getattr(unittest, "_datadog_tracer", _CIVisibility._instance.tracer) + test_module_path = _extract_module_file_path(instance) + test_module_span = _extract_module_span(test_module_path) + test_suite_name = _extract_suite_name_from_test_method(instance) + resource_name = _generate_suite_resource(test_suite_name) + test_suite_span = tracer._start_span( + SUITE_OPERATION_NAME, + service=_CIVisibility._instance._service, + span_type=SpanTypes.TEST, + child_of=test_module_span, + activate=True, + resource=resource_name, + ) + test_suite_span.set_tag_str(_EVENT_TYPE, _SUITE_TYPE) + test_suite_span.set_tag_str(_SESSION_ID, test_module_span.get_tag(_SESSION_ID)) + test_suite_span.set_tag_str(_SUITE_ID, str(test_suite_span.span_id)) + test_suite_span.set_tag_str(_MODULE_ID, str(test_module_span.span_id)) + + test_suite_span.set_tag_str(COMPONENT, COMPONENT_VALUE) + test_suite_span.set_tag_str(SPAN_KIND, KIND) + + test_suite_span.set_tag_str(test.COMMAND, test_module_span.get_tag(test.COMMAND)) + test_suite_span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + test_suite_span.set_tag_str(test.FRAMEWORK_VERSION, _get_runtime_and_os_metadata()[RUNTIME_VERSION]) + + test_suite_span.set_tag_str(test.TEST_TYPE, SpanTypes.TEST) + test_suite_span.set_tag_str(test.SUITE, test_suite_name) + test_suite_span.set_tag_str(test.MODULE, test_module_span.get_tag(test.MODULE)) + test_suite_span.set_tag_str(test.MODULE_PATH, test_module_path) + return test_suite_span + + +def _start_test_span(instance, test_suite_span: ddtrace.Span) -> ddtrace.Span: + """ + Starts a test span and sets the required tags for a `unittest` test instance. + """ + tracer = getattr(unittest, "_datadog_tracer", _CIVisibility._instance.tracer) + test_name = _extract_test_method_name(instance) + test_method_object = _extract_test_method_object(instance) + test_suite_name = _extract_suite_name_from_test_method(instance) + resource_name = _generate_test_resource(test_suite_name, test_name) + span = tracer._start_span( + ddtrace.config.unittest.operation_name, + service=_CIVisibility._instance._service, + resource=resource_name, + span_type=SpanTypes.TEST, + child_of=test_suite_span, + activate=True, + ) + span.set_tag_str(_EVENT_TYPE, SpanTypes.TEST) + span.set_tag_str(_SESSION_ID, test_suite_span.get_tag(_SESSION_ID)) + span.set_tag_str(_MODULE_ID, test_suite_span.get_tag(_MODULE_ID)) + span.set_tag_str(_SUITE_ID, test_suite_span.get_tag(_SUITE_ID)) + + span.set_tag_str(COMPONENT, COMPONENT_VALUE) + span.set_tag_str(SPAN_KIND, KIND) + + span.set_tag_str(test.COMMAND, test_suite_span.get_tag(test.COMMAND)) + span.set_tag_str(test.FRAMEWORK, FRAMEWORK) + span.set_tag_str(test.FRAMEWORK_VERSION, _get_runtime_and_os_metadata()[RUNTIME_VERSION]) + + span.set_tag_str(test.TYPE, SpanTypes.TEST) + span.set_tag_str(test.NAME, test_name) + span.set_tag_str(test.SUITE, test_suite_name) + span.set_tag_str(test.MODULE, test_suite_span.get_tag(test.MODULE)) + span.set_tag_str(test.MODULE_PATH, test_suite_span.get_tag(test.MODULE_PATH)) + span.set_tag_str(test.STATUS, test.Status.FAIL.value) + span.set_tag_str(test.CLASS_HIERARCHY, test_suite_name) + + _CIVisibility.set_codeowners_of(_extract_test_file_name(instance), span=span) + + _add_start_end_source_file_path_data_to_span(span, test_method_object, test_name, os.getcwd()) + + _store_test_span(instance, span) + return span + + +def _finish_span(current_span: ddtrace.Span): + """ + Finishes active span and populates span status upwards + """ + current_status = current_span.get_tag(test.STATUS) + parent_span = current_span._parent + if current_status and parent_span: + _update_status_item(parent_span, current_status) + elif not current_status: + current_span.set_tag_str(test.SUITE, test.Status.FAIL.value) + current_span.finish() + + +def _finish_test_session_span(): + _finish_remaining_suites_and_modules( + _CIVisibility._unittest_data["suites"], _CIVisibility._unittest_data["modules"] + ) + _update_test_skipping_count_span(_CIVisibility._datadog_session_span) + if _CIVisibility._instance._collect_coverage_enabled and _module_has_dd_coverage_enabled(unittest): + _stop_coverage(unittest) + if _is_coverage_patched() and _is_coverage_invoked_by_coverage_run(): + run_coverage_report() + _add_pct_covered_to_span(_coverage_data, _CIVisibility._datadog_session_span) + unpatch_coverage() + _finish_span(_CIVisibility._datadog_session_span) + + +def handle_cli_run(func, instance: unittest.TestProgram, args: tuple, kwargs: dict): + """ + Creates session span and discovers test suites and tests for the current `unittest` CLI execution + """ + if _is_invoked_by_cli(instance): + _enable_unittest_if_not_started() + for parent_module in instance.test._tests: + for module in parent_module._tests: + _populate_suites_and_modules( + module, _CIVisibility._unittest_data["suites"], _CIVisibility._unittest_data["modules"] + ) + + test_session_span = _start_test_session_span(instance) + _CIVisibility._datadog_entry = "cli" + _CIVisibility._datadog_session_span = test_session_span + + try: + result = func(*args, **kwargs) + except SystemExit as e: + if _CIVisibility.enabled and _CIVisibility._datadog_session_span and hasattr(_CIVisibility, "_unittest_data"): + _finish_test_session_span() + + raise e + return result + + +def handle_text_test_runner_wrapper(func, instance: unittest.TextTestRunner, args: tuple, kwargs: dict): + """ + Creates session span if unittest is called through the `TextTestRunner` method + """ + if _is_invoked_by_cli(instance): + return func(*args, **kwargs) + _enable_unittest_if_not_started() + _CIVisibility._datadog_entry = "TextTestRunner" + if not hasattr(_CIVisibility, "_datadog_session_span"): + _CIVisibility._datadog_session_span = _start_test_session_span(instance) + _CIVisibility._datadog_expected_sessions = 0 + _CIVisibility._datadog_finished_sessions = 0 + _CIVisibility._datadog_expected_sessions += 1 + try: + result = func(*args, **kwargs) + except SystemExit as e: + _CIVisibility._datadog_finished_sessions += 1 + if _CIVisibility._datadog_finished_sessions == _CIVisibility._datadog_expected_sessions: + _finish_test_session_span() + del _CIVisibility._datadog_session_span + raise e + _CIVisibility._datadog_finished_sessions += 1 + if _CIVisibility._datadog_finished_sessions == _CIVisibility._datadog_expected_sessions: + _finish_test_session_span() + del _CIVisibility._datadog_session_span + return result diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/__init__.py new file mode 100644 index 0000000..206588f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/__init__.py @@ -0,0 +1,63 @@ +""" +The ``urllib3`` integration instruments tracing on http calls with optional +support for distributed tracing across services the client communicates with. + + +Enabling +~~~~~~~~ + +The ``urllib3`` integration is not enabled by default. Use ``patch_all()`` +with the environment variable ``DD_TRACE_URLLIB3_ENABLED`` set, or call +:func:`patch()` with the ``urllib3`` argument set to ``True`` to manually +enable the integration, before importing and using ``urllib3``:: + + from ddtrace import patch + patch(urllib3=True) + + # use urllib3 like usual + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.urllib3['service'] + + The service name reported by default for urllib3 client instances. + + This option can also be set with the ``DD_URLLIB3_SERVICE`` environment + variable. + + Default: ``"urllib3"`` + + +.. py:data:: ddtrace.config.urllib3['distributed_tracing'] + + Whether or not to parse distributed tracing headers. + + Default: ``True`` + + +.. py:data:: ddtrace.config.urllib3['trace_query_string'] + + Whether or not to include the query string as a tag. + + Default: ``False`` + + +.. py:data:: ddtrace.config.urllib3['split_by_domain'] + + Whether or not to use the domain name of requests as the service name. + + Default: ``False`` +""" +from ...internal.utils.importlib import require_modules +from .patch import get_version +from .patch import patch +from .patch import unpatch + + +required_modules = ["urllib3"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/patch.py new file mode 100644 index 0000000..a2ae3df --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/urllib3/patch.py @@ -0,0 +1,150 @@ +import os + +import urllib3 + +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.schema.span_attribute_schema import SpanDirection +from ddtrace.pin import Pin +from ddtrace.vendor.wrapt import wrap_function_wrapper as _w + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...ext import SpanKind +from ...ext import SpanTypes +from ...internal.compat import parse +from ...internal.schema import schematize_service_name +from ...internal.schema import schematize_url_operation +from ...internal.utils import ArgumentError +from ...internal.utils import get_argument_value +from ...internal.utils.formats import asbool +from ...internal.utils.wrappers import unwrap as _u +from ...propagation.http import HTTPPropagator +from .. import trace_utils + + +# Ports which, if set, will not be used in hostnames/service names +DROP_PORTS = (80, 443) + +# Initialize the default config vars +config._add( + "urllib3", + { + "_default_service": schematize_service_name("urllib3"), + "distributed_tracing": asbool(os.getenv("DD_URLLIB3_DISTRIBUTED_TRACING", default=True)), + "default_http_tag_query_string": os.getenv("DD_HTTP_CLIENT_TAG_QUERY_STRING", "true"), + "split_by_domain": asbool(os.getenv("DD_URLLIB3_SPLIT_BY_DOMAIN", default=False)), + }, +) + + +def get_version(): + # type: () -> str + return getattr(urllib3, "__version__", "") + + +def patch(): + """Enable tracing for all urllib3 requests""" + if getattr(urllib3, "__datadog_patch", False): + return + urllib3.__datadog_patch = True + + _w("urllib3", "connectionpool.HTTPConnectionPool.urlopen", _wrap_urlopen) + Pin().onto(urllib3.connectionpool.HTTPConnectionPool) + + +def unpatch(): + """Disable trace for all urllib3 requests""" + if getattr(urllib3, "__datadog_patch", False): + urllib3.__datadog_patch = False + + _u(urllib3.connectionpool.HTTPConnectionPool, "urlopen") + + +def _wrap_urlopen(func, instance, args, kwargs): + """ + Wrapper function for the lower-level urlopen in urllib3 + + :param func: The original target function "urlopen" + :param instance: The patched instance of ``HTTPConnectionPool`` + :param args: Positional arguments from the target function + :param kwargs: Keyword arguments from the target function + :return: The ``HTTPResponse`` from the target function + """ + request_method = get_argument_value(args, kwargs, 0, "method") + request_url = get_argument_value(args, kwargs, 1, "url") + try: + request_headers = get_argument_value(args, kwargs, 3, "headers") + except ArgumentError: + request_headers = None + try: + request_retries = get_argument_value(args, kwargs, 4, "retries") + except ArgumentError: + request_retries = None + + # HTTPConnectionPool allows relative path requests; convert the request_url to an absolute url + if request_url.startswith("/"): + request_url = parse.urlunparse( + ( + instance.scheme, + "{}:{}".format(instance.host, instance.port) + if instance.port and instance.port not in DROP_PORTS + else str(instance.host), + request_url, + None, + None, + None, + ) + ) + + parsed_uri = parse.urlparse(request_url) + hostname = parsed_uri.netloc + + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return func(*args, **kwargs) + + with pin.tracer.trace( + schematize_url_operation("urllib3.request", protocol="http", direction=SpanDirection.OUTBOUND), + service=trace_utils.ext_service(pin, config.urllib3), + span_type=SpanTypes.HTTP, + ) as span: + span.set_tag_str(COMPONENT, config.urllib3.integration_name) + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + if config.urllib3.split_by_domain: + span.service = hostname + + # If distributed tracing is enabled, propagate the tracing headers to downstream services + if config.urllib3.distributed_tracing: + if request_headers is None: + request_headers = {} + kwargs["headers"] = request_headers + HTTPPropagator.inject(span.context, request_headers) + + if config.urllib3.analytics_enabled: + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.urllib3.get_analytics_sample_rate()) + + retries = request_retries.total if isinstance(request_retries, urllib3.util.retry.Retry) else None + + # Call the target function + response = None + try: + response = func(*args, **kwargs) + finally: + trace_utils.set_http_meta( + span, + integration_config=config.urllib3, + method=request_method, + url=request_url, + target_host=instance.host, + status_code=None if response is None else response.status, + query=parsed_uri.query, + request_headers=request_headers, + response_headers={} if response is None else dict(response.headers), + retries_remain=retries, + ) + + return response diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/__init__.py new file mode 100644 index 0000000..f688f95 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/__init__.py @@ -0,0 +1,53 @@ +""" +The Vertica integration will trace queries made using the vertica-python +library. + +Vertica will be automatically instrumented with ``import ddtrace.auto``, or when using +the ``ddtrace-run`` command. + +Vertica is instrumented on import. To instrument Vertica manually use the +``patch`` function. Note the ordering of the following statements:: + + from ddtrace import patch + patch(vertica=True) + + import vertica_python + + # use vertica_python like usual + + +To configure the Vertica integration globally you can use the ``Config`` API:: + + from ddtrace import config, patch + patch(vertica=True) + + config.vertica['service_name'] = 'my-vertica-database' + + +To configure the Vertica integration on an instance-per-instance basis use the +``Pin`` API:: + + from ddtrace import Pin, patch, Tracer + patch(vertica=True) + + import vertica_python + + custom_tracer = Tracer() + conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) + + # override the service and tracer to be used + Pin.override(conn, service='myverticaservice', tracer=custom_tracer) +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["vertica_python"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + from .patch import unpatch + + __all__ = ["patch", "unpatch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/patch.py new file mode 100644 index 0000000..66cebc3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/vertica/patch.py @@ -0,0 +1,263 @@ +import importlib + +import ddtrace +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.vendor import wrapt + +from ...constants import ANALYTICS_SAMPLE_RATE_KEY +from ...constants import SPAN_KIND +from ...constants import SPAN_MEASURED_KEY +from ...ext import SpanKind +from ...ext import SpanTypes +from ...ext import db as dbx +from ...ext import net +from ...internal.logger import get_logger +from ...internal.schema import schematize_database_operation +from ...internal.schema import schematize_service_name +from ...internal.utils import get_argument_value +from ...internal.utils.wrappers import unwrap +from ...pin import Pin +from .. import trace_utils + + +log = get_logger(__name__) + + +_PATCHED = False + + +def copy_span_start(instance, span, conf, *args, **kwargs): + span.resource = get_argument_value(args, kwargs, 0, "sql") + + +def execute_span_start(instance, span, conf, *args, **kwargs): + span.resource = get_argument_value(args, kwargs, 0, "operation") + + +def execute_span_end(instance, result, span, conf, *args, **kwargs): + span.set_metric(dbx.ROWCOUNT, instance.rowcount) + + +def fetch_span_end(instance, result, span, conf, *args, **kwargs): + span.set_metric(dbx.ROWCOUNT, instance.rowcount) + + +def cursor_span_end(instance, cursor, _, conf, *args, **kwargs): + tags = {} + tags[net.TARGET_HOST] = instance.options["host"] + tags[net.TARGET_PORT] = instance.options["port"] + if "user" in instance.options: + tags[dbx.USER] = instance.options["user"] + if "database" in instance.options: + tags[dbx.NAME] = instance.options["database"] + + pin = Pin( + tags=tags, + _config=config.vertica["patch"]["vertica_python.vertica.cursor.Cursor"], + ) + pin.onto(cursor) + + +# tracing configuration +config._add( + "vertica", + { + "_default_service": schematize_service_name("vertica"), + "_dbapi_span_name_prefix": "vertica", + "patch": { + "vertica_python.vertica.connection.Connection": { + "routines": { + "cursor": { + "trace_enabled": False, + "span_end": cursor_span_end, + }, + }, + }, + "vertica_python.vertica.cursor.Cursor": { + "routines": { + "execute": { + "operation_name": schematize_database_operation("vertica.query", database_provider="vertica"), + "span_type": SpanTypes.SQL, + "span_start": execute_span_start, + "span_end": execute_span_end, + "measured": True, + }, + "copy": { + "operation_name": "vertica.copy", + "span_type": SpanTypes.SQL, + "span_start": copy_span_start, + "measured": False, + }, + "fetchone": { + "operation_name": schematize_database_operation( + "vertica.fetchone", database_provider="vertica" + ), + "span_type": SpanTypes.SQL, + "span_end": fetch_span_end, + "measured": False, + }, + "fetchall": { + "operation_name": schematize_database_operation( + "vertica.fetchall", database_provider="vertica" + ), + "span_type": SpanTypes.SQL, + "span_end": fetch_span_end, + "measured": False, + }, + "nextset": { + "operation_name": schematize_database_operation("vertica.nextset", database_provider="vertica"), + "span_type": SpanTypes.SQL, + "span_end": fetch_span_end, + "measured": False, + }, + }, + }, + }, + }, +) + + +def get_version(): + # type: () -> str + import vertica_python + + return vertica_python.__version__ + + +def patch(): + global _PATCHED + if _PATCHED: + return + + _install(config.vertica) + _PATCHED = True + + +def unpatch(): + global _PATCHED + if _PATCHED: + _uninstall(config.vertica) + _PATCHED = False + + +def _uninstall(config): + for patch_class_path in config["patch"]: + patch_mod, _, patch_class = patch_class_path.rpartition(".") + mod = importlib.import_module(patch_mod) + cls = getattr(mod, patch_class, None) + + if not cls: + log.debug( + """ + Unable to find corresponding class for tracing configuration. + This version may not be supported. + """ + ) + continue + + for patch_routine in config["patch"][patch_class_path]["routines"]: + unwrap(cls, patch_routine) + + +def _find_routine_config(config, instance, routine_name): + """Attempts to find the config for a routine based on the bases of the + class of the instance. + """ + bases = instance.__class__.__mro__ + for base in bases: + full_name = "{}.{}".format(base.__module__, base.__name__) + if full_name not in config["patch"]: + continue + + config_routines = config["patch"][full_name]["routines"] + + if routine_name in config_routines: + return config_routines[routine_name] + return {} + + +def _install_init(patch_item, patch_class, patch_mod, config): + patch_class_routine = "{}.{}".format(patch_class, "__init__") + + # patch the __init__ of the class with a Pin instance containing the defaults + @wrapt.patch_function_wrapper(patch_mod, patch_class_routine) + def init_wrapper(wrapped, instance, args, kwargs): + r = wrapped(*args, **kwargs) + + # create and attach a pin with the defaults + Pin( + tags=config.get("tags", {}), + tracer=config.get("tracer", ddtrace.tracer), + _config=config["patch"][patch_item], + ).onto(instance) + return r + + +def _install_routine(patch_routine, patch_class, patch_mod, config): + patch_class_routine = "{}.{}".format(patch_class, patch_routine) + + @wrapt.patch_function_wrapper(patch_mod, patch_class_routine) + def wrapper(wrapped, instance, args, kwargs): + # TODO?: remove Pin dependence + pin = Pin.get_from(instance) + + if patch_routine in pin._config["routines"]: + conf = pin._config["routines"][patch_routine] + else: + conf = _find_routine_config(config, instance, patch_routine) + + enabled = conf.get("trace_enabled", True) + + span = None + + try: + # shortcut if not enabled + if not enabled: + result = wrapped(*args, **kwargs) + return result + + operation_name = conf["operation_name"] + tracer = pin.tracer + with tracer.trace( + operation_name, + service=trace_utils.ext_service(pin, config), + span_type=conf.get("span_type"), + ) as span: + span.set_tag_str(COMPONENT, config.integration_name) + span.set_tag_str(dbx.SYSTEM, "vertica") + + # set span.kind to the type of operation being performed + span.set_tag_str(SPAN_KIND, SpanKind.CLIENT) + + if conf.get("measured", False): + span.set_tag(SPAN_MEASURED_KEY) + span.set_tags(pin.tags) + + if "span_start" in conf: + conf["span_start"](instance, span, conf, *args, **kwargs) + + # set analytics sample rate + span.set_tag(ANALYTICS_SAMPLE_RATE_KEY, config.get_analytics_sample_rate()) + + result = wrapped(*args, **kwargs) + return result + except Exception as err: + if "on_error" in conf: + conf["on_error"](instance, err, span, conf, *args, **kwargs) + raise + finally: + # if an exception is raised result will not exist + if "result" not in locals(): + result = None + if "span_end" in conf: + conf["span_end"](instance, result, span, conf, *args, **kwargs) + + +def _install(config): + for patch_class_path in config["patch"]: + patch_mod, _, patch_class = patch_class_path.rpartition(".") + _install_init(patch_class_path, patch_class, patch_mod, config) + + for patch_routine in config["patch"][patch_class_path]["routines"]: + _install_routine(patch_routine, patch_class, patch_mod, config) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/__init__.py new file mode 100644 index 0000000..84b8d14 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/__init__.py @@ -0,0 +1,43 @@ +""" +The Datadog WSGI middleware traces all WSGI requests. + + +Usage +~~~~~ + +The middleware can be used manually via the following command:: + + + from ddtrace.contrib.wsgi import DDWSGIMiddleware + + # application is a WSGI application + application = DDWSGIMiddleware(application) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.wsgi["service"] + + The service name reported for the WSGI application. + + This option can also be set with the ``DD_SERVICE`` environment + variable. + + Default: ``"wsgi"`` + +.. py:data:: ddtrace.config.wsgi["distributed_tracing"] + + Configuration that allows distributed tracing to be enabled. + + Default: ``True`` + + +:ref:`All HTTP tags ` are supported for this integration. + +""" +from .wsgi import DDWSGIMiddleware +from .wsgi import get_version + + +__all__ = ["DDWSGIMiddleware", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/wsgi.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/wsgi.py new file mode 100644 index 0000000..fc2b78e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/wsgi/wsgi.py @@ -0,0 +1,270 @@ +from typing import TYPE_CHECKING +from typing import Callable +from typing import Iterable + +from ddtrace.internal.schema.span_attribute_schema import SpanDirection + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Dict # noqa:F401 + from typing import Mapping # noqa:F401 + from typing import Optional # noqa:F401 + + from ddtrace import Pin # noqa:F401 + from ddtrace import Span # noqa:F401 + from ddtrace import Tracer # noqa:F401 + from ddtrace.settings import Config # noqa:F401 + +from urllib.parse import quote + +import ddtrace +from ddtrace import config +from ddtrace.constants import SPAN_KIND +from ddtrace.contrib import trace_utils +from ddtrace.ext import SpanKind +from ddtrace.ext import SpanTypes +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED +from ddtrace.internal.logger import get_logger +from ddtrace.internal.schema import schematize_url_operation +from ddtrace.propagation._utils import from_wsgi_header +from ddtrace.propagation.http import HTTPPropagator +from ddtrace.vendor import wrapt + +from ...internal import core + + +log = get_logger(__name__) + +propagator = HTTPPropagator + +config._add( + "wsgi", + dict( + _default_service="wsgi", + distributed_tracing=True, + ), +) + + +def get_version(): + # type: () -> str + return "" + + +class _DDWSGIMiddlewareBase(object): + """Base WSGI middleware class. + + :param application: The WSGI application to apply the middleware to. + :param tracer: Tracer instance to use the middleware with. Defaults to the global tracer. + :param int_config: Integration specific configuration object. + :param pin: Set tracing metadata on a particular traced connection + :param app_is_iterator: Boolean indicating whether the wrapped app is a Python iterator + """ + + def __init__(self, application, tracer, int_config, pin, app_is_iterator=False): + # type: (Iterable, Tracer, Config, Pin, bool) -> None + self.app = application + self.tracer = tracer + self._config = int_config + self._pin = pin + self.app_is_iterator = app_is_iterator + + @property + def _request_span_name(self): + # type: () -> str + "Returns the name of a request span. Example: `flask.request`" + raise NotImplementedError + + @property + def _application_span_name(self): + # type: () -> str + "Returns the name of an application span. Example: `flask.application`" + raise NotImplementedError + + @property + def _response_span_name(self): + # type: () -> str + "Returns the name of a response span. Example: `flask.response`" + raise NotImplementedError + + def __call__(self, environ: Iterable, start_response: Callable) -> wrapt.ObjectProxy: + headers = get_request_headers(environ) + closing_iterable = () + not_blocked = True + with core.context_with_data( + "wsgi.__call__", + remote_addr=environ.get("REMOTE_ADDR"), + headers=headers, + headers_case_sensitive=True, + service=trace_utils.int_service(self._pin, self._config), + span_type=SpanTypes.WEB, + span_name=(self._request_call_name if hasattr(self, "_request_call_name") else self._request_span_name), + middleware_config=self._config, + distributed_headers_config=self._config, + distributed_headers=environ, + environ=environ, + middleware=self, + call_key="req_span", + ) as ctx: + if core.get_item(HTTP_REQUEST_BLOCKED): + result = core.dispatch_with_results("wsgi.block.started", (ctx, construct_url)).status_headers_content + if result: + status, headers, content = result.value + else: + status, headers, content = 403, [], "" + start_response(str(status), headers) + closing_iterable = [content] + not_blocked = False + + def blocked_view(): + result = core.dispatch_with_results("wsgi.block.started", (ctx, construct_url)).status_headers_content + if result: + status, headers, content = result.value + else: + status, headers, content = 403, [], "" + return content, status, headers + + core.dispatch("wsgi.block_decided", (blocked_view,)) + + if not_blocked: + core.dispatch("wsgi.request.prepare", (ctx, start_response)) + try: + closing_iterable = self.app(environ, ctx.get_item("intercept_start_response")) + except BaseException: + core.dispatch("wsgi.app.exception", (ctx,)) + raise + else: + core.dispatch("wsgi.app.success", (ctx, closing_iterable)) + if core.get_item(HTTP_REQUEST_BLOCKED): + _, _, content = core.dispatch_with_results( + "wsgi.block.started", (ctx, construct_url) + ).status_headers_content.value or (None, None, "") + closing_iterable = [content] + + result = core.dispatch_with_results( + "wsgi.request.complete", (ctx, closing_iterable, self.app_is_iterator) + ).traced_iterable + return result.value if result else [] + + def _traced_start_response(self, start_response, request_span, app_span, status, environ, exc_info=None): + # type: (Callable, Span, Span, str, Dict, Any) -> None + """sets the status code on a request span when start_response is called""" + with core.context_with_data( + "wsgi.response", + middleware=self, + request_span=request_span, + parent_call=app_span, + status=status, + environ=environ, + span_type=SpanTypes.WEB, + service=trace_utils.int_service(None, self._config), + start_span=False, + tags={COMPONENT: self._config.integration_name, SPAN_KIND: SpanKind.SERVER}, + call_key="response_span", + ): + return start_response(status, environ, exc_info) + + def _request_span_modifier(self, req_span, environ, parsed_headers=None): + # type: (Span, Dict, Optional[Dict]) -> None + """Implement to modify span attributes on the request_span""" + + def _application_span_modifier(self, app_span, environ, result): + # type: (Span, Dict, Iterable) -> None + """Implement to modify span attributes on the application_span""" + + def _response_span_modifier(self, resp_span, response): + # type: (Span, Dict) -> None + """Implement to modify span attributes on the request_span""" + + +def construct_url(environ): + """ + https://www.python.org/dev/peps/pep-3333/#url-reconstruction + """ + url = environ["wsgi.url_scheme"] + "://" + + if environ.get("HTTP_HOST"): + url += environ["HTTP_HOST"] + else: + url += environ["SERVER_NAME"] + + if environ["wsgi.url_scheme"] == "https": + if environ["SERVER_PORT"] != "443": + url += ":" + environ["SERVER_PORT"] + else: + if environ["SERVER_PORT"] != "80": + url += ":" + environ["SERVER_PORT"] + + url += quote(environ.get("SCRIPT_NAME", "")) + url += quote(environ.get("PATH_INFO", "")) + if environ.get("QUERY_STRING"): + url += "?" + environ["QUERY_STRING"] + + return url + + +def get_request_headers(environ): + # type: (Mapping[str, str]) -> Mapping[str, str] + """ + Manually grab the request headers from the environ dictionary. + """ + request_headers = {} # type: Mapping[str, str] + for key in environ.keys(): + if key.startswith("HTTP_"): + name = from_wsgi_header(key) + if name: + request_headers[name] = environ[key] + return request_headers + + +def default_wsgi_span_modifier(span, environ): + span.resource = "{} {}".format(environ["REQUEST_METHOD"], environ["PATH_INFO"]) + + +class DDWSGIMiddleware(_DDWSGIMiddlewareBase): + """WSGI middleware providing tracing around an application. + + :param application: The WSGI application to apply the middleware to. + :param tracer: Tracer instance to use the middleware with. Defaults to the global tracer. + :param span_modifier: Span modifier that can add tags to the root span. + Defaults to using the request method and url in the resource. + :param app_is_iterator: Boolean indicating whether the wrapped WSGI app is a Python iterator + """ + + _request_span_name = schematize_url_operation("wsgi.request", protocol="http", direction=SpanDirection.INBOUND) + _application_span_name = "wsgi.application" + _response_span_name = "wsgi.response" + + def __init__(self, application, tracer=None, span_modifier=default_wsgi_span_modifier, app_is_iterator=False): + # type: (Iterable, Optional[Tracer], Callable[[Span, Dict[str, str]], None], bool) -> None + super(DDWSGIMiddleware, self).__init__( + application, tracer or ddtrace.tracer, config.wsgi, None, app_is_iterator=app_is_iterator + ) + self.span_modifier = span_modifier + + def _traced_start_response(self, start_response, request_span, app_span, status, environ, exc_info=None): + with core.context_with_data( + "wsgi.response", + middleware=self, + request_span=request_span, + parent_call=app_span, + status=status, + environ=environ, + span_type=SpanTypes.WEB, + span_name="wsgi.start_response", + service=trace_utils.int_service(None, self._config), + start_span=True, + tags={COMPONENT: self._config.integration_name, SPAN_KIND: SpanKind.SERVER}, + call_key="response_span", + ) as ctx, ctx.get_item("response_span"): + return start_response(status, environ, exc_info) + + def _request_span_modifier(self, req_span, environ, parsed_headers=None): + url = construct_url(environ) + request_headers = parsed_headers if parsed_headers is not None else get_request_headers(environ) + core.dispatch("wsgi.request.prepared", (self, req_span, url, request_headers, environ)) + + def _response_span_modifier(self, resp_span, response): + core.dispatch("wsgi.response.prepared", (resp_span, response)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/__init__.py new file mode 100644 index 0000000..1b9eade --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/__init__.py @@ -0,0 +1,79 @@ +""" +The yaaredis integration traces yaaredis requests. + + +Enabling +~~~~~~~~ + +The yaaredis integration is enabled automatically when using +:ref:`ddtrace-run` or :ref:`import ddtrace.auto`. + +Or use :func:`patch()` to manually enable the integration:: + + from ddtrace import patch + patch(yaaredis=True) + + +Global Configuration +~~~~~~~~~~~~~~~~~~~~ + +.. py:data:: ddtrace.config.yaaredis["service"] + + The service name reported by default for yaaredis traces. + + This option can also be set with the ``DD_YAAREDIS_SERVICE`` environment + variable. + + Default: ``"redis"`` + +.. py:data:: ddtrace.config.yaaredis["cmd_max_length"] + + Max allowable size for the yaaredis command span tag. + Anything beyond the max length will be replaced with ``"..."``. + + This option can also be set with the ``DD_YAAREDIS_CMD_MAX_LENGTH`` environment + variable. + + Default: ``1000`` + +.. py:data:: ddtrace.config.aredis["resource_only_command"] + + The span resource will only include the command executed. To include all + arguments in the span resource, set this value to ``False``. + + This option can also be set with the ``DD_REDIS_RESOURCE_ONLY_COMMAND`` environment + variable. + + Default: ``True`` + + +Instance Configuration +~~~~~~~~~~~~~~~~~~~~~~ + +To configure particular yaaredis instances use the :class:`Pin ` API:: + + import yaaredis + from ddtrace import Pin + + client = yaaredis.StrictRedis(host="localhost", port=6379) + + # Override service name for this instance + Pin.override(client, service="my-custom-queue") + + # Traces reported for this client will now have "my-custom-queue" + # as the service name. + async def example(): + await client.get("my-key") +""" + +from ...internal.utils.importlib import require_modules + + +required_modules = ["yaaredis", "yaaredis.client"] + +with require_modules(required_modules) as missing_modules: + if not missing_modules: + from .patch import get_version + from .patch import patch + + __all__ = ["patch", "get_version"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/patch.py new file mode 100644 index 0000000..5166e0d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/contrib/yaaredis/patch.py @@ -0,0 +1,83 @@ +import os + +import yaaredis + +from ddtrace import config +from ddtrace.vendor import wrapt + +from ...internal.schema import schematize_service_name +from ...internal.utils.formats import CMD_MAX_LEN +from ...internal.utils.formats import asbool +from ...internal.utils.formats import stringify_cache_args +from ...internal.utils.wrappers import unwrap +from ...pin import Pin +from ..trace_utils_redis import _run_redis_command_async +from ..trace_utils_redis import _trace_redis_cmd +from ..trace_utils_redis import _trace_redis_execute_pipeline + + +config._add( + "yaaredis", + dict( + _default_service=schematize_service_name("redis"), + cmd_max_length=int(os.getenv("DD_YAAREDIS_CMD_MAX_LENGTH", CMD_MAX_LEN)), + resource_only_command=asbool(os.getenv("DD_REDIS_RESOURCE_ONLY_COMMAND", True)), + ), +) + + +def get_version(): + # type: () -> str + return getattr(yaaredis, "__version__", "") + + +def patch(): + """Patch the instrumented methods""" + if getattr(yaaredis, "_datadog_patch", False): + return + yaaredis._datadog_patch = True + + _w = wrapt.wrap_function_wrapper + + _w("yaaredis.client", "StrictRedis.execute_command", traced_execute_command) + _w("yaaredis.client", "StrictRedis.pipeline", traced_pipeline) + _w("yaaredis.pipeline", "StrictPipeline.execute", traced_execute_pipeline) + _w("yaaredis.pipeline", "StrictPipeline.immediate_execute_command", traced_execute_command) + Pin().onto(yaaredis.StrictRedis) + + +def unpatch(): + if getattr(yaaredis, "_datadog_patch", False): + yaaredis._datadog_patch = False + + unwrap(yaaredis.client.StrictRedis, "execute_command") + unwrap(yaaredis.client.StrictRedis, "pipeline") + unwrap(yaaredis.pipeline.StrictPipeline, "execute") + unwrap(yaaredis.pipeline.StrictPipeline, "immediate_execute_command") + + +async def traced_execute_command(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + with _trace_redis_cmd(pin, config.yaaredis, instance, args) as span: + return await _run_redis_command_async(span=span, func=func, args=args, kwargs=kwargs) + + +async def traced_pipeline(func, instance, args, kwargs): + pipeline = await func(*args, **kwargs) + pin = Pin.get_from(instance) + if pin: + pin.onto(pipeline) + return pipeline + + +async def traced_execute_pipeline(func, instance, args, kwargs): + pin = Pin.get_from(instance) + if not pin or not pin.enabled(): + return await func(*args, **kwargs) + + cmds = [stringify_cache_args(c, cmd_max_len=config.yaaredis.cmd_max_length) for c, _ in instance.command_stack] + with _trace_redis_execute_pipeline(pin, config.yaaredis, cmds, instance): + return await func(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/data_streams.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/data_streams.py new file mode 100644 index 0000000..8330b7e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/data_streams.py @@ -0,0 +1,36 @@ +import ddtrace +from ddtrace.internal.datastreams.processor import PROPAGATION_KEY_BASE_64 + + +def set_consume_checkpoint(typ, source, carrier_get): + """ + :param typ: The type of the checkpoint, usually the streaming technology being used. + Examples include kafka, kinesis, sns etc. (str) + :param source: The source of data. This can be a topic, exchange or stream name. (str) + :param carrier_get: A function used to extract context from the carrier (function (str) -> str) + + :returns DataStreamsCtx | None + """ + if ddtrace.config._data_streams_enabled: + processor = ddtrace.tracer.data_streams_processor + processor.decode_pathway_b64(carrier_get(PROPAGATION_KEY_BASE_64)) + return processor.set_checkpoint(["type:" + typ, "topic:" + source, "direction:in", "manual_checkpoint:true"]) + + +def set_produce_checkpoint(typ, target, carrier_set): + """ + :param typ: The type of the checkpoint, usually the streaming technology being used. Examples include + kafka, kinesis, sns etc. (str) + :param target: The destination to which the data is being sent. For instance: topic, exchange or + stream name. (str) + :param carrier_set: A function used to inject the context into the carrier (function (str, str) -> None) + + :returns DataStreamsCtx | None + """ + if ddtrace.config._data_streams_enabled: + pathway = ddtrace.tracer.data_streams_processor.set_checkpoint( + ["type:" + typ, "topic:" + target, "direction:out", "manual_checkpoint:true"] + ) + if pathway is not None: + carrier_set(PROPAGATION_KEY_BASE_64, pathway.encode_b64()) + return pathway diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/__init__.py new file mode 100644 index 0000000..343bfbb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/__init__.py @@ -0,0 +1,34 @@ +""" +Dynamic Instrumentation +======================= + +Enablement +---------- + +Dynamic Instrumentation can be enabled by setting the +``DD_DYNAMIC_INSTRUMENTATION_ENABLED`` variable to ``true`` in the environment, +when using the ``ddtrace-run`` command. Alternatively, when ``dtrace-run`` +cannot be used, it can be enabled programmatically with:: + + from ddtrace.debugging import DynamicInstrumentation + + # Enable dynamic instrumentation + DynamicInstrumentation.enable() + + ... + + # Disable dynamic instrumentation + DynamicInstrumentation.disable() + + +Configuration +------------- + +See the :ref:`Configuration` page for more details on how to configure +Dynamic Instrumentation. +""" + +from ddtrace.debugging._debugger import Debugger as DynamicInstrumentation + + +__all__ = ["DynamicInstrumentation"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_async.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_async.py new file mode 100644 index 0000000..b351afd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_async.py @@ -0,0 +1,27 @@ +import sys +from types import CoroutineType +from typing import Iterable + +from ddtrace.debugging._signal.collector import SignalContext +from ddtrace.internal import compat + + +async def dd_coroutine_wrapper(coro: CoroutineType, contexts: Iterable[SignalContext]) -> CoroutineType: + start_time = compat.monotonic_ns() + try: + retval = await coro + end_time = compat.monotonic_ns() + exc_info = (None, None, None) + except Exception: + end_time = compat.monotonic_ns() + retval = None + exc_info = sys.exc_info() # type: ignore[assignment] + + for context in contexts: + context.exit(retval, exc_info, end_time - start_time) + + _, exc, _ = exc_info + if exc is not None: + raise exc + + return retval diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_config.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_config.py new file mode 100644 index 0000000..fb7b76d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_config.py @@ -0,0 +1,6 @@ +from ddtrace.internal.logger import get_logger +from ddtrace.settings.dynamic_instrumentation import config as di_config # noqa: F401 +from ddtrace.settings.exception_debugging import config as ed_config # noqa: F401 + + +log = get_logger(__name__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_debugger.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_debugger.py new file mode 100644 index 0000000..3c09476 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_debugger.py @@ -0,0 +1,730 @@ +from collections import defaultdict +from collections import deque +from itertools import chain +import linecache +import os +from pathlib import Path +import sys +import threading +from types import CoroutineType +from types import FunctionType +from types import ModuleType +from typing import Any +from typing import Deque +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import Set +from typing import Tuple +from typing import cast + +import ddtrace +from ddtrace import config as ddconfig +from ddtrace.debugging._async import dd_coroutine_wrapper +from ddtrace.debugging._config import di_config +from ddtrace.debugging._config import ed_config +from ddtrace.debugging._encoding import LogSignalJsonEncoder +from ddtrace.debugging._encoding import SignalQueue +from ddtrace.debugging._exception.auto_instrument import SpanExceptionProcessor +from ddtrace.debugging._function.discovery import FunctionDiscovery +from ddtrace.debugging._function.store import FullyNamedWrappedFunction +from ddtrace.debugging._function.store import FunctionStore +from ddtrace.debugging._metrics import metrics +from ddtrace.debugging._probe.model import FunctionLocationMixin +from ddtrace.debugging._probe.model import FunctionProbe +from ddtrace.debugging._probe.model import LineLocationMixin +from ddtrace.debugging._probe.model import LineProbe +from ddtrace.debugging._probe.model import LogFunctionProbe +from ddtrace.debugging._probe.model import LogLineProbe +from ddtrace.debugging._probe.model import MetricFunctionProbe +from ddtrace.debugging._probe.model import MetricLineProbe +from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe +from ddtrace.debugging._probe.model import SpanDecorationLineProbe +from ddtrace.debugging._probe.model import SpanFunctionProbe +from ddtrace.debugging._probe.registry import ProbeRegistry +from ddtrace.debugging._probe.remoteconfig import ProbePollerEvent +from ddtrace.debugging._probe.remoteconfig import ProbePollerEventType +from ddtrace.debugging._probe.remoteconfig import ProbeRCAdapter +from ddtrace.debugging._probe.status import ProbeStatusLogger +from ddtrace.debugging._signal.collector import SignalCollector +from ddtrace.debugging._signal.collector import SignalContext +from ddtrace.debugging._signal.metric_sample import MetricSample +from ddtrace.debugging._signal.model import Signal +from ddtrace.debugging._signal.model import SignalState +from ddtrace.debugging._signal.snapshot import Snapshot +from ddtrace.debugging._signal.tracing import DynamicSpan +from ddtrace.debugging._signal.tracing import SpanDecoration +from ddtrace.debugging._uploader import LogsIntakeUploaderV1 +from ddtrace.internal import atexit +from ddtrace.internal import compat +from ddtrace.internal import forksafe +from ddtrace.internal.logger import get_logger +from ddtrace.internal.metrics import Metrics +from ddtrace.internal.module import ModuleHookType +from ddtrace.internal.module import ModuleWatchdog +from ddtrace.internal.module import origin +from ddtrace.internal.module import register_post_run_module_hook +from ddtrace.internal.module import unregister_post_run_module_hook +from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter +from ddtrace.internal.rate_limiter import RateLimitExceeded +from ddtrace.internal.remoteconfig.worker import remoteconfig_poller +from ddtrace.internal.safety import _isinstance +from ddtrace.internal.service import Service +from ddtrace.internal.wrapping import Wrapper +from ddtrace.tracer import Tracer + + +log = get_logger(__name__) + +_probe_metrics = Metrics(namespace="dynamic.instrumentation.metric") +_probe_metrics.enable() + + +class DebuggerError(Exception): + """Generic debugger error.""" + + pass + + +class DebuggerModuleWatchdog(ModuleWatchdog): + _locations: Set[str] = set() + + @classmethod + def register_origin_hook(cls, origin: Path, hook: ModuleHookType) -> None: + if origin in cls._locations: + # We already have a hook for this origin, don't register a new one + # but invoke it directly instead, if the module was already loaded. + module = cls.get_by_origin(origin) + if module is not None: + hook(module) + + return + + cls._locations.add(str(origin)) + + super().register_origin_hook(origin, hook) + + @classmethod + def unregister_origin_hook(cls, origin: Path, hook: ModuleHookType) -> None: + try: + cls._locations.remove(str(origin)) + except KeyError: + # Nothing to unregister. + return + + return super().unregister_origin_hook(origin, hook) + + @classmethod + def register_module_hook(cls, module_name: str, hook: ModuleHookType) -> None: + if module_name in cls._locations: + # We already have a hook for this origin, don't register a new one + # but invoke it directly instead, if the module was already loaded. + module = sys.modules.get(module_name) + if module is not None: + hook(module) + + return + + cls._locations.add(module_name) + + super().register_module_hook(module_name, hook) + + @classmethod + def unregister_module_hook(cls, module_name: str, hook: ModuleHookType) -> None: + try: + cls._locations.remove(module_name) + except KeyError: + # Nothing to unregister. + return + + return super().unregister_module_hook(module_name, hook) + + @classmethod + def on_run_module(cls, module: ModuleType) -> None: + if cls._instance is not None: + # Treat run module as an import to trigger import hooks and register + # the module's origin. + cls._instance.after_import(module) + + +class Debugger(Service): + _instance: Optional["Debugger"] = None + _probe_meter = _probe_metrics.get_meter("probe") + _span_processor: Optional[SpanExceptionProcessor] = None + + __rc_adapter__ = ProbeRCAdapter + __uploader__ = LogsIntakeUploaderV1 + __collector__ = SignalCollector + __watchdog__ = DebuggerModuleWatchdog + __logger__ = ProbeStatusLogger + + @classmethod + def enable(cls, run_module: bool = False) -> None: + """Enable dynamic instrumentation + + This class method is idempotent. Dynamic instrumentation will be + disabled automatically at exit. + """ + if cls._instance is not None: + log.debug("%s already enabled", cls.__name__) + return + + log.debug("Enabling %s", cls.__name__) + + di_config.enabled = True + + cls.__watchdog__.install() + + if di_config.metrics: + metrics.enable() + + cls._instance = debugger = cls() + + debugger.start() + + forksafe.register(cls._restart) + atexit.register(cls.disable) + register_post_run_module_hook(cls._on_run_module) + + log.debug("%s enabled", cls.__name__) + + @classmethod + def disable(cls, join: bool = True) -> None: + """Disable dynamic instrumentation. + + This class method is idempotent. Called automatically at exit, if + dynamic instrumentation was enabled. + """ + if cls._instance is None: + log.debug("%s not enabled", cls.__name__) + return + + log.debug("Disabling %s", cls.__name__) + + remoteconfig_poller.unregister("LIVE_DEBUGGING") + + forksafe.unregister(cls._restart) + atexit.unregister(cls.disable) + unregister_post_run_module_hook(cls._on_run_module) + + if cls._instance._span_processor: + cls._instance._span_processor.unregister() + + cls._instance.stop(join=join) + cls._instance = None + + cls.__watchdog__.uninstall() + if di_config.metrics: + metrics.disable() + + di_config.enabled = False + + log.debug("%s disabled", cls.__name__) + + def __init__(self, tracer: Optional[Tracer] = None) -> None: + super().__init__() + + self._tracer = tracer or ddtrace.tracer + service_name = di_config.service_name + + self._signal_queue = SignalQueue( + encoder=LogSignalJsonEncoder(service_name), + on_full=self._on_encoder_buffer_full, + ) + self._status_logger = status_logger = self.__logger__(service_name) + + self._probe_registry = ProbeRegistry(status_logger=status_logger) + self._uploader = self.__uploader__(self._signal_queue) + self._collector = self.__collector__(self._signal_queue) + self._services = [self._uploader] + + self._function_store = FunctionStore(extra_attrs=["__dd_wrappers__"]) + + log_limiter = RateLimiter(limit_rate=1.0, raise_on_exceed=False) + self._global_rate_limiter = RateLimiter( + limit_rate=di_config.global_rate_limit, # TODO: Make it configurable. Note that this is per-process! + on_exceed=lambda: log_limiter.limit(log.warning, "Global rate limit exceeded"), + call_once=True, + raise_on_exceed=False, + ) + + if ed_config.enabled: + from ddtrace.debugging._exception.auto_instrument import SpanExceptionProcessor + + self._span_processor = SpanExceptionProcessor(collector=self._collector) + self._span_processor.register() + else: + self._span_processor = None + + if di_config.enabled: + # TODO: this is only temporary and will be reverted once the DD_REMOTE_CONFIGURATION_ENABLED variable + # has been removed + if ddconfig._remote_config_enabled is False: + ddconfig._remote_config_enabled = True + log.info("Disabled Remote Configuration enabled by Dynamic Instrumentation.") + + # Register the debugger with the RCM client. + if not remoteconfig_poller.update_product_callback("LIVE_DEBUGGING", self._on_configuration): + di_callback = self.__rc_adapter__(None, self._on_configuration, status_logger=status_logger) + remoteconfig_poller.register("LIVE_DEBUGGING", di_callback) + + log.debug("%s initialized (service name: %s)", self.__class__.__name__, service_name) + + def _on_encoder_buffer_full(self, item, encoded): + # type (Any, bytes) -> None + # Send upload request + self._uploader.upload() + + def _dd_debugger_hook(self, probe: Probe) -> None: + """Debugger probe hook. + + This gets called with a reference to the probe. We only check whether + the probe is active. If so, we push the collected data to the collector + for bulk processing. This way we avoid adding delay while the + instrumented code is running. + """ + try: + actual_frame = sys._getframe(1) + signal: Optional[Signal] = None + if isinstance(probe, MetricLineProbe): + signal = MetricSample( + probe=probe, + frame=actual_frame, + thread=threading.current_thread(), + trace_context=self._tracer.current_trace_context(), + meter=self._probe_meter, + ) + elif isinstance(probe, LogLineProbe): + if probe.take_snapshot: + # TODO: Global limit evaluated before probe conditions + if self._global_rate_limiter.limit() is RateLimitExceeded: + return + + signal = Snapshot( + probe=probe, + frame=actual_frame, + thread=threading.current_thread(), + trace_context=self._tracer.current_trace_context(), + ) + elif isinstance(probe, SpanDecorationLineProbe): + signal = SpanDecoration( + probe=probe, + frame=actual_frame, + thread=threading.current_thread(), + ) + else: + log.error("Unsupported probe type: %r", type(probe)) + return + + signal.line() + + log.debug("[%s][P: %s] Debugger. Report signal %s", os.getpid(), os.getppid(), signal) + self._collector.push(signal) + + if signal.state is SignalState.DONE: + self._probe_registry.set_emitting(probe) + + except Exception: + log.error("Failed to execute probe hook", exc_info=True) + + def _dd_debugger_wrapper(self, wrappers: Dict[str, FunctionProbe]) -> Wrapper: + """Debugger wrapper. + + This gets called with a reference to the wrapped function and the probe, + together with the arguments to pass. We only check + whether the probe is active and the debugger is enabled. If so, we + capture all the relevant debugging context. + """ + + def _(wrapped: FunctionType, args: Tuple[Any], kwargs: Dict[str, Any]) -> Any: + if not wrappers: + return wrapped(*args, **kwargs) + + argnames = wrapped.__code__.co_varnames + actual_frame = sys._getframe(1) + allargs = list(chain(zip(argnames, args), kwargs.items())) + thread = threading.current_thread() + + open_contexts: Deque[SignalContext] = deque() + signal: Optional[Signal] = None + + # Group probes on the basis of whether they create new context. + context_creators: List[Probe] = [] + context_consumers: List[Probe] = [] + for p in wrappers.values(): + (context_creators if p.__context_creator__ else context_consumers).append(p) + + # Trigger the context creators first, so that the new context can be + # consumed by the consumers. + for probe in chain(context_creators, context_consumers): + # Because new context might be created, we need to recompute it + # for each probe. + trace_context = self._tracer.current_trace_context() + + if isinstance(probe, MetricFunctionProbe): + signal = MetricSample( + probe=probe, + frame=actual_frame, + thread=thread, + args=allargs, + trace_context=trace_context, + meter=self._probe_meter, + ) + elif isinstance(probe, LogFunctionProbe): + signal = Snapshot( + probe=probe, + frame=actual_frame, + thread=thread, + args=allargs, + trace_context=trace_context, + ) + elif isinstance(probe, SpanFunctionProbe): + signal = DynamicSpan( + probe=probe, + frame=actual_frame, + thread=thread, + args=allargs, + trace_context=trace_context, + ) + elif isinstance(probe, SpanDecorationFunctionProbe): + signal = SpanDecoration( + probe=probe, + frame=actual_frame, + thread=thread, + args=allargs, + ) + else: + log.error("Unsupported probe type: %s", type(probe)) + continue + + # Open probe signal contexts are ordered, with those that have + # created new tracing context first. We need to finalise them in + # reverse order, so we append them to the beginning. + open_contexts.appendleft(self._collector.attach(signal)) + + if not open_contexts: + return wrapped(*args, **kwargs) + + start_time = compat.monotonic_ns() + try: + retval = wrapped(*args, **kwargs) + end_time = compat.monotonic_ns() + exc_info = (None, None, None) + except Exception: + end_time = compat.monotonic_ns() + retval = None + exc_info = sys.exc_info() # type: ignore[assignment] + else: + # DEV: We do not unwind generators here as they might result in + # tight loops. We return the result as a generator object + # instead. + if _isinstance(retval, CoroutineType): + return dd_coroutine_wrapper(retval, open_contexts) + + for context in open_contexts: + context.exit(retval, exc_info, end_time - start_time) + signal = context.signal + if signal.state is SignalState.DONE: + self._probe_registry.set_emitting(signal.probe) + + exc = exc_info[1] + if exc is not None: + raise exc + + return retval + + return _ + + def _probe_injection_hook(self, module: ModuleType) -> None: + # This hook is invoked by the ModuleWatchdog or the post run module hook + # to inject probes. + + # Group probes by function so that we decompile each function once and + # bulk-inject the probes. + probes_for_function: Dict[FullyNamedWrappedFunction, List[Probe]] = defaultdict(list) + for probe in self._probe_registry.get_pending(str(origin(module))): + if not isinstance(probe, LineLocationMixin): + continue + line = probe.line + assert line is not None # nosec + functions = FunctionDiscovery.from_module(module).at_line(line) + if not functions: + module_origin = str(origin(module)) + if linecache.getline(module_origin, line): + # The source actually has a line at the given line number + message = ( + f"Cannot install probe {probe.probe_id}: " + f"function at line {line} within source file {module_origin} " + "is likely decorated with an unsupported decorator." + ) + else: + message = ( + f"Cannot install probe {probe.probe_id}: " + f"no functions at line {line} within source file {module_origin} found" + ) + log.error(message) + self._probe_registry.set_error(probe, "NoFunctionsAtLine", message) + continue + for function in (cast(FullyNamedWrappedFunction, _) for _ in functions): + probes_for_function[function].append(cast(LineProbe, probe)) + + for function, probes in probes_for_function.items(): + failed = self._function_store.inject_hooks( + function, [(self._dd_debugger_hook, cast(LineProbe, probe).line, probe) for probe in probes] + ) + + for probe in probes: + if probe.probe_id in failed: + self._probe_registry.set_error(probe, "InjectionFailure", "Failed to inject") + else: + self._probe_registry.set_installed(probe) + + if failed: + log.error("[%s][P: %s] Failed to inject probes %r", os.getpid(), os.getppid(), failed) + + log.debug( + "[%s][P: %s] Injected probes %r in %r", + os.getpid(), + os.getppid(), + [probe.probe_id for probe in probes if probe.probe_id not in failed], + function, + ) + + def _inject_probes(self, probes: List[LineProbe]) -> None: + for probe in probes: + if probe not in self._probe_registry: + if len(self._probe_registry) >= di_config.max_probes: + log.warning("Too many active probes. Ignoring new ones.") + return + log.debug("[%s][P: %s] Received new %s.", os.getpid(), os.getppid(), probe) + self._probe_registry.register(probe) + + resolved_source = probe.source_file + if resolved_source is None: + log.error( + "Cannot inject probe %s: source file %s cannot be resolved", probe.probe_id, probe.source_file + ) + self._probe_registry.set_error(probe, "NoSourceFile", "Source file location cannot be resolved") + continue + + for source in {probe.source_file for probe in probes if probe.source_file is not None}: + try: + self.__watchdog__.register_origin_hook(source, self._probe_injection_hook) + except Exception as exc: + for probe in probes: + if probe.source_file != source: + continue + exc_type = type(exc) + self._probe_registry.set_error(probe, exc_type.__name__, str(exc)) + log.error("Cannot register probe injection hook on source '%s'", source, exc_info=True) + + def _eject_probes(self, probes_to_eject: List[LineProbe]) -> None: + # TODO[perf]: Bulk-collect probes as for injection. This is lower + # priority as probes are normally removed manually by users. + unregistered_probes: List[LineProbe] = [] + for probe in probes_to_eject: + if probe not in self._probe_registry: + log.error("Attempted to eject unregistered probe %r", probe) + continue + + (registered_probe,) = self._probe_registry.unregister(probe) + unregistered_probes.append(cast(LineProbe, registered_probe)) + + probes_for_source: Dict[Path, List[LineProbe]] = defaultdict(list) + for probe in unregistered_probes: + if probe.source_file is None: + continue + probes_for_source[probe.source_file].append(probe) + + for resolved_source, probes in probes_for_source.items(): + module = self.__watchdog__.get_by_origin(resolved_source) + if module is not None: + # The module is still loaded, so we can try to eject the hooks + probes_for_function: Dict[FullyNamedWrappedFunction, List[LineProbe]] = defaultdict(list) + for probe in probes: + if not isinstance(probe, LineLocationMixin): + continue + line = probe.line + assert line is not None, probe # nosec + functions = FunctionDiscovery.from_module(module).at_line(line) + for function in (cast(FullyNamedWrappedFunction, _) for _ in functions): + probes_for_function[function].append(probe) + + for function, ps in probes_for_function.items(): + failed = self._function_store.eject_hooks( + cast(FunctionType, function), + [(self._dd_debugger_hook, probe.line, probe) for probe in ps if probe.line is not None], + ) + for probe in ps: + if probe.probe_id in failed: + log.error("Failed to eject %r from %r", probe, function) + else: + log.debug("Ejected %r from %r", probe, function) + + if not self._probe_registry.has_probes(str(resolved_source)): + try: + self.__watchdog__.unregister_origin_hook(resolved_source, self._probe_injection_hook) + log.debug("Unregistered injection hook on source '%s'", resolved_source) + except ValueError: + log.error("Cannot unregister injection hook for %r", probe, exc_info=True) + + def _probe_wrapping_hook(self, module: ModuleType) -> None: + probes = self._probe_registry.get_pending(module.__name__) + for probe in probes: + if not isinstance(probe, FunctionLocationMixin): + continue + + try: + assert probe.module is not None and probe.func_qname is not None # nosec + function = FunctionDiscovery.from_module(module).by_name(probe.func_qname) + except ValueError: + message = ( + f"Cannot install probe {probe.probe_id}: no function '{probe.func_qname}' in module {probe.module}" + "found (note: if the function exists, it might be decorated with an unsupported decorator)" + ) + self._probe_registry.set_error(probe, "NoFunctionInModule", message) + log.error(message) + continue + + if hasattr(function, "__dd_wrappers__"): + # TODO: Check if this can be made into a set instead + wrapper = cast(FullyNamedWrappedFunction, function) + assert wrapper.__dd_wrappers__, "Function has debugger wrappers" # nosec + wrapper.__dd_wrappers__[probe.probe_id] = probe + log.debug( + "[%s][P: %s] Function probe %r added to already wrapped %r", + os.getpid(), + os.getppid(), + probe.probe_id, + function, + ) + else: + wrappers = cast(FullyNamedWrappedFunction, function).__dd_wrappers__ = {probe.probe_id: probe} + self._function_store.wrap(cast(FunctionType, function), self._dd_debugger_wrapper(wrappers)) + log.debug( + "[%s][P: %s] Function probe %r wrapped around %r", + os.getpid(), + os.getppid(), + probe.probe_id, + function, + ) + self._probe_registry.set_installed(probe) + + def _wrap_functions(self, probes: List[FunctionProbe]) -> None: + for probe in probes: + if len(self._probe_registry) >= di_config.max_probes: + log.warning("Too many active probes. Ignoring new ones.") + return + + self._probe_registry.register(probe) + try: + assert probe.module is not None # nosec + self.__watchdog__.register_module_hook(probe.module, self._probe_wrapping_hook) + except Exception as exc: + exc_type = type(exc) + self._probe_registry.set_error(probe, exc_type.__name__, str(exc)) + log.error("Cannot register probe wrapping hook on module '%s'", probe.module, exc_info=True) + + def _unwrap_functions(self, probes: List[FunctionProbe]) -> None: + # Keep track of all the modules involved to see if there are any import + # hooks that we can clean up at the end. + touched_modules: Set[str] = set() + + for probe in probes: + registered_probes = self._probe_registry.unregister(probe) + if not registered_probes: + log.error("Attempted to eject unregistered probe %r", probe) + continue + + (registered_probe,) = registered_probes + + assert probe.module is not None # nosec + module = sys.modules.get(probe.module, None) + if module is not None: + # The module is still loaded, so we can try to unwrap the function + touched_modules.add(probe.module) + assert probe.func_qname is not None # nosec + function = FunctionDiscovery.from_module(module).by_name(probe.func_qname) + if hasattr(function, "__dd_wrappers__"): + wrapper = cast(FullyNamedWrappedFunction, function) + assert wrapper.__dd_wrappers__, "Function has debugger wrappers" # nosec + del wrapper.__dd_wrappers__[probe.probe_id] + if not wrapper.__dd_wrappers__: + del wrapper.__dd_wrappers__ + self._function_store.unwrap(wrapper) + log.debug("Unwrapped %r", registered_probe) + else: + log.error("Attempted to unwrap %r, but no wrapper found", registered_probe) + + # Clean up import hooks. + for module_name in touched_modules: + if not self._probe_registry.has_probes(module_name): + try: + self.__watchdog__.unregister_module_hook(module_name, self._probe_wrapping_hook) + log.debug("Unregistered wrapping import hook on module %s", module_name) + except ValueError: + log.error("Cannot unregister wrapping import hook for module %r", module_name, exc_info=True) + + def _on_configuration(self, event: ProbePollerEventType, probes: Iterable[Probe]) -> None: + log.debug("[%s][P: %s] Received poller event %r with probes %r", os.getpid(), os.getppid(), event, probes) + + if event == ProbePollerEvent.STATUS_UPDATE: + self._probe_registry.log_probes_status() + return + + if event == ProbePollerEvent.MODIFIED_PROBES: + for probe in probes: + if probe in self._probe_registry: + registered_probe = self._probe_registry.get(probe.probe_id) + if registered_probe is None: + # We didn't have the probe. This shouldn't have happened! + log.error("Modified probe %r was not found in registry.", probe) + continue + self._probe_registry.update(probe) + + return + + line_probes: List[LineProbe] = [] + function_probes: List[FunctionProbe] = [] + for probe in probes: + if isinstance(probe, LineLocationMixin): + line_probes.append(cast(LineProbe, probe)) + elif isinstance(probe, FunctionLocationMixin): + function_probes.append(cast(FunctionProbe, probe)) + else: + log.warning("Skipping probe '%r': not supported.", probe) + + if event == ProbePollerEvent.NEW_PROBES: + self._inject_probes(line_probes) + self._wrap_functions(function_probes) + elif event == ProbePollerEvent.DELETED_PROBES: + self._eject_probes(line_probes) + self._unwrap_functions(function_probes) + else: + raise ValueError("Unknown probe poller event %r" % event) + + def _stop_service(self, join: bool = True) -> None: + self._function_store.restore_all() + for service in self._services: + service.stop() + if join: + service.join() + + def _start_service(self) -> None: + for service in self._services: + log.debug("[%s][P: %s] Debugger. Start service %s", os.getpid(), os.getppid(), service) + service.start() + + @classmethod + def _restart(cls): + log.info("[%s][P: %s] Restarting the debugger in child process", os.getpid(), os.getppid()) + cls.disable(join=False) + cls.enable() + + @classmethod + def _on_run_module(cls, module: ModuleType) -> None: + debugger = cls._instance + if debugger is not None: + debugger.__watchdog__.on_run_module(module) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_encoding.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_encoding.py new file mode 100644 index 0000000..a7a3c70 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_encoding.py @@ -0,0 +1,324 @@ +import abc +from dataclasses import dataclass +from heapq import heapify +from heapq import heappop +from heapq import heappush +import json +import os +from threading import Thread +from types import FrameType +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional + +from ddtrace.debugging._config import di_config +from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.snapshot import Snapshot +from ddtrace.internal import forksafe +from ddtrace.internal._encoding import BufferFull +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +class JsonBuffer(object): + def __init__(self, max_size=None): + self.max_size = max_size + self._reset() + + def put(self, item: bytes) -> int: + if self._flushed: + self._reset() + + size = len(item) + if self.size + size > self.max_size: + raise BufferFull(self.size, size) + + if self.size > 2: + self.size += 1 + self._buffer += b"," + self._buffer += item + self.size += size + return size + + def _reset(self): + self.size = 2 + self._buffer = bytearray(b"[") + self._flushed = False + + def flush(self): + self._buffer += b"]" + try: + return self._buffer + finally: + self._flushed = True + + +class Encoder(abc.ABC): + @abc.abstractmethod + def encode(self, item: Any) -> bytes: + """Encode the given snapshot.""" + + +class BufferedEncoder(abc.ABC): + count = 0 + + @abc.abstractmethod + def put(self, item: Any) -> int: + """Enqueue the given item and returns its encoded size.""" + + @abc.abstractmethod + def flush(self) -> Optional[bytes]: + """Flush the buffer and return the encoded data.""" + + +def _logs_track_logger_details(thread: Thread, frame: FrameType) -> Dict[str, Any]: + code = frame.f_code + + return { + "name": code.co_filename, + "method": code.co_name, + "thread_name": "%s;pid:%d" % (thread.name, os.getpid()), + "thread_id": thread.ident, + "version": 2, + } + + +def add_tags(payload): + if not di_config._tags_in_qs and di_config.tags: + payload["ddtags"] = di_config.tags + + +def _build_log_track_payload( + service: str, + signal: LogSignal, + host: Optional[str], +) -> Dict[str, Any]: + context = signal.trace_context + + payload = { + "service": service, + "debugger.snapshot": signal.snapshot, + "host": host, + "logger": _logs_track_logger_details(signal.thread, signal.frame), + "dd.trace_id": context.trace_id if context else None, + "dd.span_id": context.span_id if context else None, + "ddsource": "dd_debugger", + "message": signal.message, + "timestamp": int(signal.timestamp * 1e3), # milliseconds, + } + add_tags(payload) + return payload + + +class JSONTree: + @dataclass + class Node: + start: int + end: int + level: int + parent: Optional["JSONTree.Node"] + children: List["JSONTree.Node"] + + pruned: int = 0 + not_captured_depth: bool = False + not_captured: bool = False + + @property + def key(self): + return self.not_captured_depth, self.level, self.not_captured, len(self) + + def __len__(self): + return self.end - self.start + + def __lt__(self, other): + # The Python heapq pops the smallest item, so we reverse the + # comparison. + return self.key > other.key + + @property + def leaves(self): + if not self.children: + yield self + else: + for child in self.children[::-1]: + yield from child.leaves + + def __init__(self, data): + self._iter = enumerate(data) + self._stack: List["JSONTree.Node"] = [] # TODO: deque + self.root = None + self.level = 0 + + self._string_iter = None + + self._state = self._object + self._on_string_match = self._not_captured + + self._parse() + + def _depth_string(self): + self._stack[-1].not_captured_depth = True + return self._object + + def _not_captured(self, i, c): + if c == '"': + self._string_iter = iter("depth") + self._on_string_match = self._depth_string + self._state = self._string + + elif c not in " :\n\t\r": + self._state = self._object + + def _not_captured_string(self): + self._stack[-1].not_captured = True + return self._not_captured + + def _escape(self, i, c): + self._state = self._string + + def _string(self, i, c): + if c == '"': + self._state = ( + self._on_string_match() + if self._string_iter is not None and next(self._string_iter, None) is None + else self._object + ) + + elif c == "\\": + # If we are escaping a character, we are not parsing the + # "notCapturedReason" string. + self._string_iter = None + self._state = self._escape + + if self._string_iter is not None and c != next(self._string_iter, None): + self._string_iter = None + + def _object(self, i, c): + if c == "}": + o = self._stack.pop() + o.end = i + 1 + self.level -= 1 + if not self._stack: + self.root = o + + elif c == '"': + self._string_iter = iter("notCapturedReason") + self._on_string_match = self._not_captured_string + self._state = self._string + + elif c == "{": + o = self.Node(i, 0, self.level, None, []) + self.level += 1 + if self._stack: + o.parent = self._stack[-1] + o.parent.children.append(o) + self._stack.append(o) + + def _parse(self): + for i, c in self._iter: + self._state(i, c) + if self.root is not None: + return + + @property + def leaves(self): + return list(self.root.leaves) + + +class LogSignalJsonEncoder(Encoder): + MAX_SIGNAL_SIZE = (1 << 20) - 2 + MIN_LEVEL = 5 + + def __init__(self, service: str, host: Optional[str] = None) -> None: + self._service = service + self._host = host + + def encode(self, log_signal: LogSignal) -> bytes: + return self.pruned(json.dumps(_build_log_track_payload(self._service, log_signal, self._host))).encode("utf-8") + + def pruned(self, log_signal_json: str) -> str: + if len(log_signal_json) <= self.MAX_SIGNAL_SIZE: + return log_signal_json + + PRUNED_PROPERTY = '{"pruned":true}' + PRUNED_LEN = len(PRUNED_PROPERTY) + + tree = JSONTree(log_signal_json) + + delta = len(tree.root) - self.MAX_SIGNAL_SIZE + nodes, s = {}, 0 + + leaves = [_ for _ in tree.leaves if _.level >= self.MIN_LEVEL] + heapify(leaves) + while leaves: + leaf = heappop(leaves) + nodes[leaf.start] = leaf + s += len(leaf) - PRUNED_LEN + if s > delta: + break + + parent = leaf.parent + parent.pruned += 1 + if parent.pruned >= len(parent.children): + # We have pruned all the children of this parent node so we can + # treat it as a leaf now. + parent.not_captured_depth = parent.not_captured = True + heappush(leaves, parent) + for c in parent.children: + del nodes[c.start] + s -= len(c) - PRUNED_LEN + + pruned_nodes = sorted(nodes.values(), key=lambda n: n.start) # Leaf nodes don't overlap + + segments = [log_signal_json[: pruned_nodes[0].start]] + for n, m in zip(pruned_nodes, pruned_nodes[1:]): + segments.append(PRUNED_PROPERTY) + segments.append(log_signal_json[n.end : m.start]) + segments.append(PRUNED_PROPERTY) + segments.append(log_signal_json[pruned_nodes[-1].end :]) + + return "".join(segments) + + +class SignalQueue(BufferedEncoder): + def __init__( + self, + encoder: Encoder, + buffer_size: int = 4 * (1 << 20), + on_full: Optional[Callable[[Any, bytes], None]] = None, + ) -> None: + self._encoder = encoder + self._buffer = JsonBuffer(buffer_size) + self._lock = forksafe.Lock() + self._on_full = on_full + self.count = 0 + self.max_size = buffer_size - self._buffer.size + + def put(self, item: Snapshot) -> int: + return self.put_encoded(item, self._encoder.encode(item)) + + def put_encoded(self, item: Snapshot, encoded: bytes) -> int: + try: + with self._lock: + size = self._buffer.put(encoded) + self.count += 1 + return size + except BufferFull: + if self._on_full is not None: + self._on_full(item, encoded) + raise + + def flush(self) -> Optional[bytes]: + with self._lock: + if self.count == 0: + # Reclaim memory + self._buffer._reset() + return None + + encoded = self._buffer.flush() + self.count = 0 + return encoded diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_exception/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_exception/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_exception/auto_instrument.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_exception/auto_instrument.py new file mode 100644 index 0000000..ebc1dc5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_exception/auto_instrument.py @@ -0,0 +1,210 @@ +from collections import deque +from itertools import count +import sys +from threading import current_thread +from types import TracebackType +import typing as t +import uuid + +import attr + +from ddtrace.debugging._probe.model import LiteralTemplateSegment +from ddtrace.debugging._probe.model import LogLineProbe +from ddtrace.debugging._signal.collector import SignalCollector +from ddtrace.debugging._signal.snapshot import DEFAULT_CAPTURE_LIMITS +from ddtrace.debugging._signal.snapshot import Snapshot +from ddtrace.internal.processor import SpanProcessor +from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter +from ddtrace.internal.rate_limiter import RateLimitExceeded +from ddtrace.span import Span + + +GLOBAL_RATE_LIMITER = RateLimiter( + limit_rate=1, # one trace per second + raise_on_exceed=False, +) + +# used to mark that the span have debug info captured, visible to users +DEBUG_INFO_TAG = "error.debug_info_captured" + +# used to rate limit decision on the entire local trace (stored at the root span) +CAPTURE_TRACE_TAG = "_dd.debug.error.trace_captured" + +# unique exception id +EXCEPTION_ID_TAG = "_dd.debug.error.exception_id" + +# link to matching snapshot for every frame in the traceback +FRAME_SNAPSHOT_ID_TAG = "_dd.debug.error.%d.snapshot_id" +FRAME_FUNCTION_TAG = "_dd.debug.error.%d.function" +FRAME_FILE_TAG = "_dd.debug.error.%d.file" +FRAME_LINE_TAG = "_dd.debug.error.%d.line" + + +def unwind_exception_chain( + exc: t.Optional[BaseException], + tb: t.Optional[TracebackType], +) -> t.Tuple[t.Deque[t.Tuple[BaseException, t.Optional[TracebackType]]], t.Optional[uuid.UUID]]: + """Unwind the exception chain and assign it an ID.""" + chain: t.Deque[t.Tuple[BaseException, t.Optional[TracebackType]]] = deque() + + while exc is not None: + chain.append((exc, tb)) + + if exc.__cause__ is not None: + exc = exc.__cause__ + elif exc.__context__ is not None and not exc.__suppress_context__: + exc = exc.__context__ + else: + exc = None + + tb = getattr(exc, "__traceback__", None) + + exc_id = None + if chain: + # If the chain is not trivial we generate an ID for the whole chain and + # store it on the outermost exception, if not already generated. + exc, _ = chain[-1] + try: + exc_id = exc._dd_exc_id # type: ignore[attr-defined] + except AttributeError: + exc._dd_exc_id = exc_id = uuid.uuid4() # type: ignore[attr-defined] + + return chain, exc_id + + +@attr.s +class SpanExceptionProbe(LogLineProbe): + @classmethod + def build(cls, exc_id: uuid.UUID, tb: TracebackType) -> "SpanExceptionProbe": + _exc_id = str(exc_id) + frame = tb.tb_frame + filename = frame.f_code.co_filename + line = tb.tb_lineno + name = frame.f_code.co_name + message = f"exception info for {name}, in {filename}, line {line} (exception ID {_exc_id})" + + return cls( + probe_id=_exc_id, + version=0, + tags={}, + source_file=filename, + line=line, + template=message, + segments=[LiteralTemplateSegment(message)], + take_snapshot=True, + limits=DEFAULT_CAPTURE_LIMITS, + condition=None, + condition_error_rate=0.0, + rate=float("inf"), + ) + + +@attr.s +class SpanExceptionSnapshot(Snapshot): + exc_id = attr.ib(type=t.Optional[uuid.UUID], default=None) + + @property + def data(self) -> t.Dict[str, t.Any]: + data = super().data + + data.update({"exception-id": str(self.exc_id)}) + + return data + + +def can_capture(span: Span) -> bool: + # We determine if we should capture the exception information from the span + # by looking at its local root. If we have budget to capture, we mark the + # root as "info captured" and return True. If we don't have budget, we mark + # the root as "info not captured" and return False. If the root is already + # marked, we return the mark. + root = span._local_root + if root is None: + return False + + info_captured = root.get_tag(CAPTURE_TRACE_TAG) + + if info_captured == "false": + return False + + if info_captured == "true": + return True + + if info_captured is None: + result = GLOBAL_RATE_LIMITER.limit() is not RateLimitExceeded + root.set_tag_str(CAPTURE_TRACE_TAG, str(result).lower()) + return result + + msg = f"unexpected value for {CAPTURE_TRACE_TAG}: {info_captured}" + raise ValueError(msg) + + +@attr.s +class SpanExceptionProcessor(SpanProcessor): + collector = attr.ib(type=SignalCollector) + + def on_span_start(self, span: Span) -> None: + pass + + def on_span_finish(self, span: Span) -> None: + if not (span.error and can_capture(span)): + # No error or budget to capture + return + + _, exc, _tb = sys.exc_info() + + chain, exc_id = unwind_exception_chain(exc, _tb) + if not chain or exc_id is None: + # No exceptions to capture + return + + seq = count(1) # 1-based sequence number + + while chain: + exc, _tb = chain.pop() # LIFO: reverse the chain + + if _tb is None or _tb.tb_frame is None: + # If we don't have a traceback there isn't much we can do + continue + + # DEV: We go from the handler up to the root exception + while _tb and _tb.tb_frame: + frame = _tb.tb_frame + code = frame.f_code + seq_nr = next(seq) + + # TODO: Check if it is user code; if not, skip. We still + # generate a sequence number. + + try: + snapshot_id = frame.f_locals["_dd_debug_snapshot_id"] + except KeyError: + # We don't have a snapshot for the frame so we create one + snapshot = SpanExceptionSnapshot( + probe=SpanExceptionProbe.build(exc_id, _tb), + frame=frame, + thread=current_thread(), + trace_context=span, + exc_id=exc_id, + ) + + # Capture + snapshot.line() + + # Collect + self.collector.push(snapshot) + + # Memoize + frame.f_locals["_dd_debug_snapshot_id"] = snapshot_id = snapshot.uuid + + # Add correlation tags on the span + span.set_tag_str(FRAME_SNAPSHOT_ID_TAG % seq_nr, snapshot_id) + span.set_tag_str(FRAME_FUNCTION_TAG % seq_nr, code.co_name) + span.set_tag_str(FRAME_FILE_TAG % seq_nr, code.co_filename) + span.set_tag_str(FRAME_LINE_TAG % seq_nr, str(_tb.tb_lineno)) + + # Move up the stack + _tb = _tb.tb_next + + span.set_tag_str(DEBUG_INFO_TAG, "true") + span.set_tag_str(EXCEPTION_ID_TAG, str(exc_id)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_expressions.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_expressions.py new file mode 100644 index 0000000..db19a9f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_expressions.py @@ -0,0 +1,365 @@ +r"""Debugger expression language + +This module implements the debugger expression language that is used in the UI +to define probe conditions and metric expressions. The JSON AST is compiled into +Python bytecode. + +Full grammar: + + predicate => | | + direct_predicate => {"": } + direct_predicate_type => not | isEmpty | isUndefined + value_source => | + literal => | true | false | "string" + number => 0 | ([1-9][0-9]*\.[0-9]+) + identifier => + arg_predicate => {"": []} + arg_predicate_type => eq | ne | gt | ge | lt | le | any | all | and | or + | startsWith | endsWith | contains | matches + argument_list => (,)+ + operation => | + direct_opearation => {"": } + direct_op_type => len | count | ref + arg_operation => {"": []} + arg_op_type => filter | substring | getmember | index +""" # noqa +from itertools import chain +import re +import sys +from types import FunctionType +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + +import attr +from bytecode import Bytecode +from bytecode import Compare +from bytecode import Instr + +from ddtrace.debugging._safety import safe_getitem +from ddtrace.internal.compat import PYTHON_VERSION_INFO as PY +from ddtrace.internal.logger import get_logger + + +DDASTType = Union[Dict[str, Any], Dict[str, List[Any]], Any] + +log = get_logger(__name__) + + +def _is_identifier(name: str) -> bool: + return isinstance(name, str) and name.isidentifier() + + +IN_OPERATOR_INSTR = Instr("COMPARE_OP", Compare.IN) if PY < (3, 9) else Instr("CONTAINS_OP", 0) +NOT_IN_OPERATOR_INSTR = Instr("COMPARE_OP", Compare.NOT_IN) if PY < (3, 9) else Instr("CONTAINS_OP", 1) + + +class DDCompiler: + @classmethod + def __getmember__(cls, o, a): + return object.__getattribute__(o, a) + + @classmethod + def __index__(cls, o, i): + return safe_getitem(o, i) + + @classmethod + def __ref__(cls, x): + return x + + def _make_function(self, ast: DDASTType, args: Tuple[str, ...], name: str) -> FunctionType: + compiled = self._compile_predicate(ast) + if compiled is None: + raise ValueError("Invalid predicate: %r" % ast) + + instrs = compiled + [Instr("RETURN_VALUE")] + if sys.version_info >= (3, 11): + instrs.insert(0, Instr("RESUME", 0)) + + abstract_code = Bytecode(instrs) + abstract_code.argcount = len(args) + abstract_code.argnames = args + abstract_code.name = name + + return FunctionType(abstract_code.to_code(), {}, name, (), None) + + def _make_lambda(self, ast: DDASTType) -> Callable[[Any, Any], Any]: + return self._make_function(ast, ("_dd_it", "_locals"), "") + + def _compile_direct_predicate(self, ast: DDASTType) -> Optional[List[Instr]]: + # direct_predicate => {"": } + # direct_predicate_type => not | isEmpty | isUndefined + if not isinstance(ast, dict): + return None + + _type, arg = next(iter(ast.items())) + + if _type not in {"not", "isEmpty", "isUndefined"}: + return None + + value = self._compile_predicate(arg) + if value is None: + raise ValueError("Invalid argument: %r" % arg) + + if _type == "isUndefined": + value.append(Instr("LOAD_FAST", "_locals")) + value.append(NOT_IN_OPERATOR_INSTR) + else: + value.append(Instr("UNARY_NOT")) + + return value + + def _compile_arg_predicate(self, ast: DDASTType) -> Optional[List[Instr]]: + # arg_predicate => {"": []} + # arg_predicate_type => eq | ne | gt | ge | lt | le | any | all | and | or + # | startsWith | endsWith | contains | matches + if not isinstance(ast, dict): + return None + + _type, args = next(iter(ast.items())) + + if _type in {"or", "and"}: + a, b = args + ca, cb = self._compile_predicate(a), self._compile_predicate(b) + if ca is None: + raise ValueError("Invalid argument: %r" % a) + if cb is None: + raise ValueError("Invalid argument: %r" % b) + return ca + cb + [Instr("BINARY_%s" % _type.upper())] + + if _type in {"eq", "ge", "gt", "le", "lt", "ne"}: + a, b = args + ca, cb = self._compile_predicate(a), self._compile_predicate(b) + if ca is None: + raise ValueError("Invalid argument: %r" % a) + if cb is None: + raise ValueError("Invalid argument: %r" % b) + return ca + cb + [Instr("COMPARE_OP", getattr(Compare, _type.upper()))] + + if _type == "contains": + a, b = args + ca, cb = self._compile_predicate(a), self._compile_predicate(b) + if ca is None: + raise ValueError("Invalid argument: %r" % a) + if cb is None: + raise ValueError("Invalid argument: %r" % b) + return cb + ca + [IN_OPERATOR_INSTR] + + if _type in {"any", "all"}: + a, b = args + f = __builtins__[_type] # type: ignore[index] + ca, fb = self._compile_predicate(a), self._make_lambda(b) + + if ca is None: + raise ValueError("Invalid argument: %r" % a) + + return self._call_function( + lambda i, c, _locals: f(c(_, _locals) for _ in i), + ca, + [Instr("LOAD_CONST", fb)], + [Instr("LOAD_FAST", "_locals")], + ) + + if _type in {"startsWith", "endsWith"}: + a, b = args + ca, cb = self._compile_predicate(a), self._compile_predicate(b) + if ca is None: + raise ValueError("Invalid argument: %r" % a) + if cb is None: + raise ValueError("Invalid argument: %r" % b) + return self._call_function(getattr(str, _type.lower()), ca, cb) + + if _type == "matches": + a, b = args + string, pattern = self._compile_predicate(a), self._compile_predicate(b) + if string is None: + raise ValueError("Invalid argument: %r" % a) + if pattern is None: + raise ValueError("Invalid argument: %r" % b) + return self._call_function(lambda p, s: re.match(p, s) is not None, pattern, string) + + return None + + def _compile_direct_operation(self, ast: DDASTType) -> Optional[List[Instr]]: + # direct_opearation => {"": } + # direct_op_type => len | count | ref + if not isinstance(ast, dict): + return None + + _type, arg = next(iter(ast.items())) + + if _type in {"len", "count"}: + value = self._compile_value_source(arg) + if value is None: + raise ValueError("Invalid argument: %r" % arg) + return self._call_function(len, value) + + if _type == "ref": + if not isinstance(arg, str): + return None + + if arg == "@it": + return [Instr("LOAD_FAST", "_dd_it")] + + return [ + Instr("LOAD_FAST", "_locals"), + Instr("LOAD_CONST", self.__ref__(arg)), + Instr("BINARY_SUBSCR"), + ] + + return None + + def _call_function(self, func: Callable, *args: List[Instr]) -> List[Instr]: + if PY < (3, 11): + return [Instr("LOAD_CONST", func)] + list(chain(*args)) + [Instr("CALL_FUNCTION", len(args))] + elif PY >= (3, 12): + return [Instr("PUSH_NULL"), Instr("LOAD_CONST", func)] + list(chain(*args)) + [Instr("CALL", len(args))] + + # Python 3.11 + return ( + [Instr("PUSH_NULL"), Instr("LOAD_CONST", func)] + + list(chain(*args)) + + [Instr("PRECALL", len(args)), Instr("CALL", len(args))] + ) + + def _compile_arg_operation(self, ast: DDASTType) -> Optional[List[Instr]]: + # arg_operation => {"": []} + # arg_op_type => filter | substring + if not isinstance(ast, dict): + return None + + _type, args = next(iter(ast.items())) + + if _type not in {"filter", "substring", "getmember", "index"}: + return None + + if _type == "substring": + v, a, b = args + cv, ca, cb = self._compile_predicate(v), self._compile_predicate(a), self._compile_predicate(b) + if cv is None: + raise ValueError("Invalid argument: %r" % v) + if ca is None: + raise ValueError("Invalid argument: %r" % a) + if cb is None: + raise ValueError("Invalid argument: %r" % b) + return cv + ca + cb + [Instr("BUILD_SLICE", 2), Instr("BINARY_SUBSCR")] + + if _type == "filter": + a, b = args + ca, fb = self._compile_predicate(a), self._make_lambda(b) + + if ca is None: + raise ValueError("Invalid argument: %r" % a) + + return self._call_function( + lambda i, c, _locals: type(i)(_ for _ in i if c(_, _locals)), + ca, + [Instr("LOAD_CONST", fb)], + [Instr("LOAD_FAST", "_locals")], + ) + + if _type == "getmember": + v, attr = args + if not _is_identifier(attr): + raise ValueError("Invalid identifier: %r" % attr) + + cv = self._compile_predicate(v) + if not cv: + return None + + return self._call_function(self.__getmember__, cv, [Instr("LOAD_CONST", attr)]) + + if _type == "index": + v, i = args + cv = self._compile_predicate(v) + if not cv: + return None + ci = self._compile_predicate(i) + if not ci: + return None + return self._call_function(self.__index__, cv, ci) + + return None + + def _compile_operation(self, ast: DDASTType) -> Optional[List[Instr]]: + # operation => | + return self._compile_direct_operation(ast) or self._compile_arg_operation(ast) + + def _compile_literal(self, ast: DDASTType) -> Optional[List[Instr]]: + # literal => | true | false | "string" | null + if not (isinstance(ast, (str, int, float, bool)) or ast is None): + return None + + return [Instr("LOAD_CONST", ast)] + + def _compile_value_source(self, ast: DDASTType) -> Optional[List[Instr]]: + # value_source => | + return self._compile_operation(ast) or self._compile_literal(ast) + + def _compile_predicate(self, ast: DDASTType) -> Optional[List[Instr]]: + # predicate => | | + return ( + self._compile_direct_predicate(ast) or self._compile_arg_predicate(ast) or self._compile_value_source(ast) + ) + + def compile(self, ast: DDASTType) -> Callable[[Dict[str, Any]], Any]: + return self._make_function(ast, ("_locals",), "") + + +dd_compile = DDCompiler().compile + + +class DDExpressionEvaluationError(Exception): + """Thrown when an error occurs while evaluating a dsl expression.""" + + def __init__(self, dsl, e): + super().__init__('Failed to evaluate expression "%s": %s' % (dsl, str(e))) + self.dsl = dsl + self.error = str(e) + + +def _invalid_expression(_): + """Forces probes with invalid expression/conditions to never trigger. + + Any signs of invalid conditions in logs is an indication of a problem with + the expression compiler. + """ + return None + + +@attr.s +class DDExpression(object): + __compiler__ = dd_compile + + dsl = attr.ib(type=str) + callable = attr.ib(type=Callable[[Dict[str, Any]], Any]) + + def eval(self, _locals): + try: + return self.callable(_locals) + except Exception as e: + raise DDExpressionEvaluationError(self.dsl, e) from e + + def __call__(self, _locals): + return self.eval(_locals) + + @classmethod + def on_compiler_error(cls, dsl: str, exc: Exception) -> Callable[[Dict[str, Any]], Any]: + log.error("Cannot compile expression: %s", dsl, exc_info=True) + return _invalid_expression + + @classmethod + def compile(cls, expr: Dict[str, Any]) -> "DDExpression": + ast = expr["json"] + dsl = expr["dsl"] + + try: + compiled = cls.__compiler__(ast) + except Exception as e: + compiled = cls.on_compiler_error(dsl, e) + + return cls(dsl=dsl, callable=compiled) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/discovery.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/discovery.py new file mode 100644 index 0000000..176e909 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/discovery.py @@ -0,0 +1,240 @@ +from collections import defaultdict +from collections import deque +from pathlib import Path + +from ddtrace.internal.utils.inspection import undecorated +from ddtrace.vendor.wrapt.wrappers import FunctionWrapper + + +try: + from typing import Protocol +except ImportError: + from typing_extensions import Protocol # type: ignore[assignment] + +from types import FunctionType +from types import ModuleType +from typing import Any +from typing import Dict +from typing import Iterator +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type +from typing import Union +from typing import cast + +from ddtrace.internal.logger import get_logger +from ddtrace.internal.module import origin +from ddtrace.internal.safety import _isinstance +from ddtrace.internal.utils.inspection import linenos + + +log = get_logger(__name__) + +FunctionContainerType = Union[type, property, classmethod, staticmethod, Tuple, ModuleType] + +ContainerKey = Union[str, int, Type[staticmethod], Type[classmethod]] + +CONTAINER_TYPES = (type, property, classmethod, staticmethod) + + +class FullyNamed(Protocol): + """A fully named object.""" + + __name__: Optional[str] = None + __fullname__: Optional[str] = None + + +class FullyNamedFunction(FullyNamed): + """A fully named function object.""" + + def __call__(self, *args, **kwargs): + pass + + +class ContainerIterator(Iterator, FullyNamedFunction): + """Wrapper around different types of function containers. + + A container comes with an origin, i.e. a parent container and a position + within it in the form of a key. + """ + + def __init__( + self, + container: FunctionContainerType, + origin: Optional[Union[Tuple["ContainerIterator", ContainerKey], Tuple[FullyNamedFunction, str]]] = None, + ) -> None: + if isinstance(container, (type, ModuleType)): + self._iter = iter(container.__dict__.items()) + self.__name__ = container.__name__ + + elif isinstance(container, tuple): + self._iter = iter(enumerate(_.cell_contents for _ in container)) # type: ignore[arg-type] + self.__name__ = "" + + elif isinstance(container, property): + self._iter = iter( + (m, getattr(container, a)) for m, a in {("getter", "fget"), ("setter", "fset"), ("deleter", "fdel")} + ) + assert container.fget is not None # nosec + self.__name__ = container.fget.__name__ + + elif isinstance(container, (classmethod, staticmethod)): + self._iter = iter([(type(container), container.__func__)]) # type: ignore[list-item] + self.__name__ = None + + else: + raise TypeError("Unsupported container type: %s", type(container)) + + self._container = container + + if origin is not None and origin[0].__fullname__ is not None: + origin_fullname = origin[0].__fullname__ + self.__fullname__ = ".".join((origin_fullname, self.__name__)) if self.__name__ else origin_fullname + else: + self.__fullname__ = self.__name__ + + def __iter__(self) -> Iterator[Tuple[ContainerKey, Any]]: + return self._iter + + def __next__(self) -> Tuple[ContainerKey, Any]: + return next(self._iter) + + next = __next__ + + +def _local_name(name: str, f: FunctionType) -> str: + func_name = f.__name__ + if func_name.startswith("__") and name.endswith(func_name): + # Quite likely a mangled name + return func_name + + if name != func_name: + # Brought into scope by an import, or a decorator + return "..".join((name, func_name)) + + return func_name + + +def _collect_functions(module: ModuleType) -> Dict[str, FullyNamedFunction]: + """Collect functions from a given module. + + All the collected functions are augmented with a ``__fullname__`` attribute + to disambiguate the same functions assigned to different names. + """ + path = origin(module) + if path is None: + # We are not able to determine what this module actually exports. + return {} + + containers = deque([ContainerIterator(module)]) + functions = {} + seen_containers = set() + seen_functions = set() + + while containers: + c = containers.popleft() + + if id(c._container) in seen_containers: + continue + seen_containers.add(id(c._container)) + + for k, o in c: + code = getattr(o, "__code__", None) if _isinstance(o, (FunctionType, FunctionWrapper)) else None + if code is not None: + local_name = _local_name(k, o) if isinstance(k, str) else o.__name__ + + if o not in seen_functions: + seen_functions.add(o) + o = cast(FullyNamedFunction, o) + o.__fullname__ = ".".join((c.__fullname__, local_name)) if c.__fullname__ else local_name + + for name in (k, local_name) if isinstance(k, str) and k != local_name else (local_name,): + fullname = ".".join((c.__fullname__, name)) if c.__fullname__ else name + if fullname not in functions or Path(code.co_filename).resolve() == path: + # Give precedence to code objects from the module and + # try to retrieve any potentially decorated function so + # that we don't end up returning the decorator function + # instead of the original function. + functions[fullname] = undecorated(o, name, path) if name == k else o + + try: + if o.__closure__: + containers.append(ContainerIterator(o.__closure__, origin=(o, ""))) + except AttributeError: + pass + + elif _isinstance(o, CONTAINER_TYPES): + if _isinstance(o, property) and not isinstance(o.fget, FunctionType): + continue + containers.append(ContainerIterator(o, origin=(c, k))) + + return functions + + +class FunctionDiscovery(defaultdict): + """Discover all function objects in a module. + + The discovered functions can be retrieved by line number or by their + qualified name. In principle one wants to create a function discovery + object per module and then cache the information. For this reason, + instances of this class should be obtained with the ``from_module`` class + method. This builds the discovery object and caches the information on the + module object itself. + """ + + def __init__(self, module: ModuleType) -> None: + super().__init__(list) + self._module = module + self._fullname_index = {} + + functions = _collect_functions(module) + seen_functions = set() + module_path = origin(module) + if module_path is None: + # We are not going to collect anything because no code objects will + # match the origin. + return + + for fname, function in functions.items(): + if ( + function not in seen_functions + and Path(cast(FunctionType, function).__code__.co_filename).resolve() == module_path + ): + # We only map line numbers for functions that actually belong to + # the module. + for lineno in linenos(cast(FunctionType, function)): + self[lineno].append(function) + self._fullname_index[fname] = function + seen_functions.add(function) + + def at_line(self, line: int) -> List[FullyNamedFunction]: + """Get the functions at the given line. + + Note that, in general, there can be multiple copies of the same + functions. This can happen as a result, e.g., of using decorators. + """ + return self[line] + + def by_name(self, qualname: str) -> FullyNamedFunction: + """Get the function by its qualified name.""" + fullname = ".".join((self._module.__name__, qualname)) + try: + return self._fullname_index[fullname] + except KeyError: + raise ValueError("Function '%s' not found" % fullname) + + @classmethod + def from_module(cls, module: ModuleType) -> "FunctionDiscovery": + """Return a function discovery object from the given module. + + If this is called on a module for the first time, it caches the + information on the module object itself. Subsequent calls will + return the cached information. + """ + # Cache the function tree on the module + try: + return module.__function_discovery__ + except AttributeError: + fd = module.__function_discovery__ = cls(module) # type: ignore[attr-defined] + return fd diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/store.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/store.py new file mode 100644 index 0000000..bdc8988 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_function/store.py @@ -0,0 +1,111 @@ +from types import CodeType +from types import FunctionType +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +from typing import Set +from typing import cast + +from ddtrace.debugging._function.discovery import FullyNamed +from ddtrace.internal.injection import HookInfoType +from ddtrace.internal.injection import HookType +from ddtrace.internal.injection import eject_hooks +from ddtrace.internal.injection import inject_hooks +from ddtrace.internal.wrapping import WrappedFunction +from ddtrace.internal.wrapping import Wrapper +from ddtrace.internal.wrapping import unwrap +from ddtrace.internal.wrapping import wrap + + +WrapperType = Callable[[FunctionType, Any, Any, Any], Any] + + +class FullyNamedWrappedFunction(FullyNamed, WrappedFunction): + """A fully named wrapper function.""" + + +class FunctionStore(object): + """Function object store. + + This class provides a storage layer for patching operations, which allows us + to store the original code object of functions being patched with either + hook injections or wrapping. This also enforce a single wrapping layer. + Multiple wrapping is implemented as a list of wrappers handled by the single + wrapper function. + + If extra attributes are defined during the patching process, they will get + removed when the functions are restored. + """ + + def __init__(self, extra_attrs: Optional[List[str]] = None) -> None: + self._code_map: Dict[FunctionType, CodeType] = {} + self._wrapper_map: Dict[FunctionType, Wrapper] = {} + self._extra_attrs = ["__dd_wrapped__"] + if extra_attrs: + self._extra_attrs.extend(extra_attrs) + + def __enter__(self): + return self + + def __exit__(self, *exc): + self.restore_all() + + def _store(self, function: FunctionType) -> None: + if function not in self._code_map: + self._code_map[function] = function.__code__ + + def inject_hooks(self, function: FullyNamedWrappedFunction, hooks: List[HookInfoType]) -> Set[str]: + """Bulk-inject hooks into a function. + + Returns the set of probe IDs for those probes that failed to inject. + """ + try: + return self.inject_hooks(cast(FullyNamedWrappedFunction, function.__dd_wrapped__), hooks) + except AttributeError: + f = cast(FunctionType, function) + self._store(f) + return {p.probe_id for _, _, p in inject_hooks(f, hooks)} + + def eject_hooks(self, function: FunctionType, hooks: List[HookInfoType]) -> Set[str]: + """Bulk-eject hooks from a function. + + Returns the set of probe IDs for those probes that failed to eject. + """ + try: + wrapped = cast(FullyNamedWrappedFunction, function).__dd_wrapped__ + except AttributeError: + # Not a wrapped function so we can actually eject from it + return {p.probe_id for _, _, p in eject_hooks(function, hooks)} + else: + # Try on the wrapped function. + return self.eject_hooks(cast(FunctionType, wrapped), hooks) + + def inject_hook(self, function: FullyNamedWrappedFunction, hook: HookType, line: int, arg: Any) -> bool: + """Inject a hook into a function.""" + return not not self.inject_hooks(function, [(hook, line, arg)]) + + def eject_hook(self, function: FunctionType, hook: HookType, line: int, arg: Any) -> bool: + """Eject a hook from a function.""" + return not not self.eject_hooks(function, [(hook, line, arg)]) + + def wrap(self, function: FunctionType, wrapper: Wrapper) -> None: + """Wrap a function with a hook.""" + self._store(function) + self._wrapper_map[function] = wrapper + wrap(function, wrapper) + + def unwrap(self, function: FullyNamedWrappedFunction) -> None: + """Unwrap a hook around a wrapped function.""" + unwrap(function, self._wrapper_map.pop(cast(FunctionType, function))) + + def restore_all(self) -> None: + """Restore all the patched functions to their original form.""" + for function, code in self._code_map.items(): + function.__code__ = code + for attr in self._extra_attrs: + try: + delattr(function, attr) + except AttributeError: + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_metrics.py new file mode 100644 index 0000000..fe731b5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_metrics.py @@ -0,0 +1,9 @@ +from ddtrace.internal.metrics import Metrics + + +# Debugger metrics +metrics = Metrics(namespace="debugger") + +# Metric probe metrics (always enabled) +probe_metrics = Metrics(namespace="debugger.metric") +probe_metrics.enable() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/model.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/model.py new file mode 100644 index 0000000..d516692 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/model.py @@ -0,0 +1,290 @@ +import abc +from enum import Enum +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union + +import attr + +from ddtrace.debugging._expressions import DDExpression +from ddtrace.internal.logger import get_logger +from ddtrace.internal.module import _resolve +from ddtrace.internal.rate_limiter import BudgetRateLimiterWithJitter as RateLimiter +from ddtrace.internal.safety import _isinstance +from ddtrace.internal.utils.cache import cached + + +log = get_logger(__name__) + +DEFAULT_PROBE_RATE = 5000.0 +DEFAULT_SNAPSHOT_PROBE_RATE = 1.0 +DEFAULT_PROBE_CONDITION_ERROR_RATE = 1.0 / 60 / 5 + + +@cached() +def _resolve_source_file(_path: str) -> Optional[Path]: + """Resolve the source path for the given path. + + This recursively strips parent directories until it finds a file that + exists according to sys.path. + """ + path = Path(_path) + if path.is_file(): + return path.resolve() + + for relpath in (path.relative_to(_) for _ in path.parents): + resolved_path = _resolve(relpath) + if resolved_path is not None: + return resolved_path + + return None + + +MAXLEVEL = 2 +MAXSIZE = 100 +MAXLEN = 255 +MAXFIELDS = 20 + + +@attr.s +class CaptureLimits(object): + max_level = attr.ib(type=int, default=MAXLEVEL) + max_size = attr.ib(type=int, default=MAXSIZE) + max_len = attr.ib(type=int, default=MAXLEN) + max_fields = attr.ib(type=int, default=MAXFIELDS) + + +DEFAULT_CAPTURE_LIMITS = CaptureLimits() + + +@attr.s +class Probe(abc.ABC): + __context_creator__ = False + + probe_id = attr.ib(type=str) + version = attr.ib(type=int) + tags = attr.ib(type=dict, eq=False) + + def update(self, other: "Probe") -> None: + """Update the mutable fields from another probe.""" + if self.probe_id != other.probe_id: + log.error("Probe ID mismatch when updating mutable fields") + return + + if self.version == other.version: + return + + for attrib in (_.name for _ in self.__attrs_attrs__ if _.eq): + setattr(self, attrib, getattr(other, attrib)) + + def __hash__(self): + return hash(self.probe_id) + + +@attr.s +class RateLimitMixin(abc.ABC): + rate = attr.ib(type=float, eq=False) + limiter = attr.ib(type=RateLimiter, init=False, repr=False, eq=False) + + @limiter.default + def _(self): + return RateLimiter( + limit_rate=self.rate, + tau=1.0 / self.rate if self.rate else 1.0, + on_exceed=lambda: log.warning("Rate limit exceeeded for %r", self), + call_once=True, + raise_on_exceed=False, + ) + + +@attr.s +class ProbeConditionMixin(object): + """Conditional probe. + + If the condition is ``None``, then this is equivalent to a non-conditional + probe. + """ + + condition = attr.ib(type=Optional[DDExpression]) + condition_error_rate = attr.ib(type=float, eq=False) + condition_error_limiter = attr.ib(type=RateLimiter, init=False, repr=False, eq=False) + + @condition_error_limiter.default + def _(self): + return RateLimiter( + limit_rate=self.condition_error_rate, + tau=1.0 / self.condition_error_rate if self.condition_error_rate else 1.0, + on_exceed=lambda: log.debug("Condition error rate limit exceeeded for %r", self), + call_once=True, + raise_on_exceed=False, + ) + + +@attr.s +class ProbeLocationMixin(object): + def location(self) -> Tuple[Optional[str], Optional[Union[str, int]]]: + """return a tuple of (location,sublocation) for the probe. + For example, line probe returns the (file,line) and method probe return (module,method) + """ + return (None, None) + + +@attr.s +class LineLocationMixin(ProbeLocationMixin): + source_file = attr.ib(type=Path, converter=_resolve_source_file, eq=False) # type: ignore[misc] + line = attr.ib(type=int, eq=False) + + def location(self): + return (str(self.source_file) if self.source_file is not None else None, self.line) + + +class ProbeEvaluateTimingForMethod(str, Enum): + DEFAULT = "DEFAULT" + ENTER = "ENTER" + EXIT = "EXIT" + + +@attr.s +class FunctionLocationMixin(ProbeLocationMixin): + module = attr.ib(type=str, eq=False) + func_qname = attr.ib(type=str, eq=False) + evaluate_at = attr.ib(type=ProbeEvaluateTimingForMethod) + + def location(self): + return (self.module, self.func_qname) + + +class MetricProbeKind(str, Enum): + COUNTER = "COUNT" + GAUGE = "GAUGE" + HISTOGRAM = "HISTOGRAM" + DISTRIBUTION = "DISTRIBUTION" + + +@attr.s +class MetricProbeMixin(object): + kind = attr.ib(type=str) + name = attr.ib(type=str) + value = attr.ib(type=Optional[DDExpression]) + + +@attr.s +class MetricLineProbe(Probe, LineLocationMixin, MetricProbeMixin, ProbeConditionMixin): + pass + + +@attr.s +class MetricFunctionProbe(Probe, FunctionLocationMixin, MetricProbeMixin, ProbeConditionMixin): + pass + + +@attr.s +class TemplateSegment(abc.ABC): + @abc.abstractmethod + def eval(self, _locals: Dict[str, Any]) -> str: + pass + + +@attr.s +class LiteralTemplateSegment(TemplateSegment): + str_value = attr.ib(type=str, default=None) + + def eval(self, _locals: Dict[str, Any]) -> Any: + return self.str_value + + +@attr.s +class ExpressionTemplateSegment(TemplateSegment): + expr = attr.ib(type=DDExpression, default=None) + + def eval(self, _locals: Dict[str, Any]) -> Any: + return self.expr.eval(_locals) + + +@attr.s +class StringTemplate(object): + template = attr.ib(type=str) + segments = attr.ib(type=List[TemplateSegment]) + + def render(self, _locals: Dict[str, Any], serializer: Callable[[Any], str]) -> str: + def _to_str(value): + return value if _isinstance(value, str) else serializer(value) + + return "".join([_to_str(s.eval(_locals)) for s in self.segments]) + + +@attr.s +class LogProbeMixin(object): + template = attr.ib(type=str) + segments = attr.ib(type=List[TemplateSegment]) + take_snapshot = attr.ib(type=bool) + limits = attr.ib(type=CaptureLimits, eq=False) + + +@attr.s +class LogLineProbe(Probe, LineLocationMixin, LogProbeMixin, ProbeConditionMixin, RateLimitMixin): + pass + + +@attr.s +class LogFunctionProbe(Probe, FunctionLocationMixin, LogProbeMixin, ProbeConditionMixin, RateLimitMixin): + pass + + +@attr.s +class SpanProbeMixin(object): + pass + + +@attr.s +class SpanFunctionProbe(Probe, FunctionLocationMixin, SpanProbeMixin, ProbeConditionMixin): + __context_creator__ = True + + +class SpanDecorationTargetSpan(object): + ROOT = "ROOT" + ACTIVE = "ACTIVE" + + +@attr.s +class SpanDecorationTag(object): + name = attr.ib(type=str) + value = attr.ib(type=StringTemplate) + + +@attr.s +class SpanDecoration(object): + when = attr.ib(type=Optional[DDExpression]) + tags = attr.ib(type=List[SpanDecorationTag]) + + +@attr.s +class SpanDecorationMixin(object): + target_span = attr.ib(type=SpanDecorationTargetSpan) + decorations = attr.ib(type=List[SpanDecoration]) + + +@attr.s +class SpanDecorationLineProbe(Probe, LineLocationMixin, SpanDecorationMixin): + pass + + +@attr.s +class SpanDecorationFunctionProbe(Probe, FunctionLocationMixin, SpanDecorationMixin): + pass + + +LineProbe = Union[LogLineProbe, MetricLineProbe, SpanDecorationLineProbe] +FunctionProbe = Union[LogFunctionProbe, MetricFunctionProbe, SpanFunctionProbe, SpanDecorationFunctionProbe] + + +class ProbeType(object): + LOG_PROBE = "LOG_PROBE" + METRIC_PROBE = "METRIC_PROBE" + SPAN_PROBE = "SPAN_PROBE" + SPAN_DECORATION_PROBE = "SPAN_DECORATION_PROBE" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/registry.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/registry.py new file mode 100644 index 0000000..32e8f44 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/registry.py @@ -0,0 +1,204 @@ +from collections import defaultdict +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import cast + +from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import ProbeLocationMixin +from ddtrace.debugging._probe.status import ProbeStatusLogger +from ddtrace.internal import forksafe +from ddtrace.internal.logger import get_logger + + +logger = get_logger(__name__) + + +class ProbeRegistryEntry(object): + __slots__ = ( + "probe", + "installed", + "emitting", + "error_type", + "message", + ) + + def __init__(self, probe: Probe) -> None: + self.probe = probe + self.installed = False + self.emitting = False + self.error_type: Optional[str] = None + self.message: Optional[str] = None + + def set_installed(self) -> None: + self.installed = True + + def set_emitting(self) -> None: + self.emitting = True + + def set_error(self, error_type: str, message: str) -> None: + self.error_type = error_type + self.message = message + + def update(self, probe: Probe) -> None: + self.probe.update(probe) + + +def _get_probe_location(probe: Probe) -> Optional[str]: + if isinstance(probe, ProbeLocationMixin): + return probe.location()[0] + else: + raise ValueError("Unsupported probe type: {}".format(type(probe))) + + +class ProbeRegistry(dict): + """Keep track of all the registered probes. + + New probes are also registered as pending, on a location basis, until they + are processed (e.g. installed, generally by some import hook). Pending + probes can be retrieved with the ``get_pending`` method. + """ + + def __init__(self, status_logger: ProbeStatusLogger, *args: Any, **kwargs: Any) -> None: + """Initialize the probe registry.""" + super().__init__(*args, **kwargs) + self.logger = status_logger + + # Used to keep track of probes pending installation + self._pending: Dict[str, List[Probe]] = defaultdict(list) + + self._lock = forksafe.RLock() + + def register(self, *probes: Probe) -> None: + """Register a probe.""" + with self._lock: + for probe in probes: + if probe in self: + # Already registered. + continue + + self[probe.probe_id] = ProbeRegistryEntry(probe) + + location = _get_probe_location(probe) + if location is None: + self.set_error( + probe, + "UnresolvedLocation", + "Unable to resolve location information for probe {}".format(probe.probe_id), + ) + continue + + self._pending[location].append(probe) + + self.logger.received(probe) + + def update(self, probe): + with self._lock: + if probe not in self: + logger.error("Attempted to update unregistered probe %s", probe.probe_id) + return + + self[probe.probe_id].update(probe) + + self.log_probe_status(probe) + + def set_installed(self, probe: Probe) -> None: + """Set the installed flag for a probe.""" + with self._lock: + self[probe.probe_id].set_installed() + + # No longer pending + self._remove_pending(probe) + + self.logger.installed(probe) + + def set_emitting(self, probe: Probe) -> None: + """Set the emitting flag for a probe.""" + with self._lock: + entry = cast(ProbeRegistryEntry, self[probe.probe_id]) + if not entry.emitting: + entry.set_emitting() + self.logger.emitting(probe) + + def set_error(self, probe: Probe, error_type: str, message: str) -> None: + """Set the error message for a probe.""" + with self._lock: + self[probe.probe_id].set_error(error_type, message) + self.logger.error(probe, (error_type, message)) + + def _log_probe_status_unlocked(self, entry: ProbeRegistryEntry) -> None: + if entry.emitting: + self.logger.emitting(entry.probe) + elif entry.installed: + self.logger.installed(entry.probe) + elif entry.error_type: + assert entry.message is not None, entry # nosec + self.logger.error(entry.probe, error=(entry.error_type, entry.message)) + else: + self.logger.received(entry.probe) + + def log_probe_status(self, probe: Probe) -> None: + """Log the status of a probe using the status logger.""" + with self._lock: + self._log_probe_status_unlocked(self[probe.probe_id]) + + def log_probes_status(self) -> None: + """Log the status of all the probes using the status logger.""" + with self._lock: + for entry in self.values(): + self._log_probe_status_unlocked(entry) + + def _remove_pending(self, probe: Probe) -> None: + location = _get_probe_location(probe) + + # Pending probes must have valid location information + assert location is not None, probe # nosec + + pending_probes = self._pending[location] + try: + # DEV: Note that this is O(n), which is fine with a conservative + # number of probes. + pending_probes.remove(probe) + except ValueError: + # The probe wasn't pending + pass + if not pending_probes: + del self._pending[location] + + def has_probes(self, location: str) -> bool: + for entry in self.values(): + if _get_probe_location(entry.probe) == location: + return True + return False + + def unregister(self, *probes: Probe) -> List[Probe]: + """Unregister a collection of probes. + + This also ensures that any pending probes are removed if they haven't + been processed yet. + """ + unregistered_probes = [] + with self._lock: + for probe in probes: + try: + entry = self.pop(probe.probe_id) + except KeyError: + # We don't seem to have the probe + logger.warning("Tried to unregister unregistered probe %s", probe.probe_id) + else: + probe = entry.probe + self._remove_pending(probe) + unregistered_probes.append(probe) + return unregistered_probes + + def get_pending(self, location: str) -> List[Probe]: + """Get the currently pending probes by location.""" + return self._pending[location] + + def __contains__(self, probe: object) -> bool: + """Check if a probe is in the registry.""" + assert isinstance(probe, Probe), probe # nosec + + with self._lock: + return super().__contains__(probe.probe_id) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/remoteconfig.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/remoteconfig.py new file mode 100644 index 0000000..392cd6d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/remoteconfig.py @@ -0,0 +1,341 @@ +from itertools import count +import os +import time +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import Optional +from typing import Type + +from ddtrace import config as tracer_config +from ddtrace.debugging._config import di_config +from ddtrace.debugging._probe.model import DEFAULT_PROBE_CONDITION_ERROR_RATE +from ddtrace.debugging._probe.model import DEFAULT_PROBE_RATE +from ddtrace.debugging._probe.model import DEFAULT_SNAPSHOT_PROBE_RATE +from ddtrace.debugging._probe.model import CaptureLimits +from ddtrace.debugging._probe.model import ExpressionTemplateSegment +from ddtrace.debugging._probe.model import FunctionProbe +from ddtrace.debugging._probe.model import LineProbe +from ddtrace.debugging._probe.model import LiteralTemplateSegment +from ddtrace.debugging._probe.model import LogFunctionProbe +from ddtrace.debugging._probe.model import LogLineProbe +from ddtrace.debugging._probe.model import MetricFunctionProbe +from ddtrace.debugging._probe.model import MetricLineProbe +from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import ProbeType +from ddtrace.debugging._probe.model import SpanDecoration +from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe +from ddtrace.debugging._probe.model import SpanDecorationLineProbe +from ddtrace.debugging._probe.model import SpanDecorationTag +from ddtrace.debugging._probe.model import SpanFunctionProbe +from ddtrace.debugging._probe.model import StringTemplate +from ddtrace.debugging._probe.model import TemplateSegment +from ddtrace.debugging._probe.status import ProbeStatusLogger +from ddtrace.debugging._redaction import DDRedactedExpression +from ddtrace.internal.logger import get_logger +from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector +from ddtrace.internal.remoteconfig._publishers import RemoteConfigPublisher +from ddtrace.internal.remoteconfig._pubsub import PubSub +from ddtrace.internal.remoteconfig._subscribers import RemoteConfigSubscriber + + +log = get_logger(__name__) + + +def xlate_keys(d: Dict[str, Any], mapping: Dict[str, str]) -> Dict[str, Any]: + return {mapping.get(k, k): v for k, v in d.items()} + + +def _compile_segment(segment: dict) -> Optional[TemplateSegment]: + if "str" in segment: + return LiteralTemplateSegment(str_value=segment["str"]) + + if "json" in segment: + return ExpressionTemplateSegment(expr=DDRedactedExpression.compile(segment)) + + # what type of error we should show here? + return None + + +def _match_env_and_version(probe: Probe) -> bool: + probe_version = probe.tags.get("version", None) + probe_env = probe.tags.get("env", None) + + return (probe_version is None or probe_version == tracer_config.version) and ( + probe_env is None or probe_env == tracer_config.env + ) + + +def _filter_by_env_and_version(f: Callable[..., Iterable[Probe]]) -> Callable[..., Iterable[Probe]]: + def _wrapper(*args: Any, **kwargs: Any) -> Iterable[Probe]: + return [_ for _ in f(*args, **kwargs) if _match_env_and_version(_)] + + return _wrapper + + +class ProbeFactory(object): + __line_class__: Optional[Type[LineProbe]] = None + __function_class__: Optional[Type[FunctionProbe]] = None + + @classmethod + def update_args(cls, args, attribs): + raise NotImplementedError() + + @classmethod + def build(cls, args: Dict[str, Any], attribs: Dict[str, Any]) -> Any: + cls.update_args(args, attribs) + + where = attribs["where"] + if where.get("sourceFile", None) is not None: + if cls.__line_class__ is None: + raise TypeError("Line probe type is not supported") + + args["source_file"] = where["sourceFile"] + args["line"] = int(where["lines"][0]) + + return cls.__line_class__(**args) + + if cls.__function_class__ is None: + raise TypeError("Function probe type is not supported") + + args["module"] = where.get("type") or where["typeName"] + args["func_qname"] = where.get("method") or where["methodName"] + args["evaluate_at"] = attribs.get("evaluateAt") + + return cls.__function_class__(**args) + + +class LogProbeFactory(ProbeFactory): + __line_class__ = LogLineProbe + __function_class__ = LogFunctionProbe + + @classmethod + def update_args(cls, args, attribs): + take_snapshot = attribs.get("captureSnapshot", False) + + rate = DEFAULT_SNAPSHOT_PROBE_RATE if take_snapshot else DEFAULT_PROBE_RATE + sampling = attribs.get("sampling") + if sampling is not None: + rate = sampling.get("snapshotsPerSecond", rate) + + args.update( + condition=DDRedactedExpression.compile(attribs["when"]) if "when" in attribs else None, + rate=rate, + limits=CaptureLimits( + **xlate_keys( + attribs["capture"], + { + "maxReferenceDepth": "max_level", + "maxCollectionSize": "max_size", + "maxLength": "max_len", + "maxFieldCount": "max_fields", + }, + ) + ) + if "capture" in attribs + else None, + condition_error_rate=DEFAULT_PROBE_CONDITION_ERROR_RATE, # TODO: should we take rate limit out of Probe? + take_snapshot=take_snapshot, + template=attribs.get("template"), + segments=[_compile_segment(segment) for segment in attribs.get("segments", [])], + ) + + +class MetricProbeFactory(ProbeFactory): + __line_class__ = MetricLineProbe + __function_class__ = MetricFunctionProbe + + @classmethod + def update_args(cls, args, attribs): + # adding probe_id to probe-tags so it would be recorded as a metric tag + args["tags"]["debugger.probeid"] = args["probe_id"] + + args.update( + condition=DDRedactedExpression.compile(attribs["when"]) if "when" in attribs else None, + name=attribs["metricName"], + kind=attribs["kind"], + condition_error_rate=DEFAULT_PROBE_CONDITION_ERROR_RATE, # TODO: should we take rate limit out of Probe? + value=DDRedactedExpression.compile(attribs["value"]) if "value" in attribs else None, + ) + + +class SpanProbeFactory(ProbeFactory): + __function_class__ = SpanFunctionProbe + + @classmethod + def update_args(cls, args, attribs): + args.update( + condition=DDRedactedExpression.compile(attribs["when"]) if "when" in attribs else None, + condition_error_rate=DEFAULT_PROBE_CONDITION_ERROR_RATE, # TODO: should we take rate limit out of Probe? + ) + + +class SpanDecorationProbeFactory(ProbeFactory): + __line_class__ = SpanDecorationLineProbe + __function_class__ = SpanDecorationFunctionProbe + + @classmethod + def update_args(cls, args, attribs): + args.update( + target_span=attribs["targetSpan"], + decorations=[ + SpanDecoration( + when=DDRedactedExpression.compile(d["when"]) if "when" in d else None, + tags=[ + SpanDecorationTag( + name=t["name"], + value=StringTemplate( + template=t["value"].get("template"), + segments=[_compile_segment(segment) for segment in t["value"].get("segments", [])], + ), + ) + for t in d.get("tags", []) + ], + ) + for d in attribs["decorations"] + ], + ) + + +class InvalidProbeConfiguration(ValueError): + pass + + +def build_probe(attribs: Dict[str, Any]) -> Probe: + """ + Create a new Probe instance. + """ + try: + _type = attribs["type"] + _id = attribs["id"] + except KeyError as e: + raise InvalidProbeConfiguration("Invalid probe attributes: %s" % e) + + args = dict( + probe_id=_id, + version=attribs.get("version", 0), + tags=dict(_.split(":", 1) for _ in attribs.get("tags", [])), + ) + + if _type == ProbeType.LOG_PROBE: + return LogProbeFactory.build(args, attribs) + if _type == ProbeType.METRIC_PROBE: + return MetricProbeFactory.build(args, attribs) + if _type == ProbeType.SPAN_PROBE: + return SpanProbeFactory.build(args, attribs) + if _type == ProbeType.SPAN_DECORATION_PROBE: + return SpanDecorationProbeFactory.build(args, attribs) + + raise InvalidProbeConfiguration("Unsupported probe type: %s" % _type) + + +@_filter_by_env_and_version +def get_probes(config: dict, status_logger: ProbeStatusLogger) -> Iterable[Probe]: + try: + return [build_probe(config)] + except InvalidProbeConfiguration: + raise + except Exception as e: + status_logger.error( + probe=Probe(probe_id=config["id"], version=config["version"], tags={}), + error=(type(e).__name__, str(e)), + ) + return [] + + +class ProbePollerEvent(object): + NEW_PROBES = 0 + DELETED_PROBES = 1 + MODIFIED_PROBES = 2 + STATUS_UPDATE = 3 + + +ProbePollerEventType = int + + +class DebuggerRemoteConfigSubscriber(RemoteConfigSubscriber): + """Probe configuration adapter for the RCM client. + + This adapter turns configuration events from the RCM client into probe + events that can be handled easily by the debugger. + """ + + def __init__(self, data_connector, callback, name, status_logger): + super().__init__(data_connector, callback, name) + self._configs: Dict[str, Dict[str, Probe]] = {} + self._status_timestamp_sequence = count( + time.time() + di_config.diagnostics_interval, di_config.diagnostics_interval + ) + self._status_timestamp = next(self._status_timestamp_sequence) + self._status_logger = status_logger + + def _exec_callback(self, data, test_tracer=None): + # Check if it is time to re-emit probe status messages. + # DEV: We use the periodic signal from the remote config client worker + # thread to avoid having to spawn a separate thread for this. + if time.time() > self._status_timestamp: + self._send_status_update() + self._status_timestamp = next(self._status_timestamp_sequence) + + if data: + log.debug("[%s][P: %s] Dynamic Instrumentation Updated", os.getpid(), os.getppid()) + for metadata, config in zip(data["metadata"], data["config"]): + if metadata is None: + log.debug( + "[%s][P: %s] Dynamic Instrumentation: no RCM metadata for configuration; skipping", + os.getpid(), + os.getppid(), + ) + continue + + self._update_probes_for_config(metadata["id"], config) + + # Flush any probe status messages that migh have been generated + self._status_logger.flush() + + def _send_status_update(self): + log.debug( + "[%s][P: %s] Dynamic Instrumentation: emitting probe status log messages", + os.getpid(), + os.getppid(), + ) + + self._callback(ProbePollerEvent.STATUS_UPDATE, []) + + def _dispatch_probe_events(self, prev_probes: Dict[str, Probe], next_probes: Dict[str, Probe]) -> None: + new_probes = [p for _, p in next_probes.items() if _ not in prev_probes] + deleted_probes = [p for _, p in prev_probes.items() if _ not in next_probes] + modified_probes = [p for _, p in next_probes.items() if _ in prev_probes and p != prev_probes[_]] + + if deleted_probes: + self._callback(ProbePollerEvent.DELETED_PROBES, deleted_probes) + if modified_probes: + self._callback(ProbePollerEvent.MODIFIED_PROBES, modified_probes) + if new_probes: + self._callback(ProbePollerEvent.NEW_PROBES, new_probes) + + def _update_probes_for_config(self, config_id: str, config: Any) -> None: + prev_probes: Dict[str, Probe] = self._configs.get(config_id, {}) + next_probes: Dict[str, Probe] = ( + {probe.probe_id: probe for probe in get_probes(config, self._status_logger)} + if config not in (None, False) + else {} + ) + log.debug("[%s][P: %s] Dynamic Instrumentation, dispatch probe events", os.getpid(), os.getppid()) + self._dispatch_probe_events(prev_probes, next_probes) + + if next_probes: + self._configs[config_id] = next_probes + else: + self._configs.pop(config_id, None) + + +class ProbeRCAdapter(PubSub): + __publisher_class__ = RemoteConfigPublisher + __subscriber_class__ = DebuggerRemoteConfigSubscriber + __shared_data__ = PublisherSubscriberConnector() + + def __init__(self, _preprocess_results, callback, status_logger): + self._publisher = self.__publisher_class__(self.__shared_data__, _preprocess_results) + self._subscriber = self.__subscriber_class__(self.__shared_data__, callback, "DEBUGGER", status_logger) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/status.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/status.py new file mode 100644 index 0000000..a12db52 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_probe/status.py @@ -0,0 +1,147 @@ +import json +from queue import SimpleQueue as Queue +import time +import typing as t +from urllib.parse import quote + +from ddtrace.debugging._config import di_config +from ddtrace.debugging._encoding import add_tags +from ddtrace.debugging._metrics import metrics +from ddtrace.debugging._probe.model import Probe +from ddtrace.internal import compat +from ddtrace.internal import runtime +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.http import FormData +from ddtrace.internal.utils.http import connector +from ddtrace.internal.utils.http import multipart +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter + + +log = get_logger(__name__) +meter = metrics.get_meter("probe.status") + + +ErrorInfo = t.Tuple[str, str] + + +class ProbeStatusLogger: + RETRY_ATTEMPTS = 3 + RETRY_INTERVAL = 1 + ENDPOINT = "/debugger/v1/diagnostics" + + def __init__(self, service: str) -> None: + self._service = service + self._queue: Queue[str] = Queue() + self._connect = connector(di_config._intake_url, timeout=di_config.upload_timeout) + # Make it retryable + self._write_payload_with_backoff = fibonacci_backoff_with_jitter( + initial_wait=0.618 * self.RETRY_INTERVAL / (1.618**self.RETRY_ATTEMPTS) / 2, + attempts=self.RETRY_ATTEMPTS, + )(self._write_payload) + + if di_config._tags_in_qs and di_config.tags: + self.ENDPOINT += f"?ddtags={quote(di_config.tags)}" + + def _payload( + self, probe: Probe, status: str, message: str, timestamp: float, error: t.Optional[ErrorInfo] = None + ) -> str: + payload = { + "service": self._service, + "timestamp": int(timestamp * 1e3), # milliseconds + "message": message, + "ddsource": "dd_debugger", + "debugger": { + "diagnostics": { + "probeId": probe.probe_id, + "probeVersion": probe.version, + "runtimeId": runtime.get_runtime_id(), + "parentId": runtime.get_ancestor_runtime_id(), + "status": status, + } + }, + } + + add_tags(payload) + + if error is not None: + error_type, message = error + payload["debugger"]["diagnostics"]["exception"] = { # type: ignore[index] + "type": error_type, + "message": message, + } + + return json.dumps(payload) + + def _write_payload(self, data: t.Tuple[bytes, dict]) -> None: + body, headers = data + try: + log.debug("Sending probe status payload: %r", body) + with self._connect() as conn: + conn.request( + "POST", + "/debugger/v1/diagnostics", + body, + headers=headers, + ) + resp = compat.get_connection_response(conn) + if not (200 <= resp.status < 300): + log.error("Failed to upload payload: [%d] %r", resp.status, resp.read()) + meter.increment("upload.error", tags={"status": str(resp.status)}) + else: + meter.increment("upload.success") + meter.distribution("upload.size", len(body)) + except Exception: + log.error("Failed to write payload", exc_info=True) + meter.increment("error") + + def _enqueue(self, probe: Probe, status: str, message: str, error: t.Optional[ErrorInfo] = None) -> None: + self._queue.put_nowait(self._payload(probe, status, message, time.time(), error)) + log.debug("Probe status %s for probe %s enqueued", status, probe.probe_id) + + def flush(self) -> None: + if self._queue.empty(): + return + + msgs: t.List[str] = [] + while not self._queue.empty(): + msgs.append(self._queue.get_nowait()) + + try: + self._write_payload_with_backoff( + multipart( + parts=[ + FormData( + name="event", + filename="event.json", + data=f"[{','.join(msgs)}]", + content_type="json", + ) + ] + ) + ) + except Exception: + log.error("Failed to write probe status after retries", exc_info=True) + + def received(self, probe: Probe, message: t.Optional[str] = None) -> None: + self._enqueue( + probe, + "RECEIVED", + message or "Probe %s has been received correctly" % probe.probe_id, + ) + + def installed(self, probe: Probe, message: t.Optional[str] = None) -> None: + self._enqueue( + probe, + "INSTALLED", + message or "Probe %s instrumented correctly" % probe.probe_id, + ) + + def emitting(self, probe: Probe, message: t.Optional[str] = None) -> None: + self._enqueue( + probe, + "EMITTING", + message or "Probe %s is emitting data" % probe.probe_id, + ) + + def error(self, probe: Probe, error: t.Optional[ErrorInfo] = None) -> None: + self._enqueue(probe, "ERROR", "Failed to instrument probe %s" % probe.probe_id, error) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_redaction.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_redaction.py new file mode 100644 index 0000000..f1bb98e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_redaction.py @@ -0,0 +1,167 @@ +from ddtrace.debugging._expressions import DDCompiler +from ddtrace.debugging._expressions import DDExpression +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.cache import cached +from ddtrace.settings.dynamic_instrumentation import config +from ddtrace.settings.dynamic_instrumentation import normalize_ident + + +log = get_logger(__name__) + +# The following identifier represent function argument/local variable/object +# attribute names that should be redacted from the payload. +REDACTED_IDENTIFIERS = ( + frozenset( + { + "2fa", + "accesstoken", + "aiohttpsession", + "apikey", + "apisecret", + "apisignature", + "auth", + "authorization", + "authtoken", + "ccnumber", + "certificatepin", + "cipher", + "clientid", + "clientsecret", + "config", + "connectsid", + "cookie", + "credentials", + "creditcard", + "csrf", + "csrftoken", + "cvv", + "databaseurl", + "dburl", + "encryptionkey", + "encryptionkeyid", + "env", + "gpgkey", + "jti", + "jwt", + "licensekey", + "masterkey", + "mysqlpwd", + "nonce", + "oauth", + "oauthtoken", + "otp", + "passhash", + "passwd", + "password", + "passwordb", + "pemfile", + "pgpkey", + "phpsessid", + "pin", + "pincode", + "pkcs8", + "plateno", + "platenum", + "platenumber", + "privatekey", + "publickey", + "pwd", + "recaptchakey", + "refreshtoken", + "routingnumber", + "salt", + "secret", + "secretkey", + "secrettoken", + "securityanswer", + "securitycode", + "securityquestion", + "serviceaccountcredentials", + "session", + "sessionid", + "sessionkey", + "setcookie", + "signature", + "signaturekey", + "sshkey", + "ssn", + "symfony", + "token", + "transactionid", + "twiliotoken", + "usersession", + "voterid", + "xapikey", + "xauthtoken", + "xcsrftoken", + "xforwardedfor", + "xrealip", + "xsrftoken", + } + ) + | config.redacted_identifiers +) + + +REDACTED_PLACEHOLDER = r"{redacted}" + + +@cached() +def redact(ident: str) -> bool: + return normalize_ident(ident) in REDACTED_IDENTIFIERS + + +@cached() +def redact_type(_type: str) -> bool: + _re = config.redacted_types_re + if _re is None: + return False + return _re.search(_type) is not None + + +class DDRedactedExpressionError(Exception): + pass + + +class DDRedactedCompiler(DDCompiler): + @classmethod + def __getmember__(cls, s, a): + if redact(a): + raise DDRedactedExpressionError(f"Access to attribute {a!r} is not allowed") + + return super().__getmember__(s, a) + + @classmethod + def __index__(cls, o, i): + if isinstance(i, str) and redact(i): + raise DDRedactedExpressionError(f"Access to entry {i!r} is not allowed") + + return super().__index__(o, i) + + @classmethod + def __ref__(cls, s): + if redact(s): + raise DDRedactedExpressionError(f"Access to local {s!r} is not allowed") + + return s + + +dd_compile_redacted = DDRedactedCompiler().compile + + +def _redacted_expr(exc): + def _(_): + raise exc + + return _ + + +class DDRedactedExpression(DDExpression): + __compiler__ = dd_compile_redacted + + @classmethod + def on_compiler_error(cls, dsl, exc): + if isinstance(exc, DDRedactedExpressionError): + log.error("Cannot compile expression that references potential PII: %s", dsl, exc_info=True) + return _redacted_expr(exc) + return super().on_compiler_error(dsl, exc) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_safety.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_safety.py new file mode 100644 index 0000000..50142fc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_safety.py @@ -0,0 +1,73 @@ +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from types import FrameType +from typing import Any +from typing import Dict +from typing import Iterator +from typing import Tuple + +from ddtrace.internal.safety import get_slots + + +GetSetDescriptor = type(type.__dict__["__dict__"]) # type: ignore[index] # noqa: F821 + + +def get_args(frame: FrameType) -> Iterator[Tuple[str, Any]]: + code = frame.f_code + nargs = code.co_argcount + bool(code.co_flags & CO_VARARGS) + bool(code.co_flags & CO_VARKEYWORDS) + arg_names = code.co_varnames[:nargs] + arg_values = (frame.f_locals[name] for name in arg_names) + + return zip(arg_names, arg_values) + + +def get_locals(frame: FrameType) -> Iterator[Tuple[str, Any]]: + code = frame.f_code + nargs = code.co_argcount + bool(code.co_flags & CO_VARARGS) + bool(code.co_flags & CO_VARKEYWORDS) + names = code.co_varnames[nargs:] + values = (frame.f_locals.get(name) for name in names) + + return zip(names, values) + + +def get_globals(frame: FrameType) -> Iterator[Tuple[str, Any]]: + nonlocal_names = frame.f_code.co_names + _globals = globals() + + return ((name, _globals[name]) for name in nonlocal_names if name in _globals) + + +def safe_getattr(obj: Any, name: str) -> Any: + try: + return object.__getattribute__(obj, name) + except Exception as e: + return e + + +def safe_getitem(obj, index): + if isinstance(obj, list): + return list.__getitem__(obj, index) + elif isinstance(obj, dict): + return dict.__getitem__(obj, index) + elif isinstance(obj, tuple): + return tuple.__getitem__(obj, index) + raise TypeError("Type is not indexable collection " + str(type(obj))) + + +def _safe_dict(o: Any) -> Dict[str, Any]: + try: + __dict__ = object.__getattribute__(o, "__dict__") + if type(__dict__) is dict: + return __dict__ + except Exception: + pass # nosec + + raise AttributeError("No safe __dict__") + + +def get_fields(obj: Any) -> Dict[str, Any]: + try: + return _safe_dict(obj) + except AttributeError: + # Check for slots + return {s: safe_getattr(obj, s) for s in get_slots(obj)} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/collector.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/collector.py new file mode 100644 index 0000000..4c5debc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/collector.py @@ -0,0 +1,117 @@ +import os +from typing import Any +from typing import Callable +from typing import List +from typing import Optional +from typing import Tuple + +from ddtrace.debugging._encoding import BufferedEncoder +from ddtrace.debugging._metrics import metrics +from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.model import Signal +from ddtrace.debugging._signal.model import SignalState +from ddtrace.internal._encoding import BufferFull +from ddtrace.internal.compat import ExcInfoType +from ddtrace.internal.logger import get_logger + + +CaptorType = Callable[[List[Tuple[str, Any]], List[Tuple[str, Any]], ExcInfoType, int], Any] + +log = get_logger(__name__) +meter = metrics.get_meter("signal.collector") + + +NO_RETURN_VALUE = object() + + +class SignalContext(object): + """Debugger signal context manager. + + This is used to capture data for function invocations. The SignalContext + call ``Signal.enter`` on entry ``Signal.exit`` on function return. + + The handler is triggered just after ``Signal.exit`` returns. + """ + + def __init__( + self, + signal: Signal, + handler: Callable[[Signal], None], + ) -> None: + self._on_exit_handler = handler + self.signal = signal + self.return_value: Any = NO_RETURN_VALUE + self.duration: Optional[int] = None + + self.signal.enter() + + def exit(self, retval: Any, exc_info: ExcInfoType, duration_ns: int) -> None: + """Exit the snapshot context. + + The arguments are used to record either the return value or the exception, and + the duration of the wrapped call. + """ + self.return_value = retval + self.duration = duration_ns + + return self.__exit__(*exc_info) + + def __enter__(self): + return self + + def __exit__(self, *exc_info: ExcInfoType) -> None: + self.signal.exit(self.return_value, exc_info, self.duration) + self._on_exit_handler(self.signal) + + +class SignalCollector(object): + """Debugger signal collector. + + This is used to collect and encode signals emitted by probes as soon as + requested. The ``push`` method is intended to be called after a line-level + signal is fully emitted, and information is available and ready to be + encoded, or the signal status indicate it should be skipped. For function + instrumentation (e.g. function probes), we use the ``attach`` method to + create a ``SignalContext`` instance that can be used to capture additional + data, such as the return value of the wrapped function. + """ + + def __init__(self, encoder: BufferedEncoder) -> None: + self._encoder = encoder + + def _enqueue(self, log_signal: LogSignal) -> None: + try: + log.debug( + "[%s][P: %s] SignalCollector. _encoder (%s) _enqueue signal", os.getpid(), os.getppid(), self._encoder + ) + self._encoder.put(log_signal) + except BufferFull: + log.debug("Encoder buffer full") + meter.increment("encoder.buffer.full") + + def push(self, signal: Signal) -> None: + if signal.state == SignalState.SKIP_COND: + meter.increment("skip", tags={"cause": "cond", "probe_id": signal.probe.probe_id}) + elif signal.state in {SignalState.SKIP_COND_ERROR, SignalState.COND_ERROR}: + meter.increment("skip", tags={"cause": "cond_error", "probe_id": signal.probe.probe_id}) + elif signal.state == SignalState.SKIP_RATE: + meter.increment("skip", tags={"cause": "rate", "probe_id": signal.probe.probe_id}) + elif signal.state == SignalState.DONE: + meter.increment("signal", tags={"probe_id": signal.probe.probe_id}) + + if ( + isinstance(signal, LogSignal) + and signal.state in {SignalState.DONE, SignalState.COND_ERROR} + and signal.has_message() + ): + log.debug("Enqueueing signal %s", signal) + # This signal emits a log message + self._enqueue(signal) + else: + log.debug( + "Skipping signal %s (has message: %s)", signal, isinstance(signal, LogSignal) and signal.has_message() + ) + + def attach(self, signal: Signal) -> SignalContext: + """Collect via a probe signal context manager.""" + return SignalContext(signal, lambda e: self.push(e)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/metric_sample.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/metric_sample.py new file mode 100644 index 0000000..c096763 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/metric_sample.py @@ -0,0 +1,86 @@ +from typing import Optional +from typing import cast + +import attr + +from ddtrace.debugging._metrics import probe_metrics +from ddtrace.debugging._probe.model import MetricFunctionProbe +from ddtrace.debugging._probe.model import MetricProbeKind +from ddtrace.debugging._probe.model import MetricProbeMixin +from ddtrace.debugging._probe.model import ProbeEvaluateTimingForMethod +from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.model import SignalState +from ddtrace.internal.metrics import Metrics + + +@attr.s +class MetricSample(LogSignal): + """wrapper for making a metric sample""" + + meter = attr.ib(type=Optional[Metrics.Meter], factory=lambda: probe_metrics.get_meter("probe")) + + def enter(self): + if not isinstance(self.probe, MetricFunctionProbe): + return + + probe = self.probe + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.EXIT: + return + + _args = dict(self.args) if self.args else {} + if not self._eval_condition(_args): + return + + self.sample(_args) + self.state = SignalState.DONE + + def exit(self, retval, exc_info, duration): + if not isinstance(self.probe, MetricFunctionProbe): + return + + probe = self.probe + _args = self._enrich_args(retval, exc_info, duration) + + if probe.evaluate_at != ProbeEvaluateTimingForMethod.EXIT: + return + if not self._eval_condition(_args): + return + + self.sample(_args) + self.state = SignalState.DONE + + def line(self): + frame = self.frame + + if not self._eval_condition(frame.f_locals): + return + + self.sample(frame.f_locals) + self.state = SignalState.DONE + + def sample(self, _locals): + probe = cast(MetricProbeMixin, self.probe) + + assert probe.kind is not None and probe.name is not None # nosec + + value = float(probe.value(_locals)) if probe.value is not None else 1 + + # TODO[perf]: We know the tags in advance so we can avoid the + # list comprehension. + if probe.kind == MetricProbeKind.COUNTER: + self.meter.increment(probe.name, value, probe.tags) + elif probe.kind == MetricProbeKind.GAUGE: + self.meter.gauge(probe.name, value, probe.tags) + elif probe.kind == MetricProbeKind.HISTOGRAM: + self.meter.histogram(probe.name, value, probe.tags) + elif probe.kind == MetricProbeKind.DISTRIBUTION: + self.meter.distribution(probe.name, value, probe.tags) + + @property + def message(self): + return ("Evaluation errors for probe id %s" % self.probe.probe_id) if self.errors else None + + def has_message(self): + # type () -> bool + return bool(self.errors) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/model.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/model.py new file mode 100644 index 0000000..bea38fa --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/model.py @@ -0,0 +1,168 @@ +import abc +from enum import Enum +from threading import Thread +import time +from types import FrameType +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import Union +from typing import cast +from uuid import uuid4 + +import attr + +from ddtrace.context import Context +from ddtrace.debugging import _safety +from ddtrace.debugging._expressions import DDExpressionEvaluationError +from ddtrace.debugging._probe.model import FunctionLocationMixin +from ddtrace.debugging._probe.model import LineLocationMixin +from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import ProbeConditionMixin +from ddtrace.internal.rate_limiter import RateLimitExceeded +from ddtrace.span import Span + + +@attr.s +class EvaluationError(object): + expr = attr.ib(type=str) + message = attr.ib(type=str) + + +class SignalState(str, Enum): + NONE = "NONE" + SKIP_COND = "SKIP_COND" + SKIP_COND_ERROR = "SKIP_COND_ERROR" + SKIP_RATE = "SKIP_RATE" + COND_ERROR = "COND_ERROR" + DONE = "DONE" + + +@attr.s +class Signal(abc.ABC): + """Debugger signal base class. + + Used to model the data carried by the signal emitted by a probe when it is + triggered. + """ + + probe = attr.ib(type=Probe) + frame = attr.ib(type=FrameType) + thread = attr.ib(type=Thread) + + trace_context = attr.ib(type=Optional[Union[Span, Context]], default=None) + args = attr.ib(type=Optional[List[Tuple[str, Any]]], default=None) + state = attr.ib(type=str, default=SignalState.NONE) + errors = attr.ib(type=List[EvaluationError], factory=lambda: list()) + timestamp = attr.ib(type=float, factory=time.time) + uuid = attr.ib(type=str, init=False, factory=lambda: str(uuid4())) + + def _eval_condition(self, _locals: Optional[Dict[str, Any]] = None) -> bool: + """Evaluate the probe condition against the collected frame.""" + probe = cast(ProbeConditionMixin, self.probe) + condition = probe.condition + if condition is None: + return True + + try: + if bool(condition.eval(_locals or self.frame.f_locals)): + return True + except DDExpressionEvaluationError as e: + self.errors.append(EvaluationError(expr=e.dsl, message=e.error)) + self.state = ( + SignalState.SKIP_COND_ERROR + if probe.condition_error_limiter.limit() is RateLimitExceeded + else SignalState.COND_ERROR + ) + else: + self.state = SignalState.SKIP_COND + + return False + + def _enrich_args(self, retval, exc_info, duration): + _locals = list(self.args or _safety.get_args(self.frame)) + _locals.append(("@duration", duration / 1e6)) # milliseconds + + exc = exc_info[1] + _locals.append(("@return", retval) if exc is None else ("@exception", exc)) + + return dict(_locals) + + @abc.abstractmethod + def enter(self): + pass + + @abc.abstractmethod + def exit(self, retval, exc_info, duration): + pass + + @abc.abstractmethod + def line(self): + pass + + +@attr.s +class LogSignal(Signal): + """A signal that also emits a log message. + + Some signals might require sending a log message along with the base signal + data. For example, all the collected errors from expression evaluations + (e.g. conditions) might need to be reported. + """ + + @property + @abc.abstractmethod + def message(self): + # type () -> Optional[str] + """The log message to emit.""" + pass + + @abc.abstractmethod + def has_message(self): + # type () -> bool + """Whether the signal has a log message to emit.""" + pass + + @property + def data(self): + # type () -> Dict[str, Any] + """Extra data to include in the snapshot portion of the log message.""" + return {} + + def _probe_details(self): + # type () -> Dict[str, Any] + probe = self.probe + if isinstance(probe, LineLocationMixin): + location = { + "file": str(probe.source_file), + "lines": [probe.line], + } + elif isinstance(probe, FunctionLocationMixin): + location = { + "type": probe.module, + "method": probe.func_qname, + } + else: + return {} + + return { + "id": probe.probe_id, + "version": probe.version, + "location": location, + } + + @property + def snapshot(self): + # type () -> Dict[str, Any] + full_data = { + "id": self.uuid, + "timestamp": int(self.timestamp * 1e3), # milliseconds + "evaluationErrors": [{"expr": e.expr, "message": e.message} for e in self.errors], + "probe": self._probe_details(), + "language": "python", + } + full_data.update(self.data) + + return full_data diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/snapshot.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/snapshot.py new file mode 100644 index 0000000..f860376 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/snapshot.py @@ -0,0 +1,249 @@ +import sys +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +from typing import cast + +import attr + +from ddtrace.debugging import _safety +from ddtrace.debugging._expressions import DDExpressionEvaluationError +from ddtrace.debugging._probe.model import DEFAULT_CAPTURE_LIMITS +from ddtrace.debugging._probe.model import CaptureLimits +from ddtrace.debugging._probe.model import FunctionLocationMixin +from ddtrace.debugging._probe.model import LineLocationMixin +from ddtrace.debugging._probe.model import LiteralTemplateSegment +from ddtrace.debugging._probe.model import LogFunctionProbe +from ddtrace.debugging._probe.model import LogLineProbe +from ddtrace.debugging._probe.model import LogProbeMixin +from ddtrace.debugging._probe.model import ProbeEvaluateTimingForMethod +from ddtrace.debugging._probe.model import TemplateSegment +from ddtrace.debugging._redaction import REDACTED_PLACEHOLDER +from ddtrace.debugging._redaction import DDRedactedExpressionError +from ddtrace.debugging._signal import utils +from ddtrace.debugging._signal.model import EvaluationError +from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.model import SignalState +from ddtrace.debugging._signal.utils import serialize +from ddtrace.internal.compat import ExcInfoType +from ddtrace.internal.rate_limiter import RateLimitExceeded +from ddtrace.internal.utils.time import HourGlass + + +CAPTURE_TIME_BUDGET = 0.2 # seconds + + +def _capture_context( + arguments: List[Tuple[str, Any]], + _locals: List[Tuple[str, Any]], + throwable: ExcInfoType, + limits: CaptureLimits = DEFAULT_CAPTURE_LIMITS, +) -> Dict[str, Any]: + with HourGlass(duration=CAPTURE_TIME_BUDGET) as hg: + + def timeout(_): + return not hg.trickling() + + return { + "arguments": utils.capture_pairs( + arguments, limits.max_level, limits.max_len, limits.max_size, limits.max_fields, timeout + ) + if arguments is not None + else {}, + "locals": utils.capture_pairs( + _locals, limits.max_level, limits.max_len, limits.max_size, limits.max_fields, timeout + ) + if _locals is not None + else {}, + "throwable": utils.capture_exc_info(throwable), + } + + +_EMPTY_CAPTURED_CONTEXT = _capture_context([], [], (None, None, None), DEFAULT_CAPTURE_LIMITS) + + +def format_captured_value(value: Any) -> str: + v = value.get("value") + if v is not None: + return v + elif value.get("isNull"): + return "None" + + es = value.get("elements") + if es is not None: + return "%s(%s)" % (value["type"], ", ".join(format_captured_value(e) for e in es)) + + es = value.get("entries") + if es is not None: + return "{%s}" % ", ".join(format_captured_value(k) + ": " + format_captured_value(v) for k, v in es) + + fs = value.get("fields") + if fs is not None: + return "%s(%s)" % (value["type"], ", ".join("%s=%s" % (k, format_captured_value(v)) for k, v in fs.items())) + + return "%s()" % value["type"] + + +def format_message(function: str, args: Dict[str, Any], retval: Optional[Any] = None) -> str: + message = "%s(%s)" % ( + function, + ", ".join(("=".join((n, format_captured_value(a))) for n, a in args.items())), + ) + + if retval is not None: + return "\n".join((message, "=".join(("@return", format_captured_value(retval))))) + + return message + + +@attr.s +class Snapshot(LogSignal): + """Raw snapshot. + + Used to collect the minimum amount of information from a firing probe. + """ + + entry_capture = attr.ib(type=Optional[dict], default=None) + return_capture = attr.ib(type=Optional[dict], default=None) + line_capture = attr.ib(type=Optional[dict], default=None) + + _message = attr.ib(type=Optional[str], default=None) + duration = attr.ib(type=Optional[int], default=None) # nanoseconds + + def _eval_segment(self, segment: TemplateSegment, _locals: Dict[str, Any]) -> str: + probe = cast(LogProbeMixin, self.probe) + capture = probe.limits + try: + if isinstance(segment, LiteralTemplateSegment): + return segment.eval(_locals) + return serialize( + segment.eval(_locals), + level=capture.max_level, + maxsize=capture.max_size, + maxlen=capture.max_len, + maxfields=capture.max_fields, + ) + except DDExpressionEvaluationError as e: + self.errors.append(EvaluationError(expr=e.dsl, message=e.error)) + return REDACTED_PLACEHOLDER if isinstance(e.__cause__, DDRedactedExpressionError) else "ERROR" + + def _eval_message(self, _locals: Dict[str, Any]) -> None: + probe = cast(LogProbeMixin, self.probe) + self._message = "".join([self._eval_segment(s, _locals) for s in probe.segments]) + + def enter(self): + if not isinstance(self.probe, LogFunctionProbe): + return + + probe = self.probe + frame = self.frame + _args = list(self.args or _safety.get_args(frame)) + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.EXIT: + return + + if not self._eval_condition(dict(_args)): + return + + if probe.limiter.limit() is RateLimitExceeded: + self.state = SignalState.SKIP_RATE + return + + if probe.take_snapshot: + self.entry_capture = _capture_context( + _args, + [], + (None, None, None), + limits=probe.limits, + ) + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.ENTER: + self._eval_message(dict(_args)) + self.state = SignalState.DONE + + def exit(self, retval, exc_info, duration): + if not isinstance(self.probe, LogFunctionProbe): + return + + probe = self.probe + _args = self._enrich_args(retval, exc_info, duration) + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.EXIT: + if not self._eval_condition(_args): + return + if probe.limiter.limit() is RateLimitExceeded: + self.state = SignalState.SKIP_RATE + return + elif self.state not in {SignalState.NONE, SignalState.DONE}: + return + + _locals = [] + _, exc, _ = exc_info + if exc is None: + _locals.append(("@return", retval)) + else: + _locals.append(("@exception", exc)) + + if probe.take_snapshot: + self.return_capture = _capture_context( + self.args or _safety.get_args(self.frame), _locals, exc_info, limits=probe.limits + ) + self.duration = duration + self.state = SignalState.DONE + if probe.evaluate_at != ProbeEvaluateTimingForMethod.ENTER: + self._eval_message(dict(_args)) + + def line(self): + if not isinstance(self.probe, LogLineProbe): + return + + frame = self.frame + probe = self.probe + + if not self._eval_condition(frame.f_locals): + return + + if probe.take_snapshot: + if probe.limiter.limit() is RateLimitExceeded: + self.state = SignalState.SKIP_RATE + return + + self.line_capture = _capture_context( + self.args or _safety.get_args(frame), + _safety.get_locals(frame), + sys.exc_info(), + limits=probe.limits, + ) + + self._eval_message(frame.f_locals) + self.state = SignalState.DONE + + @property + def message(self) -> Optional[str]: + return self._message + + def has_message(self) -> bool: + return self._message is not None or bool(self.errors) + + @property + def data(self): + frame = self.frame + probe = self.probe + + captures = None + if isinstance(probe, LogProbeMixin) and probe.take_snapshot: + if isinstance(probe, LineLocationMixin): + captures = {"lines": {probe.line: self.line_capture or _EMPTY_CAPTURED_CONTEXT}} + elif isinstance(probe, FunctionLocationMixin): + captures = { + "entry": self.entry_capture or _EMPTY_CAPTURED_CONTEXT, + "return": self.return_capture or _EMPTY_CAPTURED_CONTEXT, + } + + return { + "stack": utils.capture_stack(frame), + "captures": captures, + "duration": self.duration, + } diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/tracing.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/tracing.py new file mode 100644 index 0000000..22e9cc8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/tracing.py @@ -0,0 +1,149 @@ +import typing as t + +import attr + +import ddtrace +from ddtrace import Span +from ddtrace.debugging._expressions import DDExpressionEvaluationError +from ddtrace.debugging._probe.model import Probe +from ddtrace.debugging._probe.model import ProbeEvaluateTimingForMethod +from ddtrace.debugging._probe.model import SpanDecorationFunctionProbe +from ddtrace.debugging._probe.model import SpanDecorationLineProbe +from ddtrace.debugging._probe.model import SpanDecorationMixin +from ddtrace.debugging._probe.model import SpanDecorationTargetSpan +from ddtrace.debugging._probe.model import SpanFunctionProbe +from ddtrace.debugging._signal.model import EvaluationError +from ddtrace.debugging._signal.model import LogSignal +from ddtrace.debugging._signal.model import Signal +from ddtrace.debugging._signal.model import SignalState +from ddtrace.debugging._signal.utils import serialize +from ddtrace.internal.compat import ExcInfoType +from ddtrace.internal.logger import get_logger +from ddtrace.internal.safety import _isinstance + + +log = get_logger(__name__) + +SPAN_NAME = "dd.dynamic.span" +PROBE_ID_TAG_NAME = "debugger.probeid" + + +@attr.s +class DynamicSpan(Signal): + """Dynamically created span""" + + _span_cm = attr.ib(type=t.Optional[t.ContextManager[Span]], init=False) + + def __attrs_post_init__(self) -> None: + self._span_cm = None + + def enter(self) -> None: + probe = self.probe + if not isinstance(probe, SpanFunctionProbe): + log.debug("Dynamic span entered with non-span probe: %s", self.probe) + return + + if not self._eval_condition(dict(self.args) if self.args else {}): + return + + self._span_cm = ddtrace.tracer.trace( + SPAN_NAME, + service=None, # Currently unused + resource=probe.func_qname, + span_type=None, # Currently unused + ) + span = self._span_cm.__enter__() + + span.set_tags(probe.tags) + span.set_tag(PROBE_ID_TAG_NAME, probe.probe_id) + + self.state = SignalState.DONE + + def exit(self, retval: t.Any, exc_info: ExcInfoType, duration: float) -> None: + if not isinstance(self.probe, SpanFunctionProbe): + log.debug("Dynamic span exited with non-span probe: %s", self.probe) + return + + if self._span_cm is not None: + # Condition evaluated to true so we created a span. Finish it. + self._span_cm.__exit__(*exc_info) + + def line(self): + raise NotImplementedError("Dynamic line spans are not supported in Python") + + +@attr.s +class SpanDecoration(LogSignal): + """Decorate a span.""" + + def _decorate_span(self, _locals: t.Dict[str, t.Any]) -> None: + probe = t.cast(SpanDecorationMixin, self.probe) + + if probe.target_span == SpanDecorationTargetSpan.ACTIVE: + span = ddtrace.tracer.current_span() + elif probe.target_span == SpanDecorationTargetSpan.ROOT: + span = ddtrace.tracer.current_root_span() + else: + log.error("Invalid target span for span decoration: %s", probe.target_span) + return + + if span is not None: + log.debug("Decorating span %r according to span decoration probe %r", span, probe) + for d in probe.decorations: + try: + if not (d.when is None or d.when(_locals)): + continue + except DDExpressionEvaluationError as e: + self.errors.append( + EvaluationError(expr=e.dsl, message="Failed to evaluate condition: %s" % e.error) + ) + continue + for tag in d.tags: + try: + tag_value = tag.value.render(_locals, serialize) + except DDExpressionEvaluationError as e: + span.set_tag_str( + "_dd.di.%s.evaluation_error" % tag.name, ", ".join([serialize(v) for v in e.args]) + ) + else: + span.set_tag_str(tag.name, tag_value if _isinstance(tag_value, str) else serialize(tag_value)) + span.set_tag_str("_dd.di.%s.probe_id" % tag.name, t.cast(Probe, probe).probe_id) + + def enter(self) -> None: + probe = self.probe + if not isinstance(probe, SpanDecorationFunctionProbe): + log.debug("Span decoration entered with non-span decoration probe: %s", self.probe) + return + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.ENTER: + self._decorate_span(dict(self.args) if self.args else {}) + self.state = SignalState.DONE + + def exit(self, retval: t.Any, exc_info: ExcInfoType, duration: float) -> None: + probe = self.probe + + if not isinstance(probe, SpanDecorationFunctionProbe): + log.debug("Span decoration exited with non-span decoration probe: %s", self.probe) + return + + if probe.evaluate_at == ProbeEvaluateTimingForMethod.EXIT: + self._decorate_span(self._enrich_args(retval, exc_info, duration)) + self.state = SignalState.DONE + + def line(self): + probe = self.probe + if not isinstance(probe, SpanDecorationLineProbe): + log.debug("Span decoration on line with non-span decoration probe: %s", self.probe) + return + + self._decorate_span(self.frame.f_locals) + + self.state = SignalState.DONE + + @property + def message(self): + return ("Condition evaluation errors for probe %s" % self.probe.probe_id) if self.errors else None + + def has_message(self): + # type () -> bool + return bool(self.errors) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/utils.py new file mode 100644 index 0000000..8c52cc6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_signal/utils.py @@ -0,0 +1,312 @@ +from itertools import islice +from itertools import takewhile +from types import FrameType +from typing import Any +from typing import Callable +from typing import Dict +from typing import Iterable +from typing import List +from typing import Optional +from typing import Tuple +from typing import Type + +from ddtrace.debugging._probe.model import MAXFIELDS +from ddtrace.debugging._probe.model import MAXLEN +from ddtrace.debugging._probe.model import MAXLEVEL +from ddtrace.debugging._probe.model import MAXSIZE +from ddtrace.debugging._redaction import REDACTED_PLACEHOLDER +from ddtrace.debugging._redaction import redact +from ddtrace.debugging._redaction import redact_type +from ddtrace.debugging._safety import get_fields +from ddtrace.internal.compat import BUILTIN_CONTAINER_TYPES +from ddtrace.internal.compat import BUILTIN_MAPPNG_TYPES +from ddtrace.internal.compat import BUILTIN_SIMPLE_TYPES +from ddtrace.internal.compat import CALLABLE_TYPES +from ddtrace.internal.compat import Collection +from ddtrace.internal.compat import ExcInfoType +from ddtrace.internal.compat import NoneType +from ddtrace.internal.safety import _isinstance +from ddtrace.internal.utils.cache import cached + + +EXCLUDED_FIELDS = frozenset(["__class__", "__dict__", "__weakref__", "__doc__", "__module__", "__hash__"]) + + +@cached() +def qualname(_type: Type) -> str: + try: + return _type.__qualname__ + except AttributeError: + try: + return _type.__name__ + except AttributeError: + return repr(_type) + + +def _serialize_collection( + value: Collection, brackets: str, level: int, maxsize: int, maxlen: int, maxfields: int +) -> str: + o, c = brackets[0], brackets[1] + ellipsis = ", ..." if len(value) > maxsize else "" + return "".join( + (o, ", ".join(serialize(_, level - 1, maxsize, maxlen, maxfields) for _ in islice(value, maxsize)), ellipsis, c) + ) + + +def serialize( + value: Any, level: int = MAXLEVEL, maxsize: int = MAXSIZE, maxlen: int = MAXLEN, maxfields: int = MAXFIELDS +) -> str: + """Python object serializer. + + We provide our own serializer to avoid any potential side effects of calling + ``str`` directly on arbitrary objects. + """ + + if _isinstance(value, CALLABLE_TYPES): + return object.__repr__(value) + + if type(value) in BUILTIN_SIMPLE_TYPES: + r = repr(value) + return "".join((r[:maxlen], "..." + ("'" if r[0] == "'" else "") if len(r) > maxlen else "")) + + if not level: + return repr(type(value)) + + if type(value) not in BUILTIN_CONTAINER_TYPES: + return "%s(%s)" % ( + type(value).__name__, + ", ".join( + ( + "=".join((k, serialize(v, level - 1, maxsize, maxlen, maxfields))) + for k, v in islice(get_fields(value).items(), maxfields) + if not redact(k) + ) + ), + ) + + if type(value) is dict: + return "{%s}" % ", ".join( + ( + ": ".join( + ( + serialize(_, level - 1, maxsize, maxlen, maxfields) + for _ in (k, v if not (_isinstance(k, str) and redact(k)) else REDACTED_PLACEHOLDER) + ) + ) + for k, v in islice(value.items(), maxsize) + ) + ) + elif type(value) is list: + return _serialize_collection(value, "[]", level, maxsize, maxlen, maxfields) + elif type(value) is tuple: + return _serialize_collection(value, "()", level, maxsize, maxlen, maxfields) + elif type(value) is set: + return _serialize_collection(value, r"{}", level, maxsize, maxlen, maxfields) if value else "set()" + + msg = f"Unhandled type: {type(value)}" + raise TypeError(msg) + + +def capture_stack(top_frame: FrameType, max_height: int = 4096) -> List[dict]: + frame: Optional[FrameType] = top_frame + stack = [] + h = 0 + while frame and h < max_height: + code = frame.f_code + stack.append( + { + "fileName": code.co_filename, + "function": code.co_name, + "lineNumber": frame.f_lineno, + } + ) + frame = frame.f_back + h += 1 + return stack + + +def capture_exc_info(exc_info: ExcInfoType) -> Optional[Dict[str, Any]]: + _type, value, tb = exc_info + if _type is None or value is None: + return None + + top_tb = tb + if top_tb is not None: + while top_tb.tb_next is not None: + top_tb = top_tb.tb_next + + return { + "type": _type.__name__, + "message": ", ".join([serialize(v) for v in value.args]), + "stacktrace": capture_stack(top_tb.tb_frame) if top_tb is not None else None, + } + + +def redacted_value(v: Any) -> dict: + return {"type": qualname(type(v)), "notCapturedReason": "redactedIdent"} + + +def redacted_type(t: Any) -> dict: + return {"type": qualname(t), "notCapturedReason": "redactedType"} + + +def capture_pairs( + pairs: Iterable[Tuple[str, Any]], + level: int = MAXLEVEL, + maxlen: int = MAXLEN, + maxsize: int = MAXSIZE, + maxfields: int = MAXFIELDS, + stopping_cond: Optional[Callable[[Any], bool]] = None, +) -> Dict[str, Any]: + return { + n: (capture_value(v, level, maxlen, maxsize, maxfields, stopping_cond) if not redact(n) else redacted_value(v)) + for n, v in pairs + } + + +def capture_value( + value: Any, + level: int = MAXLEVEL, + maxlen: int = MAXLEN, + maxsize: int = MAXSIZE, + maxfields: int = MAXFIELDS, + stopping_cond: Optional[Callable[[Any], bool]] = None, +) -> Dict[str, Any]: + cond = stopping_cond if stopping_cond is not None else (lambda _: False) + + _type = type(value) + + if _type in BUILTIN_SIMPLE_TYPES: + if _type is NoneType: + return {"type": "NoneType", "isNull": True} + + if cond(value): + return { + "type": qualname(_type), + "notCapturedReason": cond.__name__, + } + + value_repr = serialize(value) + value_repr_len = len(value_repr) + return ( + { + "type": qualname(_type), + "value": value_repr, + } + if value_repr_len <= maxlen + else { + "type": qualname(_type), + "value": value_repr[:maxlen], + "truncated": True, + "size": value_repr_len, + } + ) + + if _type in BUILTIN_CONTAINER_TYPES: + if level < 0: + return { + "type": qualname(_type), + "notCapturedReason": "depth", + "size": len(value), + } + + if cond(value): + return { + "type": qualname(_type), + "notCapturedReason": cond.__name__, + "size": len(value), + } + + collection: Optional[List[Any]] = None + if _type in BUILTIN_MAPPNG_TYPES: + # Mapping + collection = [ + ( + capture_value( + k, + level=level - 1, + maxlen=maxlen, + maxsize=maxsize, + maxfields=maxfields, + stopping_cond=cond, + ), + capture_value( + v, + level=level - 1, + maxlen=maxlen, + maxsize=maxsize, + maxfields=maxfields, + stopping_cond=cond, + ) + if not (_isinstance(k, str) and redact(k)) + else redacted_value(v), + ) + for k, v in takewhile(lambda _: not cond(_), islice(value.items(), maxsize)) + ] + data = { + "type": qualname(_type), + "entries": collection, + "size": len(value), + } + + else: + # Sequence + collection = [ + capture_value( + v, + level=level - 1, + maxlen=maxlen, + maxsize=maxsize, + maxfields=maxfields, + stopping_cond=cond, + ) + for v in takewhile(lambda _: not cond(_), islice(value, maxsize)) + ] + data = { + "type": qualname(_type), + "elements": collection, + "size": len(value), + } + + if len(collection) < min(maxsize, len(value)): + data["notCapturedReason"] = cond.__name__ + elif len(value) > maxsize: + data["notCapturedReason"] = "collectionSize" + + return data + + # Arbitrary object + if level < 0: + return { + "type": qualname(_type), + "notCapturedReason": "depth", + } + + if redact_type(qualname(_type)): + return redacted_type(_type) + + if cond(value): + return { + "type": qualname(_type), + "notCapturedReason": cond.__name__, + } + + fields = get_fields(value) + captured_fields = { + n: ( + capture_value(v, level=level - 1, maxlen=maxlen, maxsize=maxsize, maxfields=maxfields, stopping_cond=cond) + if not redact(n) + else redacted_value(v) + ) + for n, v in takewhile(lambda _: not cond(_), islice(fields.items(), maxfields)) + } + data = { + "type": qualname(_type), + "fields": captured_fields, + } + if len(captured_fields) < min(maxfields, len(fields)): + data["notCapturedReason"] = cond.__name__ + elif len(fields) > maxfields: + data["notCapturedReason"] = "fieldCount" + + return data diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_uploader.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_uploader.py new file mode 100644 index 0000000..d05d9c0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/debugging/_uploader.py @@ -0,0 +1,97 @@ +from typing import Optional +from urllib.parse import quote + +from ddtrace.debugging._config import di_config +from ddtrace.debugging._encoding import BufferedEncoder +from ddtrace.debugging._metrics import metrics +from ddtrace.internal import compat +from ddtrace.internal.logger import get_logger +from ddtrace.internal.periodic import AwakeablePeriodicService +from ddtrace.internal.runtime import container +from ddtrace.internal.utils.http import connector +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter + + +log = get_logger(__name__) +meter = metrics.get_meter("uploader") + + +class LogsIntakeUploaderV1(AwakeablePeriodicService): + """Logs intake uploader. + + This class implements an interface with the debugger logs intake for both + the debugger and the events platform. + """ + + ENDPOINT = di_config._intake_endpoint + + RETRY_ATTEMPTS = 3 + + def __init__(self, queue: BufferedEncoder, interval: Optional[float] = None) -> None: + super().__init__(interval or di_config.upload_flush_interval) + self._queue = queue + self._headers = { + "Content-type": "application/json; charset=utf-8", + "Accept": "text/plain", + } + + container_info = container.get_container_info() + if container_info is not None: + container_id = container_info.container_id + if container_id is not None: + self._headers["Datadog-Container-Id"] = container_id + + if di_config._tags_in_qs and di_config.tags: + self.ENDPOINT += f"?ddtags={quote(di_config.tags)}" + self._connect = connector(di_config._intake_url, timeout=di_config.upload_timeout) + + # Make it retryable + self._write_with_backoff = fibonacci_backoff_with_jitter( + initial_wait=0.618 * self.interval / (1.618**self.RETRY_ATTEMPTS) / 2, + attempts=self.RETRY_ATTEMPTS, + )(self._write) + + log.debug( + "Logs intake uploader initialized (url: %s, endpoint: %s, interval: %f)", + di_config._intake_url, + self.ENDPOINT, + self.interval, + ) + + def _write(self, payload: bytes) -> None: + try: + with self._connect() as conn: + conn.request( + "POST", + self.ENDPOINT, + payload, + headers=self._headers, + ) + resp = compat.get_connection_response(conn) + if not (200 <= resp.status < 300): + log.error("Failed to upload payload: [%d] %r", resp.status, resp.read()) + meter.increment("upload.error", tags={"status": str(resp.status)}) + else: + meter.increment("upload.success") + meter.distribution("upload.size", len(payload)) + except Exception: + log.error("Failed to write payload", exc_info=True) + meter.increment("error") + + def upload(self) -> None: + """Upload request.""" + self.awake() + + def periodic(self) -> None: + """Upload the buffer content to the logs intake.""" + count = self._queue.count + if count: + payload = self._queue.flush() + if payload is not None: + try: + self._write_with_backoff(payload) + meter.distribution("batch.cardinality", count) + except Exception: + log.debug("Cannot upload logs payload", exc_info=True) + + on_shutdown = periodic diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/__init__.py new file mode 100644 index 0000000..3ac50eb --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/__init__.py @@ -0,0 +1,23 @@ +class SpanTypes(object): + CACHE = "cache" + CASSANDRA = "cassandra" + ELASTICSEARCH = "elasticsearch" + GRPC = "grpc" + GRAPHQL = "graphql" + HTTP = "http" + MONGODB = "mongodb" + REDIS = "redis" + SQL = "sql" + TEMPLATE = "template" + TEST = "test" + WEB = "web" + WORKER = "worker" + AUTH = "auth" + SYSTEM = "system" + + +class SpanKind(object): + CLIENT = "client" + SERVER = "server" + PRODUCER = "producer" + CONSUMER = "consumer" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/aws.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/aws.py new file mode 100644 index 0000000..a4aea07 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/aws.py @@ -0,0 +1,88 @@ +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace.span import Span # noqa:F401 + + +def truncate_arg_value(value, max_len=1024): + # type: (Any, int) -> Any + """Truncate values which are bytes and greater than `max_len`. + Useful for parameters like 'Body' in `put_object` operations. + """ + if isinstance(value, bytes) and len(value) > max_len: + return b"..." + + return value + + +def _add_api_param_span_tags(span, endpoint_name, params): + # type: (Span, str, Dict[str, Any]) -> None + # Note: Only some boto3 requests will supply these params + # i.e. that might explain why you see these tags being set to empty strings + if endpoint_name == "cloudwatch": + log_group_name = params.get("logGroupName") + if log_group_name: + span.set_tag_str("aws.cloudwatch.logs.log_group_name", log_group_name) + span.set_tag_str("loggroupname", log_group_name) + elif endpoint_name == "dynamodb": + table_name = params.get("TableName") + if table_name: + span.set_tag_str("aws.dynamodb.table_name", table_name) + span.set_tag_str("tablename", table_name) + elif endpoint_name == "kinesis": + stream_name = params.get("StreamName") + if stream_name: + span.set_tag_str("aws.kinesis.stream_name", stream_name) + span.set_tag_str("streamname", stream_name) + elif endpoint_name == "redshift": + cluster_identifier = params.get("ClusterIdentifier") + if cluster_identifier: + span.set_tag_str("aws.redshift.cluster_identifier", cluster_identifier) + span.set_tag_str("clusteridentifier", cluster_identifier) + elif endpoint_name == "s3": + bucket_name = params.get("Bucket") + if bucket_name: + span.set_tag_str("aws.s3.bucket_name", bucket_name) + span.set_tag_str("bucketname", bucket_name) + + elif endpoint_name == "sns": + topic_arn = params.get("TopicArn") + if topic_arn: + # example topicArn: arn:aws:sns:sa-east-1:1234:topicname + span.set_tag_str("aws.sns.topic_arn", topic_arn) + topicname = topic_arn.split(":")[-1] + aws_account = topic_arn.split(":")[-2] + span.set_tag_str("aws_account", aws_account) + span.set_tag_str("topicname", topicname) + + elif endpoint_name == "sqs": + queue_name = params.get("QueueName", "") + queue_url = params.get("QueueUrl") + if queue_url and (queue_url.startswith("sqs:") or queue_url.startswith("http")): + # example queue_url: https://sqs.sa-east-1.amazonaws.com/12345678/queuename + queue_name = queue_url.split("/")[-1] + aws_account = queue_url.split("/")[-2] + span.set_tag_str("aws_account", aws_account) + span.set_tag_str("aws.sqs.queue_name", queue_name) + span.set_tag_str("queuename", queue_name) + + elif endpoint_name == "lambda": + function_name = params.get("FunctionName", "") + span.set_tag_str("functionname", function_name) + + elif endpoint_name == "events": + rule_name = params.get("Name", "") + span.set_tag_str("rulename", rule_name) + + elif endpoint_name == "states": + state_machine_arn = params.get("stateMachineArn", "") + span.set_tag_str("statemachinearn", state_machine_arn) + + +AWSREGION = "aws.region" +REGION = "region" +AGENT = "aws.agent" +OPERATION = "aws.operation" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/cassandra.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/cassandra.py new file mode 100644 index 0000000..d510897 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/cassandra.py @@ -0,0 +1,6 @@ +# tags +CLUSTER = "cassandra.cluster" +KEYSPACE = "cassandra.keyspace" +CONSISTENCY_LEVEL = "cassandra.consistency_level" +PAGINATED = "cassandra.paginated" +PAGE_NUMBER = "cassandra.page_number" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/ci.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/ci.py new file mode 100644 index 0000000..1b4029f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/ci.py @@ -0,0 +1,576 @@ +""" +Tags for common CI attributes +""" +import json +import logging +import os +import platform +import re +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import MutableMapping # noqa:F401 +from typing import Optional # noqa:F401 + +from ddtrace.ext import git +from ddtrace.internal.logger import get_logger + + +# CI app dd_origin tag +CI_APP_TEST_ORIGIN = "ciapp-test" + +# Stage Name +STAGE_NAME = "ci.stage.name" + +# Job Name +JOB_NAME = "ci.job.name" + +# Job URL +JOB_URL = "ci.job.url" + +# Pipeline ID +PIPELINE_ID = "ci.pipeline.id" + +# Pipeline Name +PIPELINE_NAME = "ci.pipeline.name" + +# Pipeline Number +PIPELINE_NUMBER = "ci.pipeline.number" + +# Pipeline URL +PIPELINE_URL = "ci.pipeline.url" + +# Provider +PROVIDER_NAME = "ci.provider.name" + +# CI Node Name +NODE_NAME = "ci.node.name" + +# CI Node Labels +NODE_LABELS = "ci.node.labels" + +# Workspace Path +WORKSPACE_PATH = "ci.workspace_path" + +# Architecture +OS_ARCHITECTURE = "os.architecture" + +# Platform +OS_PLATFORM = "os.platform" + +# Version +OS_VERSION = "os.version" + +# Runtime Name +RUNTIME_NAME = "runtime.name" + +# Runtime Version +RUNTIME_VERSION = "runtime.version" + +# Version of the ddtrace library +LIBRARY_VERSION = "library_version" + +# CI Visibility env vars used for pipeline correlation ID +_CI_ENV_VARS = "_dd.ci.env_vars" + +_RE_URL = re.compile(r"(https?://|ssh://)[^/]*@") + + +log = get_logger(__name__) + + +def _filter_sensitive_info(url): + # type: (Optional[str]) -> Optional[str] + return _RE_URL.sub("\\1", url) if url is not None else None + + +def _get_runtime_and_os_metadata(): + """Extract configuration facet tags for OS and Python runtime.""" + return { + OS_ARCHITECTURE: platform.machine(), + OS_PLATFORM: platform.system(), + OS_VERSION: platform.release(), + RUNTIME_NAME: platform.python_implementation(), + RUNTIME_VERSION: platform.python_version(), + } + + +def tags(env=None, cwd=None): + # type: (Optional[MutableMapping[str, str]], Optional[str]) -> Dict[str, str] + """Extract and set tags from provider environ, as well as git metadata.""" + env = os.environ if env is None else env + tags = {} # type: Dict[str, Optional[str]] + for key, extract in PROVIDERS: + if key in env: + tags = extract(env) + break + + git_info = git.extract_git_metadata(cwd=cwd) + try: + git_info[WORKSPACE_PATH] = git.extract_workspace_path(cwd=cwd) + except git.GitNotFoundError: + log.error("Git executable not found, cannot extract git metadata.") + except ValueError as e: + debug_mode = log.isEnabledFor(logging.DEBUG) + stderr = str(e) + log.error("Error extracting git metadata: %s", stderr, exc_info=debug_mode) + + # Tags collected from CI provider take precedence over extracted git metadata, but any CI provider value + # is None or "" should be overwritten. + tags.update({k: v for k, v in git_info.items() if not tags.get(k)}) + + user_specified_git_info = git.extract_user_git_metadata(env) + + # Tags provided by the user take precedence over everything + tags.update({k: v for k, v in user_specified_git_info.items() if v}) + + # if git.BRANCH is a tag, we associate its value to TAG instead of BRANCH + if git.is_ref_a_tag(tags.get(git.BRANCH)): + if not tags.get(git.TAG): + tags[git.TAG] = git.normalize_ref(tags.get(git.BRANCH)) + else: + tags[git.TAG] = git.normalize_ref(tags.get(git.TAG)) + del tags[git.BRANCH] + else: + tags[git.BRANCH] = git.normalize_ref(tags.get(git.BRANCH)) + tags[git.TAG] = git.normalize_ref(tags.get(git.TAG)) + + tags[git.REPOSITORY_URL] = _filter_sensitive_info(tags.get(git.REPOSITORY_URL)) + + workspace_path = tags.get(WORKSPACE_PATH) + if workspace_path: + tags[WORKSPACE_PATH] = os.path.expanduser(workspace_path) + + tags.update(_get_runtime_and_os_metadata()) + + return {k: v for k, v in tags.items() if v is not None} + + +def extract_appveyor(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Appveyor environ.""" + url = "https://ci.appveyor.com/project/{0}/builds/{1}".format( + env.get("APPVEYOR_REPO_NAME"), env.get("APPVEYOR_BUILD_ID") + ) + if env.get("APPVEYOR_REPO_PROVIDER") == "github": + repository = "https://github.com/{0}.git".format(env.get("APPVEYOR_REPO_NAME")) # type: Optional[str] + commit = env.get("APPVEYOR_REPO_COMMIT") # type: Optional[str] + branch = env.get("APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH") or env.get( + "APPVEYOR_REPO_BRANCH" + ) # type: Optional[str] + tag = env.get("APPVEYOR_REPO_TAG_NAME") # type: Optional[str] + else: + repository = commit = branch = tag = None + + commit_message = env.get("APPVEYOR_REPO_COMMIT_MESSAGE") + if commit_message: + extended = env.get("APPVEYOR_REPO_COMMIT_MESSAGE_EXTENDED") + if extended: + commit_message += "\n" + extended + + return { + PROVIDER_NAME: "appveyor", + git.REPOSITORY_URL: repository, + git.COMMIT_SHA: commit, + WORKSPACE_PATH: env.get("APPVEYOR_BUILD_FOLDER"), + PIPELINE_ID: env.get("APPVEYOR_BUILD_ID"), + PIPELINE_NAME: env.get("APPVEYOR_REPO_NAME"), + PIPELINE_NUMBER: env.get("APPVEYOR_BUILD_NUMBER"), + PIPELINE_URL: url, + JOB_URL: url, + git.BRANCH: branch, + git.TAG: tag, + git.COMMIT_MESSAGE: commit_message, + git.COMMIT_AUTHOR_NAME: env.get("APPVEYOR_REPO_COMMIT_AUTHOR"), + git.COMMIT_AUTHOR_EMAIL: env.get("APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL"), + } + + +def extract_azure_pipelines(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Azure pipelines environ.""" + if env.get("SYSTEM_TEAMFOUNDATIONSERVERURI") and env.get("SYSTEM_TEAMPROJECTID") and env.get("BUILD_BUILDID"): + base_url = "{0}{1}/_build/results?buildId={2}".format( + env.get("SYSTEM_TEAMFOUNDATIONSERVERURI"), env.get("SYSTEM_TEAMPROJECTID"), env.get("BUILD_BUILDID") + ) + pipeline_url = base_url # type: Optional[str] + job_url = base_url + "&view=logs&j={0}&t={1}".format( + env.get("SYSTEM_JOBID"), env.get("SYSTEM_TASKINSTANCEID") + ) # type: Optional[str] + else: + pipeline_url = job_url = None + + return { + PROVIDER_NAME: "azurepipelines", + WORKSPACE_PATH: env.get("BUILD_SOURCESDIRECTORY"), + PIPELINE_ID: env.get("BUILD_BUILDID"), + PIPELINE_NAME: env.get("BUILD_DEFINITIONNAME"), + PIPELINE_NUMBER: env.get("BUILD_BUILDID"), + PIPELINE_URL: pipeline_url, + JOB_URL: job_url, + git.REPOSITORY_URL: env.get("SYSTEM_PULLREQUEST_SOURCEREPOSITORYURI") or env.get("BUILD_REPOSITORY_URI"), + git.COMMIT_SHA: env.get("SYSTEM_PULLREQUEST_SOURCECOMMITID") or env.get("BUILD_SOURCEVERSION"), + git.BRANCH: env.get("SYSTEM_PULLREQUEST_SOURCEBRANCH") + or env.get("BUILD_SOURCEBRANCH") + or env.get("BUILD_SOURCEBRANCHNAME"), + git.COMMIT_MESSAGE: env.get("BUILD_SOURCEVERSIONMESSAGE"), + git.COMMIT_AUTHOR_NAME: env.get("BUILD_REQUESTEDFORID"), + git.COMMIT_AUTHOR_EMAIL: env.get("BUILD_REQUESTEDFOREMAIL"), + STAGE_NAME: env.get("SYSTEM_STAGEDISPLAYNAME"), + JOB_NAME: env.get("SYSTEM_JOBDISPLAYNAME"), + _CI_ENV_VARS: json.dumps( + { + "SYSTEM_TEAMPROJECTID": env.get("SYSTEM_TEAMPROJECTID"), + "BUILD_BUILDID": env.get("BUILD_BUILDID"), + "SYSTEM_JOBID": env.get("SYSTEM_JOBID"), + }, + separators=(",", ":"), + ), + } + + +def extract_bitbucket(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Bitbucket environ.""" + url = "https://bitbucket.org/{0}/addon/pipelines/home#!/results/{1}".format( + env.get("BITBUCKET_REPO_FULL_NAME"), env.get("BITBUCKET_BUILD_NUMBER") + ) + return { + git.BRANCH: env.get("BITBUCKET_BRANCH"), + git.COMMIT_SHA: env.get("BITBUCKET_COMMIT"), + git.REPOSITORY_URL: env.get("BITBUCKET_GIT_SSH_ORIGIN") or env.get("BITBUCKET_GIT_HTTP_ORIGIN"), + git.TAG: env.get("BITBUCKET_TAG"), + JOB_URL: url, + PIPELINE_ID: env.get("BITBUCKET_PIPELINE_UUID", "").strip("{}}") or None, # noqa: B005 + PIPELINE_NAME: env.get("BITBUCKET_REPO_FULL_NAME"), + PIPELINE_NUMBER: env.get("BITBUCKET_BUILD_NUMBER"), + PIPELINE_URL: url, + PROVIDER_NAME: "bitbucket", + WORKSPACE_PATH: env.get("BITBUCKET_CLONE_DIR"), + } + + +def extract_buildkite(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Buildkite environ.""" + # Get all keys which start with BUILDKITE_AGENT_META_DATA_x + node_label_list = [] # type: List[str] + buildkite_agent_meta_data_prefix = "BUILDKITE_AGENT_META_DATA_" + for env_variable in env: + if env_variable.startswith(buildkite_agent_meta_data_prefix): + key = env_variable.replace(buildkite_agent_meta_data_prefix, "").lower() + value = env.get(env_variable) + node_label_list.append("{}:{}".format(key, value)) + return { + git.BRANCH: env.get("BUILDKITE_BRANCH"), + git.COMMIT_SHA: env.get("BUILDKITE_COMMIT"), + git.REPOSITORY_URL: env.get("BUILDKITE_REPO"), + git.TAG: env.get("BUILDKITE_TAG"), + PIPELINE_ID: env.get("BUILDKITE_BUILD_ID"), + PIPELINE_NAME: env.get("BUILDKITE_PIPELINE_SLUG"), + PIPELINE_NUMBER: env.get("BUILDKITE_BUILD_NUMBER"), + PIPELINE_URL: env.get("BUILDKITE_BUILD_URL"), + JOB_URL: "{0}#{1}".format(env.get("BUILDKITE_BUILD_URL"), env.get("BUILDKITE_JOB_ID")), + PROVIDER_NAME: "buildkite", + WORKSPACE_PATH: env.get("BUILDKITE_BUILD_CHECKOUT_PATH"), + git.COMMIT_MESSAGE: env.get("BUILDKITE_MESSAGE"), + git.COMMIT_AUTHOR_NAME: env.get("BUILDKITE_BUILD_AUTHOR"), + git.COMMIT_AUTHOR_EMAIL: env.get("BUILDKITE_BUILD_AUTHOR_EMAIL"), + git.COMMIT_COMMITTER_NAME: env.get("BUILDKITE_BUILD_CREATOR"), + git.COMMIT_COMMITTER_EMAIL: env.get("BUILDKITE_BUILD_CREATOR_EMAIL"), + _CI_ENV_VARS: json.dumps( + { + "BUILDKITE_BUILD_ID": env.get("BUILDKITE_BUILD_ID"), + "BUILDKITE_JOB_ID": env.get("BUILDKITE_JOB_ID"), + }, + separators=(",", ":"), + ), + NODE_LABELS: json.dumps(node_label_list, separators=(",", ":")), + NODE_NAME: env.get("BUILDKITE_AGENT_ID"), + } + + +def extract_circle_ci(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from CircleCI environ.""" + return { + git.BRANCH: env.get("CIRCLE_BRANCH"), + git.COMMIT_SHA: env.get("CIRCLE_SHA1"), + git.REPOSITORY_URL: env.get("CIRCLE_REPOSITORY_URL"), + git.TAG: env.get("CIRCLE_TAG"), + PIPELINE_ID: env.get("CIRCLE_WORKFLOW_ID"), + PIPELINE_NAME: env.get("CIRCLE_PROJECT_REPONAME"), + PIPELINE_NUMBER: env.get("CIRCLE_BUILD_NUM"), + PIPELINE_URL: "https://app.circleci.com/pipelines/workflows/{0}".format(env.get("CIRCLE_WORKFLOW_ID")), + JOB_URL: env.get("CIRCLE_BUILD_URL"), + JOB_NAME: env.get("CIRCLE_JOB"), + PROVIDER_NAME: "circleci", + WORKSPACE_PATH: env.get("CIRCLE_WORKING_DIRECTORY"), + _CI_ENV_VARS: json.dumps( + { + "CIRCLE_WORKFLOW_ID": env.get("CIRCLE_WORKFLOW_ID"), + "CIRCLE_BUILD_NUM": env.get("CIRCLE_BUILD_NUM"), + }, + separators=(",", ":"), + ), + } + + +def extract_codefresh(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Codefresh environ.""" + build_id = env.get("CF_BUILD_ID") + return { + git.BRANCH: env.get("CF_BRANCH"), + PIPELINE_ID: build_id, + PIPELINE_NAME: env.get("CF_PIPELINE_NAME"), + PIPELINE_URL: env.get("CF_BUILD_URL"), + JOB_NAME: env.get("CF_STEP_NAME"), + PROVIDER_NAME: "codefresh", + _CI_ENV_VARS: json.dumps( + {"CF_BUILD_ID": build_id}, + separators=(",", ":"), + ), + } + + +def extract_github_actions(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Github environ.""" + + github_server_url = _filter_sensitive_info(env.get("GITHUB_SERVER_URL")) + github_repository = env.get("GITHUB_REPOSITORY") + git_commit_sha = env.get("GITHUB_SHA") + github_run_id = env.get("GITHUB_RUN_ID") + run_attempt = env.get("GITHUB_RUN_ATTEMPT") + + pipeline_url = "{0}/{1}/actions/runs/{2}".format( + github_server_url, + github_repository, + github_run_id, + ) + + env_vars = { + "GITHUB_SERVER_URL": github_server_url, + "GITHUB_REPOSITORY": github_repository, + "GITHUB_RUN_ID": github_run_id, + } + if run_attempt: + env_vars["GITHUB_RUN_ATTEMPT"] = run_attempt + pipeline_url = "{0}/attempts/{1}".format(pipeline_url, run_attempt) + + return { + git.BRANCH: env.get("GITHUB_HEAD_REF") or env.get("GITHUB_REF"), + git.COMMIT_SHA: git_commit_sha, + git.REPOSITORY_URL: "{0}/{1}.git".format(github_server_url, github_repository), + JOB_URL: "{0}/{1}/commit/{2}/checks".format(github_server_url, github_repository, git_commit_sha), + PIPELINE_ID: github_run_id, + PIPELINE_NAME: env.get("GITHUB_WORKFLOW"), + PIPELINE_NUMBER: env.get("GITHUB_RUN_NUMBER"), + PIPELINE_URL: pipeline_url, + JOB_NAME: env.get("GITHUB_JOB"), + PROVIDER_NAME: "github", + WORKSPACE_PATH: env.get("GITHUB_WORKSPACE"), + _CI_ENV_VARS: json.dumps(env_vars, separators=(",", ":")), + } + + +def extract_gitlab(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Gitlab environ.""" + author = env.get("CI_COMMIT_AUTHOR") + author_name = None # type: Optional[str] + author_email = None # type: Optional[str] + if author: + # Extract name and email from `author` which is in the form "name " + author_name, author_email = author.strip("> ").split(" <") + commit_timestamp = env.get("CI_COMMIT_TIMESTAMP") + return { + git.BRANCH: env.get("CI_COMMIT_REF_NAME"), + git.COMMIT_SHA: env.get("CI_COMMIT_SHA"), + git.REPOSITORY_URL: env.get("CI_REPOSITORY_URL"), + git.TAG: env.get("CI_COMMIT_TAG"), + STAGE_NAME: env.get("CI_JOB_STAGE"), + JOB_NAME: env.get("CI_JOB_NAME"), + JOB_URL: env.get("CI_JOB_URL"), + PIPELINE_ID: env.get("CI_PIPELINE_ID"), + PIPELINE_NAME: env.get("CI_PROJECT_PATH"), + PIPELINE_NUMBER: env.get("CI_PIPELINE_IID"), + PIPELINE_URL: env.get("CI_PIPELINE_URL"), + PROVIDER_NAME: "gitlab", + WORKSPACE_PATH: env.get("CI_PROJECT_DIR"), + git.COMMIT_MESSAGE: env.get("CI_COMMIT_MESSAGE"), + git.COMMIT_AUTHOR_NAME: author_name, + git.COMMIT_AUTHOR_EMAIL: author_email, + git.COMMIT_AUTHOR_DATE: commit_timestamp, + _CI_ENV_VARS: json.dumps( + { + "CI_PROJECT_URL": env.get("CI_PROJECT_URL"), + "CI_PIPELINE_ID": env.get("CI_PIPELINE_ID"), + "CI_JOB_ID": env.get("CI_JOB_ID"), + }, + separators=(",", ":"), + ), + NODE_LABELS: env.get("CI_RUNNER_TAGS"), + NODE_NAME: env.get("CI_RUNNER_ID"), + } + + +def extract_jenkins(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Jenkins environ.""" + branch = env.get("GIT_BRANCH", "") + name = env.get("JOB_NAME") + if name and branch: + name = re.sub("/{0}".format(git.normalize_ref(branch)), "", name) + if name: + name = "/".join((v for v in name.split("/") if v and "=" not in v)) + node_labels_list = [] # type: List[str] + node_labels_env = env.get("NODE_LABELS") # type: Optional[str] + if node_labels_env: + node_labels_list = node_labels_env.split() + return { + git.BRANCH: env.get("GIT_BRANCH"), + git.COMMIT_SHA: env.get("GIT_COMMIT"), + git.REPOSITORY_URL: env.get("GIT_URL", env.get("GIT_URL_1")), + PIPELINE_ID: env.get("BUILD_TAG"), + PIPELINE_NAME: name, + PIPELINE_NUMBER: env.get("BUILD_NUMBER"), + PIPELINE_URL: env.get("BUILD_URL"), + PROVIDER_NAME: "jenkins", + WORKSPACE_PATH: env.get("WORKSPACE"), + _CI_ENV_VARS: json.dumps( + { + "DD_CUSTOM_TRACE_ID": env.get("DD_CUSTOM_TRACE_ID"), + }, + separators=(",", ":"), + ), + NODE_LABELS: json.dumps(node_labels_list, separators=(",", ":")), + NODE_NAME: env.get("NODE_NAME"), + } + + +def extract_teamcity(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Teamcity environ.""" + return { + JOB_URL: env.get("BUILD_URL"), + JOB_NAME: env.get("TEAMCITY_BUILDCONF_NAME"), + PROVIDER_NAME: "teamcity", + } + + +def extract_travis(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Travis environ.""" + return { + git.BRANCH: env.get("TRAVIS_PULL_REQUEST_BRANCH") or env.get("TRAVIS_BRANCH"), + git.COMMIT_SHA: env.get("TRAVIS_COMMIT"), + git.REPOSITORY_URL: "https://github.com/{0}.git".format(env.get("TRAVIS_REPO_SLUG")), + git.TAG: env.get("TRAVIS_TAG"), + JOB_URL: env.get("TRAVIS_JOB_WEB_URL"), + PIPELINE_ID: env.get("TRAVIS_BUILD_ID"), + PIPELINE_NAME: env.get("TRAVIS_REPO_SLUG"), + PIPELINE_NUMBER: env.get("TRAVIS_BUILD_NUMBER"), + PIPELINE_URL: env.get("TRAVIS_BUILD_WEB_URL"), + PROVIDER_NAME: "travisci", + WORKSPACE_PATH: env.get("TRAVIS_BUILD_DIR"), + git.COMMIT_MESSAGE: env.get("TRAVIS_COMMIT_MESSAGE"), + } + + +def extract_bitrise(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Bitrise environ.""" + commit = env.get("BITRISE_GIT_COMMIT") or env.get("GIT_CLONE_COMMIT_HASH") + branch = env.get("BITRISEIO_GIT_BRANCH_DEST") or env.get("BITRISE_GIT_BRANCH") + if env.get("BITRISE_GIT_MESSAGE"): + message = env.get("BITRISE_GIT_MESSAGE") # type: Optional[str] + elif env.get("GIT_CLONE_COMMIT_MESSAGE_SUBJECT") or env.get("GIT_CLONE_COMMIT_MESSAGE_BODY"): + message = "{0}:\n{1}".format( + env.get("GIT_CLONE_COMMIT_MESSAGE_SUBJECT"), env.get("GIT_CLONE_COMMIT_MESSAGE_BODY") + ) + else: + message = None + + return { + PROVIDER_NAME: "bitrise", + PIPELINE_ID: env.get("BITRISE_BUILD_SLUG"), + PIPELINE_NAME: env.get("BITRISE_TRIGGERED_WORKFLOW_ID"), + PIPELINE_NUMBER: env.get("BITRISE_BUILD_NUMBER"), + PIPELINE_URL: env.get("BITRISE_BUILD_URL"), + WORKSPACE_PATH: env.get("BITRISE_SOURCE_DIR"), + git.REPOSITORY_URL: env.get("GIT_REPOSITORY_URL"), + git.COMMIT_SHA: commit, + git.BRANCH: branch, + git.TAG: env.get("BITRISE_GIT_TAG"), + git.COMMIT_MESSAGE: message, + git.COMMIT_AUTHOR_NAME: env.get("GIT_CLONE_COMMIT_AUTHOR_NAME"), + git.COMMIT_AUTHOR_EMAIL: env.get("GIT_CLONE_COMMIT_AUTHOR_EMAIL"), + git.COMMIT_COMMITTER_NAME: env.get("GIT_CLONE_COMMIT_COMMITER_NAME"), + git.COMMIT_COMMITTER_EMAIL: env.get("GIT_CLONE_COMMIT_COMMITER_NAME"), + } + + +def extract_buddy(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from Buddy environ.""" + return { + PROVIDER_NAME: "buddy", + PIPELINE_ID: "{0}/{1}".format(env.get("BUDDY_PIPELINE_ID"), env.get("BUDDY_EXECUTION_ID")), + PIPELINE_NAME: env.get("BUDDY_PIPELINE_NAME"), + PIPELINE_NUMBER: env.get("BUDDY_EXECUTION_ID"), + PIPELINE_URL: env.get("BUDDY_EXECUTION_URL"), + git.REPOSITORY_URL: env.get("BUDDY_SCM_URL"), + git.COMMIT_SHA: env.get("BUDDY_EXECUTION_REVISION"), + git.BRANCH: env.get("BUDDY_EXECUTION_BRANCH"), + git.TAG: env.get("BUDDY_EXECUTION_TAG"), + git.COMMIT_MESSAGE: env.get("BUDDY_EXECUTION_REVISION_MESSAGE"), + git.COMMIT_COMMITTER_NAME: env.get("BUDDY_EXECUTION_REVISION_COMMITTER_NAME"), + git.COMMIT_COMMITTER_EMAIL: env.get("BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL"), + } + + +def extract_codebuild(env): + # type: (MutableMapping[str, str]) -> Dict[str, Optional[str]] + """Extract CI tags from codebuild environments.""" + + tags = {} + + # AWS Codepipeline + if "CODEBUILD_INITIATOR" in env: + codebuild_initiator = env.get("CODEBUILD_INITIATOR") + if codebuild_initiator and codebuild_initiator.startswith("codepipeline"): + tags.update( + { + PROVIDER_NAME: "awscodepipeline", + PIPELINE_ID: env.get("DD_PIPELINE_EXECUTION_ID"), + _CI_ENV_VARS: json.dumps( + { + "CODEBUILD_BUILD_ARN": env.get("CODEBUILD_BUILD_ARN"), + "DD_PIPELINE_EXECUTION_ID": env.get("DD_PIPELINE_EXECUTION_ID"), + "DD_ACTION_EXECUTION_ID": env.get("DD_ACTION_EXECUTION_ID"), + }, + separators=(",", ":"), + ), + } + ) + + return tags + + +PROVIDERS = ( + ("APPVEYOR", extract_appveyor), + ("TF_BUILD", extract_azure_pipelines), + ("BITBUCKET_COMMIT", extract_bitbucket), + ("BUILDKITE", extract_buildkite), + ("CIRCLECI", extract_circle_ci), + ("CF_BUILD_ID", extract_codefresh), + ("GITHUB_SHA", extract_github_actions), + ("GITLAB_CI", extract_gitlab), + ("JENKINS_URL", extract_jenkins), + ("TEAMCITY_VERSION", extract_teamcity), + ("TRAVIS", extract_travis), + ("BITRISE_BUILD_SLUG", extract_bitrise), + ("BUDDY", extract_buddy), + ("CODEBUILD_INITIATOR", extract_codebuild), +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/consul.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/consul.py new file mode 100644 index 0000000..72b9986 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/consul.py @@ -0,0 +1,4 @@ +APP = "consul" +SERVICE = "consul" +CMD = "consul.command" +KEY = "consul.key" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/db.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/db.py new file mode 100644 index 0000000..9223f5a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/db.py @@ -0,0 +1,6 @@ +# tags +NAME = "db.name" # the database name (eg: dbname for pgsql) +USER = "db.user" # the user connecting to the db +SYSTEM = "db.system" # the database's DBMS name (e.g. postgresql for pgsql) +ROWCOUNT = "db.row_count" # the rowcount of a query +SYSTEM = "db.system" # the database's DBMS name (e.g. postgresql for pgsql) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/elasticsearch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/elasticsearch.py new file mode 100644 index 0000000..ca42c0b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/elasticsearch.py @@ -0,0 +1,9 @@ +SERVICE = "elasticsearch" +APP = "elasticsearch" + +# standard tags +URL = "elasticsearch.url" +METHOD = "elasticsearch.method" +TOOK = "elasticsearch.took" +PARAMS = "elasticsearch.params" +BODY = "elasticsearch.body" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/git.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/git.py new file mode 100644 index 0000000..92e4328 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/git.py @@ -0,0 +1,402 @@ +""" +tags for common git attributes +""" +import contextlib +import logging +import os +import random +import re +import subprocess +from typing import Dict # noqa:F401 +from typing import Generator # noqa:F401 +from typing import List # noqa:F401 +from typing import MutableMapping # noqa:F401 +from typing import NamedTuple # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal import compat +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.time import StopWatch + + +GitNotFoundError = FileNotFoundError + +# Git Branch +BRANCH = "git.branch" + +# Git Commit SHA +COMMIT_SHA = "git.commit.sha" + +# Git Repository URL +REPOSITORY_URL = "git.repository_url" + +# Git Tag +TAG = "git.tag" + +# Git Commit Author Name +COMMIT_AUTHOR_NAME = "git.commit.author.name" + +# Git Commit Author Email +COMMIT_AUTHOR_EMAIL = "git.commit.author.email" + +# Git Commit Author Date (UTC) +COMMIT_AUTHOR_DATE = "git.commit.author.date" + +# Git Commit Committer Name +COMMIT_COMMITTER_NAME = "git.commit.committer.name" + +# Git Commit Committer Email +COMMIT_COMMITTER_EMAIL = "git.commit.committer.email" + +# Git Commit Committer Date (UTC) +COMMIT_COMMITTER_DATE = "git.commit.committer.date" + +# Git Commit Message +COMMIT_MESSAGE = "git.commit.message" + +# Python main package +MAIN_PACKAGE = "python_main_package" + +_RE_REFS = re.compile(r"^refs/(heads/)?") +_RE_ORIGIN = re.compile(r"^origin/") +_RE_TAGS = re.compile(r"^tags/") + +log = get_logger(__name__) + +_GitSubprocessDetails = NamedTuple( + "_GitSubprocessDetails", [("stdout", str), ("stderr", str), ("duration", float), ("returncode", int)] +) + + +def normalize_ref(name): + # type: (Optional[str]) -> Optional[str] + return _RE_TAGS.sub("", _RE_ORIGIN.sub("", _RE_REFS.sub("", name))) if name is not None else None + + +def is_ref_a_tag(ref): + # type: (Optional[str]) -> bool + return "tags/" in ref if ref else False + + +def _git_subprocess_cmd_with_details(*cmd, cwd=None, std_in=None): + # type: (str, Optional[str], Optional[bytes]) -> _GitSubprocessDetails + """Helper for invoking the git CLI binary + + Returns a tuple containing: + - a str representation of stdout + - a str representation of stderr + - the time it took to execute the command, in milliseconds + - the exit code + """ + git_cmd = ["git"] + git_cmd.extend(cmd) + + log.debug("Executing git command: %s", git_cmd) + + with StopWatch() as stopwatch: + process = subprocess.Popen( + git_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, cwd=cwd + ) + stdout, stderr = process.communicate(input=std_in) + + return _GitSubprocessDetails( + compat.ensure_text(stdout).strip(), + compat.ensure_text(stderr).strip(), + stopwatch.elapsed() * 1000, # StopWatch measures elapsed time in seconds + process.returncode, + ) + + +def _git_subprocess_cmd(cmd, cwd=None, std_in=None): + # type: (Union[str, list[str]], Optional[str], Optional[bytes]) -> str + """Helper for invoking the git CLI binary.""" + if isinstance(cmd, str): + cmd = cmd.split(" ") + + stdout, stderr, _, returncode = _git_subprocess_cmd_with_details(*cmd, cwd=cwd, std_in=None) + + if returncode == 0: + return stdout + raise ValueError(stderr) + + +def _set_safe_directory(): + try: + _git_subprocess_cmd("config --global --add safe.directory *") + except GitNotFoundError: + log.error("Git executable not found, cannot extract git metadata.") + except ValueError: + log.error("Error setting safe directory") + + +def _extract_clone_defaultremotename_with_details(cwd): + # type: (Optional[str]) -> _GitSubprocessDetails + return _git_subprocess_cmd_with_details( + "config", "--default", "origin", "--get", "clone.defaultRemoteName", cwd=cwd + ) + + +def _extract_upstream_sha(cwd=None): + # type: (Optional[str]) -> str + output = _git_subprocess_cmd("rev-parse @{upstream}", cwd=cwd) + return output + + +def _is_shallow_repository_with_details(cwd=None): + # type: (Optional[str]) -> Tuple[bool, float, int] + stdout, _, duration, returncode = _git_subprocess_cmd_with_details("rev-parse", "--is-shallow-repository", cwd=cwd) + is_shallow = stdout.strip() == "true" + return (is_shallow, duration, returncode) + + +def _get_device_for_path(path): + # type: (str) -> int + return os.stat(path).st_dev + + +def _unshallow_repository_with_details(cwd=None, repo=None, refspec=None): + # type (Optional[str], Optional[str], Optional[str]) -> _GitSubprocessDetails + cmd = [ + "fetch", + '--shallow-since="1 month ago"', + "--update-shallow", + "--filter=blob:none", + "--recurse-submodules=no", + ] + if repo is not None: + cmd.append(repo) + if refspec is not None: + cmd.append(refspec) + + return _git_subprocess_cmd_with_details(*cmd, cwd=cwd) + + +def _unshallow_repository(cwd=None, repo=None, refspec=None): + # type (Optional[str], Optional[str], Optional[str]) -> None + _unshallow_repository_with_details(cwd, repo, refspec) + + +def extract_user_info(cwd=None): + # type: (Optional[str]) -> Dict[str, Tuple[str, str, str]] + """Extract commit author info from the git repository in the current directory or one specified by ``cwd``.""" + # Note: `git show -s --format... --date...` is supported since git 2.1.4 onwards + stdout = _git_subprocess_cmd("show -s --format=%an,%ae,%ad,%cn,%ce,%cd --date=format:%Y-%m-%dT%H:%M:%S%z", cwd=cwd) + author_name, author_email, author_date, committer_name, committer_email, committer_date = stdout.split(",") + return { + "author": (author_name, author_email, author_date), + "committer": (committer_name, committer_email, committer_date), + } + + +def extract_git_version(cwd=None): + output = _git_subprocess_cmd("--version") + try: + version_info = tuple([int(part) for part in output.split()[2].split(".")]) + except ValueError: + log.error("Git version not found, it is not following the desired version format: %s", output) + return 0, 0, 0 + return version_info + + +def _extract_remote_url_with_details(cwd=None): + # type: (Optional[str]) -> _GitSubprocessDetails + return _git_subprocess_cmd_with_details("config", "--get", "remote.origin.url", cwd=cwd) + + +def extract_remote_url(cwd=None): + remote_url, error, _, returncode = _extract_remote_url_with_details(cwd=cwd) + if returncode == 0: + return remote_url + raise ValueError(error) + + +def _extract_latest_commits_with_details(cwd=None): + # type: (Optional[str]) -> _GitSubprocessDetails + return _git_subprocess_cmd_with_details("log", "--format=%H", "-n", "1000", '--since="1 month ago"', cwd=cwd) + + +def extract_latest_commits(cwd=None): + # type: (Optional[str]) -> List[str] + latest_commits, error, _, returncode = _extract_latest_commits_with_details(cwd=cwd) + if returncode == 0: + return latest_commits.split("\n") if latest_commits else [] + raise ValueError(error) + + +def get_rev_list_excluding_commits(commit_shas, cwd=None): + return _get_rev_list_with_details(excluded_commit_shas=commit_shas, cwd=cwd)[0] + + +def _get_rev_list_with_details(excluded_commit_shas=None, included_commit_shas=None, cwd=None): + # type: (Optional[list[str]], Optional[list[str]], Optional[str]) -> _GitSubprocessDetails + command = ["rev-list", "--objects", "--filter=blob:none"] + if extract_git_version(cwd=cwd) >= (2, 23, 0): + command.append('--since="1 month ago"') + command.append("--no-object-names") + command.append("HEAD") + if excluded_commit_shas: + exclusions = ["^%s" % sha for sha in excluded_commit_shas] + command.extend(exclusions) + if included_commit_shas: + inclusions = ["%s" % sha for sha in included_commit_shas] + command.extend(inclusions) + return _git_subprocess_cmd_with_details(*command, cwd=cwd) + + +def _get_rev_list(excluded_commit_shas=None, included_commit_shas=None, cwd=None): + # type: (Optional[list[str]], Optional[list[str]], Optional[str]) -> str + return _get_rev_list_with_details( + excluded_commit_shas=excluded_commit_shas, included_commit_shas=included_commit_shas, cwd=cwd + )[0] + + +def _extract_repository_url_with_details(cwd=None): + # type: (Optional[str]) -> _GitSubprocessDetails + """Extract the repository url from the git repository in the current directory or one specified by ``cwd``.""" + + return _git_subprocess_cmd_with_details("ls-remote", "--get-url", cwd=cwd) + + +def extract_repository_url(cwd=None): + # type: (Optional[str]) -> str + """Extract the repository url from the git repository in the current directory or one specified by ``cwd``.""" + stdout, stderr, _, returncode = _extract_repository_url_with_details(cwd=cwd) + if returncode == 0: + return stdout + raise ValueError(stderr) + + +def extract_commit_message(cwd=None): + # type: (Optional[str]) -> str + """Extract git commit message from the git repository in the current directory or one specified by ``cwd``.""" + # Note: `git show -s --format... --date...` is supported since git 2.1.4 onwards + commit_message = _git_subprocess_cmd("show -s --format=%s", cwd=cwd) + return commit_message + + +def extract_workspace_path(cwd=None): + # type: (Optional[str]) -> str + """Extract the root directory path from the git repository in the current directory or one specified by ``cwd``.""" + workspace_path = _git_subprocess_cmd("rev-parse --show-toplevel", cwd=cwd) + return workspace_path + + +def extract_branch(cwd=None): + # type: (Optional[str]) -> str + """Extract git branch from the git repository in the current directory or one specified by ``cwd``.""" + branch = _git_subprocess_cmd("rev-parse --abbrev-ref HEAD", cwd=cwd) + return branch + + +def extract_commit_sha(cwd=None): + # type: (Optional[str]) -> str + """Extract git commit SHA from the git repository in the current directory or one specified by ``cwd``.""" + commit_sha = _git_subprocess_cmd("rev-parse HEAD", cwd=cwd) + return commit_sha + + +def extract_git_metadata(cwd=None): + # type: (Optional[str]) -> Dict[str, Optional[str]] + """Extract git commit metadata.""" + tags = {} # type: Dict[str, Optional[str]] + _set_safe_directory() + try: + tags[REPOSITORY_URL] = extract_repository_url(cwd=cwd) + tags[COMMIT_MESSAGE] = extract_commit_message(cwd=cwd) + users = extract_user_info(cwd=cwd) + tags[COMMIT_AUTHOR_NAME] = users["author"][0] + tags[COMMIT_AUTHOR_EMAIL] = users["author"][1] + tags[COMMIT_AUTHOR_DATE] = users["author"][2] + tags[COMMIT_COMMITTER_NAME] = users["committer"][0] + tags[COMMIT_COMMITTER_EMAIL] = users["committer"][1] + tags[COMMIT_COMMITTER_DATE] = users["committer"][2] + tags[BRANCH] = extract_branch(cwd=cwd) + tags[COMMIT_SHA] = extract_commit_sha(cwd=cwd) + except GitNotFoundError: + log.error("Git executable not found, cannot extract git metadata.") + except ValueError as e: + debug_mode = log.isEnabledFor(logging.DEBUG) + stderr = str(e) + log.error("Error extracting git metadata: %s", stderr, exc_info=debug_mode) + + return tags + + +def extract_user_git_metadata(env=None): + # type: (Optional[MutableMapping[str, str]]) -> Dict[str, Optional[str]] + """Extract git commit metadata from user-provided env vars.""" + env = os.environ if env is None else env + + branch = normalize_ref(env.get("DD_GIT_BRANCH")) + tag = normalize_ref(env.get("DD_GIT_TAG")) + + # if DD_GIT_BRANCH is a tag, we associate its value to TAG instead of BRANCH + if is_ref_a_tag(env.get("DD_GIT_BRANCH")): + tag = branch + branch = None + + tags = {} + tags[REPOSITORY_URL] = env.get("DD_GIT_REPOSITORY_URL") + tags[COMMIT_SHA] = env.get("DD_GIT_COMMIT_SHA") + tags[BRANCH] = branch + tags[TAG] = tag + tags[COMMIT_MESSAGE] = env.get("DD_GIT_COMMIT_MESSAGE") + tags[COMMIT_AUTHOR_DATE] = env.get("DD_GIT_COMMIT_AUTHOR_DATE") + tags[COMMIT_AUTHOR_EMAIL] = env.get("DD_GIT_COMMIT_AUTHOR_EMAIL") + tags[COMMIT_AUTHOR_NAME] = env.get("DD_GIT_COMMIT_AUTHOR_NAME") + tags[COMMIT_COMMITTER_DATE] = env.get("DD_GIT_COMMIT_COMMITTER_DATE") + tags[COMMIT_COMMITTER_EMAIL] = env.get("DD_GIT_COMMIT_COMMITTER_EMAIL") + tags[COMMIT_COMMITTER_NAME] = env.get("DD_GIT_COMMIT_COMMITTER_NAME") + + return tags + + +@contextlib.contextmanager +def _build_git_packfiles_with_details(revisions, cwd=None, use_tempdir=True): + # type: (str, Optional[str], bool) -> Generator + basename = str(random.randint(1, 1000000)) + + # check that the tempdir and cwd are on the same filesystem, otherwise git pack-objects will fail + cwd = cwd if cwd else os.getcwd() + tempdir = compat.TemporaryDirectory() + if _get_device_for_path(cwd) == _get_device_for_path(tempdir.name): + basepath = tempdir.name + else: + log.debug("tempdir %s and cwd %s are on different filesystems, using cwd", tempdir.name, cwd) + basepath = cwd + + prefix = "{basepath}/{basename}".format(basepath=basepath, basename=basename) + + log.debug("Building packfiles in prefix path: %s", prefix) + + try: + process_details = _git_subprocess_cmd_with_details( + "pack-objects", + "--compression=9", + "--max-pack-size=3m", + prefix, + cwd=cwd, + std_in=revisions.encode("utf-8"), + ) + yield prefix, process_details + finally: + if isinstance(tempdir, compat.TemporaryDirectory): + log.debug("Cleaning up temporary directory: %s", basepath) + tempdir.cleanup() + + +@contextlib.contextmanager +def build_git_packfiles(revisions, cwd=None): + # type: (str, Optional[str]) -> Generator + with _build_git_packfiles_with_details(revisions, cwd=cwd) as (prefix, process_details): + if process_details.returncode == 0: + yield prefix + return + log.debug( + "Failed to pack objects, command return code: %s, error: %s", + process_details.returncode, + process_details.stderr, + ) + raise ValueError(process_details.stderr) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/http.py new file mode 100644 index 0000000..31a8286 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/http.py @@ -0,0 +1,22 @@ +""" +Standard http tags. + +For example: + +span.set_tag(URL, '/user/home') +span.set_tag(STATUS_CODE, 404) +""" +# tags +URL = "http.url" +METHOD = "http.method" +STATUS_CODE = "http.status_code" +USER_AGENT = "http.useragent" +STATUS_MSG = "http.status_msg" +QUERY_STRING = "http.query.string" +RETRIES_REMAIN = "http.retries_remain" +VERSION = "http.version" +CLIENT_IP = "http.client_ip" +ROUTE = "http.route" + +# template render span type +TEMPLATE = "template" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kafka.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kafka.py new file mode 100644 index 0000000..5b13c33 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kafka.py @@ -0,0 +1,14 @@ +SERVICE = "kafka" + +TOPIC = "kafka.topic" +PARTITION = "kafka.partition" +MESSAGE_KEY = "kafka.message_key" +MESSAGE_OFFSET = "kafka.message_offset" +GROUP_ID = "kafka.group_id" +TOMBSTONE = "kafka.tombstone" +RECEIVED_MESSAGE = "kafka.received_message" + +HOST_LIST = "messaging.kafka.bootstrap.servers" + +PRODUCE = "kafka.produce" +CONSUME = "kafka.consume" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kombu.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kombu.py new file mode 100644 index 0000000..44a25f5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/kombu.py @@ -0,0 +1,12 @@ +SERVICE = "kombu" + +# net extension +VHOST = "out.vhost" + +# standard tags +EXCHANGE = "kombu.exchange" +BODY_LEN = "kombu.body_length" +ROUTING_KEY = "kombu.routing_key" + +PUBLISH_NAME = "kombu.publish" +RECEIVE_NAME = "kombu.receive" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/memcached.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/memcached.py new file mode 100644 index 0000000..9b24051 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/memcached.py @@ -0,0 +1,4 @@ +CMD = "memcached.command" +DBMS_NAME = "memcached" +SERVICE = "memcached" +QUERY = "memcached.query" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/mongo.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/mongo.py new file mode 100644 index 0000000..d27d97d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/mongo.py @@ -0,0 +1,4 @@ +SERVICE = "mongodb" +COLLECTION = "mongodb.collection" +DB = "mongodb.db" +QUERY = "mongodb.query" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/net.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/net.py new file mode 100644 index 0000000..4e3ff4a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/net.py @@ -0,0 +1,12 @@ +""" +Standard network tags. +""" + +# request targets +TARGET_HOST = "out.host" +TARGET_PORT = "network.destination.port" +TARGET_IP = "network.destination.ip" + +PEER_HOSTNAME = "peer.hostname" + +BYTES_OUT = "net.out.bytes" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/redis.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/redis.py new file mode 100644 index 0000000..32f1d96 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/redis.py @@ -0,0 +1,14 @@ +# defaults +APP = "redis" +DEFAULT_SERVICE = "redis" + +# net extension +DB = "out.redis_db" + +# standard tags +RAWCMD = "redis.raw_command" +CMD = "redis.command" +ARGS_LEN = "redis.args_length" +PIPELINE_LEN = "redis.pipeline_length" +PIPELINE_AGE = "redis.pipeline_age" +CLIENT_NAME = "redis.client_name" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/sql.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/sql.py new file mode 100644 index 0000000..a8965dc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/sql.py @@ -0,0 +1,75 @@ +from typing import Dict # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.internal.module import ModuleWatchdog + + +log = get_logger(__name__) + +# tags +DB = "sql.db" # the name of the database + + +def normalize_vendor(vendor): + # type: (str) -> str + """Return a canonical name for a type of database.""" + if not vendor: + return "db" # should this ever happen? + elif "sqlite" in vendor: + return "sqlite" + elif "postgres" in vendor or vendor == "psycopg2": + return "postgres" + else: + return vendor + + +def _dd_parse_pg_dsn(dsn): + # type: (str) -> Dict[str, str] + """ + Return a dictionary of the components of a postgres DSN. + >>> parse_pg_dsn('user=dog port=1543 dbname=dogdata') + {'user':'dog', 'port':'1543', 'dbname':'dogdata'} + """ + dsn_dict = dict() + try: + # Provides a default implementation for parsing DSN strings. + # The following is an example of a valid DSN string that fails to be parsed: + # "db=moon user=ears options='-c statement_timeout=1000 -c lock_timeout=250'" + dsn_dict = dict(_.split("=", 1) for _ in dsn.split()) + except Exception: + log.debug("Failed to parse postgres dsn connection", exc_info=True) + return dsn_dict + + +# Do not import from psycopg directly! This reference will be updated at runtime to use +# a better implementation that is provided by the psycopg library. +# This is done to avoid circular imports. +parse_pg_dsn = _dd_parse_pg_dsn + + +@ModuleWatchdog.after_module_imported("psycopg2") +def use_psycopg2_parse_dsn(psycopg_module): + """Replaces parse_pg_dsn with the helper function defined in psycopg2""" + global parse_pg_dsn + + try: + from psycopg2.extensions import parse_dsn + + parse_pg_dsn = parse_dsn + except ImportError: + # Best effort, we'll use our own parser: _dd_parse_pg_dsn + pass + + +@ModuleWatchdog.after_module_imported("psycopg") +def use_psycopg3_parse_dsn(psycopg_module): + """Replaces parse_pg_dsn with the helper function defined in psycopg3""" + global parse_pg_dsn + + try: + from psycopg.conninfo import conninfo_to_dict + + parse_pg_dsn = conninfo_to_dict + except ImportError: + # Best effort, we'll use our own parser: _dd_parse_pg_dsn + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/test.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/test.py new file mode 100644 index 0000000..0829f6e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/test.py @@ -0,0 +1,93 @@ +""" +tags for common test attributes +""" + +from enum import Enum + + +# Test Arguments +ARGUMENTS = TEST_ARGUMENTS = "test.arguments" + +# Test Framework +FRAMEWORK = TEST_FRAMEWORK = "test.framework" + +# Test Framework Version +FRAMEWORK_VERSION = TEST_FRAMEWORK_VERSION = "test.framework_version" + +# Test Command +COMMAND = "test.command" + +# Test Module +MODULE = "test.module" + +# Test Module Path +MODULE_PATH = "test.module_path" + +# Test Suite +SUITE = TEST_SUITE = "test.suite" + +# Test Name +NAME = TEST_NAME = "test.name" + +# Test Parameters +PARAMETERS = "test.parameters" + +# Test Result (XFail, XPass) +RESULT = TEST_RESULT = "test.result" + +# Skip Reason +SKIP_REASON = TEST_SKIP_REASON = "test.skip_reason" + +# Test Status +STATUS = TEST_STATUS = "test.status" + +# Traits +TRAITS = TEST_TRAITS = "test.traits" + +# Test Type +TYPE = TEST_TYPE = "test.type" + +# Test File +# Use when test implementation file is different from test suite name. +FILE = TEST_FILE = "test.file" + +# Test Source File +SOURCE_FILE = TEST_SOURCE_FILE = "test.source.file" + +# Test Source Start +SOURCE_START = TEST_SOURCE_START = "test.source.start" + +# Test Source End +SOURCE_END = TEST_SOURCE_END = "test.source.end" + +# Test Code Coverage Total Lines Percentage +LINES_PCT = TEST_LINES_PCT = "test.code_coverage.lines_pct" + +# Test Class Hierarchy +CLASS_HIERARCHY = "test.class_hierarchy" + +# Test Codeowners +CODEOWNERS = TEST_CODEOWNERS = "test.codeowners" + +# ITR +ITR_SKIPPED = "test.skipped_by_itr" + +# Test session-level ITR and coverage: +ITR_DD_CI_ITR_TESTS_SKIPPED = "_dd.ci.itr.tests_skipped" +ITR_TEST_SKIPPING_ENABLED = "test.itr.tests_skipping.enabled" +ITR_TEST_SKIPPING_TESTS_SKIPPED = "test.itr.tests_skipping.tests_skipped" +ITR_TEST_SKIPPING_TYPE = "test.itr.tests_skipping.type" +ITR_TEST_SKIPPING_COUNT = "test.itr.tests_skipping.count" +ITR_TEST_CODE_COVERAGE_ENABLED = "test.code_coverage.enabled" + +# ITR: unskippable tests +ITR_UNSKIPPABLE = "test.itr.unskippable" +ITR_FORCED_RUN = "test.itr.forced_run" + + +class Status(Enum): + PASS = "pass" + FAIL = "fail" + SKIP = "skip" + XFAIL = "xfail" + XPASS = "xpass" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/user.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/user.py new file mode 100644 index 0000000..3c90474 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/ext/user.py @@ -0,0 +1,8 @@ +# tags +ID = "usr.id" +NAME = "usr.name" +EMAIL = "usr.email" +ROLE = "usr.role" +SCOPE = "usr.scope" +SESSION_ID = "usr.session_id" +EXISTS = "usr.exists" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/filters.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/filters.py new file mode 100644 index 0000000..edbfb11 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/filters.py @@ -0,0 +1,72 @@ +import abc +import re +from typing import TYPE_CHECKING # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.ext import http +from ddtrace.internal.processor.trace import TraceProcessor + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace import Span # noqa:F401 + + +class TraceFilter(TraceProcessor): + @abc.abstractmethod + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + """Processes a trace. + + None can be returned to prevent the trace from being exported. + """ + pass + + +class FilterRequestsOnUrl(TraceFilter): + r"""Filter out traces from incoming http requests based on the request's url. + + This class takes as argument a list of regular expression patterns + representing the urls to be excluded from tracing. A trace will be excluded + if its root span contains a ``http.url`` tag and if this tag matches any of + the provided regular expression using the standard python regexp match + semantic (https://docs.python.org/3/library/re.html#re.match). + + :param list regexps: a list of regular expressions (or a single string) defining + the urls that should be filtered out. + + Examples: + To filter out http calls to domain api.example.com:: + + FilterRequestsOnUrl(r'http://api\\.example\\.com') + + To filter out http calls to all first level subdomains from example.com:: + + FilterRequestOnUrl(r'http://.*+\\.example\\.com') + + To filter out calls to both http://test.example.com and http://example.com/healthcheck:: + + FilterRequestOnUrl([r'http://test\\.example\\.com', r'http://example\\.com/healthcheck']) + """ + + def __init__(self, regexps: Union[str, List[str]]): + if isinstance(regexps, str): + regexps = [regexps] + self._regexps = [re.compile(regexp) for regexp in regexps] + + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + """ + When the filter is registered in the tracer, process_trace is called by + on each trace before it is sent to the agent, the returned value will + be fed to the next filter in the list. If process_trace returns None, + the whole trace is discarded. + """ + for span in trace: + url = span.get_tag(http.URL) + if span.parent_id is None and url is not None: + for regexp in self._regexps: + if regexp.match(url): + return None + return trace diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/README.md b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/README.md new file mode 100644 index 0000000..5cb3808 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/README.md @@ -0,0 +1,7 @@ +# Internal +This internal module is used to define and document an internal only API for `ddtrace`. + +These modules are not intended to be used outside of `ddtrace`. + +The APIs found within `ddtrace.internal` are subject to breaking changes at any time +and do not follow the semver versioning scheme of the `ddtrace` package. diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..48ae8d1 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.pyi new file mode 100644 index 0000000..2d8610d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_encoding.pyi @@ -0,0 +1,41 @@ +from typing import Any +from typing import List +from typing import Optional +from typing import Union + +from ddtrace.span import Span + +Trace = List[Span] + +class ListStringTable(object): + def index(self, string: str) -> int: ... + +class BufferFull(Exception): + pass + +class BufferItemTooLarge(Exception): + pass + +class BufferedEncoder(object): + max_size: int + max_item_size: int + def __init__(self, max_size: int, max_item_size: int) -> None: ... + def __len__(self) -> int: ... + def put(self, item: Any) -> None: ... + def encode(self) -> Optional[bytes]: ... + @property + def size(self) -> int: ... + +class ListBufferedEncoder(BufferedEncoder): + def get(self) -> List[bytes]: ... + def encode_item(self, item: Any) -> bytes: ... + +class MsgpackEncoderBase(BufferedEncoder): + content_type: str + def get_bytes(self) -> bytes: ... + def _decode(self, data: Union[str, bytes]) -> Any: ... + +class MsgpackEncoderV03(MsgpackEncoderBase): ... +class MsgpackEncoderV05(MsgpackEncoderBase): ... + +def packb(o: Any, **kwargs) -> bytes: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..2bcdfc4 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.pyi new file mode 100644 index 0000000..f4e4450 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_rand.pyi @@ -0,0 +1,3 @@ +def seed() -> None: ... +def rand64bits(check_pid: bool = True) -> int: ... +def rand128bits(check_pid: bool = True) -> int: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..a4c8ffa Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.pyi new file mode 100644 index 0000000..fd95131 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_tagset.pyi @@ -0,0 +1,18 @@ +from typing import Dict + +class TagsetDecodeError(ValueError): ... +class TagsetEncodeError(ValueError): ... + +class TagsetMaxSizeEncodeError(TagsetEncodeError): + values: Dict[str, str] + max_size: int + current_results: str + def __init__(self, values: Dict[str, str], max_size: int, current_results: str): ... + +class TagsetMaxSizeDecodeError(TagsetDecodeError): + value: Dict[str, str] + max_size: int + def __init__(self, value: Dict[str, str], max_size: int): ... + +def decode_tagset_string(tagset: str) -> Dict[str, str]: ... +def encode_tagset_values(values: Dict[str, str], max_size: int = 512) -> str: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_utils.pxd b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_utils.pxd new file mode 100644 index 0000000..9b79b95 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/_utils.pxd @@ -0,0 +1,2 @@ +cdef extern from "_utils.h": + cdef inline int PyBytesLike_Check(object o) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/agent.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/agent.py new file mode 100644 index 0000000..151852c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/agent.py @@ -0,0 +1,106 @@ +import json +import os +import socket +from typing import TypeVar +from typing import Union + +from ddtrace.internal.logger import get_logger +from ddtrace.settings import _config as ddconfig + +from .http import HTTPConnection +from .http import HTTPSConnection +from .uds import UDSHTTPConnection +from .utils.http import get_connection + + +DEFAULT_HOSTNAME = "localhost" +DEFAULT_TRACE_PORT = 8126 +DEFAULT_UNIX_TRACE_PATH = "/var/run/datadog/apm.socket" +DEFAULT_UNIX_DSD_PATH = "/var/run/datadog/dsd.socket" +DEFAULT_STATS_PORT = 8125 + +ConnectionType = Union[HTTPSConnection, HTTPConnection, UDSHTTPConnection] + +T = TypeVar("T") + +log = get_logger(__name__) + + +# This method returns if a hostname is an IPv6 address +def is_ipv6_hostname(hostname): + # type: (Union[T, str]) -> bool + if not isinstance(hostname, str): + return False + try: + socket.inet_pton(socket.AF_INET6, hostname) + return True + except socket.error: # not a valid address + return False + + +def get_trace_url(): + # type: () -> str + """Return the Agent URL computed from the environment. + + Raises a ``ValueError`` if the URL is not supported by the Agent. + """ + user_supplied_host = ddconfig._trace_agent_hostname is not None + user_supplied_port = ddconfig._trace_agent_port is not None + + url = ddconfig._trace_agent_url + + if not url: + if user_supplied_host or user_supplied_port: + host = ddconfig._trace_agent_hostname or DEFAULT_HOSTNAME + port = ddconfig._trace_agent_port or DEFAULT_TRACE_PORT + if is_ipv6_hostname(host): + host = "[{}]".format(host) + url = "http://%s:%s" % (host, port) + elif os.path.exists("/var/run/datadog/apm.socket"): + url = "unix://%s" % (DEFAULT_UNIX_TRACE_PATH) + else: + url = "http://{}:{}".format(DEFAULT_HOSTNAME, DEFAULT_TRACE_PORT) + + return url + + +def get_stats_url(): + # type: () -> str + user_supplied_host = ddconfig._stats_agent_hostname is not None + user_supplied_port = ddconfig._stats_agent_port is not None + + url = ddconfig._stats_agent_url + + if not url: + if user_supplied_host or user_supplied_port: + port = ddconfig._stats_agent_port or DEFAULT_STATS_PORT + host = ddconfig._stats_agent_hostname or DEFAULT_HOSTNAME + if is_ipv6_hostname(host): + host = "[{}]".format(host) + url = "udp://{}:{}".format(host, port) + elif os.path.exists("/var/run/datadog/dsd.socket"): + url = "unix://%s" % (DEFAULT_UNIX_DSD_PATH) + else: + url = "udp://{}:{}".format(DEFAULT_HOSTNAME, DEFAULT_STATS_PORT) + return url + + +def info(): + agent_url = get_trace_url() + _conn = get_connection(agent_url, timeout=ddconfig._agent_timeout_seconds) + try: + _conn.request("GET", "info", headers={"content-type": "application/json"}) + resp = _conn.getresponse() + data = resp.read() + finally: + _conn.close() + + if resp.status == 404: + # Remote configuration is not enabled or unsupported by the agent + return None + + if resp.status < 200 or resp.status >= 300: + log.warning("Unexpected error: HTTP error status %s, reason %s", resp.status, resp.reason) + return None + + return json.loads(data) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/assembly.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/assembly.py new file mode 100644 index 0000000..9d50299 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/assembly.py @@ -0,0 +1,274 @@ +# Grammar: +# +# ident ::= [a-zA-Z_][a-zA-Z0-9_]* +# number ::= [0-9]+ +# label ::= ident ":" +# label_ref ::= "@" ident +# string_ref ::= "$" ident +# try_block_begin ::= "try" label_ref ["lasti"]? +# try_block_end ::= "tried" +# opcode ::= [A-Z][A-Z0-9_]* +# bind_opcode_arg ::= "{" ident "}" +# opcode_arg ::= label_ref | string | number | bind_opcode_arg | +# instruction ::= opcode [opcode_arg]? +# line ::= label | try_block_begin | try_block_end | instruction + +import dis +import sys +from types import CodeType +import typing as t + +import bytecode as bc + + +if sys.version_info >= (3, 11): + ParsedLine = t.Union[bc.Instr, bc.Label, bc.TryBegin, bc.TryEnd] +else: + ParsedLine = t.Union[bc.Instr, bc.Label] + + +def relocate(instrs: bc.Bytecode, lineno: int) -> bc.Bytecode: + new_instrs = bc.Bytecode() + for i in instrs: + if isinstance(i, bc.Instr): + new_i = i.copy() + new_i.lineno = lineno + new_instrs.append(new_i) + else: + new_instrs.append(i) + return new_instrs + + +def transform_instruction(opcode: str, arg: t.Any) -> t.Tuple[str, t.Any]: + # Handle pseudo-instructions + if sys.version_info >= (3, 12): + if opcode.upper() == "LOAD_METHOD": + opcode = "LOAD_ATTR" + arg = (True, arg) + elif opcode.upper() == "LOAD_ATTR" and not isinstance(arg, tuple): + arg = (False, arg) + + return opcode, arg + + +class BindOpArg(bc.Label): + # We cannot have arbitrary objects in Bytecode, so we subclass Label + def __init__(self, name: str, arg: str, lineno: t.Optional[int] = None) -> None: + self.name = name + self.arg = arg + self.lineno = lineno + + def __call__(self, bind_args: t.Dict[str, t.Any], lineno: t.Optional[int] = None) -> bc.Instr: + return bc.Instr(self.name, bind_args[self.arg], lineno=lineno if lineno is not None else self.lineno) + + +class Assembly: + def __init__( + self, name: t.Optional[str] = None, filename: t.Optional[str] = None, lineno: t.Optional[int] = None + ) -> None: + self._labels: t.Dict[str, bc.Label] = {} + self._ref_labels: t.Dict[str, bc.Label] = {} + self._tb: t.Optional[bc.TryBegin] = None + self._instrs = bc.Bytecode() + self._instrs.name = name or "" + self._instrs.filename = filename or __file__ + self._lineno = lineno + self._bind_opargs: t.Dict[int, BindOpArg] = {} + + def parse_ident(self, text: str) -> str: + if not text.isidentifier(): + raise ValueError("invalid identifier %s" % text) + + return text + + def parse_number(self, text: str) -> t.Optional[int]: + try: + return int(text) + except ValueError: + return None + + def parse_label(self, line: str) -> t.Optional[bc.Label]: + if not line.endswith(":"): + return None + + label_ident = self.parse_ident(line[:-1]) + if label_ident in self._labels: + raise ValueError("label %s already defined" % label_ident) + + label = self._labels[label_ident] = self._ref_labels.pop(label_ident, None) or bc.Label() + + return label + + def parse_label_ref(self, text: str) -> t.Optional[bc.Label]: + if not text.startswith("@"): + return None + + label_ident = self.parse_ident(text[1:]) + + try: + return self._labels[label_ident] + except KeyError: + try: + return self._ref_labels[label_ident] + except KeyError: + label = self._ref_labels[label_ident] = bc.Label() + return label + + def parse_string_ref(self, text: str) -> t.Optional[str]: + if not text.startswith("$"): + return None + + return self.parse_ident(text[1:]) + + if sys.version_info >= (3, 11): + + def parse_try_begin(self, line: str) -> t.Optional[bc.TryBegin]: + try: + head, label_ref, *lasti = line.split(maxsplit=2) + except ValueError: + return None + + if head != "try": + return None + + if self._tb is not None: + raise ValueError("cannot start try block while another is open") + + label = self.parse_label_ref(label_ref) + if label is None: + raise ValueError("invalid label reference for try block") + + tb = self._tb = bc.TryBegin(label, push_lasti=bool(lasti)) + + return tb + + def parse_try_end(self, line: str) -> t.Optional[bc.TryEnd]: + if line != "tried": + return None + + if self._tb is None: + raise ValueError("cannot end try block while none is open") + + end = bc.TryEnd(self._tb) + + self._tb = None + + return end + + def parse_opcode(self, text: str) -> str: + opcode = text.upper() + if opcode not in dis.opmap: + raise ValueError("unknown opcode %s" % opcode) + + return opcode + + def parse_expr(self, text: str) -> t.Any: + frame = sys._getframe(1) + + _globals = frame.f_globals.copy() + _globals["asm"] = bc + + return eval(text, _globals, frame.f_locals) # nosec + + def parse_opcode_arg(self, text: str) -> t.Union[bc.Label, str, int, t.Any]: + if not text: + return bc.UNSET + + return ( + self.parse_label_ref(text) + or self.parse_string_ref(text) + or self.parse_number(text) + or self.parse_expr(text) + ) + + def parse_bind_opcode_arg(self, text: str) -> t.Optional[str]: + if not text.startswith("{") or not text.endswith("}"): + return None + + return text[1:-1] + + def parse_instruction(self, line: str) -> t.Optional[t.Union[bc.Instr, BindOpArg]]: + opcode, *args = line.split(maxsplit=1) + + arg = "" + if args: + (arg,) = args + bind_arg = self.parse_bind_opcode_arg(arg) + if bind_arg is not None: + return BindOpArg(self.parse_opcode(opcode), bind_arg, lineno=self._lineno) + + return bc.Instr( + *transform_instruction(self.parse_opcode(opcode), self.parse_opcode_arg(arg)), lineno=self._lineno + ) + + def parse_line(self, line: str) -> ParsedLine: + if sys.version_info >= (3, 11): + entry = ( + self.parse_label(line) + or self.parse_try_begin(line) + or self.parse_try_end(line) + or self.parse_instruction(line) + ) + else: + entry = self.parse_label(line) or self.parse_instruction(line) + + if entry is None: + raise ValueError("invalid line %s" % line) + + return entry + + def _validate(self) -> None: + if self._ref_labels: + raise ValueError("undefined labels: %s" % ", ".join(self._ref_labels)) + + def parse(self, asm: str) -> None: + for line in (_.strip() for _ in asm.splitlines()): + if not line or line.startswith("#"): + continue + + entry = self.parse_line(line) + if isinstance(entry, BindOpArg): + self._bind_opargs[len(self._instrs)] = entry + + self._instrs.append(entry) + + self._validate() + + def bind(self, bind_args: t.Optional[t.Dict[str, t.Any]] = None, lineno: t.Optional[int] = None) -> bc.Bytecode: + if not self._bind_opargs: + if lineno is not None: + return relocate(self._instrs, lineno) + return self._instrs + + if bind_args is None: + raise ValueError("missing bind arguments") + + # If we have bind opargs, the bytecode we parsed has some + # BindOpArg placeholders that need to be resolved. Therefore, we + # make a copy of the parsed bytecode and replace the BindOpArg + # placeholders with the resolved values. + instrs = bc.Bytecode(self._instrs) + for i, arg in self._bind_opargs.items(): + instrs[i] = arg(bind_args, lineno=lineno) + + return relocate(instrs, lineno) if lineno is not None else instrs + + def compile(self, bind_args: t.Optional[t.Dict[str, t.Any]] = None, lineno: t.Optional[int] = None) -> CodeType: + return self.bind(bind_args, lineno=lineno).to_code() + + def _label_ident(self, label: bc.Label) -> str: + return next(ident for ident, l in self._labels.items() if l is label) # noqa: E741 + + def dis(self) -> None: + for entry in self._instrs: + if isinstance(entry, bc.Instr): + print(f" {entry.name:<32}{entry.arg if entry.arg is not None else ''}") + elif isinstance(entry, BindOpArg): + print(f" {entry.name:<32}{{{entry.arg}}}") + elif isinstance(entry, bc.Label): + print(f"{self._label_ident(entry)}:") + elif isinstance(entry, bc.TryBegin): + print(f"try @{self._label_ident(entry.target)} (lasti={entry.push_lasti})") + + def __iter__(self) -> t.Iterator[bc.Instr]: + return iter(self._instrs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/atexit.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/atexit.py new file mode 100644 index 0000000..0d778e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/atexit.py @@ -0,0 +1,79 @@ +# -*- encoding: utf-8 -*- +""" +An API to provide atexit functionalities +""" +from __future__ import absolute_import + +import atexit +import logging +import signal +import threading +import typing # noqa:F401 + +from ddtrace.internal.utils import signals + + +log = logging.getLogger(__name__) + + +if hasattr(atexit, "unregister"): + register = atexit.register + unregister = atexit.unregister +else: + # Hello Python 2! + _registry = [] # type: typing.List[typing.Tuple[typing.Callable[..., None], typing.Tuple, typing.Dict]] + + def _ddtrace_atexit(): + # type: (...) -> None + """Wrapper function that calls all registered function on normal program termination""" + global _registry + + # DEV: we make a copy of the registry to prevent hook execution from + # introducing new hooks, potentially causing an infinite loop. + for hook, args, kwargs in list(_registry): + try: + hook(*args, **kwargs) + except Exception: + # Mimic the behaviour of Python's atexit hooks. + log.exception("Error in atexit hook %r", hook) + + def register( + func, # type: typing.Callable[..., typing.Any] + *args, # type: typing.Any + **kwargs, # type: typing.Any + ): + # type: (...) -> typing.Callable[..., typing.Any] + """Register a function to be executed upon normal program termination""" + + _registry.append((func, args, kwargs)) + return func + + def unregister(func): + # type: (typing.Callable[..., typing.Any]) -> None + """ + Unregister an exit function which was previously registered using + atexit.register. + If the function was not registered, it is ignored. + """ + global _registry + + _registry = [(f, args, kwargs) for f, args, kwargs in _registry if f != func] + + atexit.register(_ddtrace_atexit) + + +# registers a function to be called when an exit signal (TERM or INT) or received. +def register_on_exit_signal(f): + def handle_exit(sig, frame): + f() + + if threading.current_thread() is threading.main_thread(): + try: + signals.handle_signal(signal.SIGTERM, handle_exit) + signals.handle_signal(signal.SIGINT, handle_exit) + except Exception: + # We catch a general exception here because we don't know + # what might go wrong, but we don't want to stop + # normal program execution based upon failing to register + # a signal handler. + log.debug("Encountered an exception while registering a signal", exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/__init__.py new file mode 100644 index 0000000..f7f0cf9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/__init__.py @@ -0,0 +1,11 @@ +""" +CI Visibility Service. +This is normally started automatically by including ``ddtrace=1`` or ``--ddtrace`` in the pytest run command. +To start the service manually, invoke the ``enable`` method:: + from ddtrace.internal.ci_visibility import CIVisibility + CIVisibility.enable() +""" +from .recorder import CIVisibility + + +__all__ = ["CIVisibility"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/constants.py new file mode 100644 index 0000000..8a33f59 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/constants.py @@ -0,0 +1,66 @@ +from enum import IntEnum + + +SUITE = "suite" +TEST = "test" + +EVENT_TYPE = "type" + + +# Test Session ID +SESSION_ID = "test_session_id" + +# Test Module ID +MODULE_ID = "test_module_id" + +# Test Suite ID +SUITE_ID = "test_suite_id" + +# Event type signals for CI Visibility +SESSION_TYPE = "test_session_end" + +MODULE_TYPE = "test_module_end" + +SUITE_TYPE = "test_suite_end" + +# Agentless and EVP-specific constants +COVERAGE_TAG_NAME = "test.coverage" + +EVP_PROXY_AGENT_BASE_PATH = "/evp_proxy/v2" +EVP_PROXY_AGENT_ENDPOINT = "{}/api/v2/citestcycle".format(EVP_PROXY_AGENT_BASE_PATH) +AGENTLESS_ENDPOINT = "api/v2/citestcycle" +AGENTLESS_COVERAGE_ENDPOINT = "api/v2/citestcov" +AGENTLESS_API_KEY_HEADER_NAME = "dd-api-key" +AGENTLESS_APP_KEY_HEADER_NAME = "dd-application-key" +EVP_NEEDS_APP_KEY_HEADER_NAME = "X-Datadog-NeedsAppKey" +EVP_NEEDS_APP_KEY_HEADER_VALUE = "true" +EVP_PROXY_COVERAGE_ENDPOINT = "{}/{}".format(EVP_PROXY_AGENT_BASE_PATH, AGENTLESS_COVERAGE_ENDPOINT) +EVP_SUBDOMAIN_HEADER_API_VALUE = "api" +EVP_SUBDOMAIN_HEADER_COVERAGE_VALUE = "citestcov-intake" +EVP_SUBDOMAIN_HEADER_EVENT_VALUE = "citestcycle-intake" +EVP_SUBDOMAIN_HEADER_NAME = "X-Datadog-EVP-Subdomain" +AGENTLESS_BASE_URL = "https://citestcycle-intake" +AGENTLESS_COVERAGE_BASE_URL = "https://citestcov-intake" +AGENTLESS_DEFAULT_SITE = "datadoghq.com" +GIT_API_BASE_PATH = "/api/v2/git" +SETTING_ENDPOINT = "/api/v2/libraries/tests/services/setting" +SKIPPABLE_ENDPOINT = "/api/v2/ci/tests/skippable" + +# Intelligent Test Runner constants +ITR_UNSKIPPABLE_REASON = "datadog_itr_unskippable" +SKIPPED_BY_ITR_REASON = "Skipped by Datadog Intelligent Test Runner" + +# Tracer configuration defaults: +TRACER_PARTIAL_FLUSH_MIN_SPANS = 1 + + +class REQUESTS_MODE(IntEnum): + AGENTLESS_EVENTS = 0 + EVP_PROXY_EVENTS = 1 + TRACES = 2 + + +# Miscellaneous constants +CUSTOM_CONFIGURATIONS_PREFIX = "test.configuration" + +CIVISIBILITY_LOG_FILTER_RE = r"^ddtrace\.(contrib\.(coverage|pytest|unittest)|internal\.ci_visibility|ext\.git)" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/coverage.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/coverage.py new file mode 100644 index 0000000..e72e767 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/coverage.py @@ -0,0 +1,163 @@ +from itertools import groupby +import json +from typing import Dict # noqa:F401 +from typing import Iterable # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + +import ddtrace +from ddtrace.internal.ci_visibility.constants import COVERAGE_TAG_NAME +from ddtrace.internal.ci_visibility.utils import get_relative_or_absolute_path_for_path +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) +_global_relative_file_paths_for_cov: Dict[str, Dict[str, str]] = {} + +try: + from coverage import Coverage + from coverage import version_info as coverage_version + + # this public attribute became private after coverage==6.3 + EXECUTE_ATTR = "_execute" if coverage_version > (6, 3) else "execute" +except ImportError: + Coverage = None # type: ignore[misc,assignment] + EXECUTE_ATTR = "" + + +def is_coverage_available(): + return Coverage is not None + + +def _initialize_coverage(root_dir): + coverage_kwargs = { + "data_file": None, + "source": [root_dir], + "config_file": False, + "omit": [ + "*/site-packages/*", + ], + } + cov_object = Coverage(**coverage_kwargs) + cov_object.set_option("run:parallel", True) + return cov_object + + +def _start_coverage(root_dir: str): + coverage = _initialize_coverage(root_dir) + coverage.start() + return coverage + + +def _stop_coverage(module): + if _module_has_dd_coverage_enabled(module): + module._dd_coverage.stop() + module._dd_coverage.erase() + del module._dd_coverage + + +def _module_has_dd_coverage_enabled(module, silent_mode: bool = False) -> bool: + if not hasattr(module, "_dd_coverage"): + if not silent_mode: + log.warning("Datadog Coverage has not been initiated") + return False + return True + + +def _coverage_has_valid_data(coverage_data: Coverage, silent_mode: bool = False) -> bool: + if not coverage_data._collector or len(coverage_data._collector.data) == 0: + if not silent_mode: + log.warning("No coverage collector or data found for item") + return False + return True + + +def _switch_coverage_context(coverage_data: Coverage, unique_test_name: str): + if not _coverage_has_valid_data(coverage_data, silent_mode=True): + return + coverage_data._collector.data.clear() # type: ignore[union-attr] + try: + coverage_data.switch_context(unique_test_name) + except RuntimeError as err: + log.warning(err) + + +def _report_coverage_to_span(coverage_data: Coverage, span: ddtrace.Span, root_dir: str): + span_id = str(span.trace_id) + if not _coverage_has_valid_data(coverage_data): + return + span.set_tag_str( + COVERAGE_TAG_NAME, + build_payload(coverage_data, root_dir, span_id), + ) + coverage_data._collector.data.clear() # type: ignore[union-attr] + + +def segments(lines: Iterable[int]) -> List[Tuple[int, int, int, int, int]]: + """Extract the relevant report data for a single file.""" + _segments = [] + for _key, g in groupby(enumerate(sorted(lines)), lambda x: x[1] - x[0]): + group = list(g) + start = group[0][1] + end = group[-1][1] + _segments.append((start, 0, end, 0, -1)) + + return _segments + + +def _lines(coverage: Coverage, context: Optional[str]) -> Dict[str, List[Tuple[int, int, int, int, int]]]: + if not coverage._collector or not coverage._collector.data: + return {} + + return { + k: segments(v.keys()) if isinstance(v, dict) else segments(v) # type: ignore + for k, v in list(coverage._collector.data.items()) + } + + +def build_payload(coverage: Coverage, root_dir: str, test_id: Optional[str] = None) -> str: + """ + Generate a CI Visibility coverage payload, formatted as follows: + + "files": [ + { + "filename": , + "segments": [ + [Int, Int, Int, Int, Int], # noqa:F401 + ] + }, + ... + ] + + For each segment of code for which there is coverage, there are always five integer values: + The first number indicates the start line of the code block (index starting in 1) + The second number indicates the start column of the code block (index starting in 1). Use value -1 if the + column is unknown. + The third number indicates the end line of the code block + The fourth number indicates the end column of the code block + The fifth number indicates the number of executions of the block + If the number is >0 then it indicates the number of executions + If the number is -1 then it indicates that the number of executions are unknown + + :param coverage: Coverage object containing coverage data + :param root_dir: the directory relative to which paths to covered files should be resolved + :param test_id: a unique identifier for the current test run + """ + root_dir_str = str(root_dir) + if root_dir_str not in _global_relative_file_paths_for_cov: + _global_relative_file_paths_for_cov[root_dir_str] = {} + files_data = [] + for filename, lines in _lines(coverage, test_id).items(): + if filename not in _global_relative_file_paths_for_cov[root_dir_str]: + _global_relative_file_paths_for_cov[root_dir_str][filename] = get_relative_or_absolute_path_for_path( + filename, root_dir_str + ) + if lines: + files_data.append( + {"filename": _global_relative_file_paths_for_cov[root_dir_str][filename], "segments": lines} + ) + else: + files_data.append({"filename": _global_relative_file_paths_for_cov[root_dir_str][filename]}) + + return json.dumps({"files": files_data}) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/encoder.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/encoder.py new file mode 100644 index 0000000..ad8fbfc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/encoder.py @@ -0,0 +1,208 @@ +import json +import threading +from typing import TYPE_CHECKING # noqa:F401 +from uuid import uuid4 + +from ddtrace.ext import SpanTypes +from ddtrace.internal._encoding import BufferedEncoder +from ddtrace.internal._encoding import packb as msgpack_packb +from ddtrace.internal.ci_visibility.constants import COVERAGE_TAG_NAME +from ddtrace.internal.ci_visibility.constants import EVENT_TYPE +from ddtrace.internal.ci_visibility.constants import MODULE_ID +from ddtrace.internal.ci_visibility.constants import MODULE_TYPE +from ddtrace.internal.ci_visibility.constants import SESSION_ID +from ddtrace.internal.ci_visibility.constants import SESSION_TYPE +from ddtrace.internal.ci_visibility.constants import SUITE_ID +from ddtrace.internal.ci_visibility.constants import SUITE_TYPE +from ddtrace.internal.encoding import JSONEncoderV2 +from ddtrace.internal.writer.writer import NoEncodableSpansError + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + + from ..span import Span # noqa:F401 + + +class CIVisibilityEncoderV01(BufferedEncoder): + content_type = "application/msgpack" + ALLOWED_METADATA_KEYS = ("language", "library_version", "runtime-id", "env") + PAYLOAD_FORMAT_VERSION = 1 + TEST_SUITE_EVENT_VERSION = 1 + TEST_EVENT_VERSION = 2 + + def __init__(self, *args): + super(CIVisibilityEncoderV01, self).__init__() + self._lock = threading.RLock() + self._metadata = {} + self._init_buffer() + self._metadata = {} + + def __len__(self): + with self._lock: + return len(self.buffer) + + def set_metadata(self, metadata): + self._metadata.update(metadata) + + def _init_buffer(self): + with self._lock: + self.buffer = [] + + def put(self, spans): + with self._lock: + self.buffer.append(spans) + + def encode_traces(self, traces): + return self._build_payload(traces=traces) + + def encode(self): + with self._lock: + payload = self._build_payload(self.buffer) + self._init_buffer() + return payload + + def _build_payload(self, traces): + normalized_spans = [self._convert_span(span, trace[0].context.dd_origin) for trace in traces for span in trace] + self._metadata = {k: v for k, v in self._metadata.items() if k in self.ALLOWED_METADATA_KEYS} + # TODO: Split the events in several payloads as needed to avoid hitting the intake's maximum payload size. + return CIVisibilityEncoderV01._pack_payload( + {"version": self.PAYLOAD_FORMAT_VERSION, "metadata": {"*": self._metadata}, "events": normalized_spans} + ) + + @staticmethod + def _pack_payload(payload): + return msgpack_packb(payload) + + def _convert_span(self, span, dd_origin): + # type: (Span, str) -> Dict[str, Any] + sp = JSONEncoderV2._span_to_dict(span) + sp = JSONEncoderV2._normalize_span(sp) + sp["type"] = span.get_tag(EVENT_TYPE) or span.span_type + sp["duration"] = span.duration_ns + sp["meta"] = dict(sorted(span._meta.items())) + sp["metrics"] = dict(sorted(span._metrics.items())) + if dd_origin is not None: + sp["meta"].update({"_dd.origin": dd_origin}) + + sp = CIVisibilityEncoderV01._filter_ids(sp) + + version = CIVisibilityEncoderV01.TEST_SUITE_EVENT_VERSION + if span.get_tag(EVENT_TYPE) == "test": + version = CIVisibilityEncoderV01.TEST_EVENT_VERSION + + if span.span_type == "test": + event_type = span.get_tag(EVENT_TYPE) + else: + event_type = "span" + + return {"version": version, "type": event_type, "content": sp} + + @staticmethod + def _filter_ids(sp): + """ + Remove trace/span/parent IDs if non-test event, move session/module/suite IDs from meta to outer content layer. + """ + if sp["meta"].get(EVENT_TYPE) in [SESSION_TYPE, MODULE_TYPE, SUITE_TYPE]: + del sp["trace_id"] + del sp["span_id"] + del sp["parent_id"] + else: + sp["trace_id"] = int(sp.get("trace_id") or "1") + sp["parent_id"] = int(sp.get("parent_id") or "1") + sp["span_id"] = int(sp.get("span_id") or "1") + if sp["meta"].get(EVENT_TYPE) in [SESSION_TYPE, MODULE_TYPE, SUITE_TYPE, SpanTypes.TEST]: + test_session_id = sp["meta"].get(SESSION_ID) + if test_session_id: + sp[SESSION_ID] = int(test_session_id) + del sp["meta"][SESSION_ID] + if sp["meta"].get(EVENT_TYPE) in [MODULE_TYPE, SUITE_TYPE, SpanTypes.TEST]: + test_module_id = sp["meta"].get(MODULE_ID) + if test_module_id: + sp[MODULE_ID] = int(test_module_id) + del sp["meta"][MODULE_ID] + if sp["meta"].get(EVENT_TYPE) in [SUITE_TYPE, SpanTypes.TEST]: + test_suite_id = sp["meta"].get(SUITE_ID) + if test_suite_id: + sp[SUITE_ID] = int(test_suite_id) + del sp["meta"][SUITE_ID] + if COVERAGE_TAG_NAME in sp["meta"]: + del sp["meta"][COVERAGE_TAG_NAME] + return sp + + +class CIVisibilityCoverageEncoderV02(CIVisibilityEncoderV01): + PAYLOAD_FORMAT_VERSION = 2 + boundary = uuid4().hex + content_type = "multipart/form-data; boundary=%s" % boundary + itr_suite_skipping_mode = False + + def _set_itr_suite_skipping_mode(self, new_value): + self.itr_suite_skipping_mode = new_value + + def put(self, spans): + spans_with_coverage = [span for span in spans if COVERAGE_TAG_NAME in span.get_tags()] + if not spans_with_coverage: + raise NoEncodableSpansError() + return super(CIVisibilityCoverageEncoderV02, self).put(spans_with_coverage) + + def _build_coverage_attachment(self, data): + # type: (bytes) -> List[bytes] + return [ + b"--%s" % self.boundary.encode("utf-8"), + b'Content-Disposition: form-data; name="coverage1"; filename="coverage1.msgpack"', + b"Content-Type: application/msgpack", + b"", + data, + ] + + def _build_event_json_attachment(self): + # type: () -> List[bytes] + return [ + b"--%s" % self.boundary.encode("utf-8"), + b'Content-Disposition: form-data; name="event"; filename="event.json"', + b"Content-Type: application/json", + b"", + b'{"dummy":true}', + ] + + def _build_body(self, data): + # type: (bytes) -> List[bytes] + return ( + self._build_coverage_attachment(data) + + self._build_event_json_attachment() + + [b"--%s--" % self.boundary.encode("utf-8")] + ) + + def _build_data(self, traces): + # type: (List[List[Span]]) -> Optional[bytes] + normalized_covs = [ + self._convert_span(span, "") for trace in traces for span in trace if COVERAGE_TAG_NAME in span.get_tags() + ] + if not normalized_covs: + return None + # TODO: Split the events in several payloads as needed to avoid hitting the intake's maximum payload size. + return msgpack_packb({"version": self.PAYLOAD_FORMAT_VERSION, "coverages": normalized_covs}) + + def _build_payload(self, traces): + # type: (List[List[Span]]) -> Optional[bytes] + data = self._build_data(traces) + if not data: + return None + return b"\r\n".join(self._build_body(data)) + + def _convert_span(self, span, dd_origin): + # type: (Span, str) -> Dict[str, Any] + converted_span = { + "test_session_id": int(span.get_tag(SESSION_ID) or "1"), + "test_suite_id": int(span.get_tag(SUITE_ID) or "1"), + "files": json.loads(span.get_tag(COVERAGE_TAG_NAME))["files"], + } + + if not self.itr_suite_skipping_mode: + converted_span["span_id"] = span.span_id + + return converted_span diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/filters.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/filters.py new file mode 100644 index 0000000..a6e4db8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/filters.py @@ -0,0 +1,40 @@ +from typing import TYPE_CHECKING # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +import ddtrace +from ddtrace.constants import AUTO_KEEP +from ddtrace.ext import SpanTypes +from ddtrace.ext import ci +from ddtrace.filters import TraceFilter + + +if TYPE_CHECKING: + from ddtrace import Span # noqa:F401 + + +class TraceCiVisibilityFilter(TraceFilter): + def __init__(self, tags, service): + # type: (Dict[Union[str, bytes], str], str) -> None + self._tags = tags + self._service = service + + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + if not trace: + return trace + + local_root = trace[0]._local_root + if not local_root or local_root.span_type != SpanTypes.TEST: + return None + + local_root.context.dd_origin = ci.CI_APP_TEST_ORIGIN + local_root.context.sampling_priority = AUTO_KEEP + for span in trace: + span.set_tags(self._tags) + span.service = self._service + span.set_tag_str(ci.LIBRARY_VERSION, ddtrace.__version__) + + return trace diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/git_client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/git_client.py new file mode 100644 index 0000000..e2eb4e8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/git_client.py @@ -0,0 +1,481 @@ +from ctypes import c_int +from enum import IntEnum +import json +from multiprocessing import Process +from multiprocessing import Value +import os +import time +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + +from ddtrace.ext import ci +from ddtrace.ext.git import _build_git_packfiles_with_details +from ddtrace.ext.git import _extract_clone_defaultremotename_with_details +from ddtrace.ext.git import _extract_latest_commits_with_details +from ddtrace.ext.git import _extract_upstream_sha +from ddtrace.ext.git import _get_rev_list_with_details +from ddtrace.ext.git import _is_shallow_repository_with_details +from ddtrace.ext.git import _unshallow_repository +from ddtrace.ext.git import _unshallow_repository_with_details +from ddtrace.ext.git import extract_commit_sha +from ddtrace.ext.git import extract_git_version +from ddtrace.ext.git import extract_remote_url +from ddtrace.internal.agent import get_trace_url +from ddtrace.internal.compat import JSONDecodeError +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter + +from .. import compat +from .. import telemetry +from ..utils.http import Response +from ..utils.http import get_connection +from ..utils.http import verify_url +from ..utils.time import StopWatch +from .constants import AGENTLESS_API_KEY_HEADER_NAME +from .constants import AGENTLESS_DEFAULT_SITE +from .constants import EVP_PROXY_AGENT_BASE_PATH +from .constants import EVP_SUBDOMAIN_HEADER_API_VALUE +from .constants import EVP_SUBDOMAIN_HEADER_NAME +from .constants import GIT_API_BASE_PATH +from .constants import REQUESTS_MODE +from .telemetry.constants import ERROR_TYPES +from .telemetry.constants import GIT_TELEMETRY_COMMANDS +from .telemetry.git import record_git_command +from .telemetry.git import record_objects_pack_data +from .telemetry.git import record_objects_pack_request +from .telemetry.git import record_search_commits + + +log = get_logger(__name__) + +# this exists only for the purpose of mocking in tests +RESPONSE = None + +# we're only interested in uploading .pack files +PACK_EXTENSION = ".pack" + +DEFAULT_TIMEOUT = 20 + +DEFAULT_METADATA_UPLOAD_TIMEOUT = 120 + + +class METADATA_UPLOAD_STATUS(IntEnum): + PENDING = 0 + IN_PROCESS = 1 + SUCCESS = 2 + FAILED = 3 + UNNECESSARY = 4 + + +FINISHED_METADATA_UPLOAD_STATUSES = [ + METADATA_UPLOAD_STATUS.FAILED, + METADATA_UPLOAD_STATUS.SUCCESS, + METADATA_UPLOAD_STATUS.UNNECESSARY, +] + + +class CIVisibilityGitClient(object): + def __init__( + self, + api_key, + requests_mode=REQUESTS_MODE.AGENTLESS_EVENTS, + base_url="", + ): + # type: (str, int, str) -> None + self._serializer = CIVisibilityGitClientSerializerV1(api_key) + self._worker = None # type: Optional[Process] + self._response = RESPONSE + self._requests_mode = requests_mode + self._metadata_upload_status = Value(c_int, METADATA_UPLOAD_STATUS.PENDING, lock=True) + if self._requests_mode == REQUESTS_MODE.EVP_PROXY_EVENTS: + self._base_url = get_trace_url() + EVP_PROXY_AGENT_BASE_PATH + GIT_API_BASE_PATH + elif self._requests_mode == REQUESTS_MODE.AGENTLESS_EVENTS: + self._base_url = "https://api.{}{}".format(os.getenv("DD_SITE", AGENTLESS_DEFAULT_SITE), GIT_API_BASE_PATH) + + def upload_git_metadata(self, cwd=None): + # type: (Optional[str]) -> None + self._tags = ci.tags(cwd=cwd) + if self._worker is None: + self._worker = Process( + target=CIVisibilityGitClient._run_protocol, + args=(self._serializer, self._requests_mode, self._base_url, self._metadata_upload_status), + kwargs={"_tags": self._tags, "_response": self._response, "cwd": cwd, "log_level": log.level}, + ) + self._worker.start() + log.debug("Upload git metadata to URL %s started with PID %s", self._base_url, self._worker.pid) + else: + log.debug("git metadata upload worker already started: %s", self._worker) + + def metadata_upload_finished(self): + return self._metadata_upload_status.value in FINISHED_METADATA_UPLOAD_STATUSES + + def _wait_for_metadata_upload(self, timeout=DEFAULT_METADATA_UPLOAD_TIMEOUT): + log.debug("Waiting up to %s seconds for git metadata upload to finish", timeout) + with StopWatch() as stopwatch: + while not self.metadata_upload_finished(): + log.debug("Waited %s so far, status is %s", stopwatch.elapsed(), self._metadata_upload_status.value) + if stopwatch.elapsed() >= timeout: + raise TimeoutError( + "Timed out waiting for git metadata upload to complete after %s seconds" % timeout + ) + + if self._worker.exitcode is not None: + log.debug( + "git metadata process exited with exitcode %s but upload status is %s", + self._worker.exitcode, + self._metadata_upload_status.value, + ) + raise ValueError("git metadata process exited with exitcode %s", self._worker.exitcode) + + self._worker.join(timeout=1) + time.sleep(1) + log.debug("git metadata upload completed, waited %s seconds", stopwatch.elapsed()) + + def wait_for_metadata_upload_status(self): + # type: () -> METADATA_UPLOAD_STATUS + self._wait_for_metadata_upload() + return self._metadata_upload_status.value # type: ignore + + @classmethod + def _run_protocol( + cls, + serializer, # CIVisibilityGitClientSerializerV1 + requests_mode, # int + base_url, # str + _metadata_upload_status, # METADATA_UPLOAD_STATUS + _tags=None, # Optional[Dict[str, str]] + _response=None, # Optional[Response] + cwd=None, # Optional[str] + log_level=0, # int + ): + # type: (...) -> None + log.setLevel(log_level) + telemetry.telemetry_writer.enable() + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.IN_PROCESS + try: + if _tags is None: + _tags = {} + repo_url = cls._get_repository_url(tags=_tags, cwd=cwd) + + # If all latest commits are known to our gitdb backend, assume that unshallowing is unnecessary + latest_commits = cls._get_latest_commits(cwd=cwd) + backend_commits = cls._search_commits( + requests_mode, base_url, repo_url, latest_commits, serializer, _response + ) + + if backend_commits is None: + log.debug("No initial backend commits found, returning early.") + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.FAILED + return + + commits_not_in_backend = list(set(latest_commits) - set(backend_commits)) + + if len(commits_not_in_backend) == 0: + log.debug("All latest commits found in backend, skipping metadata upload") + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.UNNECESSARY + return + + if cls._is_shallow_repository(cwd=cwd) and extract_git_version(cwd=cwd) >= (2, 27, 0): + log.debug("Shallow repository detected on git > 2.27 detected, unshallowing") + try: + cls._unshallow_repository(cwd=cwd) + except ValueError: + log.warning("Failed to unshallow repository, continuing to send pack data", exc_info=True) + + latest_commits = cls._get_latest_commits(cwd=cwd) + backend_commits = cls._search_commits( + requests_mode, base_url, repo_url, latest_commits, serializer, _response + ) + if backend_commits is None: + log.debug("No backend commits found, returning early.") + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.FAILED + return + + commits_not_in_backend = list(set(latest_commits) - set(backend_commits)) + + rev_list = cls._get_filtered_revisions( + excluded_commits=backend_commits, included_commits=commits_not_in_backend, cwd=cwd + ) + if rev_list: + log.debug("Building and uploading packfiles for revision list: %s", rev_list) + with _build_git_packfiles_with_details(rev_list, cwd=cwd) as (packfiles_prefix, packfiles_details): + record_git_command( + GIT_TELEMETRY_COMMANDS.PACK_OBJECTS, packfiles_details.duration, packfiles_details.returncode + ) + if packfiles_details.returncode == 0: + if cls._upload_packfiles( + requests_mode, base_url, repo_url, packfiles_prefix, serializer, _response, cwd=cwd + ): + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.SUCCESS + return + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.FAILED + log.warning("Failed to upload git metadata packfiles") + else: + log.debug("Revision list empty, no packfiles to build and upload") + _metadata_upload_status.value = METADATA_UPLOAD_STATUS.SUCCESS + record_objects_pack_data(0, 0) + finally: + telemetry.telemetry_writer.periodic(force_flush=True) + + @classmethod + def _get_repository_url(cls, tags=None, cwd=None): + # type: (Optional[Dict[str, str]], Optional[str]) -> str + if tags is None: + tags = {} + result = tags.get(ci.git.REPOSITORY_URL, "") + if not result: + result = extract_remote_url(cwd=cwd) + return result + + @classmethod + def _get_latest_commits(cls, cwd=None): + # type: (Optional[str]) -> List[str] + latest_commits, stderr, duration, returncode = _extract_latest_commits_with_details(cwd=cwd) + record_git_command(GIT_TELEMETRY_COMMANDS.GET_LOCAL_COMMITS, duration, returncode) + if returncode == 0: + return latest_commits.split("\n") if latest_commits else [] + raise ValueError(stderr) + + @classmethod + def _search_commits(cls, requests_mode, base_url, repo_url, latest_commits, serializer, _response): + # type: (int, str, str, List[str], CIVisibilityGitClientSerializerV1, Optional[Response]) -> Optional[List[str]] + payload = serializer.search_commits_encode(repo_url, latest_commits) + request_error = None + with StopWatch() as stopwatch: + try: + try: + response = _response or cls._do_request( + requests_mode, base_url, "/search_commits", payload, serializer + ) + except TimeoutError: + request_error = ERROR_TYPES.TIMEOUT + log.warning("Timeout searching commits") + return None + + if response.status >= 400: + request_error = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX + log.debug( + "Error searching commits, response status code: %s , response body: %s", + response.status, + response.body, + ) + log.debug("Response body: %s", response.body) + return None + + try: + result = serializer.search_commits_decode(response.body) + return result + except JSONDecodeError: + request_error = ERROR_TYPES.BAD_JSON + log.warning("Error searching commits, response not parsable: %s", response.body) + return None + finally: + record_search_commits(stopwatch.elapsed() * 1000, error=request_error) + + @classmethod + @fibonacci_backoff_with_jitter(attempts=5, until=lambda result: isinstance(result, Response)) + def _do_request(cls, requests_mode, base_url, endpoint, payload, serializer, headers=None, timeout=DEFAULT_TIMEOUT): + # type: (int, str, str, str, CIVisibilityGitClientSerializerV1, Optional[dict], int) -> Response + url = "{}/repository{}".format(base_url, endpoint) + _headers = { + AGENTLESS_API_KEY_HEADER_NAME: serializer.api_key, + } + if requests_mode == REQUESTS_MODE.EVP_PROXY_EVENTS: + _headers = { + EVP_SUBDOMAIN_HEADER_NAME: EVP_SUBDOMAIN_HEADER_API_VALUE, + } + if headers is not None: + _headers.update(headers) + try: + parsed_url = verify_url(url) + url_path = parsed_url.path + conn = get_connection(url, timeout=timeout) + log.debug("Sending request: %s %s %s %s", "POST", url_path, payload, _headers) + conn.request("POST", url_path, payload, _headers) + resp = compat.get_connection_response(conn) + log.debug("Response status: %s", resp.status) + result = Response.from_http_response(resp) + finally: + conn.close() + return result + + @classmethod + def _get_filtered_revisions(cls, excluded_commits, included_commits=None, cwd=None): + # type: (List[str], Optional[List[str]], Optional[str]) -> str + filtered_revisions, _, duration, returncode = _get_rev_list_with_details( + excluded_commits, included_commits, cwd=cwd + ) + record_git_command(GIT_TELEMETRY_COMMANDS.GET_OBJECTS, duration, returncode if returncode != 0 else None) + return filtered_revisions + + @classmethod + def _upload_packfiles(cls, requests_mode, base_url, repo_url, packfiles_prefix, serializer, _response, cwd=None): + # type: (int, str, str, str, CIVisibilityGitClientSerializerV1, Optional[Response], Optional[str]) -> bool + + sha = extract_commit_sha(cwd=cwd) + parts = packfiles_prefix.split("/") + directory = "/".join(parts[:-1]) + rand = parts[-1] + packfiles_uploaded_count = 0 + packfiles_uploaded_bytes = 0 + for filename in os.listdir(directory): + if not filename.startswith(rand) or not filename.endswith(PACK_EXTENSION): + continue + file_path = os.path.join(directory, filename) + content_type, payload = serializer.upload_packfile_encode(repo_url, sha, file_path) + headers = {"Content-Type": content_type} + with StopWatch() as stopwatch: + error_type = None + try: + response = _response or cls._do_request( + requests_mode, base_url, "/packfile", payload, serializer, headers=headers + ) + if response.status == 204: + packfiles_uploaded_count += 1 + packfiles_uploaded_bytes += len(payload) + elif response.status >= 400: + log.debug( + "Git packfile upload request for file %s (sizee: %s) failed with status: %s", + filename, + len(payload), + response.status, + ) + error_type = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX + except ConnectionRefusedError: + error_type = ERROR_TYPES.NETWORK + log.debug("Git packfile upload request for file %s failed: connection refused", filename) + except TimeoutError: + error_type = ERROR_TYPES.TIMEOUT + log.debug("Git packfile upload request for file %s (size: %s) timed out", filename, len(payload)) + finally: + duration = stopwatch.elapsed() * 1000 # StopWatch is in seconds + record_objects_pack_request(duration, error_type) + + record_objects_pack_data(packfiles_uploaded_count, packfiles_uploaded_bytes) + log.debug( + "Git packfiles upload succeeded, file count: %s, total size: %s", + packfiles_uploaded_count, + packfiles_uploaded_bytes, + ) + + return response.status == 204 + + @classmethod + def _is_shallow_repository(cls, cwd=None): + # type () -> bool + is_shallow_repository, duration, returncode = _is_shallow_repository_with_details(cwd=cwd) + record_git_command(GIT_TELEMETRY_COMMANDS.CHECK_SHALLOW, duration, returncode if returncode != 0 else None) + return is_shallow_repository + + @classmethod + def _unshallow_repository(cls, cwd=None): + # type () -> None + with StopWatch() as stopwatch: + error_exit_code = None + try: + remote, stderr, _, exit_code = _extract_clone_defaultremotename_with_details(cwd=cwd) + if exit_code != 0: + error_exit_code = exit_code + log.debug("Failed to get default remote: %s", stderr) + return + + try: + CIVisibilityGitClient._unshallow_repository_to_local_head(remote, cwd=cwd) + return + except ValueError as e: + log.debug("Could not unshallow repository to local head: %s", e) + + try: + CIVisibilityGitClient._unshallow_repository_to_upstream(remote, cwd=cwd) + return + except ValueError as e: + log.debug("Could not unshallow to upstream: %s", e) + + log.debug("Unshallowing to default") + _, unshallow_error, _, exit_code = _unshallow_repository_with_details(cwd=cwd, repo=remote) + if exit_code == 0: + log.debug("Unshallowing to default successful") + return + log.debug("Unshallowing failed: %s", unshallow_error) + error_exit_code = exit_code + return + finally: + duration = stopwatch.elapsed() * 1000 # StopWatch measures elapsed time in seconds + record_git_command(GIT_TELEMETRY_COMMANDS.UNSHALLOW, duration, error_exit_code) + + @classmethod + def _unshallow_repository_to_local_head(cls, remote, cwd=None): + # type (str, Optional[str) -> None + head = extract_commit_sha(cwd=cwd) + log.debug("Unshallowing to local head %s", head) + _unshallow_repository(cwd=cwd, repo=remote, refspec=head) + log.debug("Unshallowing to local head successful") + + @classmethod + def _unshallow_repository_to_upstream(cls, remote, cwd=None): + # type (str, Optional[str) -> None + upstream = _extract_upstream_sha(cwd=cwd) + log.debug("Unshallowing to upstream %s", upstream) + _unshallow_repository(cwd=cwd, repo=remote, refspec=upstream) + log.debug("Unshallowing to upstream") + + +class CIVisibilityGitClientSerializerV1(object): + def __init__(self, api_key): + # type: (str) -> None + self.api_key = api_key + + def search_commits_encode(self, repo_url, latest_commits): + # type: (str, list[str]) -> str + return json.dumps( + {"meta": {"repository_url": repo_url}, "data": [{"id": sha, "type": "commit"} for sha in latest_commits]} + ) + + def search_commits_decode(self, payload): + # type: (str) -> List[str] + res = [] # type: List[str] + try: + if isinstance(payload, bytes): + parsed = json.loads(payload.decode()) + else: + parsed = json.loads(payload) + return [item["id"] for item in parsed["data"] if item["type"] == "commit"] + except KeyError: + log.warning("Expected information not found in search_commits response", exc_info=True) + except JSONDecodeError: + log.warning("Unexpected decode error in search_commits response", exc_info=True) + + return res + + def upload_packfile_encode(self, repo_url, sha, file_path): + # type: (str, str, str) -> Tuple[str, bytes] + BOUNDARY = b"----------boundary------" + CRLF = b"\r\n" + body = [] + metadata = {"data": {"id": sha, "type": "commit"}, "meta": {"repository_url": repo_url}} + body.extend( + [ + b"--" + BOUNDARY, + b'Content-Disposition: form-data; name="pushedSha"', + b"Content-Type: application/json", + b"", + json.dumps(metadata).encode("utf-8"), + ] + ) + file_name = os.path.basename(file_path) + f = open(file_path, "rb") + file_content = f.read() + f.close() + body.extend( + [ + b"--" + BOUNDARY, + b'Content-Disposition: form-data; name="packfile"; filename="%s"' % file_name.encode("utf-8"), + b"Content-Type: application/octet-stream", + b"", + file_content, + ] + ) + body.extend([b"--" + BOUNDARY + b"--", b""]) + return "multipart/form-data; boundary=%s" % BOUNDARY.decode("utf-8"), CRLF.join(body) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/recorder.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/recorder.py new file mode 100644 index 0000000..311ad29 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/recorder.py @@ -0,0 +1,600 @@ +from collections import defaultdict +import json +import os +import socket +from typing import TYPE_CHECKING # noqa:F401 +from typing import NamedTuple # noqa:F401 +from uuid import uuid4 + +from ddtrace import Tracer +from ddtrace import config as ddconfig +from ddtrace.contrib import trace_utils +from ddtrace.ext import ci +from ddtrace.ext import test +from ddtrace.internal import atexit +from ddtrace.internal import compat +from ddtrace.internal import telemetry +from ddtrace.internal.agent import get_connection +from ddtrace.internal.agent import get_trace_url +from ddtrace.internal.ci_visibility.coverage import is_coverage_available +from ddtrace.internal.ci_visibility.filters import TraceCiVisibilityFilter +from ddtrace.internal.compat import JSONDecodeError +from ddtrace.internal.compat import parse +from ddtrace.internal.logger import get_logger +from ddtrace.internal.service import Service +from ddtrace.internal.utils.formats import asbool +from ddtrace.internal.writer.writer import Response +from ddtrace.provider import CIContextProvider + +from .. import agent +from ..utils.http import verify_url +from ..utils.time import StopWatch +from .constants import AGENTLESS_API_KEY_HEADER_NAME +from .constants import AGENTLESS_DEFAULT_SITE +from .constants import CUSTOM_CONFIGURATIONS_PREFIX +from .constants import EVP_PROXY_AGENT_BASE_PATH +from .constants import EVP_SUBDOMAIN_HEADER_API_VALUE +from .constants import EVP_SUBDOMAIN_HEADER_EVENT_VALUE +from .constants import EVP_SUBDOMAIN_HEADER_NAME +from .constants import REQUESTS_MODE +from .constants import SETTING_ENDPOINT +from .constants import SKIPPABLE_ENDPOINT +from .constants import SUITE +from .constants import TEST +from .constants import TRACER_PARTIAL_FLUSH_MIN_SPANS +from .git_client import METADATA_UPLOAD_STATUS +from .git_client import CIVisibilityGitClient +from .telemetry.constants import ERROR_TYPES +from .telemetry.git import record_settings +from .writer import CIVisibilityWriter + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import DefaultDict # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Tuple # noqa:F401 + + from ddtrace.settings import IntegrationConfig # noqa:F401 + +log = get_logger(__name__) + +DEFAULT_TIMEOUT = 15 + +_CIVisibilitySettings = NamedTuple( + "_CIVisibilitySettings", + [("coverage_enabled", bool), ("skipping_enabled", bool), ("require_git", bool), ("itr_enabled", bool)], +) + + +class CIVisibilityAuthenticationException(Exception): + pass + + +def _extract_repository_name_from_url(repository_url): + # type: (str) -> str + try: + return parse.urlparse(repository_url).path.rstrip(".git").rpartition("/")[-1] + except ValueError: + # In case of parsing error, default to repository url + log.warning("Repository name cannot be parsed from repository_url: %s", repository_url) + return repository_url + + +def _get_git_repo(): + # this exists only for the purpose of patching in tests + return None + + +def _get_custom_configurations(): + # type () -> dict + custom_configurations = {} + for tag, value in ddconfig.tags.items(): + if tag.startswith(CUSTOM_CONFIGURATIONS_PREFIX): + custom_configurations[tag.replace("%s." % CUSTOM_CONFIGURATIONS_PREFIX, "", 1)] = value + + return custom_configurations + + +def _do_request(method, url, payload, headers): + # type: (str, str, str, Dict) -> Response + try: + parsed_url = verify_url(url) + url_path = parsed_url.path + conn = get_connection(url, timeout=DEFAULT_TIMEOUT) + log.debug("Sending request: %s %s %s %s", method, url_path, payload, headers) + conn.request("POST", url_path, payload, headers) + resp = compat.get_connection_response(conn) + log.debug("Response status: %s", resp.status) + result = Response.from_http_response(resp) + finally: + conn.close() + return result + + +class CIVisibility(Service): + _instance = None # type: Optional[CIVisibility] + enabled = False + _test_suites_to_skip = None # type: Optional[List[str]] + _tests_to_skip = defaultdict(list) # type: DefaultDict[str, List[str]] + + def __init__(self, tracer=None, config=None, service=None): + # type: (Optional[Tracer], Optional[IntegrationConfig], Optional[str]) -> None + super(CIVisibility, self).__init__() + + telemetry.telemetry_writer.enable() + + if tracer: + self.tracer = tracer + else: + if asbool(os.getenv("_DD_CIVISIBILITY_USE_CI_CONTEXT_PROVIDER")): + # Create a new CI tracer + self.tracer = Tracer(context_provider=CIContextProvider()) + else: + self.tracer = Tracer() + + # Partial traces are required for ITR to work in suite-level skipping for long test sessions, but we + # assume that a tracer is already configured if it's been passed in. + self.tracer.configure(partial_flush_enabled=True, partial_flush_min_spans=TRACER_PARTIAL_FLUSH_MIN_SPANS) + + self._configurations = ci._get_runtime_and_os_metadata() + custom_configurations = _get_custom_configurations() + if custom_configurations: + self._configurations["custom"] = custom_configurations + + self._api_key = os.getenv("_CI_DD_API_KEY", os.getenv("DD_API_KEY")) + + self._dd_site = os.getenv("DD_SITE", AGENTLESS_DEFAULT_SITE) + self._suite_skipping_mode = asbool(os.getenv("_DD_CIVISIBILITY_ITR_SUITE_MODE", default=False)) + self.config = config # type: Optional[IntegrationConfig] + self._tags = ci.tags(cwd=_get_git_repo()) # type: Dict[str, str] + self._service = service + self._codeowners = None + self._root_dir = None + self._should_upload_git_metadata = True + + int_service = None + if self.config is not None: + int_service = trace_utils.int_service(None, self.config) + # check if repository URL detected from environment or .git, and service name unchanged + if self._tags.get(ci.git.REPOSITORY_URL, None) and self.config and int_service == self.config._default_service: + self._service = _extract_repository_name_from_url(self._tags[ci.git.REPOSITORY_URL]) + elif self._service is None and int_service is not None: + self._service = int_service + + if ddconfig._ci_visibility_agentless_enabled: + if not self._api_key: + raise EnvironmentError( + "DD_CIVISIBILITY_AGENTLESS_ENABLED is set, but DD_API_KEY is not set, so ddtrace " + "cannot be initialized." + ) + requests_mode_str = "agentless" + self._requests_mode = REQUESTS_MODE.AGENTLESS_EVENTS + elif self._agent_evp_proxy_is_available(): + requests_mode_str = "agent EVP proxy" + self._requests_mode = REQUESTS_MODE.EVP_PROXY_EVENTS + else: + requests_mode_str = "APM (some features will be disabled" + self._requests_mode = REQUESTS_MODE.TRACES + self._should_upload_git_metadata = False + + if self._should_upload_git_metadata: + self._git_client = CIVisibilityGitClient(api_key=self._api_key or "", requests_mode=self._requests_mode) + self._git_client.upload_git_metadata(cwd=_get_git_repo()) + + self._api_settings = self._check_enabled_features() + + self._collect_coverage_enabled = self._should_collect_coverage(self._api_settings.coverage_enabled) + + self._configure_writer(coverage_enabled=self._collect_coverage_enabled) + + log.info("Service: %s (env: %s)", self._service, ddconfig.env) + log.info("Requests mode: %s", requests_mode_str) + log.info("Git metadata upload enabled: %s", self._should_upload_git_metadata) + log.info("API-provided settings: coverage collection: %s", self._api_settings.coverage_enabled) + log.info( + "API-provided settings: Intelligent Test Runner: %s, test skipping: %s", + self._api_settings.itr_enabled, + self._api_settings.skipping_enabled, + ) + log.info("Detected configurations: %s", str(self._configurations)) + + try: + from ddtrace.internal.codeowners import Codeowners + + self._codeowners = Codeowners() + except ValueError: + log.warning("CODEOWNERS file is not available") + except Exception: + log.warning("Failed to load CODEOWNERS", exc_info=True) + + @staticmethod + def _should_collect_coverage(coverage_enabled_by_api): + if not coverage_enabled_by_api and not asbool( + os.getenv("_DD_CIVISIBILITY_ITR_FORCE_ENABLE_COVERAGE", default=False) + ): + return False + if not is_coverage_available(): + log.warning( + "CI Visibility code coverage tracking is enabled, but the `coverage` package is not installed." + "To use code coverage tracking, please install `coverage` from https://pypi.org/project/coverage/" + ) + return False + return True + + def _check_settings_api(self, url, headers): + # type: (str, Dict[str, str]) -> _CIVisibilitySettings + payload = { + "data": { + "id": str(uuid4()), + "type": "ci_app_test_service_libraries_settings", + "attributes": { + "test_level": SUITE if self._suite_skipping_mode else TEST, + "service": self._service, + "env": ddconfig.env, + "repository_url": self._tags.get(ci.git.REPOSITORY_URL), + "sha": self._tags.get(ci.git.COMMIT_SHA), + "branch": self._tags.get(ci.git.BRANCH), + "configurations": self._configurations, + }, + } + } + + sw = StopWatch() + sw.start() + try: + response = _do_request("POST", url, json.dumps(payload), headers) + except TimeoutError: + record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.TIMEOUT) + raise + if response.status >= 400: + error_code = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX + record_settings(sw.elapsed() * 1000, error=error_code) + if response.status == 403: + raise CIVisibilityAuthenticationException() + raise ValueError("API response status code: %d", response.status) + try: + if isinstance(response.body, bytes): + parsed = json.loads(response.body.decode()) + else: + parsed = json.loads(response.body) + except JSONDecodeError: + record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.BAD_JSON) + raise + + if "errors" in parsed and parsed["errors"][0] == "Not found": + record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.UNKNOWN) + raise ValueError("Settings response contained an error, disabling Intelligent Test Runner") + + log.debug("Parsed API response: %s", parsed) + + try: + attributes = parsed["data"]["attributes"] + coverage_enabled = attributes["code_coverage"] + skipping_enabled = attributes["tests_skipping"] + require_git = attributes["require_git"] + itr_enabled = attributes.get("itr_enabled", False) + except KeyError: + record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.UNKNOWN) + raise + + record_settings(sw.elapsed() * 1000, coverage_enabled, skipping_enabled, require_git, itr_enabled) + + return _CIVisibilitySettings(coverage_enabled, skipping_enabled, require_git, itr_enabled) + + def _check_enabled_features(self): + # type: () -> _CIVisibilitySettings + # DEV: Remove this ``if`` once ITR is in GA + _error_return_value = _CIVisibilitySettings(False, False, False, False) + + if not ddconfig._ci_visibility_intelligent_testrunner_enabled: + return _error_return_value + + if self._requests_mode == REQUESTS_MODE.EVP_PROXY_EVENTS: + url = get_trace_url() + EVP_PROXY_AGENT_BASE_PATH + SETTING_ENDPOINT + _headers = { + EVP_SUBDOMAIN_HEADER_NAME: EVP_SUBDOMAIN_HEADER_API_VALUE, + } + log.debug("Making EVP request to agent: url=%s, headers=%s", url, _headers) + elif self._requests_mode == REQUESTS_MODE.AGENTLESS_EVENTS: + if not self._api_key: + log.debug("Cannot make request to setting endpoint if API key is not set") + return _error_return_value + url = "https://api." + self._dd_site + SETTING_ENDPOINT + _headers = { + AGENTLESS_API_KEY_HEADER_NAME: self._api_key, + "Content-Type": "application/json", + } + else: + log.warning("Cannot make requests to setting endpoint if mode is not agentless or evp proxy") + return _error_return_value + + try: + settings = self._check_settings_api(url, _headers) + except CIVisibilityAuthenticationException: + # Authentication exception is handled during enable() to prevent the service from being used + raise + except Exception: + log.warning( + "Error checking Intelligent Test Runner API, disabling coverage collection and test skipping", + exc_info=True, + ) + return _error_return_value + + if settings.require_git: + log.info("Settings API requires git metadata, waiting for git metadata upload to complete") + try: + try: + if self._git_client.wait_for_metadata_upload_status() == METADATA_UPLOAD_STATUS.FAILED: + log.warning("Metadata upload failed, test skipping will be best effort") + except ValueError: + log.warning( + "Error waiting for git metadata upload, test skipping will be best effort", exc_info=True + ) + except TimeoutError: + log.warning("Timeout waiting for metadata upload, test skipping will be best effort") + + # The most recent API response overrides the first one + try: + settings = self._check_settings_api(url, _headers) + except Exception: + log.warning( + "Error checking Intelligent Test Runner API after git metadata upload," + " disabling coverage and test skipping", + exc_info=True, + ) + return _error_return_value + if settings.require_git: + log.warning("git metadata upload did not complete in time, test skipping will be best effort") + + return settings + + def _configure_writer(self, coverage_enabled=False, requests_mode=None): + writer = None + if requests_mode is None: + requests_mode = self._requests_mode + + if requests_mode == REQUESTS_MODE.AGENTLESS_EVENTS: + headers = {"dd-api-key": self._api_key} + writer = CIVisibilityWriter( + headers=headers, + coverage_enabled=coverage_enabled, + itr_suite_skipping_mode=self._suite_skipping_mode, + ) + elif requests_mode == REQUESTS_MODE.EVP_PROXY_EVENTS: + writer = CIVisibilityWriter( + intake_url=agent.get_trace_url(), + headers={EVP_SUBDOMAIN_HEADER_NAME: EVP_SUBDOMAIN_HEADER_EVENT_VALUE}, + use_evp=True, + coverage_enabled=coverage_enabled, + itr_suite_skipping_mode=self._suite_skipping_mode, + ) + if writer is not None: + self.tracer.configure(writer=writer) + + def _agent_evp_proxy_is_available(self): + # type: () -> bool + try: + info = agent.info() + except Exception: + info = None + + if info: + endpoints = info.get("endpoints", []) + if endpoints and any(EVP_PROXY_AGENT_BASE_PATH in endpoint for endpoint in endpoints): + return True + return False + + @classmethod + def test_skipping_enabled(cls): + if not cls.enabled or asbool(os.getenv("_DD_CIVISIBILITY_ITR_PREVENT_TEST_SKIPPING", default=False)): + return False + return cls._instance and cls._instance._api_settings.skipping_enabled + + def _fetch_tests_to_skip(self, skipping_mode): + # Make sure git uploading has finished + # this will block the thread until that happens + try: + try: + metadata_upload_status = self._git_client.wait_for_metadata_upload_status() + if metadata_upload_status not in [METADATA_UPLOAD_STATUS.SUCCESS, METADATA_UPLOAD_STATUS.UNNECESSARY]: + log.warning("git metadata upload was not successful, some tests may not be skipped") + except ValueError: + log.warning( + "Error waiting for metadata upload to complete while fetching tests to skip" + ", some tests may not be skipped", + exc_info=True, + ) + except TimeoutError: + log.debug("Timed out waiting for git metadata upload, some tests may not be skipped") + + payload = { + "data": { + "type": "test_params", + "attributes": { + "service": self._service, + "env": ddconfig.env, + "repository_url": self._tags.get(ci.git.REPOSITORY_URL), + "sha": self._tags.get(ci.git.COMMIT_SHA), + "configurations": self._configurations, + "test_level": skipping_mode, + }, + } + } + + _headers = { + "dd-api-key": self._api_key, + "Content-Type": "application/json", + } + if self._requests_mode == REQUESTS_MODE.EVP_PROXY_EVENTS: + url = get_trace_url() + EVP_PROXY_AGENT_BASE_PATH + SKIPPABLE_ENDPOINT + _headers = { + EVP_SUBDOMAIN_HEADER_NAME: EVP_SUBDOMAIN_HEADER_API_VALUE, + } + elif self._requests_mode == REQUESTS_MODE.AGENTLESS_EVENTS: + url = "https://api." + self._dd_site + SKIPPABLE_ENDPOINT + else: + log.warning("Cannot make requests to skippable endpoint if mode is not agentless or evp proxy") + return + + try: + response = _do_request("POST", url, json.dumps(payload), _headers) + except (TimeoutError, socket.timeout): + log.warning("Request timeout while fetching skippable tests") + self._test_suites_to_skip = [] + return + + self._test_suites_to_skip = [] + + if response.status >= 400: + log.warning("Skippable tests request responded with status %d", response.status) + return + try: + if isinstance(response.body, bytes): + parsed = json.loads(response.body.decode()) + else: + parsed = json.loads(response.body) + except json.JSONDecodeError: + log.warning("Skippable tests request responded with invalid JSON '%s'", response.body) + return + + if "data" not in parsed: + log.warning("Skippable tests request missing data, no tests will be skipped") + return + + try: + for item in parsed["data"]: + if item["type"] == skipping_mode and "suite" in item["attributes"]: + module = item["attributes"].get("configurations", {}).get("test.bundle", "").replace(".", "/") + path = "/".join((module, item["attributes"]["suite"])) if module else item["attributes"]["suite"] + + if skipping_mode == SUITE: + self._test_suites_to_skip.append(path) + else: + self._tests_to_skip[path].append(item["attributes"]["name"]) + except Exception: + log.warning("Error processing skippable test data, no tests will be skipped", exc_info=True) + self._test_suites_to_skip = [] + self._tests_to_skip = defaultdict(list) + + def _should_skip_path(self, path, name, test_skipping_mode=None): + if test_skipping_mode is None: + _test_skipping_mode = SUITE if self._suite_skipping_mode else TEST + else: + _test_skipping_mode = test_skipping_mode + + if _test_skipping_mode == SUITE: + return os.path.relpath(path) in self._test_suites_to_skip + else: + return name in self._tests_to_skip[os.path.relpath(path)] + return False + + @classmethod + def enable(cls, tracer=None, config=None, service=None): + # type: (Optional[Tracer], Optional[Any], Optional[str]) -> None + log.debug("Enabling %s", cls.__name__) + + if ddconfig._ci_visibility_agentless_enabled: + if not os.getenv("_CI_DD_API_KEY", os.getenv("DD_API_KEY")): + log.critical( + "%s disabled: environment variable DD_CIVISIBILITY_AGENTLESS_ENABLED is true but" + " DD_API_KEY is not set", + cls.__name__, + ) + cls.enabled = False + return + + if cls._instance is not None: + log.debug("%s already enabled", cls.__name__) + return + + try: + cls._instance = cls(tracer=tracer, config=config, service=service) + except CIVisibilityAuthenticationException: + log.warning("Authentication error, disabling CI Visibility, please check Datadog API key") + cls.enabled = False + return + + cls.enabled = True + + cls._instance.start() + atexit.register(cls.disable) + + log.debug("%s enabled", cls.__name__) + log.info( + "Final settings: coverage collection: %s, test skipping: %s", + cls._instance._collect_coverage_enabled, + CIVisibility.test_skipping_enabled(), + ) + + @classmethod + def disable(cls): + # type: () -> None + if cls._instance is None: + log.debug("%s not enabled", cls.__name__) + return + log.debug("Disabling %s", cls.__name__) + atexit.unregister(cls.disable) + + cls._instance.stop() + cls._instance = None + cls.enabled = False + + telemetry.telemetry_writer.periodic(force_flush=True) + + log.debug("%s disabled", cls.__name__) + + def _start_service(self): + # type: () -> None + tracer_filters = self.tracer._filters + if not any(isinstance(tracer_filter, TraceCiVisibilityFilter) for tracer_filter in tracer_filters): + tracer_filters += [TraceCiVisibilityFilter(self._tags, self._service)] # type: ignore[arg-type] + self.tracer.configure(settings={"FILTERS": tracer_filters}) + + if self.test_skipping_enabled() and (not self._tests_to_skip and self._test_suites_to_skip is None): + skipping_level = SUITE if self._suite_skipping_mode else TEST + self._fetch_tests_to_skip(skipping_level) + if self._suite_skipping_mode: + if self._test_suites_to_skip is None: + skippable_items_count = 0 + log.warning("Suites to skip remains None after fetching tests") + else: + skippable_items_count = len(self._test_suites_to_skip) + else: + skippable_items_count = sum([len(skippable_tests) for skippable_tests in self._tests_to_skip.values()]) + log.info("Intelligent Test Runner skipping level: %s", skipping_level) + log.info("Skippable items fetched: %s", skippable_items_count) + + def _stop_service(self): + # type: () -> None + if self._should_upload_git_metadata and not self._git_client.metadata_upload_finished(): + log.debug("git metadata upload still in progress, waiting before shutting down") + try: + try: + self._git_client._wait_for_metadata_upload(timeout=self.tracer.SHUTDOWN_TIMEOUT) + except ValueError: + log.debug("Error waiting for metadata upload to complete during shutdown", exc_info=True) + except TimeoutError: + log.debug("Timed out waiting for metadata upload to complete during shutdown.") + try: + self.tracer.shutdown() + except Exception: + log.warning("Failed to shutdown tracer", exc_info=True) + + @classmethod + def set_codeowners_of(cls, location, span=None): + if not cls.enabled or cls._instance is None or cls._instance._codeowners is None or not location: + return + + span = span or cls._instance.tracer.current_span() + if span is None: + return + + try: + handles = cls._instance._codeowners.of(location) + if handles: + span.set_tag(test.CODEOWNERS, json.dumps(handles)) + except KeyError: + log.debug("no matching codeowners for %s", location) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/constants.py new file mode 100644 index 0000000..bdf8796 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/constants.py @@ -0,0 +1,44 @@ +from enum import Enum + + +CIVISIBILITY_TELEMETRY_NAMESPACE = "civisibility" + + +class ERROR_TYPES(str, Enum): + TIMEOUT = "timeout" + NETWORK = "network" + CODE_4XX = "status_code_4xx_response" + CODE_5XX = "status_code_5xx_response" + BAD_JSON = "bad_json" + UNKNOWN = "unknown" + + +class GIT_TELEMETRY_COMMANDS(str, Enum): + GET_REPOSITORY = "get_repository" + GET_BRANCH = "get_branch" + CHECK_SHALLOW = "check_shallow" + UNSHALLOW = "unshallow" + GET_LOCAL_COMMITS = "get_local_commits" + GET_OBJECTS = "get_objects" + PACK_OBJECTS = "pack_objects" + + +class GIT_TELEMETRY(str, Enum): + COMMAND_COUNT = "git.command" + COMMAND_MS = "git.command_ms" + COMMAND_ERRORS = "git.command_errors" + + SEARCH_COMMITS_COUNT = "git_requests.search_commits" + SEARCH_COMMITS_MS = "git_requests.search_commits_ms" + SEARCH_COMMITS_ERRORS = "git_requests.search_commits_errors" + + OBJECTS_PACK_COUNT = "git_requests.objects_pack" + OBJECTS_PACK_MS = "git_requests.objects_pack_ms" + OBJECTS_PACK_ERRORS = "git_requests.objects_pack_errors" + OBJECTS_PACK_FILES = "git_requests.objects_pack_files" + OBJECTS_PACK_BYTES = "git_requests.objects_pack_bytes" + + SETTINGS_COUNT = "git_requests.settings" + SETTINGS_MS = "git_requests.settings_ms" + SETTINGS_ERRORS = "git_requests.settings_errors" + SETTINGS_RESPONSE = "git_requests.settings_response" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/git.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/git.py new file mode 100644 index 0000000..c1ac9b6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/git.py @@ -0,0 +1,90 @@ +from typing import Optional + +from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE +from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES +from ddtrace.internal.ci_visibility.telemetry.constants import GIT_TELEMETRY +from ddtrace.internal.ci_visibility.telemetry.constants import GIT_TELEMETRY_COMMANDS +from ddtrace.internal.ci_visibility.telemetry.utils import skip_if_agentless +from ddtrace.internal.logger import get_logger +from ddtrace.internal.telemetry import telemetry_writer + + +log = get_logger(__name__) + + +@skip_if_agentless +def record_git_command(command: GIT_TELEMETRY_COMMANDS, duration: float, exit_code: Optional[int]) -> None: + log.debug("Recording git command telemetry: %s, %s, %s", command, duration, exit_code) + tags = (("command", command),) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_COUNT, 1, tags) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_MS, duration, tags) + if exit_code is not None and exit_code != 0: + error_tags = (("command", command), ("exit_code", str(exit_code))) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.COMMAND_ERRORS, 1, error_tags) + + +@skip_if_agentless +def record_search_commits(duration: float, error: Optional[ERROR_TYPES] = None) -> None: + log.debug("Recording search commits telemetry: %s, %s", duration, error) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_COUNT, 1) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_MS, duration) + if error is not None: + error_tags = (("error_type", str(error)),) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SEARCH_COMMITS_ERRORS, 1, error_tags) + + +@skip_if_agentless +def record_objects_pack_request(duration: float, error: Optional[ERROR_TYPES] = None) -> None: + log.debug("Recording objects pack request telmetry: %s, %s", duration, error) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_COUNT, 1) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_MS, duration) + if error is not None: + error_tags = (("error", error),) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_ERRORS, 1, error_tags) + + +@skip_if_agentless +def record_objects_pack_data(num_files: int, num_bytes: int) -> None: + log.debug("Recording objects pack data telemetry: %s, %s", num_files, num_bytes) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_BYTES, num_bytes) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_FILES, num_files) + + +@skip_if_agentless +def record_settings( + duration: float, + coverage_enabled: Optional[bool] = False, + skipping_enabled: Optional[bool] = False, + require_git: Optional[bool] = False, + itr_enabled: Optional[bool] = False, + error: Optional[ERROR_TYPES] = None, +) -> None: + log.debug( + "Recording settings telemetry: %s, %s, %s, %s, %s, %s", + duration, + coverage_enabled, + skipping_enabled, + require_git, + itr_enabled, + error, + ) + # Telemetry "booleans" are true if they exist, otherwise false + response_tags = [] + if coverage_enabled: + response_tags.append(("coverage_enabled", "1")) + if skipping_enabled: + response_tags.append(("itrskip_enabled", "1")) + if require_git: + response_tags.append(("require_git", "1")) + if itr_enabled: + response_tags.append(("itrskip_enabled", "1")) + + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_COUNT, 1) + telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_MS, duration) + + telemetry_writer.add_count_metric( + _NAMESPACE, GIT_TELEMETRY.SETTINGS_RESPONSE, 1, tuple(response_tags) if response_tags else None + ) + if error is not None: + error_tags = (("error", error),) + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_ERRORS, 1, error_tags) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/utils.py new file mode 100644 index 0000000..fd82e6c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/telemetry/utils.py @@ -0,0 +1,17 @@ +from ddtrace import config as ddconfig +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def skip_if_agentless(func): + """Deocrator to skip sending telemetry if we are in agentless mode as it is not currently supported.""" + + def wrapper(*args, **kwargs): + if ddconfig._ci_visibility_agentless_enabled: + log.debug("Running in agentless mode, skipping sending telemetry") + return + func(*args, **kwargs) + + return wrapper diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/utils.py new file mode 100644 index 0000000..1944ec8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/utils.py @@ -0,0 +1,141 @@ +import inspect +import logging +import os +import re +import typing + +import ddtrace +from ddtrace import config as ddconfig +from ddtrace.contrib.coverage.constants import PCT_COVERED_KEY +from ddtrace.ext import test +from ddtrace.internal.ci_visibility.constants import CIVISIBILITY_LOG_FILTER_RE +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def get_relative_or_absolute_path_for_path(path: str, start_directory: str): + try: + relative_path = os.path.relpath(path, start=start_directory) + except ValueError: + log.debug( + "Tried to collect relative path but it is using different drive paths on Windows, " + "using absolute path instead", + ) + return os.path.abspath(path) + return relative_path + + +def get_source_file_path_for_test_method(test_method_object, repo_directory: str) -> typing.Union[str, None]: + try: + file_object = inspect.getfile(test_method_object) + except TypeError: + return "" + + return get_relative_or_absolute_path_for_path(file_object, repo_directory) + + +def get_source_lines_for_test_method( + test_method_object, +) -> typing.Union[typing.Tuple[int, int], typing.Tuple[None, None]]: + try: + source_lines_tuple = inspect.getsourcelines(test_method_object) + except (TypeError, OSError): + return None, None + start_line = source_lines_tuple[1] + end_line = start_line + len(source_lines_tuple[0]) + return start_line, end_line + + +def _add_start_end_source_file_path_data_to_span( + span: ddtrace.Span, test_method_object, test_name: str, repo_directory: str +): + if not test_method_object: + log.debug( + "Tried to collect source start/end lines for test method %s but test method object could not be found", + test_name, + ) + return + source_file_path = get_source_file_path_for_test_method(test_method_object, repo_directory) + if not source_file_path: + log.debug( + "Tried to collect file path for test %s but it is a built-in Python function", + test_name, + ) + return + start_line, end_line = get_source_lines_for_test_method(test_method_object) + if not start_line or not end_line: + log.debug("Tried to collect source start/end lines for test method %s but an exception was raised", test_name) + span.set_tag_str(test.SOURCE_FILE, source_file_path) + if start_line: + span.set_tag(test.SOURCE_START, start_line) + if end_line: + span.set_tag(test.SOURCE_END, end_line) + + +def _add_pct_covered_to_span(coverage_data: dict, span: ddtrace.Span): + if not coverage_data or PCT_COVERED_KEY not in coverage_data: + log.warning("Tried to add total covered percentage to session span but no data was found") + return + lines_pct_value = coverage_data[PCT_COVERED_KEY] + if type(lines_pct_value) != float: + log.warning("Tried to add total covered percentage to session span but the format was unexpected") + return + span.set_tag(test.TEST_LINES_PCT, lines_pct_value) + + +def _generate_fully_qualified_test_name(test_module_path: str, test_suite_name: str, test_name: str) -> str: + return "{}.{}.{}".format(test_module_path, test_suite_name, test_name) + + +def _generate_fully_qualified_module_name(test_module_path: str, test_suite_name: str) -> str: + return "{}.{}".format(test_module_path, test_suite_name) + + +def take_over_logger_stream_handler(remove_ddtrace_stream_handlers=True): + """Creates a handler with a filter for CIVisibility-specific messages. The also removes the existing + handlers on the DDTrace logger, to prevent double-logging. + + This is useful for testrunners (currently pytest) that have their own logger. + + NOTE: This should **only** be called from testrunner-level integrations (eg: pytest, unittest). + """ + if ddconfig._debug_mode: + log.debug("CIVisibility not taking over ddtrace logger handler because debug mode is enabled") + return + + level = ddconfig.ci_visibility_log_level + + if level.upper() == "NONE": + log.debug("CIVisibility not taking over ddtrace logger because level is set to: %s", level) + return + + root_logger = logging.getLogger() + + if remove_ddtrace_stream_handlers: + log.debug("CIVisibility removing DDTrace logger handler") + ddtrace_logger = logging.getLogger("ddtrace") + for handler in ddtrace_logger.handlers: + ddtrace_logger.removeHandler(handler) + else: + log.warning("Keeping DDTrace logger handler, double logging is likely") + + logger_name_re = re.compile(CIVISIBILITY_LOG_FILTER_RE) + + ci_visibility_handler = logging.StreamHandler() + ci_visibility_handler.addFilter(lambda record: bool(logger_name_re.match(record.name))) + ci_visibility_handler.setFormatter( + logging.Formatter("[Datadog CI Visibility] %(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s") + ) + + try: + ci_visibility_handler.setLevel(level.upper()) + except ValueError: + log.warning("Invalid log level: %s", level) + return + + root_logger.addHandler(ci_visibility_handler) + root_logger.setLevel(min(root_logger.level, ci_visibility_handler.level)) + + log.debug("logger setup complete") diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/writer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/writer.py new file mode 100644 index 0000000..6746e3c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/ci_visibility/writer.py @@ -0,0 +1,148 @@ +import os +from typing import TYPE_CHECKING # noqa:F401 +from typing import Optional # noqa:F401 + +import ddtrace +from ddtrace import config +from ddtrace.vendor.dogstatsd import DogStatsd # noqa:F401 + +from .. import agent +from .. import service +from ..runtime import get_runtime_id +from ..writer import HTTPWriter +from ..writer import WriterClientBase +from .constants import AGENTLESS_BASE_URL +from .constants import AGENTLESS_COVERAGE_BASE_URL +from .constants import AGENTLESS_COVERAGE_ENDPOINT +from .constants import AGENTLESS_DEFAULT_SITE +from .constants import AGENTLESS_ENDPOINT +from .constants import EVP_PROXY_AGENT_ENDPOINT +from .constants import EVP_PROXY_COVERAGE_ENDPOINT +from .constants import EVP_SUBDOMAIN_HEADER_COVERAGE_VALUE +from .constants import EVP_SUBDOMAIN_HEADER_NAME +from .encoder import CIVisibilityCoverageEncoderV02 +from .encoder import CIVisibilityEncoderV01 + + +if TYPE_CHECKING: # pragma: no cover + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + + +class CIVisibilityEventClient(WriterClientBase): + def __init__(self): + encoder = CIVisibilityEncoderV01(0, 0) + encoder.set_metadata( + { + "language": "python", + "env": config.env, + "runtime-id": get_runtime_id(), + "library_version": ddtrace.__version__, + } + ) + super(CIVisibilityEventClient, self).__init__(encoder) + + +class CIVisibilityCoverageClient(WriterClientBase): + def __init__(self, intake_url, headers=None, itr_suite_skipping_mode=False): + encoder = CIVisibilityCoverageEncoderV02(0, 0) + if itr_suite_skipping_mode: + encoder._set_itr_suite_skipping_mode(itr_suite_skipping_mode) + self._intake_url = intake_url + if headers: + self._headers = headers + super(CIVisibilityCoverageClient, self).__init__(encoder) + + +class CIVisibilityProxiedCoverageClient(CIVisibilityCoverageClient): + ENDPOINT = EVP_PROXY_COVERAGE_ENDPOINT + + +class CIVisibilityAgentlessCoverageClient(CIVisibilityCoverageClient): + ENDPOINT = AGENTLESS_COVERAGE_ENDPOINT + + +class CIVisibilityAgentlessEventClient(CIVisibilityEventClient): + ENDPOINT = AGENTLESS_ENDPOINT + + +class CIVisibilityProxiedEventClient(CIVisibilityEventClient): + ENDPOINT = EVP_PROXY_AGENT_ENDPOINT + + +class CIVisibilityWriter(HTTPWriter): + RETRY_ATTEMPTS = 5 + HTTP_METHOD = "POST" + STATSD_NAMESPACE = "civisibility.writer" + + def __init__( + self, + intake_url="", # type: str + processing_interval=None, # type: Optional[float] + timeout=None, # type: Optional[float] + dogstatsd=None, # type: Optional[DogStatsd] + sync_mode=False, # type: bool + report_metrics=False, # type: bool + api_version=None, # type: Optional[str] + reuse_connections=None, # type: Optional[bool] + headers=None, # type: Optional[Dict[str, str]] + use_evp=False, # type: bool + coverage_enabled=False, # type: bool + itr_suite_skipping_mode=False, # type: bool + ): + if processing_interval is None: + processing_interval = config._trace_writer_interval_seconds + if timeout is None: + timeout = config._agent_timeout_seconds + intake_cov_url = None + if use_evp: + intake_url = agent.get_trace_url() + intake_cov_url = agent.get_trace_url() + elif config._ci_visibility_agentless_url: + intake_url = config._ci_visibility_agentless_url + intake_cov_url = config._ci_visibility_agentless_url + if not intake_url: + intake_url = "%s.%s" % (AGENTLESS_BASE_URL, os.getenv("DD_SITE", AGENTLESS_DEFAULT_SITE)) + + clients = ( + [CIVisibilityProxiedEventClient()] if use_evp else [CIVisibilityAgentlessEventClient()] + ) # type: List[WriterClientBase] + if coverage_enabled: + if not intake_cov_url: + intake_cov_url = "%s.%s" % (AGENTLESS_COVERAGE_BASE_URL, os.getenv("DD_SITE", AGENTLESS_DEFAULT_SITE)) + clients.append( + CIVisibilityProxiedCoverageClient( + intake_url=intake_cov_url, + headers={EVP_SUBDOMAIN_HEADER_NAME: EVP_SUBDOMAIN_HEADER_COVERAGE_VALUE}, + itr_suite_skipping_mode=itr_suite_skipping_mode, + ) + if use_evp + else CIVisibilityAgentlessCoverageClient( + intake_url=intake_cov_url, itr_suite_skipping_mode=itr_suite_skipping_mode + ) + ) + + super(CIVisibilityWriter, self).__init__( + intake_url=intake_url, + clients=clients, + processing_interval=processing_interval, + timeout=timeout, + dogstatsd=dogstatsd, + sync_mode=sync_mode, + reuse_connections=reuse_connections, + headers=headers, + ) + + def stop(self, timeout=None): + if self.status != service.ServiceStatus.STOPPED: + super(CIVisibilityWriter, self).stop(timeout=timeout) + + def recreate(self): + # type: () -> HTTPWriter + return self.__class__( + intake_url=self.intake_url, + processing_interval=self._interval, + timeout=self._timeout, + dogstatsd=self.dogstatsd, + sync_mode=self._sync_mode, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/codeowners.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/codeowners.py new file mode 100644 index 0000000..06c88ed --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/codeowners.py @@ -0,0 +1,195 @@ +import os +import re +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + + +def path_to_regex(pattern): + # type: (str) -> re.Pattern + """ + source https://github.com/sbdchd/codeowners/blob/c95e13d384ac09cfa1c23be1a8601987f41968ea/codeowners/__init__.py + + Copyright (c) 2019-2020 Steve Dignam + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + ported from https://github.com/hmarr/codeowners/blob/d0452091447bd2a29ee508eebc5a79874fb5d4ff/match.go#L33 + + MIT License + + Copyright (c) 2020 Harry Marr + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + """ + regex = "" + + slash_pos = pattern.find("/") + anchored = slash_pos > -1 and slash_pos != len(pattern) - 1 + + regex += r"\A" if anchored else r"(?:\A|/)" + + matches_dir = pattern[-1] == "/" + pattern_trimmed = pattern.strip("/") + + in_char_class = False + escaped = False + + iterator = enumerate(pattern_trimmed) + for i, ch in iterator: + if escaped: + regex += re.escape(ch) + escaped = False + continue + + if ch == "\\": + escaped = True + elif ch == "*": + if i + 1 < len(pattern_trimmed) and pattern_trimmed[i + 1] == "*": + left_anchored = i == 0 + leading_slash = i > 0 and pattern_trimmed[i - 1] == "/" + right_anchored = i + 2 == len(pattern_trimmed) + trailing_slash = i + 2 < len(pattern_trimmed) and pattern_trimmed[i + 2] == "/" + + if (left_anchored or leading_slash) and (right_anchored or trailing_slash): + regex += ".*" + + next(iterator, None) + next(iterator, None) + continue + regex += "[^/]*" + elif ch == "?": + regex += "[^/]" + elif ch == "[": + in_char_class = True + regex += ch + elif ch == "]": + if in_char_class: + regex += ch + in_char_class = False + else: + regex += re.escape(ch) + else: + regex += re.escape(ch) + + if in_char_class: + raise ValueError("unterminated character class in pattern {pattern}".format(pattern=pattern)) + + regex += "/" if matches_dir else r"(?:\Z|/)" + return re.compile(regex) + + +class Codeowners(object): + """Provide interface to parse CODEOWNERS file and match a given path against it.""" + + KNOWN_LOCATIONS = ( + "CODEOWNERS", + ".github/CODEOWNERS", + "docs/CODEOWNERS", + ".gitlab/CODEOWNERS", + ) + + def __init__(self, path=None, cwd=None): + # type: (Optional[str], Optional[str]) -> None + """Initialize Codeowners object. + + :param path: path to CODEOWNERS file otherwise try to use any from known locations + """ + path = path or self.location(cwd) + if path is not None: + self.path = path # type: str + self.patterns = [] # type: List[Tuple[re.Pattern, List[str]]] + self.parse() + + def location(self, cwd=None): + # type: (Optional[str]) -> Optional[str] + """Return the location of the CODEOWNERS file.""" + cwd = cwd or os.getcwd() + for location in self.KNOWN_LOCATIONS: + path = os.path.join(cwd, location) + if os.path.isfile(path): + return path + raise ValueError("CODEOWNERS file not found") + + def parse(self): + # type: () -> None + """Parse CODEOWNERS file and store the lines and regexes.""" + with open(self.path) as f: + patterns = [] + for line in f.readlines(): + line = line.strip() + if line == "": + continue + # Lines starting with '#' are comments. + if line.startswith("#"): + continue + if line.startswith("[") and line.endswith("]"): + # found a code owners section + continue + if line.startswith("^[") and line.endswith("]"): + # found an optional code owners section + continue + + elements = line.split() + if len(elements) < 2: + continue + + path = elements[0] + if path is None: + continue + + try: + pattern = path_to_regex(path) + except (ValueError, IndexError): + continue + + owners = [owner for owner in elements[1:] if owner] + + if not owners: + continue + patterns.append((pattern, owners)) + # Order is important. The last matching pattern has the most precedence. + patterns.reverse() + self.patterns = patterns + + def of(self, path): + # type: (str) -> List[str] + """Return code owners for a given path. + + :param path: path to check + :return: list of file code owners identified by the given path + """ + for pattern, owners in self.patterns: + if pattern.search(path): + return owners + raise KeyError("no code owners found for {path}".format(path=path)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/compat.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/compat.py new file mode 100644 index 0000000..5372764 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/compat.py @@ -0,0 +1,458 @@ +from collections import Counter +from collections import OrderedDict +from collections import defaultdict +from collections import deque +import functools +from inspect import iscoroutinefunction +from inspect import isgeneratorfunction +import ipaddress +import os +import platform +import re +import sys +from tempfile import mkdtemp +import threading +from types import BuiltinFunctionType +from types import BuiltinMethodType +from types import FunctionType +from types import MethodType +from types import TracebackType +from typing import Any # noqa:F401 +from typing import AnyStr # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Text # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Type # noqa:F401 +from typing import Union # noqa:F401 +import warnings + +from ddtrace.vendor.wrapt.wrappers import BoundFunctionWrapper +from ddtrace.vendor.wrapt.wrappers import FunctionWrapper + + +__all__ = [ + "httplib", + "iscoroutinefunction", + "Queue", + "StringIO", + "urlencode", + "parse", + "maybe_stringify", +] + +PYTHON_VERSION_INFO = sys.version_info + +# Infos about python passed to the trace agent through the header +PYTHON_VERSION = platform.python_version() +PYTHON_INTERPRETER = platform.python_implementation() + +import http.client as httplib # noqa: E402 +from io import StringIO # noqa: E402 +from queue import Queue # noqa: E402 +import urllib.parse as parse # noqa: E402 +from urllib.parse import urlencode # noqa: E402 + + +def ensure_text(s, encoding="utf-8", errors="ignore") -> str: + if isinstance(s, str): + return s + if isinstance(s, bytes): + return s.decode(encoding, errors) + raise TypeError("Expected str or bytes but received %r" % (s.__class__)) + + +def ensure_binary(s, encoding="utf-8", errors="ignore") -> bytes: + if isinstance(s, bytes): + return s + if not isinstance(s, str): + raise TypeError("Expected str or bytes but received %r" % (s.__class__)) + return s.encode(encoding, errors) + + +NumericType = Union[int, float] + +# Pattern class generated by `re.compile` +pattern_type = re.Pattern + +try: + from inspect import getfullargspec # noqa:F401 + + def is_not_void_function(f, argspec): + return ( + argspec.args + or argspec.varargs + or argspec.varkw + or argspec.defaults + or argspec.kwonlyargs + or argspec.kwonlydefaults + or isgeneratorfunction(f) + ) + +except ImportError: + from inspect import getargspec as getfullargspec # type: ignore[assignment] # noqa: F401 + + def is_not_void_function(f, argspec): + return argspec.args or argspec.varargs or argspec.keywords or argspec.defaults or isgeneratorfunction(f) + + +def is_integer(obj): + # type: (Any) -> bool + """Helper to determine if the provided ``obj`` is an integer type or not""" + # DEV: We have to make sure it is an integer and not a boolean + # >>> type(True) + # + # >>> isinstance(True, int) + # True + return isinstance(obj, int) and not isinstance(obj, bool) + + +try: + from time import time_ns +except ImportError: + from time import time as _time + + def time_ns(): + # type: () -> int + return int(_time() * 10e5) * 1000 + + +try: + from time import monotonic +except ImportError: + from ddtrace.vendor.monotonic import monotonic + + +try: + from time import monotonic_ns +except ImportError: + + def monotonic_ns(): + # type: () -> int + return int(monotonic() * 1e9) + + +try: + from time import process_time_ns +except ImportError: + from time import clock as _process_time # type: ignore[attr-defined] + + def process_time_ns(): + # type: () -> int + return int(_process_time() * 1e9) + + +main_thread = threading.main_thread() + + +def make_async_decorator(tracer, coro, *params, **kw_params): + """ + Decorator factory that creates an asynchronous wrapper that yields + a coroutine result. This factory is required to handle Python 2 + compatibilities. + + :param object tracer: the tracer instance that is used + :param function f: the coroutine that must be executed + :param tuple params: arguments given to the Tracer.trace() + :param dict kw_params: keyword arguments given to the Tracer.trace() + """ + + @functools.wraps(coro) + async def func_wrapper(*args, **kwargs): + with tracer.trace(*params, **kw_params): + result = await coro(*args, **kwargs) + return result + + return func_wrapper + + +# DEV: There is `six.u()` which does something similar, but doesn't have the guard around `hasattr(s, 'decode')` +def to_unicode(s): + # type: (AnyStr) -> Text + """Return a unicode string for the given bytes or string instance.""" + # No reason to decode if we already have the unicode compatible object we expect + # DEV: `six.text_type` will be a `str` for python 3 and `unicode` for python 2 + # DEV: Double decoding a `unicode` can cause a `UnicodeEncodeError` + # e.g. `'\xc3\xbf'.decode('utf-8').decode('utf-8')` + if isinstance(s, str): + return s + + # If the object has a `decode` method, then decode into `utf-8` + # e.g. Python 2 `str`, Python 2/3 `bytearray`, etc + if hasattr(s, "decode"): + return s.decode("utf-8", errors="ignore") + + # Always try to coerce the object into the `six.text_type` object we expect + # e.g. `to_unicode(1)`, `to_unicode(dict(key='value'))` + return str(s) + + +def get_connection_response( + conn, # type: httplib.HTTPConnection +): + # type: (...) -> httplib.HTTPResponse + """Returns the response for a connection. + + If using Python 2 enable buffering. + + Python 2 does not enable buffering by default resulting in many recv + syscalls. + + See: + https://bugs.python.org/issue4879 + https://github.com/python/cpython/commit/3c43fcba8b67ea0cec4a443c755ce5f25990a6cf + """ + return conn.getresponse() + + +CONTEXTVARS_IS_AVAILABLE = True + + +try: + from collections.abc import Iterable # noqa:F401 +except ImportError: + from collections import Iterable # type: ignore[no-redef, attr-defined] # noqa:F401 + + +def maybe_stringify(obj): + # type: (Any) -> Optional[str] + if obj is not None: + return str(obj) + return None + + +NoneType = type(None) + +BUILTIN_SIMPLE_TYPES = frozenset([int, float, str, bytes, bool, NoneType, type, complex]) +BUILTIN_MAPPNG_TYPES = frozenset([dict, defaultdict, Counter, OrderedDict]) +BUILTIN_SEQUENCE_TYPES = frozenset([list, tuple, set, frozenset, deque]) +BUILTIN_CONTAINER_TYPES = BUILTIN_MAPPNG_TYPES | BUILTIN_SEQUENCE_TYPES +BUILTIN_TYPES = BUILTIN_SIMPLE_TYPES | BUILTIN_CONTAINER_TYPES + + +try: + from types import MethodWrapperType + +except ImportError: + MethodWrapperType = object().__init__.__class__ # type: ignore[misc] + +CALLABLE_TYPES = ( + BuiltinMethodType, + BuiltinFunctionType, + FunctionType, + MethodType, + MethodWrapperType, + FunctionWrapper, + BoundFunctionWrapper, + property, + classmethod, + staticmethod, +) +BUILTIN = "builtins" + + +try: + from typing import Collection # noqa:F401 +except ImportError: + from typing import List # noqa:F401 + from typing import Set # noqa:F401 + from typing import Union # noqa:F401 + + Collection = Union[List, Set, Tuple] # type: ignore[misc,assignment] + +ExcInfoType = Union[Tuple[Type[BaseException], BaseException, Optional[TracebackType]], Tuple[None, None, None]] + + +try: + from json import JSONDecodeError +except ImportError: + JSONDecodeError = ValueError # type: ignore[misc,assignment] + + +def is_valid_ip(ip: str) -> bool: + try: + # try parsing the IP address + ipaddress.ip_address(str(ip)) + return True + except BaseException: + return False + + +def ip_is_global(ip): + # type: (str) -> bool + """ + is_global is Python 3+ only. This could raise a ValueError if the IP is not valid. + """ + parsed_ip = ipaddress.ip_address(str(ip)) + + return parsed_ip.is_global + + +# https://stackoverflow.com/a/19299884 +class TemporaryDirectory(object): + """Create and return a temporary directory. This has the same + behavior as mkdtemp but can be used as a context manager. For + example: + + with TemporaryDirectory() as tmpdir: + ... + + Upon exiting the context, the directory and everything contained + in it are removed. + """ + + def __init__(self, suffix="", prefix="tmp", _dir=None): + self._closed = False + self.name = None # Handle mkdtemp raising an exception + self.name = mkdtemp(suffix, prefix, _dir) + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self.name + + def cleanup(self, _warn=False): + if self.name and not self._closed: + try: + self._rmtree(self.name) + except (TypeError, AttributeError) as ex: + # Issue #10188: Emit a warning on stderr + # if the directory could not be cleaned + # up due to missing globals + if "None" not in str(ex): + raise + return + self._closed = True + if _warn: + self._warn("Implicitly cleaning up {!r}".format(self), ResourceWarning) + + def __exit__(self, exc, value, tb): + self.cleanup() + + def __del__(self): + # Issue a ResourceWarning if implicit cleanup needed + self.cleanup(_warn=True) + + # XXX (ncoghlan): The following code attempts to make + # this class tolerant of the module nulling out process + # that happens during CPython interpreter shutdown + # Alas, it doesn't actually manage it. See issue #10188 + _listdir = staticmethod(os.listdir) + _path_join = staticmethod(os.path.join) + _isdir = staticmethod(os.path.isdir) + _islink = staticmethod(os.path.islink) + _remove = staticmethod(os.remove) + _rmdir = staticmethod(os.rmdir) + _warn = warnings.warn + + def _rmtree(self, path): + # Essentially a stripped down version of shutil.rmtree. We can't + # use globals because they may be None'ed out at shutdown. + for name in self._listdir(path): + fullname = self._path_join(path, name) + try: + isdir = self._isdir(fullname) and not self._islink(fullname) + except OSError: + isdir = False + if isdir: + self._rmtree(fullname) + else: + try: + self._remove(fullname) + except OSError: + pass + try: + self._rmdir(path) + except OSError: + pass + + +try: + from shlex import quote as shquote +except ImportError: + import re + + _find_unsafe = re.compile(r"[^\w@%+=:,./-]").search + + def shquote(s): + # type: (str) -> str + """Return a shell-escaped version of the string *s*.""" + if not s: + return "''" + if _find_unsafe(s) is None: + return s + + # use single quotes, and put single quotes into double quotes + # the string $'b is then quoted as '$'"'"'b' + return "'" + s.replace("'", "'\"'\"'") + "'" + + +try: + from shlex import join as shjoin +except ImportError: + + def shjoin(args): # type: ignore[misc] + # type: (Iterable[str]) -> str + """Return a shell-escaped string from *args*.""" + return " ".join(shquote(arg) for arg in args) + + +try: + from contextlib import nullcontext +except ImportError: + from contextlib import contextmanager + + @contextmanager # type: ignore[no-redef] + def nullcontext(enter_result=None): + yield enter_result + + +if PYTHON_VERSION_INFO >= (3, 9): + from functools import singledispatchmethod +elif PYTHON_VERSION_INFO >= (3, 8): + # This fix was not backported to 3.8 + # https://github.com/python/cpython/issues/83860 + from functools import singledispatchmethod + + def _register(self, cls, method=None): + if hasattr(cls, "__func__"): + setattr(cls, "__annotations__", cls.__func__.__annotations__) + return self.dispatcher.register(cls, func=method) + + singledispatchmethod.register = _register # type: ignore[assignment] +else: + from functools import singledispatch + from functools import update_wrapper + + class singledispatchmethod: # type: ignore[no-redef] + """Single-dispatch generic method descriptor. + + Supports wrapping existing descriptors and handles non-descriptor + callables as instance methods. + """ + + def __init__(self, func): + if not callable(func) and not hasattr(func, "__get__"): + raise TypeError(f"{func!r} is not callable or a descriptor") + + self.dispatcher = singledispatch(func) + self.func = func + + def register(self, cls, method=None): + if hasattr(cls, "__func__"): + setattr(cls, "__annotations__", cls.__func__.__annotations__) + return self.dispatcher.register(cls, func=method) + + def __get__(self, obj, cls=None): + def _method(*args, **kwargs): + method = self.dispatcher.dispatch(args[0].__class__) + return method.__get__(obj, cls)(*args, **kwargs) + + _method.__isabstractmethod__ = self.__isabstractmethod__ + _method.register = self.register + update_wrapper(_method, self.func) + return _method + + @property + def __isabstractmethod__(self): + return getattr(self.func, "__isabstractmethod__", False) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/constants.py new file mode 100644 index 0000000..b3053e9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/constants.py @@ -0,0 +1,102 @@ +from ddtrace.constants import AUTO_KEEP +from ddtrace.constants import AUTO_REJECT +from ddtrace.constants import USER_KEEP +from ddtrace.constants import USER_REJECT + + +PROPAGATION_STYLE_DATADOG = "datadog" +PROPAGATION_STYLE_B3_MULTI = "b3multi" +PROPAGATION_STYLE_B3_SINGLE = "b3" +_PROPAGATION_STYLE_W3C_TRACECONTEXT = "tracecontext" +_PROPAGATION_STYLE_NONE = "none" +_PROPAGATION_STYLE_DEFAULT = "datadog,tracecontext" +PROPAGATION_STYLE_ALL = ( + _PROPAGATION_STYLE_W3C_TRACECONTEXT, + PROPAGATION_STYLE_DATADOG, + PROPAGATION_STYLE_B3_MULTI, + PROPAGATION_STYLE_B3_SINGLE, + _PROPAGATION_STYLE_NONE, +) +W3C_TRACESTATE_KEY = "tracestate" +W3C_TRACEPARENT_KEY = "traceparent" +W3C_TRACESTATE_ORIGIN_KEY = "o" +W3C_TRACESTATE_SAMPLING_PRIORITY_KEY = "s" +DEFAULT_SAMPLING_RATE_LIMIT = 100 +SAMPLING_DECISION_TRACE_TAG_KEY = "_dd.p.dm" +DEFAULT_SERVICE_NAME = "unnamed-python-service" +# Used to set the name of an integration on a span +COMPONENT = "component" +HIGHER_ORDER_TRACE_ID_BITS = "_dd.p.tid" +MAX_UINT_64BITS = (1 << 64) - 1 +SPAN_LINKS_KEY = "_dd.span_links" +SPAN_API_DATADOG = "datadog" +SPAN_API_OTEL = "otel" +SPAN_API_OPENTRACING = "opentracing" +DEFAULT_BUFFER_SIZE = 20 << 20 # 20 MB +DEFAULT_MAX_PAYLOAD_SIZE = 20 << 20 # 20 MB +DEFAULT_PROCESSING_INTERVAL = 1.0 +DEFAULT_REUSE_CONNECTIONS = False +BLOCKED_RESPONSE_HTML = """ + You've been blocked + +

Sorry, you cannot access this page. Please contact the customer service team.

+""" +BLOCKED_RESPONSE_JSON = ( + '{"errors": [{"title": "You\'ve been blocked", "detail": "Sorry, you cannot access ' + 'this page. Please contact the customer service team. Security provided by Datadog."}]}' +) +HTTP_REQUEST_BLOCKED = "http.request.blocked" +RESPONSE_HEADERS = "http.response.headers" +HTTP_REQUEST_QUERY = "http.request.query" +HTTP_REQUEST_COOKIE_VALUE = "http.request.cookie.value" +HTTP_REQUEST_COOKIE_NAME = "http.request.cookie.name" +HTTP_REQUEST_PATH = "http.request.path" +HTTP_REQUEST_HEADER_NAME = "http.request.header.name" +HTTP_REQUEST_HEADER = "http.request.header" +HTTP_REQUEST_PARAMETER = "http.request.parameter" +HTTP_REQUEST_BODY = "http.request.body" +HTTP_REQUEST_PATH_PARAMETER = "http.request.path.parameter" +REQUEST_PATH_PARAMS = "http.request.path_params" +STATUS_403_TYPE_AUTO = {"status_code": 403, "type": "auto"} + +MESSAGING_SYSTEM = "messaging.system" + +FLASK_ENDPOINT = "flask.endpoint" +FLASK_VIEW_ARGS = "flask.view_args" +FLASK_URL_RULE = "flask.url_rule" + +_HTTPLIB_NO_TRACE_REQUEST = "_dd_no_trace" +DEFAULT_TIMEOUT = 2.0 + + +class _PRIORITY_CATEGORY: + USER = "user" + RULE = "rule" + AUTO = "auto" + DEFAULT = "default" + + +# intermediate mapping of priority categories to actual priority values +# used to simplify code that selects sampling priority based on many factors +_CATEGORY_TO_PRIORITIES = { + _PRIORITY_CATEGORY.USER: (USER_KEEP, USER_REJECT), + _PRIORITY_CATEGORY.RULE: (USER_KEEP, USER_REJECT), + _PRIORITY_CATEGORY.AUTO: (AUTO_KEEP, AUTO_REJECT), + _PRIORITY_CATEGORY.DEFAULT: (AUTO_KEEP, AUTO_REJECT), +} +_KEEP_PRIORITY_INDEX = 0 +_REJECT_PRIORITY_INDEX = 1 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/__init__.py new file mode 100644 index 0000000..52a2982 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/__init__.py @@ -0,0 +1,322 @@ +""" +This file implements the Core API, the abstraction layer between Integrations and Product code. +The Core API enables two primary use cases: maintaining a tree of ``ExecutionContext`` objects +and dispatching events. + +When using the Core API, keep concerns separate between Products and Integrations. Integrations +should not contain any code that references Products (Tracing, AppSec, Spans, WAF, Debugging, et cetera) +and Product code should never reference the library being integrated with (for example by importing ``flask``). + +It's helpful to think of the context tree as a Trace with extra data on each Span. It's similar +to a tree of Spans in that it represents the parts of the execution state that Datadog products +care about. + +This example shows how ``core.context_with_data`` might be used to create a node in this context tree:: + + + import flask + + + def _patched_request(pin, wrapped, args, kwargs): + with core.context_with_data( + "flask._patched_request", + pin=pin, + flask_request=flask.request, + block_request_callable=_block_request_callable, + ) as ctx, ctx.get_item("flask_request_call"): + return wrapped(*args, **kwargs) + + +This example looks a bit like a span created by ``tracer.trace()``: it has a name, a ``Pin`` instance, and +``__enter__`` and ``__exit__`` functionality as a context manager. In fact, it's so similar to a span that +the Tracing code in ``ddtrace/tracing`` can create a span directly from it (that's what ``flask_request_call`` +is in this example). + +The ``ExecutionContext`` object in this example also holds some data that you wouldn't typically find on +spans, like ``flask_request`` and ``block_request_callable``. These illustrate the context's utility as a +generic container for data that Datadog products need related to the current execution. ``block_request_callable`` +happens to be used in ``ddtrace/appsec`` by the AppSec product code to make request-blocking decisions, and +``flask_request`` is a reference to a library-specific function that Tracing uses. + +The first argument to ``context_with_data`` is the unique name of the context. When choosing this name, +consider how to differentiate it from other similar contexts while making its purpose clear. An easy default +is to use the name of the function within which ``context_with_data`` is being called, prefixed with the +integration name and a dot, for example ``flask._patched_request``. + +The integration code finds all of the library-specific objects that products need, and puts them into +the context tree it's building via ``context_with_data``. Product code then accesses the data it needs +by calling ``ExecutionContext.get_item`` like this:: + + + pin = ctx.get_item("pin") + current_span = pin.tracer.current_span() + ctx.set_item("current_span", current_span) + flask_config = ctx.get_item("flask_config") + _set_request_tags(ctx.get_item("flask_request"), current_span, flask_config) + + +Integration code can also call ``get_item`` when necessary, for example when the Flask integration checks +the request blocking flag that may have been set on the context by AppSec code and then runs Flask-specific +response logic:: + + + if core.get_item(HTTP_REQUEST_BLOCKED): + result = start_response("403", [("content-type", "application/json")]) + + +In order for ``get_item`` calls in Product code like ``ddtrace/appsec`` to find what they're looking for, +they need to happen at the right time. That's the problem that the ``core.dispatch`` and ``core.on`` +functions solve. + +The common pattern is that integration code generates events by calling ``dispatch`` and product code +listens to those events by calling ``on``. This allows product code to react to integration code at the +appropriate moments while maintaining clear separation of concerns. + +For example, the Flask integration calls ``dispatch`` to indicate that a blocked response just started, +passing some data along with the event:: + + + call = tracer.trace("operation") + core.dispatch("flask.blocked_request_callable", call) + + +The AppSec code listens for this event and does some AppSec-specific stuff in the handler:: + + + def _on_flask_blocked_request(): + core.set_item(HTTP_REQUEST_BLOCKED, True) + core.on("flask.blocked_request_callable", _on_flask_blocked_request) + + +``ExecutionContexts`` also generate their own start and end events that Product code can respond to +like this:: + + + def _on_jsonify_context_started_flask(ctx): + span = ctx.get_item("pin").tracer.trace(ctx.get_item("name")) + ctx.set_item("flask_jsonify_call", span) + core.on("context.started.flask.jsonify", _on_jsonify_context_started_flask) + + +The names of these events follow the pattern ``context.[started|ended].``. +""" +from contextlib import contextmanager +import logging +import sys +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + +from ddtrace.vendor.debtcollector import deprecate + +from ..utils.deprecations import DDTraceDeprecationWarning +from . import event_hub # noqa:F401 +from .event_hub import EventResultDict # noqa:F401 +from .event_hub import dispatch +from .event_hub import dispatch_with_results # noqa:F401 +from .event_hub import has_listeners # noqa:F401 +from .event_hub import on # noqa:F401 +from .event_hub import reset as reset_listeners # noqa:F401 + + +if TYPE_CHECKING: + from ddtrace.span import Span # noqa:F401 + + +try: + import contextvars +except ImportError: + import ddtrace.vendor.contextvars as contextvars # type: ignore + + +log = logging.getLogger(__name__) + + +_CURRENT_CONTEXT = None +ROOT_CONTEXT_ID = "__root" +SPAN_DEPRECATION_MESSAGE = ( + "The 'span' keyword argument on ExecutionContext methods is deprecated and will be removed in a future version." +) +SPAN_DEPRECATION_SUGGESTION = ( + "Please store contextual data on the ExecutionContext object using other kwargs and/or set_item()" +) + + +def _deprecate_span_kwarg(span): + if span is not None: + # https://github.com/tiangolo/fastapi/pull/10876 + if "fastapi" not in sys.modules and "fastapi.applications" not in sys.modules: + deprecate( + SPAN_DEPRECATION_MESSAGE, + message=SPAN_DEPRECATION_SUGGESTION, + category=DDTraceDeprecationWarning, + ) + + +class ExecutionContext: + __slots__ = ["identifier", "_data", "_parents", "_span", "_token"] + + def __init__(self, identifier, parent=None, span=None, **kwargs): + _deprecate_span_kwarg(span) + self.identifier = identifier + self._data = {} + self._parents = [] + self._span = span + if parent is not None: + self.addParent(parent) + self._data.update(kwargs) + if self._span is None and _CURRENT_CONTEXT is not None: + self._token = _CURRENT_CONTEXT.set(self) + dispatch("context.started.%s" % self.identifier, (self,)) + dispatch("context.started.start_span.%s" % self.identifier, (self,)) + + def __repr__(self): + return self.__class__.__name__ + " '" + self.identifier + "' @ " + str(id(self)) + + @property + def parents(self): + return self._parents + + @property + def parent(self): + return self._parents[0] if self._parents else None + + def end(self): + dispatch_result = dispatch_with_results("context.ended.%s" % self.identifier, (self,)) + if self._span is None: + try: + _CURRENT_CONTEXT.reset(self._token) + except ValueError: + log.debug( + "Encountered ValueError during core contextvar reset() call. " + "This can happen when a span holding an executioncontext is " + "finished in a Context other than the one that started it." + ) + except LookupError: + log.debug( + "Encountered LookupError during core contextvar reset() call. I don't know why this is possible." + ) + return dispatch_result + + def addParent(self, context): + if self.identifier == ROOT_CONTEXT_ID: + raise ValueError("Cannot add parent to root context") + self._parents.append(context) + + @classmethod + @contextmanager + def context_with_data(cls, identifier, parent=None, span=None, **kwargs): + new_context = cls(identifier, parent=parent, span=span, **kwargs) + try: + yield new_context + finally: + new_context.end() + + def get_item(self, data_key: str, default: Optional[Any] = None, traverse: Optional[bool] = True) -> Any: + # NB mimic the behavior of `ddtrace.internal._context` by doing lazy inheritance + current = self + while current is not None: + if data_key in current._data: + return current._data.get(data_key) + if not traverse: + break + current = current.parent + return default + + def __getitem__(self, key: str): + value = self.get_item(key) + if value is None and key not in self._data: + raise KeyError + return value + + def get_items(self, data_keys): + # type: (List[str]) -> Optional[Any] + return [self.get_item(key) for key in data_keys] + + def set_item(self, data_key, data_value): + # type: (str, Optional[Any]) -> None + self._data[data_key] = data_value + + def set_safe(self, data_key, data_value): + # type: (str, Optional[Any]) -> None + if data_key in self._data: + raise ValueError("Cannot overwrite ExecutionContext data key '%s'", data_key) + return self.set_item(data_key, data_value) + + def set_items(self, keys_values): + # type: (Dict[str, Optional[Any]]) -> None + for data_key, data_value in keys_values.items(): + self.set_item(data_key, data_value) + + def root(self): + if self.identifier == ROOT_CONTEXT_ID: + return self + current = self + while current.parent is not None: + current = current.parent + return current + + +def __getattr__(name): + if name == "root": + return _CURRENT_CONTEXT.get().root() + raise AttributeError + + +def _reset_context(): + global _CURRENT_CONTEXT + _CURRENT_CONTEXT = contextvars.ContextVar("ExecutionContext_var", default=ExecutionContext(ROOT_CONTEXT_ID)) + + +_reset_context() +_CONTEXT_CLASS = ExecutionContext + + +def context_with_data(identifier, parent=None, **kwargs): + return _CONTEXT_CLASS.context_with_data(identifier, parent=(parent or _CURRENT_CONTEXT.get()), **kwargs) + + +def get_item(data_key, span=None): + # type: (str, Optional[Span]) -> Optional[Any] + _deprecate_span_kwarg(span) + if span is not None and span._local_root is not None: + return span._local_root._get_ctx_item(data_key) + else: + return _CURRENT_CONTEXT.get().get_item(data_key) # type: ignore + + +def get_items(data_keys, span=None): + # type: (List[str], Optional[Span]) -> Optional[Any] + _deprecate_span_kwarg(span) + if span is not None and span._local_root is not None: + return [span._local_root._get_ctx_item(key) for key in data_keys] + else: + return _CURRENT_CONTEXT.get().get_items(data_keys) # type: ignore + + +def set_safe(data_key, data_value): + # type: (str, Optional[Any]) -> None + _CURRENT_CONTEXT.get().set_safe(data_key, data_value) # type: ignore + + +# NB Don't call these set_* functions from `ddtrace.contrib`, only from product code! +def set_item(data_key, data_value, span=None): + # type: (str, Optional[Any], Optional[Span]) -> None + _deprecate_span_kwarg(span) + if span is not None and span._local_root is not None: + span._local_root._set_ctx_item(data_key, data_value) + else: + _CURRENT_CONTEXT.get().set_item(data_key, data_value) # type: ignore + + +def set_items(keys_values, span=None): + # type: (Dict[str, Optional[Any]], Optional[Span]) -> None + _deprecate_span_kwarg(span) + if span is not None and span._local_root is not None: + span._local_root._set_ctx_items(keys_values) + else: + _CURRENT_CONTEXT.get().set_items(keys_values) # type: ignore diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/event_hub.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/event_hub.py new file mode 100644 index 0000000..ae2fa07 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/core/event_hub.py @@ -0,0 +1,134 @@ +import dataclasses +import enum +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple + +from ddtrace import config + + +_listeners: Dict[str, Dict[Any, Callable[..., Any]]] = {} +_all_listeners: List[Callable[[str, Tuple[Any, ...]], None]] = [] + + +class ResultType(enum.Enum): + RESULT_OK = 0 + RESULT_EXCEPTION = 1 + RESULT_UNDEFINED = -1 + + +@dataclasses.dataclass +class EventResult: + response_type: ResultType = ResultType.RESULT_UNDEFINED + value: Any = None + exception: Optional[BaseException] = None + + def __bool__(self): + "EventResult can easily be checked as a valid result" + return self.response_type == ResultType.RESULT_OK + + +_MissingEvent = EventResult() + + +class EventResultDict(Dict[str, EventResult]): + def __missing__(self, key: str): + return _MissingEvent + + def __getattr__(self, name: str): + return dict.__getitem__(self, name) + + +_MissingEventDict = EventResultDict() + + +def has_listeners(event_id: str) -> bool: + """Check if there are hooks registered for the provided event_id""" + global _listeners + return bool(_listeners.get(event_id)) + + +def on(event_id: str, callback: Callable[..., Any], name: Any = None) -> None: + """Register a listener for the provided event_id""" + global _listeners + if name is None: + name = id(callback) + if event_id not in _listeners: + _listeners[event_id] = {} + _listeners[event_id][name] = callback + + +def on_all(callback: Callable[..., Any]) -> None: + """Register a listener for all events emitted""" + global _all_listeners + if callback not in _all_listeners: + _all_listeners.insert(0, callback) + + +def reset(event_id: Optional[str] = None) -> None: + """Remove all registered listeners. If an event_id is provided, only clear those + event listeners. + """ + global _listeners + global _all_listeners + + if not event_id: + _listeners.clear() + _all_listeners.clear() + elif event_id in _listeners: + del _listeners[event_id] + + +def dispatch(event_id: str, args: Tuple[Any, ...] = ()) -> None: + """Call all hooks for the provided event_id with the provided args""" + global _all_listeners + global _listeners + + for hook in _all_listeners: + try: + hook(event_id, args) + except Exception: + if config._raise: + raise + + if event_id not in _listeners: + return + + for local_hook in _listeners[event_id].values(): + try: + local_hook(*args) + except Exception: + if config._raise: + raise + + +def dispatch_with_results(event_id: str, args: Tuple[Any, ...] = ()) -> EventResultDict: + """Call all hooks for the provided event_id with the provided args + returning the results and exceptions from the called hooks + """ + global _listeners + global _all_listeners + + for hook in _all_listeners: + try: + hook(event_id, args) + except Exception: + if config._raise: + raise + + if event_id not in _listeners: + return _MissingEventDict + + results = EventResultDict() + for name, hook in _listeners[event_id].items(): + try: + results[name] = EventResult(ResultType.RESULT_OK, hook(*args)) + except Exception as e: + if config._raise: + raise + results[name] = EventResult(ResultType.RESULT_EXCEPTION, None, e) + + return results diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..321b7ca Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.pyi new file mode 100644 index 0000000..0545124 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/_ddup.pyi @@ -0,0 +1,29 @@ +import typing +from typing import Optional + +from ddtrace.span import Span + +def init( + env: Optional[str], + service: Optional[str], + version: Optional[str], + tags: Optional[typing.Dict[str, str]], + max_nframes: Optional[int], + url: Optional[str], +) -> None: ... +def start_sample(nframes: int) -> None: ... +def push_cputime(value: int, count: int) -> None: ... +def push_walltime(value: int, count: int) -> None: ... +def push_acquire(value: int, count: int) -> None: ... +def push_release(value: int, count: int) -> None: ... +def push_alloc(value: int, count: int) -> None: ... +def push_heap(value: int) -> None: ... +def push_lock_name(lock_name: str) -> None: ... +def push_frame(name: str, filename: str, address: int, line: int) -> None: ... +def push_threadinfo(thread_id: int, thread_native_id: int, thread_name: Optional[str]) -> None: ... +def push_taskinfo(task_id: int, task_name: str) -> None: ... +def push_exceptioninfo(exc_type: type, count: int) -> None: ... +def push_class_name(class_name: str) -> None: ... +def push_span(span: typing.Optional[Span], endpoint_collection_enabled: bool) -> None: ... +def flush_sample() -> None: ... +def upload() -> None: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/ddup.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/ddup.py new file mode 100644 index 0000000..5902a11 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/ddup.py @@ -0,0 +1,90 @@ +from .utils import sanitize_string # noqa: F401 + + +try: + from ._ddup import * # noqa: F403, F401 +except ImportError: + from typing import Dict # noqa:F401 + from typing import Optional # noqa:F401 + + from ddtrace.span import Span # noqa:F401 + + # Decorator for not-implemented + def not_implemented(func): + def wrapper(*args, **kwargs): + raise NotImplementedError("{} is not implemented on this platform".format(func.__name__)) + + @not_implemented + def init( + env, # type: Optional[str] + service, # type: Optional[str] + version, # type: Optional[str] + tags, # type: Optional[Dict[str, str]] + max_nframes, # type: Optional[int] + url, # type: Optional[str] + ): + pass + + @not_implemented + def start_sample(nframes): # type: (int) -> None + pass + + @not_implemented + def push_cputime(value, count): # type: (int, int) -> None + pass + + @not_implemented + def push_walltime(value, count): # type: (int, int) -> None + pass + + @not_implemented + def push_acquire(value, count): # type: (int, int) -> None + pass + + @not_implemented + def push_release(value, count): # type: (int, int) -> None + pass + + @not_implemented + def push_alloc(value, count): # type: (int, int) -> None + pass + + @not_implemented + def push_heap(value): # type: (int) -> None + pass + + @not_implemented + def push_lock_name(lock_name): # type: (str) -> None + pass + + @not_implemented + def push_frame(name, filename, address, line): # type: (str, str, int, int) -> None + pass + + @not_implemented + def push_threadinfo(thread_id, thread_native_id, thread_name): # type: (int, int, Optional[str]) -> None + pass + + @not_implemented + def push_taskinfo(task_id, task_name): # type: (int, str) -> None + pass + + @not_implemented + def push_exceptioninfo(exc_type, count): # type: (type, int) -> None + pass + + @not_implemented + def push_class_name(class_name): # type: (str) -> None + pass + + @not_implemented + def push_span(span, endpoint_collection_enabled): # type: (Optional[Span], bool) -> None + pass + + @not_implemented + def flush_sample(): # type: () -> None + pass + + @not_implemented + def upload(): # type: () -> None + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/utils.py new file mode 100644 index 0000000..c938706 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datadog/profiling/utils.py @@ -0,0 +1,31 @@ +from sys import version_info +from typing import Any # noqa:F401 + +from ddtrace.internal.logger import get_logger + + +LOG = get_logger(__name__) + + +# 3.11 and above +def _sanitize_string_check(value): + # type: (Any) -> str + if isinstance(value, str): + return value + elif value is None: + return "" + try: + return value.decode("utf-8", "ignore") + except Exception: + LOG.warning("Got object of type '%s' instead of str during profile serialization", type(value).__name__) + return "[invalid string]%s" % type(value).__name__ + + +# 3.10 and below (the noop version) +def _sanitize_string_identity(value): + # type: (Any) -> str + return value or "" + + +# Assign based on version +sanitize_string = _sanitize_string_check if version_info[:2] > (3, 10) else _sanitize_string_identity diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/__init__.py new file mode 100644 index 0000000..0745818 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/__init__.py @@ -0,0 +1,27 @@ +from ddtrace import config +from ddtrace.internal import agent + +from ...internal.utils.importlib import require_modules + + +required_modules = ["confluent_kafka", "botocore", "kombu"] +_processor = None + +if config._data_streams_enabled: + with require_modules(required_modules) as missing_modules: + if "confluent_kafka" not in missing_modules: + from . import kafka # noqa:F401 + if "botocore" not in missing_modules: + from . import botocore # noqa:F401 + if "kombu" not in missing_modules: + from . import kombu # noqa:F401 + + +def data_streams_processor(): + global _processor + if config._data_streams_enabled and not _processor: + from . import processor + + _processor = processor.DataStreamsProcessor(agent.get_trace_url()) + + return _processor diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/botocore.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/botocore.py new file mode 100644 index 0000000..41d3e2c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/botocore.py @@ -0,0 +1,186 @@ +import base64 +from datetime import datetime +import json + +from ddtrace import config +from ddtrace.internal import core +from ddtrace.internal.compat import parse +from ddtrace.internal.datastreams.processor import PROPAGATION_KEY_BASE_64 +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def get_pathway(endpoint_service, dsm_identifier): + # type: (str, str) -> str + """ + :endpoint_service: the name of the service (i.e. 'sns', 'sqs', 'kinesis') + :dsm_identifier: the identifier for the topic/queue/stream/etc + + Set the data streams monitoring checkpoint and return the encoded pathway + """ + from . import data_streams_processor as processor + + path_type = "type:{}".format(endpoint_service) + if not dsm_identifier: + log.debug("pathway being generated with unrecognized service: ", dsm_identifier) + + pathway = processor().set_checkpoint(["direction:out", "topic:{}".format(dsm_identifier), path_type]) + return pathway.encode_b64() + + +def get_queue_name(params): + # type: (dict) -> str + """ + :params: contains the params for the current botocore action + + Return the name of the queue given the params + """ + queue_url = params["QueueUrl"] + url = parse.urlparse(queue_url) + return url.path.rsplit("/", 1)[-1] + + +def get_topic_arn(params): + # type: (dict) -> str + """ + :params: contains the params for the current botocore action + + Return the name of the topic given the params + """ + sns_arn = params["TopicArn"] + return sns_arn + + +def get_stream_arn(params): + # type: (dict) -> str + """ + :params: contains the params for the current botocore action + + Return the name of the stream given the params + """ + stream_arn = params.get("StreamARN", "") + return stream_arn + + +def inject_context(trace_data, endpoint_service, dsm_identifier): + pathway = get_pathway(endpoint_service, dsm_identifier) + + if trace_data is not None: + trace_data[PROPAGATION_KEY_BASE_64] = pathway + + +def handle_kinesis_produce(params): + stream_arn = get_stream_arn(params) + if stream_arn: # If stream ARN isn't specified, we give up (it is not a required param) + # put_records has a "Records" entry but put_record does not, so we fake a record to + # collapse the logic for the two cases + for _ in params.get("Records", ["fake_record"]): + # In other DSM code, you'll see the pathway + context injection but not here. + # Kinesis DSM doesn't inject any data, so we only need to generate checkpoints. + inject_context(None, "kinesis", stream_arn) + + +def handle_sqs_sns_produce(endpoint_service, trace_data, params): + dsm_identifier = None + if endpoint_service == "sqs": + dsm_identifier = get_queue_name(params) + elif endpoint_service == "sns": + dsm_identifier = get_topic_arn(params) + inject_context(trace_data, endpoint_service, dsm_identifier) + + +def handle_sqs_prepare(params): + if "MessageAttributeNames" not in params: + params.update({"MessageAttributeNames": ["_datadog"]}) + elif "_datadog" not in params["MessageAttributeNames"]: + params.update({"MessageAttributeNames": list(params["MessageAttributeNames"]) + ["_datadog"]}) + + +def get_datastreams_context(message): + """ + Formats we're aware of: + - message.Body.MessageAttributes._datadog.Value.decode() (SQS) + - message.MessageAttributes._datadog.StringValue (SNS -> SQS) + - message.MesssageAttributes._datadog.BinaryValue.decode() (SNS -> SQS, raw) + """ + context_json = None + message_body = message + try: + message_body = json.loads(message.get("Body")) + except ValueError: + log.debug("Unable to parse message body, treat as non-json") + + if "MessageAttributes" not in message_body: + log.debug("DataStreams skipped message: %r", message) + return None + + if "_datadog" not in message_body["MessageAttributes"]: + log.debug("DataStreams skipped message: %r", message) + return None + + if message_body.get("Type") == "Notification": + # This is potentially a DSM SNS notification + if message_body["MessageAttributes"]["_datadog"]["Type"] == "Binary": + context_json = json.loads(base64.b64decode(message_body["MessageAttributes"]["_datadog"]["Value"]).decode()) + elif "StringValue" in message["MessageAttributes"]["_datadog"]: + # The message originated from SQS + context_json = json.loads(message_body["MessageAttributes"]["_datadog"]["StringValue"]) + elif "BinaryValue" in message["MessageAttributes"]["_datadog"]: + # Raw message delivery + context_json = json.loads(message_body["MessageAttributes"]["_datadog"]["BinaryValue"].decode()) + else: + log.debug("DataStreams did not handle message: %r", message) + + return context_json + + +def handle_sqs_receive(params, result): + from . import data_streams_processor as processor + + queue_name = get_queue_name(params) + + for message in result.get("Messages"): + try: + context_json = get_datastreams_context(message) + pathway = context_json.get(PROPAGATION_KEY_BASE_64, None) if context_json else None + ctx = processor().decode_pathway_b64(pathway) + ctx.set_checkpoint(["direction:in", "topic:" + queue_name, "type:sqs"]) + + except Exception: + log.debug("Error receiving SQS message with data streams monitoring enabled", exc_info=True) + + +def record_data_streams_path_for_kinesis_stream(params, results): + from . import data_streams_processor as processor + + stream_arn = params.get("StreamARN") + + if not stream_arn: + log.debug("Unable to determine StreamARN for request with params: ", params) + return + + pathway = processor().new_pathway() + for record in results.get("Records", []): + time_estimate = record.get("ApproximateArrivalTimestamp", datetime.now()).timestamp() + pathway.set_checkpoint( + ["direction:in", "topic:" + stream_arn, "type:kinesis"], + edge_start_sec_override=time_estimate, + pathway_start_sec_override=time_estimate, + ) + + +def handle_kinesis_receive(params, result): + try: + record_data_streams_path_for_kinesis_stream(params, result) + except Exception: + log.debug("Failed to report data streams monitoring info for kinesis", exc_info=True) + + +if config._data_streams_enabled: + core.on("botocore.kinesis.start", handle_kinesis_produce) + core.on("botocore.sqs_sns.start", handle_sqs_sns_produce) + core.on("botocore.sqs.ReceiveMessage.pre", handle_sqs_prepare) + core.on("botocore.sqs.ReceiveMessage.post", handle_sqs_receive) + core.on("botocore.kinesis.GetRecords.post", handle_kinesis_receive) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/encoding.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/encoding.py new file mode 100644 index 0000000..e75fb6d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/encoding.py @@ -0,0 +1,45 @@ +import struct +from typing import Tuple # noqa:F401 + +from .fnv import _get_byte + + +MAX_VAR_LEN_64 = 9 + + +def encode_var_int_64(v): + # type: (int) -> bytes + return encode_var_uint_64(v >> (64 - 1) ^ (v << 1)) + + +def decode_var_int_64(b): + # type: (bytes) -> Tuple[int, bytes] + v, b = decode_var_uint_64(b) + return (v >> 1) ^ -(v & 1), b + + +def encode_var_uint_64(v): + # type: (int) -> bytes + b = b"" + for _ in range(0, MAX_VAR_LEN_64): + if v < 0x80: + break + b += struct.pack("B", (v & 255) | 0x80) + v >>= 7 + b += struct.pack("B", v & 255) + return b + + +def decode_var_uint_64(b): + # type: (bytes) -> Tuple[int, bytes] + x = 0 + s = 0 + for i in range(0, MAX_VAR_LEN_64): + if len(b) <= i: + raise EOFError() + n = _get_byte(b[i]) + if n < 0x80 or i == MAX_VAR_LEN_64 - 1: + return x | n << s, b[i + 1 :] + x |= (n & 0x7F) << s + s += 7 + raise EOFError diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/fnv.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/fnv.py new file mode 100644 index 0000000..ee200af --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/fnv.py @@ -0,0 +1,39 @@ +""" +Implementation of Fowler/Noll/Vo hash algorithm in pure Python. +See http://isthe.com/chongo/tech/comp/fnv/ +""" +import sys + + +FNV_64_PRIME = 0x100000001B3 +FNV1_64_INIT = 0xCBF29CE484222325 + + +def no_op(c): + return c + + +if sys.version_info[0] == 3: + _get_byte = no_op +else: + _get_byte = ord + + +def fnv(data, hval_init, fnv_prime, fnv_size): + # type: (bytes, int, int, int) -> int + """ + Core FNV hash algorithm used in FNV0 and FNV1. + """ + hval = hval_init + for byte in data: + hval = (hval * fnv_prime) % fnv_size + hval = hval ^ _get_byte(byte) + return hval + + +def fnv1_64(data): + # type: (bytes) -> int + """ + Returns the 64 bit FNV-1 hash value for the given data. + """ + return fnv(data, FNV1_64_INIT, FNV_64_PRIME, 2**64) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kafka.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kafka.py new file mode 100644 index 0000000..b8d5543 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kafka.py @@ -0,0 +1,117 @@ +import time + +from confluent_kafka import TopicPartition + +from ddtrace import config +from ddtrace.internal import core +from ddtrace.internal.datastreams.processor import PROPAGATION_KEY +from ddtrace.internal.datastreams.utils import _calculate_byte_size +from ddtrace.internal.utils import ArgumentError +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.utils import set_argument_value + + +INT_TYPES = (int,) +MESSAGE_ARG_POSITION = 1 +KEY_ARG_POSITION = 2 +KEY_KWARG_NAME = "key" + + +def dsm_kafka_message_produce(instance, args, kwargs, is_serializing, span): + from . import data_streams_processor as processor + + topic = core.get_item("kafka_topic") + message = get_argument_value(args, kwargs, MESSAGE_ARG_POSITION, "value", optional=True) + key = get_argument_value(args, kwargs, KEY_ARG_POSITION, KEY_KWARG_NAME, optional=True) + headers = kwargs.get("headers", {}) + + payload_size = 0 + payload_size += _calculate_byte_size(message) + payload_size += _calculate_byte_size(key) + payload_size += _calculate_byte_size(headers) + + pathway = processor().set_checkpoint( + ["direction:out", "topic:" + topic, "type:kafka"], payload_size=payload_size, span=span + ) + encoded_pathway = pathway.encode() + headers[PROPAGATION_KEY] = encoded_pathway + kwargs["headers"] = headers + + on_delivery_kwarg = "on_delivery" + on_delivery_arg = 5 + on_delivery = None + try: + on_delivery = get_argument_value(args, kwargs, on_delivery_arg, on_delivery_kwarg) + except ArgumentError: + if not is_serializing: + on_delivery_kwarg = "callback" + on_delivery_arg = 4 + on_delivery = get_argument_value(args, kwargs, on_delivery_arg, on_delivery_kwarg, optional=True) + + def wrapped_callback(err, msg): + if err is None: + reported_offset = msg.offset() if isinstance(msg.offset(), INT_TYPES) else -1 + processor().track_kafka_produce(msg.topic(), msg.partition(), reported_offset, time.time()) + if on_delivery is not None: + on_delivery(err, msg) + + try: + args, kwargs = set_argument_value(args, kwargs, on_delivery_arg, on_delivery_kwarg, wrapped_callback) + except ArgumentError: + # we set the callback even if it's not set by the client, to track produce calls correctly. + kwargs[on_delivery_kwarg] = wrapped_callback + + +def dsm_kafka_message_consume(instance, message, span): + from . import data_streams_processor as processor + + headers = {header[0]: header[1] for header in (message.headers() or [])} + topic = core.get_item("kafka_topic") + group = instance._group_id + + payload_size = 0 + if hasattr(message, "len"): + # message.len() is only supported for some versions of confluent_kafka + payload_size += message.len() + else: + payload_size += _calculate_byte_size(message.value()) + + payload_size += _calculate_byte_size(message.key()) + payload_size += _calculate_byte_size(headers) + + ctx = processor().decode_pathway(headers.get(PROPAGATION_KEY, None)) + ctx.set_checkpoint( + ["direction:in", "group:" + group, "topic:" + topic, "type:kafka"], payload_size=payload_size, span=span + ) + + if instance._auto_commit: + # it's not exactly true, but if auto commit is enabled, we consider that a message is acknowledged + # when it's read. We add one because the commit offset is the next message to read. + reported_offset = (message.offset() + 1) if isinstance(message.offset(), INT_TYPES) else -1 + processor().track_kafka_commit( + instance._group_id, message.topic(), message.partition(), reported_offset, time.time() + ) + + +def dsm_kafka_message_commit(instance, args, kwargs): + from . import data_streams_processor as processor + + message = get_argument_value(args, kwargs, 0, "message", optional=True) + + offsets = [] + if message is not None: + # the commit offset is the next message to read. So last message read + 1 + reported_offset = message.offset() + 1 if isinstance(message.offset(), INT_TYPES) else -1 + offsets = [TopicPartition(message.topic(), message.partition(), reported_offset)] + else: + offsets = get_argument_value(args, kwargs, 1, "offsets", True) or [] + + for offset in offsets: + reported_offset = offset.offset if isinstance(offset.offset, INT_TYPES) else -1 + processor().track_kafka_commit(instance._group_id, offset.topic, offset.partition, reported_offset, time.time()) + + +if config._data_streams_enabled: + core.on("kafka.produce.start", dsm_kafka_message_produce) + core.on("kafka.consume.start", dsm_kafka_message_consume) + core.on("kafka.commit.start", dsm_kafka_message_commit) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kombu.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kombu.py new file mode 100644 index 0000000..109ca34 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/kombu.py @@ -0,0 +1,55 @@ +from ddtrace import config +from ddtrace.contrib.kombu.utils import HEADER_POS +from ddtrace.contrib.kombu.utils import PUBLISH_BODY_IDX +from ddtrace.contrib.kombu.utils import get_exchange_from_args +from ddtrace.contrib.kombu.utils import get_routing_key_from_args +from ddtrace.internal import core +from ddtrace.internal.datastreams.processor import PROPAGATION_KEY +from ddtrace.internal.datastreams.utils import _calculate_byte_size +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def handle_kombu_produce(args, kwargs, span): + from . import data_streams_processor as processor + + routing_key = get_routing_key_from_args(args) + dsm_identifier = get_exchange_from_args(args) + payload_size = 0 + payload_size += _calculate_byte_size(args[HEADER_POS]) + payload_size += _calculate_byte_size(args[PUBLISH_BODY_IDX]) + + has_routing_key = str(bool(routing_key)).lower() + + pathway_tags = [] + for prefix, value in [ + ("direction", "out"), + ("exchange", dsm_identifier), + ("has_routing_key", has_routing_key), + ("type", "rabbitmq"), + ]: + if value is not None: + pathway_tags.append(f"{prefix}:{value}") + + pathway = processor().set_checkpoint(pathway_tags, payload_size=payload_size, span=span) + encoded_pathway = pathway.encode() + args[HEADER_POS][PROPAGATION_KEY] = encoded_pathway + + +def handle_kombu_consume(instance, message, span): + from . import data_streams_processor as processor + + payload_size = 0 + payload_size += _calculate_byte_size(message.body) + payload_size += _calculate_byte_size(message.headers) + + ctx = processor().decode_pathway(message.headers.get(PROPAGATION_KEY, None)) + queue = instance.queues[0].name if len(instance.queues) > 0 else "" + ctx.set_checkpoint(["direction:in", f"topic:{queue}", "type:rabbitmq"], payload_size=payload_size, span=span) + + +if config._data_streams_enabled: + core.on("kombu.amqp.publish.pre", handle_kombu_produce) + core.on("kombu.amqp.receive.post", handle_kombu_consume) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/processor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/processor.py new file mode 100644 index 0000000..208f4ea --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/processor.py @@ -0,0 +1,464 @@ +# coding: utf-8 +import base64 +from collections import defaultdict +from functools import partial +import gzip +import os +import struct +import threading +import time +import typing +from typing import DefaultDict # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import NamedTuple # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +from ddsketch import LogCollapsingLowestDenseDDSketch +from ddsketch.pb.proto import DDSketchProto + +import ddtrace +from ddtrace import config +from ddtrace.internal import compat +from ddtrace.internal.atexit import register_on_exit_signal +from ddtrace.internal.constants import DEFAULT_SERVICE_NAME +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter + +from .._encoding import packb +from ..agent import get_connection +from ..compat import get_connection_response +from ..forksafe import Lock +from ..hostname import get_hostname +from ..logger import get_logger +from ..periodic import PeriodicService +from ..writer import _human_size +from .encoding import decode_var_int_64 +from .encoding import encode_var_int_64 +from .fnv import fnv1_64 + + +def gzip_compress(payload): + return gzip.compress(payload, 1) + + +""" +The data streams processor aggregate stats about pathways (linked chains of services and topics) +And example of a pathway would be: + +service 1 --> Kafka topic A --> service 2 --> kafka topic B --> service 3 + +The processor flushes stats periodically (every 10 sec) to the Datadog agent. +This powers the data streams monitoring product. More details about the product can be found here: +https://docs.datadoghq.com/data_streams/ +""" + + +log = get_logger(__name__) + +PROPAGATION_KEY = "dd-pathway-ctx" +PROPAGATION_KEY_BASE_64 = "dd-pathway-ctx-base64" +SHUTDOWN_TIMEOUT = 5 + +""" +PathwayAggrKey uniquely identifies a pathway to aggregate stats on. +""" +PathwayAggrKey = typing.Tuple[ + str, # edge tags + int, # hash_value + int, # parent hash +] + + +class PathwayStats(object): + """Aggregated pathway statistics.""" + + __slots__ = ("full_pathway_latency", "edge_latency", "payload_size") + + def __init__(self): + self.full_pathway_latency = LogCollapsingLowestDenseDDSketch(0.00775, bin_limit=2048) + self.edge_latency = LogCollapsingLowestDenseDDSketch(0.00775, bin_limit=2048) + self.payload_size = LogCollapsingLowestDenseDDSketch(0.00775, bin_limit=2048) + + +PartitionKey = NamedTuple("PartitionKey", [("topic", str), ("partition", int)]) +ConsumerPartitionKey = NamedTuple("ConsumerPartitionKey", [("group", str), ("topic", str), ("partition", int)]) +Bucket = NamedTuple( + "Bucket", + [ + ("pathway_stats", DefaultDict[PathwayAggrKey, PathwayStats]), + ("latest_produce_offsets", DefaultDict[PartitionKey, int]), + ("latest_commit_offsets", DefaultDict[ConsumerPartitionKey, int]), + ], +) + + +class DataStreamsProcessor(PeriodicService): + """DataStreamsProcessor for computing, collecting and submitting data stream stats to the Datadog Agent.""" + + def __init__(self, agent_url, interval=None, timeout=1.0, retry_attempts=3): + # type: (str, Optional[float], float, int) -> None + if interval is None: + interval = float(os.getenv("_DD_TRACE_STATS_WRITER_INTERVAL") or 10.0) + super(DataStreamsProcessor, self).__init__(interval=interval) + self._agent_url = agent_url + self._endpoint = "/v0.1/pipeline_stats" + self._agent_endpoint = "%s%s" % (self._agent_url, self._endpoint) + self._timeout = timeout + # Have the bucket size match the interval in which flushes occur. + self._bucket_size_ns = int(interval * 1e9) # type: int + self._buckets = defaultdict( + lambda: Bucket(defaultdict(PathwayStats), defaultdict(int), defaultdict(int)) + ) # type: DefaultDict[int, Bucket] + self._headers = { + "Datadog-Meta-Lang": "python", + "Datadog-Meta-Tracer-Version": ddtrace.__version__, + "Content-Type": "application/msgpack", + "Content-Encoding": "gzip", + } # type: Dict[str, str] + self._hostname = compat.ensure_text(get_hostname()) + self._service = compat.ensure_text(config._get_service(DEFAULT_SERVICE_NAME)) + self._lock = Lock() + self._current_context = threading.local() + self._enabled = True + + self._flush_stats_with_backoff = fibonacci_backoff_with_jitter( + attempts=retry_attempts, + initial_wait=0.618 * self.interval / (1.618**retry_attempts) / 2, + )(self._flush_stats) + + register_on_exit_signal(partial(_atexit, obj=self)) + self.start() + + def on_checkpoint_creation( + self, hash_value, parent_hash, edge_tags, now_sec, edge_latency_sec, full_pathway_latency_sec, payload_size=0 + ): + # type: (int, int, List[str], float, float, float, Optional[int]) -> None + """ + on_checkpoint_creation is called every time a new checkpoint is created on a pathway. It records the + latency to the previous checkpoint in the pathway (edge latency), + and the latency from the very first element in the pathway (full_pathway_latency) + the pathway is hashed to reduce amount of information transmitted in headers. + + :param hash_value: hash of the pathway, it's a hash of the edge leading to this point, and the parent hash. + :param parent_hash: hash of the previous step in the pathway + :param edge_tags: all tags associated with the edge leading to this step in the pathway + :param now_sec: current time + :param edge_latency_sec: latency of the direct edge between the previous point + in the pathway, and the current step + :param full_pathway_latency_sec: latency from the very start of the pathway. + :return: Nothing + """ + if not self._enabled: + return + + now_ns = int(now_sec * 1e9) + + with self._lock: + # Align the span into the corresponding stats bucket + bucket_time_ns = now_ns - (now_ns % self._bucket_size_ns) + aggr_key = (",".join(edge_tags), hash_value, parent_hash) + stats = self._buckets[bucket_time_ns].pathway_stats[aggr_key] + stats.full_pathway_latency.add(full_pathway_latency_sec) + stats.edge_latency.add(edge_latency_sec) + stats.payload_size.add(payload_size) + self._buckets[bucket_time_ns].pathway_stats[aggr_key] = stats + + def track_kafka_produce(self, topic, partition, offset, now_sec): + now_ns = int(now_sec * 1e9) + key = PartitionKey(topic, partition) + with self._lock: + bucket_time_ns = now_ns - (now_ns % self._bucket_size_ns) + self._buckets[bucket_time_ns].latest_produce_offsets[key] = max( + offset, self._buckets[bucket_time_ns].latest_produce_offsets[key] + ) + + def track_kafka_commit(self, group, topic, partition, offset, now_sec): + now_ns = int(now_sec * 1e9) + key = ConsumerPartitionKey(group, topic, partition) + with self._lock: + bucket_time_ns = now_ns - (now_ns % self._bucket_size_ns) + self._buckets[bucket_time_ns].latest_commit_offsets[key] = max( + offset, self._buckets[bucket_time_ns].latest_commit_offsets[key] + ) + + def _serialize_buckets(self): + # type: () -> List[Dict] + """Serialize and update the buckets.""" + serialized_buckets = [] + serialized_bucket_keys = [] + for bucket_time_ns, bucket in self._buckets.items(): + bucket_aggr_stats = [] + backlogs = [] + serialized_bucket_keys.append(bucket_time_ns) + + for aggr_key, stat_aggr in bucket.pathway_stats.items(): + edge_tags, hash_value, parent_hash = aggr_key + serialized_bucket = { + "EdgeTags": [compat.ensure_text(tag) for tag in edge_tags.split(",")], + "Hash": hash_value, + "ParentHash": parent_hash, + "PathwayLatency": DDSketchProto.to_proto(stat_aggr.full_pathway_latency).SerializeToString(), + "EdgeLatency": DDSketchProto.to_proto(stat_aggr.edge_latency).SerializeToString(), + } + bucket_aggr_stats.append(serialized_bucket) + for consumer_key, offset in bucket.latest_commit_offsets.items(): + backlogs.append( + { + "Tags": [ + "type:kafka_commit", + "consumer_group:" + consumer_key.group, + "topic:" + consumer_key.topic, + "partition:" + str(consumer_key.partition), + ], + "Value": offset, + } + ) + for producer_key, offset in bucket.latest_produce_offsets.items(): + backlogs.append( + { + "Tags": [ + "type:kafka_produce", + "topic:" + producer_key.topic, + "partition:" + str(producer_key.partition), + ], + "Value": offset, + } + ) + serialized_buckets.append( + { + "Start": bucket_time_ns, + "Duration": self._bucket_size_ns, + "Stats": bucket_aggr_stats, + "Backlogs": backlogs, + } + ) + + # Clear out buckets that have been serialized + for key in serialized_bucket_keys: + del self._buckets[key] + + return serialized_buckets + + def _flush_stats(self, payload): + # type: (bytes) -> None + try: + conn = get_connection(self._agent_url, self._timeout) + conn.request("POST", self._endpoint, payload, self._headers) + resp = get_connection_response(conn) + except Exception: + log.error("failed to submit pathway stats to the Datadog agent at %s", self._agent_endpoint, exc_info=True) + raise + else: + if resp.status == 404: + log.error("Datadog agent does not support data streams monitoring. Upgrade to 7.34+") + return + elif resp.status >= 400: + log.error( + "failed to send data stream stats payload, %s (%s) (%s) response from Datadog agent at %s", + resp.status, + resp.reason, + resp.read(), + self._agent_endpoint, + ) + else: + log.debug("sent %s to %s", _human_size(len(payload)), self._agent_endpoint) + + def periodic(self): + # type: () -> None + + with self._lock: + serialized_stats = self._serialize_buckets() + + if not serialized_stats: + log.debug("No data streams reported. Skipping flushing.") + return + raw_payload = { + "Service": self._service, + "TracerVersion": ddtrace.__version__, + "Lang": "python", + "Stats": serialized_stats, + "Hostname": self._hostname, + } # type: Dict[str, Union[List[Dict], str]] + if config.env: + raw_payload["Env"] = compat.ensure_text(config.env) + if config.version: + raw_payload["Version"] = compat.ensure_text(config.version) + + payload = packb(raw_payload) + compressed = gzip_compress(payload) + try: + self._flush_stats_with_backoff(compressed) + except Exception: + log.error("retry limit exceeded submitting pathway stats to the Datadog agent at %s", self._agent_endpoint) + + def shutdown(self, timeout): + # type: (Optional[float]) -> None + self.periodic() + self.stop(timeout) + + def decode_pathway(self, data): + # type: (bytes) -> DataStreamsCtx + try: + hash_value = struct.unpack(" DataStreamsCtx + if not data: + return self.new_pathway() + binary_pathway = data.encode("utf-8") + encoded_pathway = base64.b64decode(binary_pathway) + data_streams_context = self.decode_pathway(encoded_pathway) + return data_streams_context + + def new_pathway(self, now_sec=None): + """ + type: (Optional[int]) -> DataStreamsCtx + :param now_sec: optional start time of this path. Use for services like Kinesis which + we aren't getting path information for. + """ + + if not now_sec: + now_sec = time.time() + ctx = DataStreamsCtx(self, 0, now_sec, now_sec) + return ctx + + def set_checkpoint(self, tags, now_sec=None, payload_size=0, span=None): + """ + type: (List[str], Optional[int], Optional[int]) -> DataStreamsCtx + :param tags: a list of strings identifying the pathway and direction + :param now_sec: The time in seconds to count as "now" when computing latencies + :param payload_size: The size of the payload being sent in bytes + """ + + if not now_sec: + now_sec = time.time() + if hasattr(self._current_context, "value"): + ctx = self._current_context.value + else: + ctx = self.new_pathway() + self._current_context.value = ctx + if "direction:out" in tags: + # Add the header for this now, as the callee doesn't have access + # when producing + payload_size += len(ctx.encode()) + payload_size += len(PROPAGATION_KEY) + ctx.set_checkpoint(tags, now_sec=now_sec, payload_size=payload_size, span=span) + return ctx + + +class DataStreamsCtx: + def __init__(self, processor, hash_value, pathway_start_sec, current_edge_start_sec): + # type: (DataStreamsProcessor, int, float, float) -> None + self.processor = processor + self.pathway_start_sec = pathway_start_sec + self.current_edge_start_sec = current_edge_start_sec + self.hash = hash_value + self.service = compat.ensure_text(config._get_service(DEFAULT_SERVICE_NAME)) + self.env = compat.ensure_text(config.env or "none") + # loop detection logic + self.previous_direction = "" + self.closest_opposite_direction_hash = 0 + self.closest_opposite_direction_edge_start = current_edge_start_sec + + def encode(self): + # type: () -> bytes + return ( + struct.pack(" str + encoded_pathway = self.encode() + binary_pathway = base64.b64encode(encoded_pathway) + data_streams_context = binary_pathway.decode("utf-8") + return data_streams_context + + def _compute_hash(self, tags, parent_hash): + def get_bytes(s): + return bytes(s, encoding="utf-8") + + b = get_bytes(self.service) + get_bytes(self.env) + for t in tags: + b += get_bytes(t) + node_hash = fnv1_64(b) + return fnv1_64(struct.pack(" None + + :param tags: an list of tags identifying the pathway and direction + :param now_sec: The time in seconds to count as "now" when computing latencies + :param edge_start_sec_override: Use this to override the starting time of an edge + :param pathway_start_sec_override: Use this to override the starting time of a pathway + """ + if not now_sec: + now_sec = time.time() + tags = sorted(tags) + direction = "" + for t in tags: + if t.startswith("direction:"): + direction = t + break + if direction == self.previous_direction: + self.hash = self.closest_opposite_direction_hash + if self.hash == 0: + # if the closest hash from opposite direction is 0, that means we produce in a loop, without consuming + # in that case, we don't want the pathway to be longer and longer, but we want to restart a new pathway. + self.current_edge_start_sec = now_sec + self.pathway_start_sec = now_sec + else: + self.current_edge_start_sec = self.closest_opposite_direction_edge_start + else: + self.previous_direction = direction + self.closest_opposite_direction_hash = self.hash + self.closest_opposite_direction_edge_start = now_sec + + if edge_start_sec_override: + self.current_edge_start_sec = edge_start_sec_override + + if pathway_start_sec_override: + self.pathway_start_sec = pathway_start_sec_override + + parent_hash = self.hash + hash_value = self._compute_hash(tags, parent_hash) + if span: + span.set_tag_str("pathway.hash", str(hash_value)) + edge_latency_sec = now_sec - self.current_edge_start_sec + pathway_latency_sec = now_sec - self.pathway_start_sec + self.hash = hash_value + self.current_edge_start_sec = now_sec + self.processor.on_checkpoint_creation( + hash_value, parent_hash, tags, now_sec, edge_latency_sec, pathway_latency_sec, payload_size=payload_size + ) + + +def _atexit(obj=None): + try: + # Data streams tries to flush data on shutdown. + # Adding a try except here to ensure we don't crash the application if the agent is killed before + # the application for example. + obj.shutdown(SHUTDOWN_TIMEOUT) + except Exception as e: + if config._data_streams_enabled: + log.warning("Failed to shutdown data streams processor: %s", repr(e)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/utils.py new file mode 100644 index 0000000..336a116 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/datastreams/utils.py @@ -0,0 +1,18 @@ +def _calculate_byte_size(data): + if isinstance(data, str): + # We encode here to handle non-ascii characters + # If there are non-unicode characters, we replace + # with a single character/byte + return len(data.encode("utf-8", errors="replace")) + + if isinstance(data, bytes): + return len(data) + + if isinstance(data, dict): + total = 0 + for k, v in data.items(): + total += _calculate_byte_size(k) + total += _calculate_byte_size(v) + return total + + return 0 # Return 0 to avoid breaking calculations if its a type we don't know diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/debug.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/debug.py new file mode 100644 index 0000000..55bf2b2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/debug.py @@ -0,0 +1,268 @@ +import datetime +import logging +import os +import platform +import re +import sys +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Union # noqa:F401 + +import ddtrace +from ddtrace.internal import agent +from ddtrace.internal.packages import get_distributions +from ddtrace.internal.utils.cache import callonce +from ddtrace.internal.writer import AgentWriter +from ddtrace.internal.writer import LogWriter +from ddtrace.sampler import DatadogSampler +from ddtrace.settings.asm import config as asm_config + +from .logger import get_logger + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace import Tracer # noqa:F401 + + +logger = get_logger(__name__) + +# The architecture function spawns the file subprocess on the interpreter +# executable. We make sure we call this once and cache the result. +architecture = callonce(lambda: platform.architecture()) + + +def in_venv(): + # type: () -> bool + # Works with both venv and virtualenv + # https://stackoverflow.com/a/42580137 + return ( + "VIRTUAL_ENV" in os.environ + or hasattr(sys, "real_prefix") + or (hasattr(sys, "base_prefix") and sys.base_prefix != sys.prefix) + ) + + +def tags_to_str(tags): + # type: (Dict[str, Any]) -> str + # Turn a dict of tags to a string "k1:v1,k2:v2,..." + return ",".join(["%s:%s" % (k, v) for k, v in tags.items()]) + + +def collect(tracer): + # type: (Tracer) -> Dict[str, Any] + """Collect system and library information into a serializable dict.""" + + from ddtrace.internal.runtime.runtime_metrics import RuntimeWorker + + if isinstance(tracer._writer, LogWriter): + agent_url = "AGENTLESS" + agent_error = None + elif isinstance(tracer._writer, AgentWriter): + writer = tracer._writer + agent_url = writer.agent_url + try: + writer.write([]) + writer.flush_queue(raise_exc=True) + except Exception as e: + agent_error = "Agent not reachable at %s. Exception raised: %s" % (agent_url, str(e)) + else: + agent_error = None + else: + agent_url = "CUSTOM" + agent_error = None + + sampler_rules = None + if isinstance(tracer._sampler, DatadogSampler): + sampler_rules = [str(rule) for rule in tracer._sampler.rules] + + is_venv = in_venv() + + packages_available = {p.name: p.version for p in get_distributions()} + integration_configs = {} # type: Dict[str, Union[Dict[str, Any], str]] + for module, enabled in ddtrace._monkey.PATCH_MODULES.items(): + # TODO: this check doesn't work in all cases... we need a mapping + # between the module and the library name. + module_available = module in packages_available + module_instrumented = module in ddtrace._monkey._PATCHED_MODULES + module_imported = module in sys.modules + + if enabled: + # Note that integration configs aren't added until the integration + # module is imported. This typically occurs as a side-effect of + # patch(). + # This also doesn't load work in all cases since we don't always + # name the configuration entry the same as the integration module + # name :/ + config = ddtrace.config._config.get(module, "N/A") + else: + config = None + + if module_available: + integration_configs[module] = dict( + enabled=enabled, + instrumented=module_instrumented, + module_available=module_available, + module_version=packages_available[module], + module_imported=module_imported, + config=config, + ) + else: + # Use N/A here to avoid the additional clutter of an entire + # config dictionary for a module that isn't available. + integration_configs[module] = "N/A" + + pip_version = packages_available.get("pip", "N/A") + + from ddtrace.tracer import log + + return dict( + # Timestamp UTC ISO 8601 + date=datetime.datetime.utcnow().isoformat(), + # eg. "Linux", "Darwin" + os_name=platform.system(), + # eg. 12.5.0 + os_version=platform.release(), + is_64_bit=sys.maxsize > 2**32, + architecture=architecture()[0], + vm=platform.python_implementation(), + version=ddtrace.__version__, + lang="python", + lang_version=platform.python_version(), + pip_version=pip_version, + in_virtual_env=is_venv, + agent_url=agent_url, + agent_error=agent_error, + statsd_url=agent.get_stats_url(), + env=ddtrace.config.env or "", + is_global_tracer=tracer == ddtrace.tracer, + enabled_env_setting=os.getenv("DATADOG_TRACE_ENABLED"), + tracer_enabled=tracer.enabled, + sampler_type=type(tracer._sampler).__name__ if tracer._sampler else "N/A", + priority_sampler_type="N/A", + sampler_rules=sampler_rules, + service=ddtrace.config.service or "", + debug=log.isEnabledFor(logging.DEBUG), + enabled_cli="ddtrace" in os.getenv("PYTHONPATH", ""), + analytics_enabled=ddtrace.config.analytics_enabled, + log_injection_enabled=ddtrace.config.logs_injection, + health_metrics_enabled=ddtrace.config.health_metrics_enabled, + runtime_metrics_enabled=RuntimeWorker.enabled, + dd_version=ddtrace.config.version or "", + priority_sampling_enabled=ddtrace.config._priority_sampling, + global_tags=os.getenv("DD_TAGS", ""), + tracer_tags=tags_to_str(tracer._tags), + integrations=integration_configs, + partial_flush_enabled=tracer._partial_flush_enabled, + partial_flush_min_spans=tracer._partial_flush_min_spans, + asm_enabled=asm_config._asm_enabled, + iast_enabled=asm_config._iast_enabled, + waf_timeout=asm_config._waf_timeout, + remote_config_enabled=ddtrace.config._remote_config_enabled, + ) + + +def pretty_collect(tracer, color=True): + class bcolors: + HEADER = "\033[95m" + OKBLUE = "\033[94m" + OKCYAN = "\033[96m" + OKGREEN = "\033[92m" + WARNING = "\033[93m" + FAIL = "\033[91m" + ENDC = "\033[0m" + BOLD = "\033[1m" + + info = collect(tracer) + + info_pretty = """{blue}{bold}Tracer Configurations:{end} + Tracer enabled: {tracer_enabled} + Application Security enabled: {appsec_enabled} + Remote Configuration enabled: {remote_config_enabled} + IAST enabled (experimental): {iast_enabled} + Debug logging: {debug} + Writing traces to: {agent_url} + Agent error: {agent_error} + App Analytics enabled(deprecated): {analytics_enabled} + Log injection enabled: {log_injection_enabled} + Health metrics enabled: {health_metrics_enabled} + Priority sampling enabled: {priority_sampling_enabled} + Partial flushing enabled: {partial_flush_enabled} + Partial flush minimum number of spans: {partial_flush_min_spans} + WAF timeout: {waf_timeout} msecs + {green}{bold}Tagging:{end} + DD Service: {service} + DD Env: {env} + DD Version: {dd_version} + Global Tags: {global_tags} + Tracer Tags: {tracer_tags}""".format( + tracer_enabled=info.get("tracer_enabled"), + appsec_enabled=info.get("asm_enabled"), + remote_config_enabled=info.get("remote_config_enabled"), + iast_enabled=info.get("iast_enabled"), + debug=info.get("debug"), + agent_url=info.get("agent_url") or "Not writing at the moment, is your tracer running?", + agent_error=info.get("agent_error") or "None", + analytics_enabled=info.get("analytics_enabled"), + log_injection_enabled=info.get("log_injection_enabled"), + health_metrics_enabled=info.get("health_metrics_enabled"), + priority_sampling_enabled=info.get("priority_sampling_enabled"), + partial_flush_enabled=info.get("partial_flush_enabled"), + partial_flush_min_spans=info.get("partial_flush_min_spans") or "Not set", + service=info.get("service") or "None", + env=info.get("env") or "None", + dd_version=info.get("dd_version") or "None", + global_tags=info.get("global_tags") or "None", + waf_timeout=info.get("waf_timeout"), + tracer_tags=info.get("tracer_tags") or "None", + blue=bcolors.OKBLUE, + green=bcolors.OKGREEN, + bold=bcolors.BOLD, + end=bcolors.ENDC, + ) + + summary = "{0}{1}Summary{2}".format(bcolors.OKCYAN, bcolors.BOLD, bcolors.ENDC) + + if info.get("agent_error"): + summary += ( + "\n\n{fail}ERROR: It looks like you have an agent error: '{agent_error}'\n If you're experiencing" + " a connection error, please make sure you've followed the setup for your particular environment so that " + "the tracer and Datadog agent are configured properly to connect, and that the Datadog agent is running: " + "https://ddtrace.readthedocs.io/en/stable/troubleshooting.html#failed-to-send-traces-connectionrefused" + "error" + "\nIf your issue is not a connection error then please reach out to support for further assistance:" + " https://docs.datadoghq.com/help/{end}" + ).format(fail=bcolors.FAIL, agent_error=info.get("agent_error"), end=bcolors.ENDC) + + if not info.get("service"): + summary += ( + "\n\n{warning}WARNING SERVICE NOT SET: It is recommended that a service tag be set for all traced" + " applications. For more information please see" + " https://ddtrace.readthedocs.io/en/stable/troubleshooting.html{end}" + ).format(warning=bcolors.WARNING, end=bcolors.ENDC) + + if not info.get("env"): + summary += ( + "\n\n{warning}WARNING ENV NOT SET: It is recommended that an env tag be set for all traced" + " applications. For more information please see " + "https://ddtrace.readthedocs.io/en/stable/troubleshooting.html{end}" + ).format(warning=bcolors.WARNING, end=bcolors.ENDC) + + if not info.get("dd_version"): + summary += ( + "\n\n{warning}WARNING VERSION NOT SET: It is recommended that a version tag be set for all traced" + " applications. For more information please see" + " https://ddtrace.readthedocs.io/en/stable/troubleshooting.html{end}" + ).format(warning=bcolors.WARNING, end=bcolors.ENDC) + + info_pretty += "\n\n" + summary + + if color is False: + return escape_ansi(info_pretty) + + return info_pretty + + +def escape_ansi(line): + ansi_escape = re.compile(r"(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]") + return ansi_escape.sub("", line) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/dogstatsd.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/dogstatsd.py new file mode 100644 index 0000000..8c966c4 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/dogstatsd.py @@ -0,0 +1,29 @@ +from typing import List +from typing import Optional + +from ddtrace.internal.compat import parse +from ddtrace.vendor.dogstatsd import DogStatsd +from ddtrace.vendor.dogstatsd import base + + +def get_dogstatsd_client(url: str, namespace: Optional[str] = None, tags: Optional[List[str]] = None) -> DogStatsd: + # url can be either of the form `udp://:` or `unix://` + # also support without url scheme included + if url.startswith("/"): + url = "unix://" + url + elif "://" not in url: + url = "udp://" + url + + parsed = parse.urlparse(url) + + if parsed.scheme == "unix": + return DogStatsd(socket_path=parsed.path, namespace=namespace, constant_tags=tags) + elif parsed.scheme == "udp": + return DogStatsd( + host=parsed.hostname or "", + port=base.DEFAULT_PORT if parsed.port is None else parsed.port, + namespace=namespace, + constant_tags=tags, + ) + + raise ValueError("Unknown scheme `%s` for DogStatsD URL `{}`".format(parsed.scheme)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/encoding.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/encoding.py new file mode 100644 index 0000000..1d5e6ea --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/encoding.py @@ -0,0 +1,150 @@ +import json +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 + +from ._encoding import ListStringTable +from ._encoding import MsgpackEncoderV03 +from ._encoding import MsgpackEncoderV05 +from .compat import ensure_text +from .logger import get_logger + + +__all__ = ["MsgpackEncoderV03", "MsgpackEncoderV05", "ListStringTable", "MSGPACK_ENCODERS"] + + +if TYPE_CHECKING: # pragma: no cover + from ..span import Span # noqa:F401 + + +log = get_logger(__name__) + + +class _EncoderBase(object): + """ + Encoder interface that provides the logic to encode traces and service. + """ + + def encode_traces(self, traces): + # type: (List[List[Span]]) -> str + """ + Encodes a list of traces, expecting a list of items where each items + is a list of spans. Before dumping the string in a serialized format all + traces are normalized according to the encoding format. The trace + nesting is not changed. + + :param traces: A list of traces that should be serialized + """ + raise NotImplementedError() + + def encode(self, obj): + # type: (List[List[Any]]) -> str + """ + Defines the underlying format used during traces or services encoding. + This method must be implemented and should only be used by the internal + functions. + """ + raise NotImplementedError() + + @staticmethod + def _span_to_dict(span): + # type: (Span) -> Dict[str, Any] + d = { + "trace_id": span._trace_id_64bits, + "parent_id": span.parent_id, + "span_id": span.span_id, + "service": span.service, + "resource": span.resource, + "name": span.name, + "error": span.error, + } + + # a common mistake is to set the error field to a boolean instead of an + # int. let's special case that here, because it's sure to happen in + # customer code. + err = d.get("error") + if err and type(err) == bool: + d["error"] = 1 + + if span.start_ns: + d["start"] = span.start_ns + + if span.duration_ns: + d["duration"] = span.duration_ns + + if span._meta: + d["meta"] = span._meta + + if span._metrics: + d["metrics"] = span._metrics + + if span.span_type: + d["type"] = span.span_type + + return d + + +class JSONEncoder(json.JSONEncoder, _EncoderBase): + content_type = "application/json" + + def encode_traces(self, traces): + normalized_traces = [ + [JSONEncoder._normalize_span(JSONEncoder._span_to_dict(span)) for span in trace] for trace in traces + ] + return self.encode(normalized_traces) + + @staticmethod + def _normalize_span(span): + # Ensure all string attributes are actually strings and not bytes + # DEV: We are deferring meta/metrics to reduce any performance issues. + # Meta/metrics may still contain `bytes` and have encoding issues. + span["resource"] = JSONEncoder._normalize_str(span["resource"]) + span["name"] = JSONEncoder._normalize_str(span["name"]) + span["service"] = JSONEncoder._normalize_str(span["service"]) + return span + + @staticmethod + def _normalize_str(obj): + if obj is None: + return obj + + return ensure_text(obj, errors="backslashreplace") + + +class JSONEncoderV2(JSONEncoder): + """ + JSONEncoderV2 encodes traces to the new intake API format. + """ + + content_type = "application/json" + + def encode_traces(self, traces): + # type: (List[List[Span]]) -> str + normalized_traces = [[JSONEncoderV2._convert_span(span) for span in trace] for trace in traces] + return self.encode({"traces": normalized_traces}) + + @staticmethod + def _convert_span(span): + # type: (Span) -> Dict[str, Any] + sp = JSONEncoderV2._span_to_dict(span) + sp = JSONEncoderV2._normalize_span(sp) + sp["trace_id"] = JSONEncoderV2._encode_id_to_hex(sp.get("trace_id")) + sp["parent_id"] = JSONEncoderV2._encode_id_to_hex(sp.get("parent_id")) + sp["span_id"] = JSONEncoderV2._encode_id_to_hex(sp.get("span_id")) + return sp + + @staticmethod + def _encode_id_to_hex(dd_id): + # type: (Optional[int]) -> str + if not dd_id: + return "0000000000000000" + return "%0.16X" % int(dd_id) + + +MSGPACK_ENCODERS = { + "v0.3": MsgpackEncoderV03, + "v0.4": MsgpackEncoderV03, + "v0.5": MsgpackEncoderV05, +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/forksafe.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/forksafe.py new file mode 100644 index 0000000..8f1a284 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/forksafe.py @@ -0,0 +1,137 @@ +""" +An API to provide fork-safe functions. +""" +import logging +import os +import threading +import typing +import weakref + +from ddtrace.vendor import wrapt + + +log = logging.getLogger(__name__) + + +_registry = [] # type: typing.List[typing.Callable[[], None]] + +# Some integrations might require after-fork hooks to be executed after the +# actual call to os.fork with earlier versions of Python (<= 3.6), else issues +# like SIGSEGV will occur. Setting this to True will cause the after-fork hooks +# to be executed after the actual fork, which seems to prevent the issue. +_soft = True + + +def ddtrace_after_in_child(): + # type: () -> None + global _registry + + # DEV: we make a copy of the registry to prevent hook execution from + # introducing new hooks, potentially causing an infinite loop. + for hook in list(_registry): + try: + hook() + except Exception: + # Mimic the behaviour of Python's fork hooks. + log.exception("Exception ignored in forksafe hook %r", hook) + + +def register(after_in_child): + # type: (typing.Callable[[], None]) -> typing.Callable[[], None] + """Register a function to be called after fork in the child process. + + Note that ``after_in_child`` will be called in all child processes across + multiple forks unless it is unregistered. + """ + _registry.append(after_in_child) + return after_in_child + + +def unregister(after_in_child): + # type: (typing.Callable[[], None]) -> None + """Unregister a function to be called after fork in the child process. + + Raises `ValueError` if the function was not registered. + """ + _registry.remove(after_in_child) + + +if hasattr(os, "register_at_fork"): + os.register_at_fork(after_in_child=ddtrace_after_in_child) +elif hasattr(os, "fork"): + # DEV: This "should" be the correct way of implementing this, but it doesn't + # work if hooks create new threads. + _threading_after_fork = threading._after_fork # type: ignore + + def _after_fork(): + # type: () -> None + _threading_after_fork() + if not _soft: + ddtrace_after_in_child() + + threading._after_fork = _after_fork # type: ignore[attr-defined] + + # DEV: If hooks create threads, we should do this instead. + _os_fork = os.fork + + def _fork(): + pid = _os_fork() + if pid == 0 and _soft: + ddtrace_after_in_child() + return pid + + os.fork = _fork + +_resetable_objects = weakref.WeakSet() # type: weakref.WeakSet[ResetObject] + + +def _reset_objects(): + # type: (...) -> None + for obj in list(_resetable_objects): + try: + obj._reset_object() + except Exception: + log.exception("Exception ignored in object reset forksafe hook %r", obj) + + +register(_reset_objects) + + +_T = typing.TypeVar("_T") + + +class ResetObject(wrapt.ObjectProxy, typing.Generic[_T]): + """An object wrapper object that is fork-safe and resets itself after a fork. + + When a Python process forks, a Lock can be in any state, locked or not, by any thread. Since after fork all threads + are gone, Lock objects needs to be reset. CPython does this with an internal `threading._after_fork` function. We + use the same mechanism here. + + """ + + def __init__( + self, wrapped_class # type: typing.Type[_T] + ): + # type: (...) -> None + super(ResetObject, self).__init__(wrapped_class()) + self._self_wrapped_class = wrapped_class + _resetable_objects.add(self) + + def _reset_object(self): + # type: (...) -> None + self.__wrapped__ = self._self_wrapped_class() + + +def Lock(): + # type: (...) -> ResetObject[threading.Lock] + return ResetObject(threading.Lock) + + +def RLock(): + # type: (...) -> ResetObject[threading.RLock] + return ResetObject(threading.RLock) + + +def Event(): + # type: (...) -> ResetObject[threading.Event] + return ResetObject(threading.Event) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/gitmetadata.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/gitmetadata.py new file mode 100644 index 0000000..5bdeee7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/gitmetadata.py @@ -0,0 +1,154 @@ +import typing # noqa:F401 + +from envier import Env + +from ddtrace.ext.ci import _filter_sensitive_info +from ddtrace.ext.git import COMMIT_SHA +from ddtrace.ext.git import MAIN_PACKAGE +from ddtrace.ext.git import REPOSITORY_URL +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import formats + + +_GITMETADATA_TAGS = None # type: typing.Optional[typing.Tuple[str, str, str]] + +log = get_logger(__name__) + + +class GitMetadataConfig(Env): + __prefix__ = "dd" + + # DD_TRACE_GIT_METADATA_ENABLED + enabled = Env.var(bool, "trace_git_metadata_enabled", default=True) + + # DD_GIT_REPOSITORY_URL + repository_url = Env.var(str, "git_repository_url", default="") + + # DD_GIT_COMMIT_SHA + commit_sha = Env.var(str, "git_commit_sha", default="") + + # DD_MAIN_PACKAGE + main_package = Env.var(str, "main_package", default="") + + # DD_TAGS + tags = Env.var(str, "tags", default="") + + +def _get_tags_from_env(config): + # type: (GitMetadataConfig) -> typing.Tuple[str, str, str] + """ + Get git metadata from environment variables. + Returns tuple (repository_url, commit_sha, main_package) + """ + repository_url = config.repository_url + commit_sha = config.commit_sha + main_package = config.main_package + + # Previously, the repository URL and commit SHA were derived from the DD_TAGS environment variable. + # This approach was for backward compatibility before the introduction of DD_GIT_REPOSITORY_URL + # and DD_GIT_COMMIT_SHA environment variables. + tags = formats.parse_tags_str(config.tags) + if not repository_url: + repository_url = tags.get(REPOSITORY_URL, "") + if not commit_sha: + commit_sha = tags.get(COMMIT_SHA, "") + filtered_git_url = _filter_sensitive_info(repository_url) + if type(filtered_git_url) != str: + return "", commit_sha, main_package + return filtered_git_url, commit_sha, main_package + + +def _get_tags_from_package(main_package): + # type: (str) -> typing.Tuple[str, str] + """ + Extracts git metadata from python package's medatada field Project-URL: + e.g: Project-URL: source_code_link, https://github.com/user/repo#gitcommitsha&someoptions + Returns tuple (repository_url, commit_sha) + """ + if not main_package: + return "", "" + try: + try: + import importlib.metadata as importlib_metadata + except ImportError: + import importlib_metadata # type: ignore[no-redef] + + source_code_link = "" + for val in importlib_metadata.metadata(main_package).get_all("Project-URL"): + capt_val = val.split(", ") + if len(capt_val) > 1 and capt_val[0] == "source_code_link": + source_code_link = capt_val[1].strip() + break + + if source_code_link and "#" in source_code_link: + repository_url, commit_sha = source_code_link.split("#") + commit_sha = commit_sha.split("&")[0] + filtered_git_url = _filter_sensitive_info(repository_url) + if type(filtered_git_url) != str: + return "", commit_sha + return filtered_git_url, commit_sha + return "", "" + except importlib_metadata.PackageNotFoundError: + return "", "" + + +def get_git_tags(): + # type: () -> typing.Tuple[str, str, str] + """ + Returns git metadata tags tuple (repository_url, commit_sha, main_package) + """ + try: + global _GITMETADATA_TAGS + if _GITMETADATA_TAGS is not None: + return _GITMETADATA_TAGS + + config = GitMetadataConfig() + + if config.enabled: + repository_url, commit_sha, main_package = _get_tags_from_env(config) + log.debug("git tags from env: %s %s %s", repository_url, commit_sha, main_package) + if main_package and (not repository_url or not commit_sha): + # trying to extract repo URL and/or commit sha from the main package + pkg_repository_url, pkg_commit_sha = _get_tags_from_package(main_package) + log.debug("git tags from package: %s %s", pkg_repository_url, pkg_commit_sha) + if not repository_url: + repository_url = pkg_repository_url + if not commit_sha: + commit_sha = pkg_commit_sha + + log.debug("git tags: %s %s", repository_url, commit_sha) + _GITMETADATA_TAGS = repository_url, commit_sha, main_package + else: + log.debug("git tags disabled") + _GITMETADATA_TAGS = ("", "", "") + return _GITMETADATA_TAGS + except Exception: + log.debug("git tags failed", exc_info=True) + return "", "", "" + + +def clean_tags(tags): + # type: (typing.Dict[str, str]) -> typing.Dict[str, str] + """ + Cleanup tags from git metadata + """ + tags.pop(REPOSITORY_URL, None) + tags.pop(COMMIT_SHA, None) + tags.pop(MAIN_PACKAGE, None) + + return tags + + +def add_tags(tags): + clean_tags(tags) + + repository_url, commit_sha, main_package = get_git_tags() + + if repository_url: + tags[REPOSITORY_URL] = repository_url + + if commit_sha: + tags[COMMIT_SHA] = commit_sha + + if main_package: + tags[MAIN_PACKAGE] = main_package diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/glob_matching.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/glob_matching.py new file mode 100644 index 0000000..8ac0b49 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/glob_matching.py @@ -0,0 +1,51 @@ +from .utils.cache import cachedmethod + + +class GlobMatcher(object): + """This is a backtracking implementation of the glob matching algorithm. + The glob pattern language supports `*` as a multiple character wildcard which includes matches on `""` + and `?` as a single character wildcard, but no escape sequences. + The match method will be cached for quicker matching and is in a class to keep it from being global. + """ + + def __init__(self, pattern): + # type: (str) -> None + self.pattern = pattern + + @cachedmethod() + def match(self, subject): + # type: (str) -> bool + pattern = self.pattern + px = 0 # [p]attern inde[x] + sx = 0 # [s]ubject inde[x] + nextPx = 0 + nextSx = 0 + + while px < len(pattern) or sx < len(subject): + if px < len(pattern): + char = pattern[px] + + if char == "?": # single character wildcard + if sx < len(subject): + px += 1 + sx += 1 + continue + + elif char == "*": # zero-or-more-character wildcard + nextPx = px + nextSx = sx + 1 + px += 1 + continue + + elif sx < len(subject) and subject[sx] == char: # default normal character match + px += 1 + sx += 1 + continue + + if 0 < nextSx <= len(subject): + px = nextPx + sx = nextSx + continue + + return False + return True diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/hostname.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/hostname.py new file mode 100644 index 0000000..0a9e9cd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/hostname.py @@ -0,0 +1,18 @@ +import os +import socket + + +_hostname = os.getenv("DD_HOSTNAME", "") # type: str + + +def get_hostname(): + # type: () -> str + global _hostname + if not _hostname: + _hostname = socket.gethostname() + return _hostname + + +def _reset(): + global _hostname + _hostname = "" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/http.py new file mode 100644 index 0000000..c8e6c77 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/http.py @@ -0,0 +1,36 @@ +from ddtrace.internal.compat import httplib +from ddtrace.internal.compat import parse + + +class BasePathMixin(httplib.HTTPConnection, object): + """ + Mixin for HTTPConnection to insert a base path to requested URLs + """ + + _base_path = "/" # type: str + + def putrequest(self, method, url, skip_host=False, skip_accept_encoding=False): + # type: (str, str, bool, bool) -> None + url = parse.urljoin(self._base_path, url) + return super(BasePathMixin, self).putrequest( + method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding + ) + + @classmethod + def with_base_path(cls, *args, **kwargs): + base_path = kwargs.pop("base_path", None) + obj = cls(*args, **kwargs) + obj._base_path = base_path + return obj + + +class HTTPConnection(BasePathMixin, httplib.HTTPConnection): + """ + httplib.HTTPConnection wrapper to add a base path to requested URLs + """ + + +class HTTPSConnection(BasePathMixin, httplib.HTTPSConnection): + """ + httplib.HTTPSConnection wrapper to add a base path to requested URLs + """ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/injection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/injection.py new file mode 100644 index 0000000..d6fa271 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/injection.py @@ -0,0 +1,211 @@ +from collections import deque +from types import FunctionType +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Deque # noqa:F401 +from typing import List # noqa:F401 +from typing import Tuple # noqa:F401 + +from bytecode import Bytecode + +from ddtrace.internal.assembly import Assembly + +from .compat import PYTHON_VERSION_INFO as PY + + +HookType = Callable[[Any], Any] +HookInfoType = Tuple[HookType, int, Any] + +HOOK_ARG_PREFIX = "_hook_arg" + + +class InvalidLine(Exception): + """ + Raised when trying to inject a hook on an invalid line, e.g. a comment or a blank line. + """ + + +INJECTION_ASSEMBLY = Assembly() +if PY >= (3, 12): + INJECTION_ASSEMBLY.parse( + r""" + push_null + load_const {hook} + load_const {arg} + call 1 + pop_top + """ + ) +elif PY >= (3, 11): + INJECTION_ASSEMBLY.parse( + r""" + push_null + load_const {hook} + load_const {arg} + precall 1 + call 1 + pop_top + """ + ) +else: + INJECTION_ASSEMBLY.parse( + r""" + load_const {hook} + load_const {arg} + call_function 1 + pop_top + """ + ) + +_INJECT_HOOK_OPCODES = [_.name for _ in INJECTION_ASSEMBLY] + + +def _inject_hook(code: Bytecode, hook: HookType, lineno: int, arg: Any) -> None: + """Inject a hook at the given line number inside an abstract code object. + + The hook is called with the given argument, which is also used as an + identifier for the hook itself. This should be kept in case the hook needs + to be removed. + """ + # DEV: In general there are no guarantees for bytecode to be "linear", + # meaning that a line number can occur multiple times. We need to find all + # occurrences and inject the hook at each of them. An example of when this + # happens is with finally blocks, which are duplicated at the end of the + # bytecode. + locs: Deque[int] = deque() + last_lineno = None + for i, instr in enumerate(code): + try: + if instr.lineno == last_lineno: + continue + last_lineno = instr.lineno + # Some lines might be implemented across multiple instruction + # offsets, and sometimes a NOP is used as a placeholder. We skip + # those to avoid duplicate injections. + if instr.lineno == lineno and instr.name != "NOP": + locs.appendleft(i) + except AttributeError: + # pseudo-instruction (e.g. label) + pass + + if not locs: + raise InvalidLine("Line %d does not exist or is either blank or a comment" % lineno) + + # DEV: This is the bytecode equivalent of + # >>> hook(arg) + # Additionally, we must discard the return value (top of the stack) to + # restore the stack to the state prior to the call. + for i in locs: + code[i:i] = INJECTION_ASSEMBLY.bind(dict(hook=hook, arg=arg), lineno=lineno) + + +_INJECT_HOOK_OPCODE_POS = 0 if PY < (3, 11) else 1 +_INJECT_ARG_OPCODE_POS = 1 if PY < (3, 11) else 2 + + +def _eject_hook(code: Bytecode, hook: HookType, line: int, arg: Any) -> None: + """Eject a hook from the abstract code object at the given line number. + + The hook is identified by its argument. This ensures that only the right + hook is ejected. + """ + locs: Deque[int] = deque() + for i, instr in enumerate(code): + try: + # DEV: We look at the expected opcode pattern to match the injected + # hook and we also test for the expected opcode arguments + if ( + instr.lineno == line + and code[i + _INJECT_HOOK_OPCODE_POS].arg == hook # bound methods don't like identity comparisons + and code[i + _INJECT_ARG_OPCODE_POS].arg is arg + and [code[_].name for _ in range(i, i + len(_INJECT_HOOK_OPCODES))] == _INJECT_HOOK_OPCODES + ): + locs.appendleft(i) + except AttributeError: + # pseudo-instruction (e.g. label) + pass + except IndexError: + pass + + if not locs: + raise InvalidLine("Line %d does not contain a hook" % line) + + for i in locs: + del code[i : i + len(_INJECT_HOOK_OPCODES)] + + +def inject_hooks(f: FunctionType, hooks: List[HookInfoType]) -> List[HookInfoType]: + """Bulk-inject a list of hooks into a function. + + Hooks are specified via a list of tuples, where each tuple contains the hook + itself, the line number and the identifying argument passed to the hook. + + Returns the list of hooks that failed to be injected. + """ + abstract_code = Bytecode.from_code(f.__code__) + + failed = [] + for hook, line, arg in hooks: + try: + _inject_hook(abstract_code, hook, line, arg) + except InvalidLine: + failed.append((hook, line, arg)) + + if len(failed) < len(hooks): + f.__code__ = abstract_code.to_code() + + return failed + + +def eject_hooks(f: FunctionType, hooks: List[HookInfoType]) -> List[HookInfoType]: + """Bulk-eject a list of hooks from a function. + + The hooks are specified via a list of tuples, where each tuple contains the + hook line number and the identifying argument. + + Returns the list of hooks that failed to be ejected. + """ + abstract_code = Bytecode.from_code(f.__code__) + + failed = [] + for hook, line, arg in hooks: + try: + _eject_hook(abstract_code, hook, line, arg) + except InvalidLine: + failed.append((hook, line, arg)) + + if len(failed) < len(hooks): + f.__code__ = abstract_code.to_code() + + return failed + + +def inject_hook(f: FunctionType, hook: HookType, line: int, arg: Any) -> FunctionType: + """Inject a hook into a function. + + The hook is injected at the given line number and called with the given + argument. The latter is also used as an identifier for the hook. This should + be kept in case the hook needs to be removed. + """ + abstract_code = Bytecode.from_code(f.__code__) + + _inject_hook(abstract_code, hook, line, arg) + + f.__code__ = abstract_code.to_code() + + return f + + +def eject_hook(f: FunctionType, hook: HookType, line: int, arg: Any) -> FunctionType: + """Eject a hook from a function. + + The hook is identified by its line number and the argument passed to the + hook. + """ + abstract_code = Bytecode.from_code(f.__code__) + + _eject_hook(abstract_code, hook, line, arg) + + f.__code__ = abstract_code.to_code() + + return f diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/__init__.py new file mode 100644 index 0000000..89e3f5a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/__init__.py @@ -0,0 +1,4 @@ +from .writer import LLMObsWriter + + +__all__ = ["LLMObsWriter"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/__init__.py new file mode 100644 index 0000000..7e96ff6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/__init__.py @@ -0,0 +1,7 @@ +from .base import BaseLLMIntegration +from .bedrock import BedrockIntegration +from .langchain import LangChainIntegration +from .openai import OpenAIIntegration + + +__all__ = ["BaseLLMIntegration", "BedrockIntegration", "LangChainIntegration", "OpenAIIntegration"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/base.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/base.py new file mode 100644 index 0000000..d090a05 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/base.py @@ -0,0 +1,227 @@ +import abc +import os +import time +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 + +from ddtrace import Pin +from ddtrace import Span +from ddtrace import config +from ddtrace.constants import SPAN_MEASURED_KEY +from ddtrace.contrib.trace_utils import int_service +from ddtrace.internal.agent import get_stats_url +from ddtrace.internal.dogstatsd import get_dogstatsd_client +from ddtrace.internal.hostname import get_hostname +from ddtrace.internal.llmobs import LLMObsWriter +from ddtrace.internal.log_writer import V2LogWriter +from ddtrace.internal.utils.formats import asbool +from ddtrace.sampler import RateSampler +from ddtrace.settings import IntegrationConfig + + +class BaseLLMIntegration: + _integration_name = "baseLLM" + + def __init__(self, integration_config: IntegrationConfig) -> None: + # FIXME: this currently does not consider if the tracer is configured to + # use a different hostname. eg. tracer.configure(host="new-hostname") + # Ideally the metrics client should live on the tracer or some other core + # object that is strongly linked with configuration. + self._log_writer = None + self._llmobs_writer = None + self._statsd = None + self.integration_config = integration_config + self._span_pc_sampler = RateSampler(sample_rate=integration_config.span_prompt_completion_sample_rate) + + if self.metrics_enabled: + self._statsd = get_dogstatsd_client(get_stats_url(), namespace=self._integration_name) + if self.logs_enabled: + if not config._dd_api_key: + raise ValueError( + f"DD_API_KEY is required for sending logs from the {self._integration_name} integration. " + f"To use the {self._integration_name} integration without logs, " + f"set `DD_{self._integration_name.upper()}_LOGS_ENABLED=false`." + ) + self._log_writer = V2LogWriter( + site=config._dd_site, + api_key=config._dd_api_key, + interval=float(os.getenv("_DD_%s_LOG_WRITER_INTERVAL" % self._integration_name.upper(), "1.0")), + timeout=float(os.getenv("_DD_%s_LOG_WRITER_TIMEOUT" % self._integration_name.upper(), "2.0")), + ) + self._log_pc_sampler = RateSampler(sample_rate=integration_config.log_prompt_completion_sample_rate) + self.start_log_writer() + + if self.llmobs_enabled: + if not config._dd_api_key: + raise ValueError( + f"DD_API_KEY is required for sending LLMObs data from the {self._integration_name} integration. " + f"To use the {self._integration_name} integration without LLMObs, " + f"set `DD_{self._integration_name.upper()}_LLMOBS_ENABLED=false`." + ) + if not config._dd_app_key: + raise ValueError( + f"DD_APP_KEY is required for sending LLMObs payloads from the {self._integration_name} integration." + f" To use the {self._integration_name} integration without LLMObs, " + f"set `DD_{self._integration_name.upper()}_LLMOBS_ENABLED=false`." + ) + self._llmobs_writer = LLMObsWriter( + site=config._dd_site, + api_key=config._dd_api_key, + app_key=config._dd_app_key, + interval=float(os.getenv("_DD_%s_LLM_WRITER_INTERVAL" % self._integration_name.upper(), "1.0")), + timeout=float(os.getenv("_DD_%s_LLM_WRITER_TIMEOUT" % self._integration_name.upper(), "2.0")), + ) + self._llmobs_pc_sampler = RateSampler(sample_rate=integration_config.llmobs_prompt_completion_sample_rate) + self.start_llm_writer() + + @property + def metrics_enabled(self) -> bool: + """Return whether submitting metrics is enabled for this integration, or global config if not set.""" + if hasattr(self.integration_config, "metrics_enabled"): + return asbool(self.integration_config.metrics_enabled) + return False + + @property + def logs_enabled(self) -> bool: + """Return whether submitting logs is enabled for this integration, or global config if not set.""" + if hasattr(self.integration_config, "logs_enabled"): + return asbool(self.integration_config.logs_enabled) + return False + + @property + def llmobs_enabled(self) -> bool: + """Return whether submitting llmobs payloads is enabled for this integration, or global config if not set.""" + if hasattr(self.integration_config, "llmobs_enabled"): + return asbool(self.integration_config.llmobs_enabled) + return False + + def is_pc_sampled_span(self, span: Span) -> bool: + if not span.sampled: + return False + return self._span_pc_sampler.sample(span) + + def is_pc_sampled_log(self, span: Span) -> bool: + if not self.logs_enabled or not span.sampled: + return False + return self._log_pc_sampler.sample(span) + + def is_pc_sampled_llmobs(self, span: Span) -> bool: + # Sampling of llmobs payloads is independent of spans, but we're using a RateSampler for consistency. + if not self.llmobs_enabled: + return False + return self._llmobs_pc_sampler.sample(span) + + def start_log_writer(self) -> None: + if not self.logs_enabled or self._log_writer is None: + return + self._log_writer.start() + + def start_llm_writer(self) -> None: + if not self.llmobs_enabled or self._llmobs_writer is None: + return + self._llmobs_writer.start() + + @abc.abstractmethod + def _set_base_span_tags(self, span: Span, **kwargs) -> None: + """Set default LLM span attributes when possible.""" + pass + + def trace(self, pin: Pin, operation_id: str, **kwargs: Dict[str, Any]) -> Span: + """ + Start a LLM request span. + Reuse the service of the application since we'll tag downstream request spans with the LLM name. + Eventually those should also be internal service spans once peer.service is implemented. + """ + span = pin.tracer.trace( + "%s.request" % self._integration_name, + resource=operation_id, + service=int_service(pin, self.integration_config), + ) + # Enable trace metrics for these spans so users can see per-service openai usage in APM. + span.set_tag(SPAN_MEASURED_KEY) + self._set_base_span_tags(span, **kwargs) + return span + + @classmethod + @abc.abstractmethod + def _logs_tags(cls, span: Span) -> str: + """Generate ddtags from the corresponding span.""" + pass + + def log(self, span: Span, level: str, msg: str, attrs: Dict[str, Any]) -> None: + if not self.logs_enabled or self._log_writer is None: + return + tags = self._logs_tags(span) + log = { + "timestamp": time.time() * 1000, + "message": msg, + "hostname": get_hostname(), + "ddsource": self._integration_name, + "service": span.service or "", + "status": level, + "ddtags": tags, + } + if span is not None: + # FIXME: this is a temporary workaround until we figure out why 128 bit trace IDs are stored as decimals. + # log["dd.trace_id"] = str(span.trace_id) + log["dd.trace_id"] = "{:x}".format(span.trace_id) + log["dd.span_id"] = str(span.span_id) + log.update(attrs) + self._log_writer.enqueue(log) # type: ignore[arg-type] + + @classmethod + @abc.abstractmethod + def _metrics_tags(cls, span: Span) -> List[str]: + """Generate a list of metrics tags from a given span.""" + return [] + + def metric(self, span: Span, kind: str, name: str, val: Any, tags: Optional[List[str]] = None) -> None: + """Set a metric using the context from the given span.""" + if not self.metrics_enabled or self._statsd is None: + return + metric_tags = self._metrics_tags(span) + if tags: + metric_tags += tags + if kind == "dist": + self._statsd.distribution(name, val, tags=metric_tags) + elif kind == "incr": + self._statsd.increment(name, val, tags=metric_tags) + elif kind == "gauge": + self._statsd.gauge(name, val, tags=metric_tags) + else: + raise ValueError("Unexpected metric type %r" % kind) + + def trunc(self, text: str) -> str: + """Truncate the given text. + + Use to avoid attaching too much data to spans. + """ + if not text: + return text + text = text.replace("\n", "\\n").replace("\t", "\\t") + if len(text) > self.integration_config.span_char_limit: + text = text[: self.integration_config.span_char_limit] + "..." + return text + + @classmethod + @abc.abstractmethod + def _llmobs_tags(cls, span: Span) -> List[str]: + """Generate a list of llmobs tags from a given span.""" + return [] + + def llm_record(self, span: Span, attrs: Dict[str, Any], tags: Optional[List[str]] = None) -> None: + """Create a LLM record to send to the LLM Obs intake.""" + if not self.llmobs_enabled or self._llmobs_writer is None: + return + llmobs_tags = self._llmobs_tags(span) + if span is not None and span.sampled: + # FIXME: this is a temporary workaround until we figure out why 128 bit trace IDs are stored as decimals. + llmobs_tags.insert(0, "dd.trace_id:{:x}".format(span.trace_id)) + # llmobs_tags.insert(0, "dd.trace_id:{}".format(span.trace_id)) + llmobs_tags.insert(1, "dd.span_id:{}".format(span.span_id)) + if tags: + llmobs_tags += tags + attrs["ddtags"] = llmobs_tags + self._llmobs_writer.enqueue(attrs) # type: ignore[arg-type] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/bedrock.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/bedrock.py new file mode 100644 index 0000000..ca24693 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/bedrock.py @@ -0,0 +1,80 @@ +import time +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +import uuid + +from ddtrace import Span +from ddtrace import config + +from .base import BaseLLMIntegration + + +class BedrockIntegration(BaseLLMIntegration): + _integration_name = "bedrock" + + @classmethod + def _llmobs_tags(cls, span: Span) -> List[str]: + tags = [ + "version:%s" % (config.version or ""), + "env:%s" % (config.env or ""), + "service:%s" % (span.service or ""), + "source:integration", + "model_name:%s" % (span.get_tag("bedrock.request.model") or ""), + "model_provider:%s" % (span.get_tag("bedrock.request.model_provider") or ""), + "error:%d" % span.error, + ] + err_type = span.get_tag("error.type") + if err_type: + tags.append("error_type:%s" % err_type) + return tags + + def generate_llm_record( + self, + span: Span, + formatted_response: Optional[Dict[str, Any]] = None, + prompt: Optional[str] = None, + err: bool = False, + ) -> None: + """Generate payloads for the LLM Obs API from a completion.""" + if not self.llmobs_enabled: + return + if err or formatted_response is None: + record = _llmobs_record(span, prompt) + record["id"] = str(uuid.uuid4()) + record["output"]["completions"] = [{"content": ""}] + record["output"]["errors"] = [span.get_tag("error.message")] + self.llm_record(span, record) + return + for i in range(len(formatted_response["text"])): + prompt_tokens = int(span.get_tag("bedrock.usage.prompt_tokens") or 0) + completion_tokens = int(span.get_tag("bedrock.usage.completion_tokens") or 0) + record = _llmobs_record(span, prompt) + record["id"] = span.get_tag("bedrock.response.id") + record["input"]["prompt_tokens"] = [prompt_tokens] + record["output"]["completions"] = [{"content": formatted_response["text"][i]}] + record["output"]["completion_tokens"] = [completion_tokens] + record["output"]["total_tokens"] = [prompt_tokens + completion_tokens] + self.llm_record(span, record) + + +def _llmobs_record(span: Span, prompt: Optional[str]) -> Dict[str, Any]: + """LLMObs bedrock record template.""" + now = time.time() + record = { + "type": "completion", + "id": str(uuid.uuid4()), + "timestamp": int(span.start * 1000), + "model": span.get_tag("bedrock.request.model"), + "model_provider": span.get_tag("bedrock.request.model_provider"), + "input": { + "prompts": [prompt], + "temperature": float(span.get_tag("bedrock.request.temperature") or 0.0), + "max_tokens": int(span.get_tag("bedrock.request.max_tokens") or 0), + }, + "output": { + "durations": [now - span.start], + }, + } + return record diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/langchain.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/langchain.py new file mode 100644 index 0000000..0be471a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/langchain.py @@ -0,0 +1,88 @@ +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from ddtrace import Span +from ddtrace import config +from ddtrace.constants import ERROR_TYPE +from ddtrace.internal.llmobs.integrations import BaseLLMIntegration + + +API_KEY = "langchain.request.api_key" +MODEL = "langchain.request.model" +PROVIDER = "langchain.request.provider" +TOTAL_COST = "langchain.tokens.total_cost" +TYPE = "langchain.request.type" + + +class LangChainIntegration(BaseLLMIntegration): + _integration_name = "langchain" + + def _set_base_span_tags( # type: ignore[override] + self, + span: Span, + interface_type: str = "", + provider: Optional[str] = None, + model: Optional[str] = None, + api_key: Optional[str] = None, + ) -> None: + """Set base level tags that should be present on all LangChain spans (if they are not None).""" + span.set_tag_str(TYPE, interface_type) + if provider is not None: + span.set_tag_str(PROVIDER, provider) + if model is not None: + span.set_tag_str(MODEL, model) + if api_key is not None: + if len(api_key) >= 4: + span.set_tag_str(API_KEY, "...%s" % str(api_key[-4:])) + else: + span.set_tag_str(API_KEY, api_key) + + @classmethod + def _logs_tags(cls, span: Span) -> str: + api_key = span.get_tag(API_KEY) or "" + tags = "env:%s,version:%s,%s:%s,%s:%s,%s:%s,%s:%s" % ( # noqa: E501 + (config.env or ""), + (config.version or ""), + PROVIDER, + (span.get_tag(PROVIDER) or ""), + MODEL, + (span.get_tag(MODEL) or ""), + TYPE, + (span.get_tag(TYPE) or ""), + API_KEY, + api_key, + ) + return tags + + @classmethod + def _metrics_tags(cls, span: Span) -> List[str]: + provider = span.get_tag(PROVIDER) or "" + api_key = span.get_tag(API_KEY) or "" + tags = [ + "version:%s" % (config.version or ""), + "env:%s" % (config.env or ""), + "service:%s" % (span.service or ""), + "%s:%s" % (PROVIDER, provider), + "%s:%s" % (MODEL, span.get_tag(MODEL) or ""), + "%s:%s" % (TYPE, span.get_tag(TYPE) or ""), + "%s:%s" % (API_KEY, api_key), + "error:%d" % span.error, + ] + err_type = span.get_tag(ERROR_TYPE) + if err_type: + tags.append("%s:%s" % (ERROR_TYPE, err_type)) + return tags + + def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: + if not usage or self.metrics_enabled is False: + return + for token_type in ("prompt", "completion", "total"): + num_tokens = usage.get("token_usage", {}).get(token_type + "_tokens") + if not num_tokens: + continue + self.metric(span, "dist", "tokens.%s" % token_type, num_tokens) + total_cost = span.get_metric(TOTAL_COST) + if total_cost: + self.metric(span, "incr", "tokens.total_cost", total_cost) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/openai.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/openai.py new file mode 100644 index 0000000..42cde81 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/integrations/openai.py @@ -0,0 +1,215 @@ +import time +from typing import Any +from typing import Dict +from typing import List +from typing import Optional +from typing import Tuple +import uuid + +from ddtrace import Span +from ddtrace import config +from ddtrace.internal.constants import COMPONENT +from ddtrace.internal.llmobs.integrations.base import BaseLLMIntegration +from ddtrace.internal.utils.version import parse_version + + +class OpenAIIntegration(BaseLLMIntegration): + _integration_name = "openai" + + def __init__(self, integration_config, openai): + # FIXME: this currently does not consider if the tracer is configured to + # use a different hostname. eg. tracer.configure(host="new-hostname") + # Ideally the metrics client should live on the tracer or some other core + # object that is strongly linked with configuration. + super().__init__(integration_config) + self._openai = openai + self._user_api_key = None + self._client = None + if self._openai.api_key is not None: + self.user_api_key = self._openai.api_key + + @property + def user_api_key(self) -> Optional[str]: + """Get a representation of the user API key for tagging.""" + return self._user_api_key + + @user_api_key.setter + def user_api_key(self, value: str) -> None: + # Match the API key representation that OpenAI uses in their UI. + self._user_api_key = "sk-...%s" % value[-4:] + + def _set_base_span_tags(self, span: Span, **kwargs) -> None: + span.set_tag_str(COMPONENT, self.integration_config.integration_name) + if self._user_api_key is not None: + span.set_tag_str("openai.user.api_key", self._user_api_key) + + # Do these dynamically as openai users can set these at any point + # not necessarily before patch() time. + # organization_id is only returned by a few endpoints, grab it when we can. + if parse_version(self._openai.version.VERSION) >= (1, 0, 0): + source = self._client + base_attrs: Tuple[str, ...] = ("base_url", "organization") + else: + source = self._openai + base_attrs = ("api_base", "api_version", "api_type", "organization") + for attr in base_attrs: + v = getattr(source, attr, None) + if v is not None: + if attr == "organization": + span.set_tag_str("openai.organization.id", v or "") + else: + span.set_tag_str("openai.%s" % attr, str(v)) + + @classmethod + def _logs_tags(cls, span: Span) -> str: + tags = ( + "env:%s,version:%s,openai.request.endpoint:%s,openai.request.method:%s,openai.request.model:%s,openai.organization.name:%s," + "openai.user.api_key:%s" + % ( # noqa: E501 + (config.env or ""), + (config.version or ""), + (span.get_tag("openai.request.endpoint") or ""), + (span.get_tag("openai.request.method") or ""), + (span.get_tag("openai.request.model") or ""), + (span.get_tag("openai.organization.name") or ""), + (span.get_tag("openai.user.api_key") or ""), + ) + ) + return tags + + @classmethod + def _metrics_tags(cls, span: Span) -> List[str]: + tags = [ + "version:%s" % (config.version or ""), + "env:%s" % (config.env or ""), + "service:%s" % (span.service or ""), + "openai.request.model:%s" % (span.get_tag("openai.request.model") or ""), + "openai.request.endpoint:%s" % (span.get_tag("openai.request.endpoint") or ""), + "openai.request.method:%s" % (span.get_tag("openai.request.method") or ""), + "openai.organization.id:%s" % (span.get_tag("openai.organization.id") or ""), + "openai.organization.name:%s" % (span.get_tag("openai.organization.name") or ""), + "openai.user.api_key:%s" % (span.get_tag("openai.user.api_key") or ""), + "error:%d" % span.error, + ] + err_type = span.get_tag("error.type") + if err_type: + tags.append("error_type:%s" % err_type) + return tags + + @classmethod + def _llmobs_tags(cls, span: Span) -> List[str]: + tags = [ + "version:%s" % (config.version or ""), + "env:%s" % (config.env or ""), + "service:%s" % (span.service or ""), + "source:integration", + "model_name:%s" % (span.get_tag("openai.response.model") or span.get_tag("openai.request.model") or ""), + "model_provider:openai", + "error:%d" % span.error, + ] + err_type = span.get_tag("error.type") + if err_type: + tags.append("error_type:%s" % err_type) + return tags + + def record_usage(self, span: Span, usage: Dict[str, Any]) -> None: + if not usage or not self.metrics_enabled: + return + tags = ["openai.estimated:false"] + for token_type in ("prompt", "completion", "total"): + num_tokens = getattr(usage, token_type + "_tokens", None) + if not num_tokens: + continue + span.set_metric("openai.response.usage.%s_tokens" % token_type, num_tokens) + self.metric(span, "dist", "tokens.%s" % token_type, num_tokens, tags=tags) + + def generate_completion_llm_records(self, resp: Any, err: Any, span: Span, kwargs: Dict[str, Any]) -> None: + """Generate payloads for the LLM Obs API from a completion.""" + if not self.llmobs_enabled: + return + if err is not None: + attrs_dict = self._llmobs_record(span, kwargs, resp, err, "completion") + self.llm_record(span, attrs_dict) + return + n = kwargs.get("n", 1) + # Note: LLMObs ingest endpoint only accepts a 1:1 prompt-response mapping per record, + # so we need to deduplicate and send unique prompt-response records if n > 1. + for i in range(n): + unique_choices = resp.choices[i::n] + attrs_dict = self._llmobs_record(span, kwargs, resp, err, "completion") + attrs_dict["output"]["durations"] = [time.time() - span.start for _ in unique_choices] + attrs_dict["output"]["completions"] = [{"content": choice.text} for choice in unique_choices] + self.llm_record(span, attrs_dict) + + def generate_chat_llm_records(self, resp: Any, err: Any, span: Span, kwargs: Dict[str, Any]) -> None: + """Generate payloads for the LLM Obs API from a chat completion.""" + if not self.llmobs_enabled: + return + if err is not None: + attrs_dict = self._llmobs_record(span, kwargs, resp, err, "chat") + self.llm_record(span, attrs_dict) + return + # Note: LLMObs ingest endpoint only accepts a 1:1 prompt-response mapping per record, + # so we need to send unique prompt-response records if there are multiple responses (n > 1). + for choice in resp.choices: + content = getattr(choice.message, "content", None) + if getattr(choice.message, "function_call", None): + content = choice.message.function_call.arguments + elif getattr(choice.message, "tool_calls", None): + content = choice.message.tool_calls.function.arguments + attrs_dict = self._llmobs_record(span, kwargs, resp, err, "chat") + attrs_dict["output"]["durations"] = [time.time() - span.start] + attrs_dict["output"]["completions"] = [{"content": str(content), "role": choice.message.role}] + self.llm_record(span, attrs_dict) + + def _llmobs_record( + self, span: Span, kwargs: Dict[str, Any], resp: Any, err: Any, record_type: str + ) -> Dict[str, Any]: + """LLMObs record template for OpenAI.""" + attrs_dict = { + "type": record_type, + "id": str(uuid.uuid4()), + "timestamp": int(span.start * 1000), + "model": span.get_tag("openai.request.model"), + "model_provider": "openai", + "input": { + "temperature": kwargs.get("temperature"), + "max_tokens": kwargs.get("max_tokens"), + }, + } + if record_type == "completion": + prompt = kwargs.get("prompt", "") + if isinstance(prompt, str): + prompt = [prompt] + attrs_dict["input"]["prompts"] = prompt # type: ignore[index] + elif record_type == "chat": + messages = kwargs.get("messages", []) + attrs_dict["input"]["messages"] = [ # type: ignore[index] + {"content": str(m.get("content", "")), "role": m.get("role", "")} for m in messages + ] + if err is not None: + attrs_dict["output"] = { + "completions": [{"content": ""}], + "error": [span.get_tag("error.message")], + "durations": [time.time() - span.start], + } + elif resp is not None: + choices = resp.choices + # OpenAI only returns the aggregate token count for the entire response if n>1. This means we can only + # provide a rough estimate of the number of tokens used for individual completions by taking the average. + prompt_tokens = int(resp.usage.prompt_tokens / len(choices)) + completion_tokens = int(resp.usage.completion_tokens / len(choices)) + attrs_dict["input"]["prompt_tokens"] = [prompt_tokens] # type: ignore[index] + attrs_dict.update( + { + "id": resp.id, + "model": resp.model or span.get_tag("openai.request.model"), + "output": { + "completion_tokens": [completion_tokens], + "total_tokens": [prompt_tokens + completion_tokens], + "rate_limit_requests": [span.get_metric("openai.organization.ratelimit.requests.limit")], + "rate_limit_tokens": [span.get_metric("openai.organization.ratelimit.tokens.limit")], + }, + } + ) + return attrs_dict diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/writer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/writer.py new file mode 100644 index 0000000..46b3b03 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/llmobs/writer.py @@ -0,0 +1,135 @@ +import atexit +import json +from typing import Dict +from typing import List +from typing import Union + + +# TypedDict was added to typing in python 3.8 +try: + from typing import TypedDict +except ImportError: + from typing_extensions import TypedDict + +from ddtrace.internal import forksafe +from ddtrace.internal.compat import get_connection_response +from ddtrace.internal.compat import httplib +from ddtrace.internal.logger import get_logger +from ddtrace.internal.periodic import PeriodicService + + +logger = get_logger(__name__) + + +class LLMObsEvent(TypedDict): + """ + Note: these attribute names match the corresponding entry in the JSON payload. + """ + + timestamp: int + id: str + type: str + input: Dict[str, Union[float, int, List[str]]] + model: str + model_provider: str + ddtags: List[str] + output: Dict[str, List[Dict[str, str]]] + # Additional attributes can be specified on the event + # including dd.trace_id and dd.span_id to correlate a trace + + +class LLMObsWriter(PeriodicService): + """Writer to the Datadog LLMObs intake.""" + + def __init__(self, site, api_key, app_key, interval, timeout): + # type: (str, str, str, float, float) -> None + super(LLMObsWriter, self).__init__(interval=interval) + self._lock = forksafe.RLock() + self._buffer = [] # type: List[LLMObsEvent] + # match the API limit + self._buffer_limit = 1000 + self._timeout = timeout # type: float + self._api_key = api_key or "" # type: str + self._app_key = app_key or "" # type: str + self._endpoint = "/api/unstable/llm-obs/v1/records" # type: str + self._site = site # type: str + self._intake = "api.%s" % self._site # type: str + self._headers = { + "DD-API-KEY": self._api_key, + "DD-APPLICATION-KEY": self._app_key, + "Content-Type": "application/json", + } + + def start(self, *args, **kwargs): + super(LLMObsWriter, self).start() + logger.debug("started llmobs writer to %r", self._url) + atexit.register(self.on_shutdown) + + def enqueue(self, log): + # type: (LLMObsEvent) -> None + with self._lock: + if len(self._buffer) >= self._buffer_limit: + logger.warning("LLMobs record buffer full (limit is %d), dropping record", self._buffer_limit) + return + self._buffer.append(log) + + def on_shutdown(self): + # TODO: Once we submit to the public EVP endpoint which accepts record-level model/model_provider + # fields instead of a single model/model_provider for the whole payload, + # we can remove this loop and just send the whole buffer at once in one periodic() call + while self._buffer: + self.periodic() + + @property + def _url(self): + # type: () -> str + return "https://%s%s" % (self._intake, self._endpoint) + + def periodic(self): + # type: () -> None + with self._lock: + if not self._buffer: + return + # This is a workaround the fact that the record ingest API only accepts a single model/model_provider/tags + # per payload, so we default to sending one record per payload at a time. + num_llm_records = 1 + llm_record = self._buffer.pop() + + model = llm_record.pop("model", None) # type: ignore[misc] + model_provider = llm_record.pop("model_provider", None) # type: ignore[misc] + + data = { + "data": { + "type": "records", + "attributes": { + "tags": llm_record.pop("ddtags") or [], # type: ignore[misc] + "model": model or "", + "model_provider": model_provider or "", + "records": [llm_record], + }, + } + } + try: + enc_llm_records = json.dumps(data) + except TypeError: + logger.error("failed to encode %d LLM records", num_llm_records, exc_info=True) + return + + conn = httplib.HTTPSConnection(self._intake, 443, timeout=self._timeout) + try: + conn.request("POST", self._endpoint, enc_llm_records, self._headers) + resp = get_connection_response(conn) + if resp.status >= 300: + logger.error( + "failed to send %d LLM records to %r, got response code %r, status: %r", + num_llm_records, + self._url, + resp.status, + resp.read(), + ) + else: + logger.debug("sent %d LLM records to %r", num_llm_records, self._url) + except Exception: + logger.error("failed to send %d LLM records to %r", num_llm_records, self._intake, exc_info=True) + finally: + conn.close() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/log_writer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/log_writer.py new file mode 100644 index 0000000..f624e24 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/log_writer.py @@ -0,0 +1,120 @@ +import atexit +import json +from typing import List # noqa:F401 + + +# TypedDict was added to typing in python 3.8 +try: + from typing import TypedDict # noqa:F401 +except ImportError: + from typing_extensions import TypedDict + +from ddtrace.internal import forksafe +from ddtrace.internal.compat import get_connection_response +from ddtrace.internal.compat import httplib +from ddtrace.internal.logger import get_logger +from ddtrace.internal.periodic import PeriodicService + + +logger = get_logger(__name__) + + +class V2LogEvent(TypedDict): + """ + Note: these attribute names match the corresponding entry in the JSON payload. + """ + + timestamp: int + message: str + ddtags: str + service: str + hostname: str + ddsource: str + status: str + # Additional attributes can be specified on the event + # including dd.trace_id and dd.span_id to correlate a trace + + +class V2LogWriter(PeriodicService): + """Writer to the Datadog log intake. + + v2/logs: + - max payload size: 5MB + - max single log: 1MB + - max array size 1000 + + refs: + - https://docs.datadoghq.com/api/v2/logs/#send-logs + """ + + def __init__(self, site, api_key, interval, timeout): + # type: (str, str, float, float) -> None + super(V2LogWriter, self).__init__(interval=interval) + self._lock = forksafe.RLock() + self._buffer = [] # type: List[V2LogEvent] + # match the API limit + self._buffer_limit = 1000 + self._timeout = timeout # type: float + self._api_key = api_key # type: str + self._endpoint = "/api/v2/logs" # type: str + self._site = site # type: str + self._intake = "http-intake.logs.%s" % self._site # type: str + self._headers = { + "DD-API-KEY": self._api_key, + "Content-Type": "application/json", + } + logger.debug("started log writer to %r", self._url) + + def start(self, *args, **kwargs): + super(V2LogWriter, self).start() + atexit.register(self.on_shutdown) + + def enqueue(self, log): + # type: (V2LogEvent) -> None + with self._lock: + if len(self._buffer) >= self._buffer_limit: + logger.warning("log buffer full (limit is %d), dropping log", self._buffer_limit) + return + self._buffer.append(log) + + def on_shutdown(self): + self.periodic() + + @property + def _url(self): + # type: () -> str + return "https://%s%s" % (self._intake, self._endpoint) + + def periodic(self): + # type: () -> None + with self._lock: + if not self._buffer: + return + logs = self._buffer + self._buffer = [] + + num_logs = len(logs) + try: + enc_logs = json.dumps(logs) + except TypeError: + logger.error("failed to encode %d logs", num_logs, exc_info=True) + return + + conn = httplib.HTTPSConnection(self._intake, 443, timeout=self._timeout) + try: + conn.request("POST", self._endpoint, enc_logs, self._headers) + resp = get_connection_response(conn) + if resp.status >= 300: + logger.error( + "failed to send %d logs to %r, got response code %r, status: %r", + num_logs, + self._url, + resp.status, + resp.read(), + ) + else: + logger.debug("sent %d logs to %r", num_logs, self._url) + except Exception: + logger.error("failed to send %d logs to %r", num_logs, self._intake, exc_info=True) + finally: + conn.close() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/logger.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/logger.py new file mode 100644 index 0000000..b0d6601 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/logger.py @@ -0,0 +1,179 @@ +import collections +import logging +import os +import typing +from typing import cast # noqa:F401 + + +if typing.TYPE_CHECKING: + from typing import Any # noqa:F401 + from typing import DefaultDict # noqa:F401 + from typing import Tuple # noqa:F401 + + +def get_logger(name): + # type: (str) -> DDLogger + """ + Retrieve or create a ``DDLogger`` instance. + + This function mirrors the behavior of `logging.getLogger`. + + If no logger with the provided name has been fetched before then + a new one is created. + + If a previous logger has been created then it is returned. + + DEV: We do not want to mess with `logging.setLoggerClass()` + That will totally mess with the user's loggers, we want + just our own, selective loggers to be DDLoggers + + :param name: The name of the logger to fetch or create + :type name: str + :return: The logger instance + :rtype: ``DDLogger`` + """ + # DEV: `logging.Logger.manager` refers to the single root `logging.Manager` instance + # https://github.com/python/cpython/blob/48769a28ad6ef4183508951fa6a378531ace26a4/Lib/logging/__init__.py#L1824-L1826 # noqa:E501 + manager = logging.Logger.manager + + # If the logger does not exist yet, create it + # DEV: `Manager.loggerDict` is a dict mapping logger name to logger + # DEV: This is a simplified version of `logging.Manager.getLogger` + # https://github.com/python/cpython/blob/48769a28ad6ef4183508951fa6a378531ace26a4/Lib/logging/__init__.py#L1221-L1253 # noqa:E501 + # DEV: _fixupParents could be adding a placeholder, we want to replace it if that's the case + if name in manager.loggerDict: + logger = manager.loggerDict[name] + if isinstance(manager.loggerDict[name], logging.PlaceHolder): + placeholder = logger + logger = DDLogger(name=name) + manager.loggerDict[name] = logger + # DEV: `_fixupChildren` and `_fixupParents` have been around for awhile, + # DEV: but add the `hasattr` guard... just in case. + if hasattr(manager, "_fixupChildren"): + manager._fixupChildren(placeholder, logger) + if hasattr(manager, "_fixupParents"): + manager._fixupParents(logger) + else: + logger = DDLogger(name=name) + manager.loggerDict[name] = logger + if hasattr(manager, "_fixupParents"): + manager._fixupParents(logger) + + # Return our logger + return cast(DDLogger, logger) + + +def hasHandlers(self): + # type: (DDLogger) -> bool + """ + See if this logger has any handlers configured. + Loop through all handlers for this logger and its parents in the + logger hierarchy. Return True if a handler was found, else False. + Stop searching up the hierarchy whenever a logger with the "propagate" + attribute set to zero is found - that will be the last logger which + is checked for the existence of handlers. + + https://github.com/python/cpython/blob/8f192d12af82c4dc40730bf59814f6a68f68f950/Lib/logging/__init__.py#L1629 + """ + c = self + rv = False + while c: + if c.handlers: + rv = True + break + if not c.propagate: + break + else: + c = c.parent # type: ignore + return rv + + +class DDLogger(logging.Logger): + """ + Custom rate limited logger used by ``ddtrace`` + + This logger class is used to rate limit the output of + log messages from within the ``ddtrace`` package. + """ + + # Named tuple used for keeping track of a log lines current time bucket and the number of log lines skipped + LoggingBucket = collections.namedtuple("LoggingBucket", ("bucket", "skipped")) + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + """Constructor for ``DDLogger``""" + super(DDLogger, self).__init__(*args, **kwargs) + + # Dict to keep track of the current time bucket per name/level/pathname/lineno + self.buckets = collections.defaultdict( + lambda: DDLogger.LoggingBucket(0, 0) + ) # type: DefaultDict[Tuple[str, int, str, int], DDLogger.LoggingBucket] + + # Allow 1 log record per name/level/pathname/lineno every 60 seconds by default + # Allow configuring via `DD_TRACE_LOGGING_RATE` + # DEV: `DD_TRACE_LOGGING_RATE=0` means to disable all rate limiting + rate_limit = os.getenv("DD_TRACE_LOGGING_RATE", default=None) + + if rate_limit is not None: + self.rate_limit = int(rate_limit) + else: + self.rate_limit = 60 + + def handle(self, record): + # type: (logging.LogRecord) -> None + """ + Function used to call the handlers for a log line. + + This implementation will first determine if this log line should + be logged or rate limited, and then call the base ``logging.Logger.handle`` + function if it should be logged + + DEV: This method has all of it's code inlined to reduce on functions calls + + :param record: The log record being logged + :type record: ``logging.LogRecord`` + """ + if record.levelno >= logging.ERROR: + # avoid circular import + from ddtrace.internal import telemetry + + # currently we only have one error code + full_file_name = os.path.join(record.pathname, record.filename) + telemetry.telemetry_writer.add_error(1, record.msg % record.args, full_file_name, record.lineno) + + # If rate limiting has been disabled (`DD_TRACE_LOGGING_RATE=0`) then apply no rate limit + # If the logging is in debug, then do not apply any limits to any log + if not self.rate_limit or self.getEffectiveLevel() == logging.DEBUG: + super(DDLogger, self).handle(record) + return + + # Allow 1 log record by name/level/pathname/lineno every X seconds + # DEV: current unix time / rate (e.g. 300 seconds) = time bucket + # int(1546615098.8404942 / 300) = 515538 + # DEV: LogRecord `created` is a unix timestamp/float + # DEV: LogRecord has `levelname` and `levelno`, we want `levelno` e.g. `logging.DEBUG = 10` + current_bucket = int(record.created / self.rate_limit) + + # Limit based on logger name, record level, filename, and line number + # ('ddtrace.writer', 'DEBUG', '../site-packages/ddtrace/writer.py', 137) + # This way each unique log message can get logged at least once per time period + # DEV: LogRecord has `levelname` and `levelno`, we want `levelno` e.g. `logging.DEBUG = 10` + key = (record.name, record.levelno, record.pathname, record.lineno) + + # Only log this message if the time bucket has changed from the previous time we ran + logging_bucket = self.buckets[key] + if logging_bucket.bucket != current_bucket: + # Append count of skipped messages if we have skipped some since our last logging + if logging_bucket.skipped: + record.msg = "{}, %s additional messages skipped".format(record.msg) + record.args = record.args + (logging_bucket.skipped,) # type: ignore + + # Reset our bucket + self.buckets[key] = DDLogger.LoggingBucket(current_bucket, 0) + + # Call the base handle to actually log this record + super(DDLogger, self).handle(record) + else: + # Increment the count of records we have skipped + # DEV: `self.buckets[key]` is a tuple which is immutable so recreate instead + self.buckets[key] = DDLogger.LoggingBucket(logging_bucket.bucket, logging_bucket.skipped + 1) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/metrics.py new file mode 100644 index 0000000..f90b46e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/metrics.py @@ -0,0 +1,87 @@ +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 + +from ddtrace.internal import agent +from ddtrace.internal.dogstatsd import get_dogstatsd_client + + +class Metrics(object): + """Higher-level DogStatsD interface. + + This class provides automatic handling of namespaces for metrics, with the + possibility of enabling and disabling them at runtime. + + Example:: + The following example shows how to create the counter metric + 'datadog.tracer.writer.success' and how to increment it. Note that + metrics are emitted only while the metrics object is enabled. + + >>> tracer_metrics = Metrics(namespace='datadog.tracer') + >>> tracer_metrics.enable() + >>> writer_meter = dd_metrics.get_meter('writer') + >>> writer_meter.increment('success') + >>> tracer_metrics.disable() + >>> writer_meter.increment('success') # won't be emitted + """ + + def __init__(self, dogstats_url=None, namespace=None): + # type: (Optional[str], Optional[str]) -> None + self.dogstats_url = dogstats_url + self.namespace = namespace + self.enabled = False + + self._client = get_dogstatsd_client(dogstats_url or agent.get_stats_url(), namespace=namespace) + + class Meter(object): + def __init__(self, metrics, name): + # type: (Metrics, str) -> None + self.metrics = metrics + self.name = name + + def increment(self, name, value=1.0, tags=None): + # type: (str, float, Optional[Dict[str, str]]) -> None + if not self.metrics.enabled: + return None + + self.metrics._client.increment( + ".".join((self.name, name)), value, [":".join(_) for _ in tags.items()] if tags else None + ) + + def gauge(self, name, value=1.0, tags=None): + # type: (str, float, Optional[Dict[str, str]]) -> None + if not self.metrics.enabled: + return None + + self.metrics._client.gauge( + ".".join((self.name, name)), value, [":".join(_) for _ in tags.items()] if tags else None + ) + + def histogram(self, name, value=1.0, tags=None): + # type: (str, float, Optional[Dict[str, str]]) -> None + if not self.metrics.enabled: + return None + + self.metrics._client.histogram( + ".".join((self.name, name)), value, [":".join(_) for _ in tags.items()] if tags else None + ) + + def distribution(self, name, value=1.0, tags=None): + # type: (str, float, Optional[Dict[str, str]]) -> None + if not self.metrics.enabled: + return None + + self.metrics._client.distribution( + ".".join((self.name, name)), value, [":".join(_) for _ in tags.items()] if tags else None + ) + + def enable(self): + # type: () -> None + self.enabled = True + + def disable(self): + # type: () -> None + self.enabled = False + + def get_meter(self, name): + # type: (str) -> Metrics.Meter + return self.Meter(self, name) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/module.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/module.py new file mode 100644 index 0000000..5b5ee69 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/module.py @@ -0,0 +1,562 @@ +import abc +from collections import defaultdict +from importlib._bootstrap import _init_module_attrs +from importlib.abc import Loader +from importlib.machinery import ModuleSpec +from importlib.util import find_spec +from pathlib import Path +import sys +from types import ModuleType +import typing as t +from weakref import WeakValueDictionary as wvdict + +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils import get_argument_value + + +ModuleHookType = t.Callable[[ModuleType], None] +PreExecHookType = t.Callable[[t.Any, ModuleType], None] +PreExecHookCond = t.Union[str, t.Callable[[str], bool]] + + +log = get_logger(__name__) + + +_run_code = None +_post_run_module_hooks: t.List[ModuleHookType] = [] + + +def _wrapped_run_code(*args: t.Any, **kwargs: t.Any) -> t.Dict[str, t.Any]: + global _run_code, _post_run_module_hooks + + # DEV: If we are calling this wrapper then _run_code must have been set to + # the original runpy._run_code. + assert _run_code is not None + + mod_name = get_argument_value(args, kwargs, 3, "mod_name") + + try: + return _run_code(*args, **kwargs) + finally: + module = sys.modules[mod_name] + for hook in _post_run_module_hooks: + hook(module) + + +def _patch_run_code() -> None: + global _run_code + + if _run_code is None: + import runpy + + _run_code = runpy._run_code # type: ignore[attr-defined] + runpy._run_code = _wrapped_run_code # type: ignore[attr-defined] + + +def register_post_run_module_hook(hook: ModuleHookType) -> None: + """Register a post run module hook. + + The hooks gets called after the module is loaded. For this to work, the + hook needs to be registered during the interpreter initialization, e.g. as + part of a sitecustomize.py script. + """ + global _run_code, _post_run_module_hooks + + _patch_run_code() + + _post_run_module_hooks.append(hook) + + +def unregister_post_run_module_hook(hook: ModuleHookType) -> None: + """Unregister a post run module hook. + + If the hook was not registered, a ``ValueError`` exception is raised. + """ + global _post_run_module_hooks + + _post_run_module_hooks.remove(hook) + + +def origin(module: ModuleType) -> t.Optional[Path]: + """Get the origin source file of the module.""" + try: + # DEV: Use object.__getattribute__ to avoid potential side-effects. + orig = Path(object.__getattribute__(module, "__file__")).resolve() + except (AttributeError, TypeError): + # Module is probably only partially initialised, so we look at its + # spec instead + try: + # DEV: Use object.__getattribute__ to avoid potential side-effects. + orig = Path(object.__getattribute__(module, "__spec__").origin).resolve() + except (AttributeError, ValueError, TypeError): + orig = None + + if orig is not None and orig.is_file(): + return orig.with_suffix(".py") if orig.suffix == ".pyc" else orig + + return None + + +def _resolve(path: Path) -> t.Optional[Path]: + """Resolve a (relative) path with respect to sys.path.""" + for base in (Path(_) for _ in sys.path): + if base.is_dir(): + resolved_path = (base / path.expanduser()).resolve() + if resolved_path.is_file(): + return resolved_path + return None + + +# Borrowed from the wrapt module +# https://github.com/GrahamDumpleton/wrapt/blob/df0e62c2740143cceb6cafea4c306dae1c559ef8/src/wrapt/importer.py + + +def find_loader(fullname: str) -> t.Optional[Loader]: + return getattr(find_spec(fullname), "loader", None) + + +def is_module_installed(module_name): + return find_loader(module_name) is not None + + +def is_namespace_spec(spec: ModuleSpec) -> bool: + return spec.origin is None and spec.submodule_search_locations is not None + + +class _ImportHookChainedLoader: + def __init__(self, loader: t.Optional[Loader], spec: t.Optional[ModuleSpec] = None) -> None: + self.loader = loader + self.spec = spec + + self.callbacks: t.Dict[t.Any, t.Callable[[ModuleType], None]] = {} + + # A missing loader is generally an indication of a namespace package. + if loader is None or hasattr(loader, "create_module"): + self.create_module = self._create_module + if loader is None or hasattr(loader, "exec_module"): + self.exec_module = self._exec_module + + def __getattr__(self, name): + # Proxy any other attribute access to the underlying loader. + return getattr(self.loader, name) + + def add_callback(self, key: t.Any, callback: t.Callable[[ModuleType], None]) -> None: + self.callbacks[key] = callback + + def call_back(self, module: ModuleType) -> None: + if module.__name__ == "pkg_resources": + # DEV: pkg_resources support to prevent errors such as + # NotImplementedError: Can't perform this operation for unregistered + # loader type + module.register_loader_type(_ImportHookChainedLoader, module.DefaultProvider) + + for callback in self.callbacks.values(): + callback(module) + + def load_module(self, fullname: str) -> t.Optional[ModuleType]: + if self.loader is None: + if self.spec is None: + return None + sys.modules[self.spec.name] = module = ModuleType(fullname) + _init_module_attrs(self.spec, module) + else: + module = self.loader.load_module(fullname) + + self.call_back(module) + + return module + + def _create_module(self, spec): + if self.loader is not None: + return self.loader.create_module(spec) + + if is_namespace_spec(spec): + module = ModuleType(spec.name) + _init_module_attrs(spec, module) + return module + + return None + + def _exec_module(self, module: ModuleType) -> None: + # Collect and run only the first hook that matches the module. + pre_exec_hook = None + + for _ in sys.meta_path: + if isinstance(_, ModuleWatchdog): + try: + for cond, hook in _._pre_exec_module_hooks: + if (isinstance(cond, str) and cond == module.__name__) or ( + callable(cond) and cond(module.__name__) + ): + # Several pre-exec hooks could match, we keep the first one + pre_exec_hook = hook + break + except Exception: + log.debug("Exception happened while processing pre_exec_module_hooks", exc_info=True) + + if pre_exec_hook is not None: + break + + if pre_exec_hook: + pre_exec_hook(self, module) + else: + if self.loader is None: + spec = getattr(module, "__spec__", None) + if spec is not None and is_namespace_spec(spec): + sys.modules[spec.name] = module + else: + self.loader.exec_module(module) + + self.call_back(module) + + +class BaseModuleWatchdog(abc.ABC): + """Base module watchdog. + + Invokes ``after_import`` every time a new module is imported. + """ + + _instance: t.Optional["BaseModuleWatchdog"] = None + + def __init__(self) -> None: + self._finding: t.Set[str] = set() + + # DEV: pkg_resources support to prevent errors such as + # NotImplementedError: Can't perform this operation for unregistered + pkg_resources = sys.modules.get("pkg_resources") + if pkg_resources is not None: + pkg_resources.register_loader_type(_ImportHookChainedLoader, pkg_resources.DefaultProvider) + + def _add_to_meta_path(self) -> None: + sys.meta_path.insert(0, self) # type: ignore[arg-type] + + @classmethod + def _find_in_meta_path(cls) -> t.Optional[int]: + for i, meta_path in enumerate(sys.meta_path): + if type(meta_path) is cls: + return i + return None + + @classmethod + def _remove_from_meta_path(cls) -> None: + i = cls._find_in_meta_path() + + if i is None: + raise RuntimeError("%s is not installed" % cls.__name__) + + sys.meta_path.pop(i) + + def after_import(self, module: ModuleType) -> None: + raise NotImplementedError() + + def find_module(self, fullname: str, path: t.Optional[str] = None) -> t.Optional[Loader]: + if fullname in self._finding: + return None + + self._finding.add(fullname) + + try: + original_loader = find_loader(fullname) + if original_loader is not None: + loader = ( + _ImportHookChainedLoader(original_loader) + if not isinstance(original_loader, _ImportHookChainedLoader) + else original_loader + ) + + loader.add_callback(type(self), self.after_import) + + return t.cast(Loader, loader) + + finally: + self._finding.remove(fullname) + + return None + + def find_spec( + self, fullname: str, path: t.Optional[str] = None, target: t.Optional[ModuleType] = None + ) -> t.Optional[ModuleSpec]: + if fullname in self._finding: + return None + + self._finding.add(fullname) + + try: + try: + # Best effort + spec = find_spec(fullname) + except Exception: + return None + + if spec is None: + return None + + loader = getattr(spec, "loader", None) + + if not isinstance(loader, _ImportHookChainedLoader): + spec.loader = t.cast(Loader, _ImportHookChainedLoader(loader, spec)) + + t.cast(_ImportHookChainedLoader, spec.loader).add_callback(type(self), self.after_import) + + return spec + + finally: + self._finding.remove(fullname) + + @classmethod + def _check_installed(cls) -> None: + if not cls.is_installed(): + raise RuntimeError("%s is not installed" % cls.__name__) + + @classmethod + def install(cls) -> None: + """Install the module watchdog.""" + if cls.is_installed(): + raise RuntimeError("%s is already installed" % cls.__name__) + + cls._instance = cls() + cls._instance._add_to_meta_path() + log.debug("%s installed", cls) + + @classmethod + def is_installed(cls): + """Check whether this module watchdog class is installed.""" + return cls._instance is not None and type(cls._instance) is cls + + @classmethod + def uninstall(cls) -> None: + """Uninstall the module watchdog. + + This will uninstall only the most recently installed instance of this + class. + """ + cls._check_installed() + cls._remove_from_meta_path() + + cls._instance = None + + log.debug("%s uninstalled", cls) + + +class ModuleWatchdog(BaseModuleWatchdog): + """Module watchdog. + + Hooks into the import machinery to detect when modules are loaded/unloaded. + This is also responsible for triggering any registered import hooks. + + Subclasses might customize the default behavior by overriding the + ``after_import`` method, which is triggered on every module import, once + the subclass is installed. + """ + + def __init__(self) -> None: + super().__init__() + + self._hook_map: t.DefaultDict[str, t.List[ModuleHookType]] = defaultdict(list) + self._om: t.Optional[t.Dict[str, ModuleType]] = None + self._pre_exec_module_hooks: t.List[t.Tuple[PreExecHookCond, PreExecHookType]] = [] + + @property + def _origin_map(self) -> t.Dict[str, ModuleType]: + def modules_with_origin(modules: t.Iterable[ModuleType]) -> t.Dict[str, t.Any]: + result: wvdict = wvdict() + + for m in modules: + module_origin = origin(m) + if module_origin is None: + continue + + try: + result[str(module_origin)] = m + except TypeError: + # This can happen if the module is a special object that + # does not allow for weak references. Quite likely this is + # an object created by a native extension. We make the + # assumption that this module does not contain valuable + # information that can be used at the Python runtime level. + pass + + return t.cast(t.Dict[str, t.Any], result) + + if self._om is None: + try: + self._om = modules_with_origin(sys.modules.values()) + except RuntimeError: + # The state of sys.modules might have been mutated by another + # thread. We try to build the full mapping at the next occasion. + # For now we take the more expensive route of building a list of + # the current values, which might be incomplete. + return modules_with_origin(list(sys.modules.values())) + return self._om + + def after_import(self, module: ModuleType) -> None: + module_path = origin(module) + path = str(module_path) if module_path is not None else None + if path is not None: + self._origin_map[path] = module + + # Collect all hooks by module origin and name + hooks = [] + if path is not None and path in self._hook_map: + hooks.extend(self._hook_map[path]) + if module.__name__ in self._hook_map: + hooks.extend(self._hook_map[module.__name__]) + + if hooks: + log.debug("Calling %d registered hooks on import of module '%s'", len(hooks), module.__name__) + for hook in hooks: + hook(module) + + @classmethod + def get_by_origin(cls, _origin: Path) -> t.Optional[ModuleType]: + """Lookup a module by its origin.""" + cls._check_installed() + + instance = t.cast(ModuleWatchdog, cls._instance) + + resolved_path = _resolve(_origin) + if resolved_path is not None: + path = str(resolved_path) + module = instance._origin_map.get(path) + if module is not None: + return module + + # Check if this is the __main__ module + main_module = sys.modules.get("__main__") + if main_module is not None and origin(main_module) == path: + # Register for future lookups + instance._origin_map[path] = main_module + + return main_module + + return None + + @classmethod + def register_origin_hook(cls, origin: Path, hook: ModuleHookType) -> None: + """Register a hook to be called when the module with the given origin is + imported. + + The hook will be called with the module object as argument. + """ + cls._check_installed() + + # DEV: Under the hypothesis that this is only ever called by the probe + # poller thread, there are no further actions to take. Should this ever + # change, then thread-safety might become a concern. + resolved_path = _resolve(origin) + if resolved_path is None: + raise ValueError("Cannot resolve module origin %s" % origin) + + path = str(resolved_path) + + log.debug("Registering hook '%r' on path '%s'", hook, path) + instance = t.cast(ModuleWatchdog, cls._instance) + instance._hook_map[path].append(hook) + try: + module = instance._origin_map[path] + # Sanity check: the module might have been removed from sys.modules + # but not yet garbage collected. + try: + sys.modules[module.__name__] + except KeyError: + del instance._origin_map[path] + raise + except KeyError: + # The module is not loaded yet. Nothing more we can do. + return + + # The module was already imported so we invoke the hook straight-away + log.debug("Calling hook '%r' on already imported module '%s'", hook, module.__name__) + hook(module) + + @classmethod + def unregister_origin_hook(cls, origin: Path, hook: ModuleHookType) -> None: + """Unregister the hook registered with the given module origin and + argument. + """ + cls._check_installed() + + resolved_path = _resolve(origin) + if resolved_path is None: + raise ValueError("Module origin %s cannot be resolved", origin) + + path = str(resolved_path) + + instance = t.cast(ModuleWatchdog, cls._instance) + if path not in instance._hook_map: + raise ValueError("No hooks registered for origin %s" % origin) + + try: + if path in instance._hook_map: + hooks = instance._hook_map[path] + hooks.remove(hook) + if not hooks: + del instance._hook_map[path] + except ValueError: + raise ValueError("Hook %r not registered for origin %s" % (hook, origin)) + + @classmethod + def register_module_hook(cls, module: str, hook: ModuleHookType) -> None: + """Register a hook to be called when the module with the given name is + imported. + + The hook will be called with the module object as argument. + """ + cls._check_installed() + + log.debug("Registering hook '%r' on module '%s'", hook, module) + instance = t.cast(ModuleWatchdog, cls._instance) + instance._hook_map[module].append(hook) + try: + module_object = sys.modules[module] + except KeyError: + # The module is not loaded yet. Nothing more we can do. + return + + # The module was already imported so we invoke the hook straight-away + log.debug("Calling hook '%r' on already imported module '%s'", hook, module) + hook(module_object) + + @classmethod + def unregister_module_hook(cls, module: str, hook: ModuleHookType) -> None: + """Unregister the hook registered with the given module name and + argument. + """ + cls._check_installed() + + instance = t.cast(ModuleWatchdog, cls._instance) + if module not in instance._hook_map: + raise ValueError("No hooks registered for module %s" % module) + + try: + if module in instance._hook_map: + hooks = instance._hook_map[module] + hooks.remove(hook) + if not hooks: + del instance._hook_map[module] + except ValueError: + raise ValueError("Hook %r not registered for module %r" % (hook, module)) + + @classmethod + def after_module_imported(cls, module: str) -> t.Callable[[ModuleHookType], None]: + def _(hook: ModuleHookType) -> None: + cls.register_module_hook(module, hook) + + return _ + + @classmethod + def register_pre_exec_module_hook( + cls: t.Type["ModuleWatchdog"], cond: PreExecHookCond, hook: PreExecHookType + ) -> None: + """Register a hook to execute before/instead of exec_module. + + The pre exec_module hook is executed before the module is executed + to allow for changed modules to be executed as needed. To ensure + that the hook is applied only to the modules that are required, + the condition is evaluated against the module name. + """ + cls._check_installed() + + log.debug("Registering pre_exec module hook '%r' on condition '%s'", hook, cond) + instance = t.cast(ModuleWatchdog, cls._instance) + instance._pre_exec_module_hooks.append((cond, hook)) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/packages.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/packages.py new file mode 100644 index 0000000..e309daa --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/packages.py @@ -0,0 +1,134 @@ +import logging +import os +import typing as t + +from ddtrace.internal.utils.cache import callonce + + +try: + import pathlib # noqa: F401 +except ImportError: + import pathlib2 as pathlib # type: ignore[no-redef] # noqa: F401 + + +LOG = logging.getLogger(__name__) + + +try: + fspath = os.fspath +except AttributeError: + # Stolen from Python 3.10 + def fspath(path): + # For testing purposes, make sure the function is available when the C + # implementation exists. + """Return the path representation of a path-like object. + + If str or bytes is passed in, it is returned unchanged. Otherwise the + os.PathLike interface is used to get the path representation. If the + path representation is not str or bytes, TypeError is raised. If the + provided path is not str, bytes, or os.PathLike, TypeError is raised. + """ + if isinstance(path, (str, bytes)): + return path + + # Work from the object's type to match method resolution of other magic + # methods. + path_type = type(path) + try: + path_repr = path_type.__fspath__(path) + except AttributeError: + if hasattr(path_type, "__fspath__"): + raise + else: + raise TypeError("expected str, bytes or os.PathLike object, not " + path_type.__name__) + if isinstance(path_repr, (str, bytes)): + return path_repr + else: + raise TypeError( + "expected {}.__fspath__() to return str or bytes, " + "not {}".format(path_type.__name__, type(path_repr).__name__) + ) + + +# We don't store every file of every package but filter commonly used extensions +SUPPORTED_EXTENSIONS = (".py", ".so", ".dll", ".pyc") + + +Distribution = t.NamedTuple("Distribution", [("name", str), ("version", str), ("path", t.Optional[str])]) + + +@callonce +def get_distributions(): + # type: () -> t.Set[Distribution] + """returns the name and version of all distributions in a python path""" + try: + import importlib.metadata as importlib_metadata + except ImportError: + import importlib_metadata # type: ignore[no-redef] + + pkgs = set() + for dist in importlib_metadata.distributions(): + # Get the root path of all files in a distribution + path = str(dist.locate_file("")) + # PKG-INFO and/or METADATA files are parsed when dist.metadata is accessed + # Optimization: we should avoid accessing dist.metadata more than once + metadata = dist.metadata + name = metadata["name"] + version = metadata["version"] + if name and version: + pkgs.add(Distribution(path=path, name=name, version=version)) + + return pkgs + + +def _is_python_source_file(path): + # type: (pathlib.PurePath) -> bool + return os.path.splitext(path.name)[-1].lower() in SUPPORTED_EXTENSIONS + + +@callonce +def _package_file_mapping(): + # type: (...) -> t.Optional[t.Dict[str, Distribution]] + try: + import importlib.metadata as il_md + except ImportError: + import importlib_metadata as il_md # type: ignore[no-redef] + + try: + mapping = {} + + for ilmd_d in il_md.distributions(): + if ilmd_d is not None and ilmd_d.files is not None: + d = Distribution(name=ilmd_d.metadata["name"], version=ilmd_d.version, path=None) + for f in ilmd_d.files: + if _is_python_source_file(f): + # mapping[fspath(f.locate())] = d + _path = fspath(f.locate()) + mapping[_path] = d + _realp = os.path.realpath(_path) + if _realp != _path: + mapping[_realp] = d + + return mapping + + except Exception: + LOG.error( + "Unable to build package file mapping, " + "please report this to https://github.com/DataDog/dd-trace-py/issues", + exc_info=True, + ) + return None + + +def filename_to_package(filename): + # type: (str) -> t.Optional[Distribution] + + mapping = _package_file_mapping() + if mapping is None: + return None + + if filename not in mapping and filename.endswith(".pyc"): + # Replace .pyc by .py + filename = filename[:-1] + + return mapping.get(filename) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/periodic.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/periodic.py new file mode 100644 index 0000000..e888025 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/periodic.py @@ -0,0 +1,174 @@ +# -*- encoding: utf-8 -*- +import threading +import typing # noqa:F401 + +import attr + +from ddtrace.internal import service + +from . import forksafe + + +class PeriodicThread(threading.Thread): + """Periodic thread. + + This class can be used to instantiate a worker thread that will run its `run_periodic` function every `interval` + seconds. + + """ + + _ddtrace_profiling_ignore = True + + def __init__( + self, + interval, # type: float + target, # type: typing.Callable[[], typing.Any] + name=None, # type: typing.Optional[str] + on_shutdown=None, # type: typing.Optional[typing.Callable[[], typing.Any]] + ): + # type: (...) -> None + """Create a periodic thread. + + :param interval: The interval in seconds to wait between execution of the periodic function. + :param target: The periodic function to execute every interval. + :param name: The name of the thread. + :param on_shutdown: The function to call when the thread shuts down. + """ + super(PeriodicThread, self).__init__(name=name) + self._target = target + self._on_shutdown = on_shutdown + self.interval = interval + self.quit = forksafe.Event() + self.daemon = True + + def stop(self): + """Stop the thread.""" + # NOTE: make sure the thread is alive before using self.quit: + # 1. self.quit is Lock-based + # 2. if we're a child trying to stop a Thread, + # the Lock might have been locked in a parent process while forking so that'd block forever + if self.is_alive(): + self.quit.set() + + def run(self): + """Run the target function periodically.""" + while not self.quit.wait(self.interval): + self._target() + if self._on_shutdown is not None: + self._on_shutdown() + + +class AwakeablePeriodicThread(PeriodicThread): + """Periodic thread that can be awakened on demand. + + This class can be used to instantiate a worker thread that will run its + `run_periodic` function every `interval` seconds, or upon request. + """ + + def __init__( + self, + interval, # type: float + target, # type: typing.Callable[[], typing.Any] + name=None, # type: typing.Optional[str] + on_shutdown=None, # type: typing.Optional[typing.Callable[[], typing.Any]] + ): + # type: (...) -> None + """Create a periodic thread that can be awakened on demand.""" + super(AwakeablePeriodicThread, self).__init__(interval, target, name, on_shutdown) + self.request = forksafe.Event() + self.served = forksafe.Event() + self.awake_lock = forksafe.Lock() + + def awake(self): + """Awake the thread.""" + with self.awake_lock: + self.served.clear() + self.request.set() + self.served.wait() + + def stop(self): + super().stop() + self.request.set() + + def run(self): + """Run the target function periodically or on demand.""" + while not self.quit.is_set(): + self._target() + + if self.request.wait(self.interval): + if self.quit.is_set(): + break + self.request.clear() + self.served.set() + + if self._on_shutdown is not None: + self._on_shutdown() + + +@attr.s(eq=False) +class PeriodicService(service.Service): + """A service that runs periodically.""" + + _interval = attr.ib(type=float) + _worker = attr.ib(default=None, init=False, repr=False) + + __thread_class__ = PeriodicThread + + @property + def interval(self): + # type: (...) -> float + return self._interval + + @interval.setter + def interval( + self, + value, # type: float + ): + # type: (...) -> None + self._interval = value + # Update the interval of the PeriodicThread based on ours + if self._worker: + self._worker.interval = value + + def _start_service(self, *args, **kwargs): + # type: (typing.Any, typing.Any) -> None + """Start the periodic service.""" + self._worker = self.__thread_class__( + self.interval, + target=self.periodic, + name="%s:%s" % (self.__class__.__module__, self.__class__.__name__), + on_shutdown=self.on_shutdown, + ) + self._worker.start() + + def _stop_service(self, *args, **kwargs): + # type: (typing.Any, typing.Any) -> None + """Stop the periodic collector.""" + self._worker.stop() + super(PeriodicService, self)._stop_service(*args, **kwargs) + + def join( + self, + timeout=None, # type: typing.Optional[float] + ): + # type: (...) -> None + if self._worker: + self._worker.join(timeout) + + @staticmethod + def on_shutdown(): + pass + + def periodic(self): + # type: (...) -> None + pass + + +class AwakeablePeriodicService(PeriodicService): + """A service that runs periodically but that can also be awakened on demand.""" + + __thread_class__ = AwakeablePeriodicThread + + def awake(self): + # type: (...) -> None + self._worker.awake() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/__init__.py new file mode 100644 index 0000000..eba8ec2 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/__init__.py @@ -0,0 +1,77 @@ +import abc +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 + +import attr + +from ddtrace import Span # noqa:F401 +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +@attr.s +class SpanProcessor(metaclass=abc.ABCMeta): + """A Processor is used to process spans as they are created and finished by a tracer.""" + + __processors__ = [] # type: List["SpanProcessor"] + + def __attrs_post_init__(self): + # type: () -> None + """Default post initializer which logs the representation of the + Processor at the ``logging.DEBUG`` level. + + The representation can be modified with the ``repr`` argument to the + attrs attribute:: + + @attr.s + class MyProcessor(Processor): + field_to_include = attr.ib(repr=True) + field_to_exclude = attr.ib(repr=False) + """ + log.debug("initialized processor %r", self) + + @abc.abstractmethod + def on_span_start(self, span): + # type: (Span) -> None + """Called when a span is started. + + This method is useful for making upfront decisions on spans. + + For example, a sampling decision can be made when the span is created + based on its resource name. + """ + pass + + @abc.abstractmethod + def on_span_finish(self, span): + # type: (Span) -> None + """Called with the result of any previous processors or initially with + the finishing span when a span finishes. + + It can return any data which will be passed to any processors that are + applied afterwards. + """ + pass + + def shutdown(self, timeout): + # type: (Optional[float]) -> None + """Called when the processor is done being used. + + Any clean-up or flushing should be performed with this method. + """ + pass + + def register(self): + # type: () -> None + """Register the processor with the global list of processors.""" + SpanProcessor.__processors__.append(self) + + def unregister(self): + # type: () -> None + """Unregister the processor from the global list of processors.""" + try: + SpanProcessor.__processors__.remove(self) + except ValueError: + raise ValueError("Span processor %r not registered" % self) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/endpoint_call_counter.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/endpoint_call_counter.py new file mode 100644 index 0000000..a31bfd6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/endpoint_call_counter.py @@ -0,0 +1,43 @@ +import typing + +import attr + +from ddtrace.ext import SpanTypes +from ddtrace.internal import forksafe +from ddtrace.internal.compat import ensure_text +from ddtrace.internal.processor import SpanProcessor +from ddtrace.span import Span # noqa:F401 + + +EndpointCountsType = typing.Dict[str, int] + + +@attr.s(eq=False) +class EndpointCallCounterProcessor(SpanProcessor): + endpoint_counts = attr.ib(init=False, repr=False, type=EndpointCountsType, factory=lambda: {}, eq=False) + _endpoint_counts_lock = attr.ib(init=False, repr=False, factory=forksafe.Lock, eq=False) + _enabled = attr.ib(default=False, repr=False, eq=False) + + def enable(self): + # type: () -> None + self._enabled = True + + def on_span_start(self, span): + # type: (Span) -> None + pass + + def on_span_finish(self, span): + # type: (Span) -> None + if not self._enabled: + return + if span._local_root == span and span.span_type == SpanTypes.WEB: + resource = ensure_text(span.resource, errors="backslashreplace") + with self._endpoint_counts_lock: + self.endpoint_counts[resource] = self.endpoint_counts.get(resource, 0) + 1 + + def reset(self): + # type: () -> EndpointCountsType + with self._endpoint_counts_lock: + counts = self.endpoint_counts + self.endpoint_counts = {} + return counts diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/stats.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/stats.py new file mode 100644 index 0000000..16008c9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/stats.py @@ -0,0 +1,255 @@ +# coding: utf-8 +from collections import defaultdict +import os +import typing + +from ddsketch import LogCollapsingLowestDenseDDSketch +from ddsketch.pb.proto import DDSketchProto + +import ddtrace +from ddtrace import config +from ddtrace.internal import compat +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter +from ddtrace.span import _is_top_level + +from ...constants import SPAN_MEASURED_KEY +from .._encoding import packb +from ..agent import get_connection +from ..compat import get_connection_response +from ..forksafe import Lock +from ..hostname import get_hostname +from ..logger import get_logger +from ..periodic import PeriodicService +from ..writer import _human_size +from . import SpanProcessor + + +if typing.TYPE_CHECKING: # pragma: no cover + from typing import DefaultDict # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Union # noqa:F401 + + from ddtrace import Span # noqa:F401 + + +log = get_logger(__name__) + + +def _is_measured(span): + # type: (Span) -> bool + """Return whether the span is flagged to be measured or not.""" + return span._metrics.get(SPAN_MEASURED_KEY) == 1 + + +""" +To aggregate metrics for spans they need to be "uniquely" identified (as +best as possible). This enables the compression of stat points. + +Aggregation can be done using primary and secondary attributes from the span +stored in a tuple which is hashable in Python. +""" +SpanAggrKey = typing.Tuple[ + str, # name + str, # service + str, # resource + str, # type + int, # http status code + bool, # synthetics request +] + + +class SpanAggrStats(object): + """Aggregated span statistics.""" + + __slots__ = ("hits", "top_level_hits", "errors", "duration", "ok_distribution", "err_distribution") + + def __init__(self): + self.hits = 0 + self.top_level_hits = 0 + self.errors = 0 + self.duration = 0 + # Match the relative accuracy of the sketch implementation used in the backend + # which is 0.775%. + self.ok_distribution = LogCollapsingLowestDenseDDSketch(0.00775, bin_limit=2048) + self.err_distribution = LogCollapsingLowestDenseDDSketch(0.00775, bin_limit=2048) + + +def _span_aggr_key(span): + # type: (Span) -> SpanAggrKey + """Return a hashable key that can be used to aggregate similar spans.""" + service = span.service or "" + resource = span.resource or "" + _type = span.span_type or "" + status_code = span.get_tag("http.status_code") or 0 + synthetics = span.context.dd_origin == "synthetics" + return span.name, service, resource, _type, int(status_code), synthetics + + +class SpanStatsProcessorV06(PeriodicService, SpanProcessor): + """SpanProcessor for computing, collecting and submitting span metrics to the Datadog Agent.""" + + def __init__(self, agent_url, interval=None, timeout=1.0, retry_attempts=3): + # type: (str, Optional[float], float, int) -> None + if interval is None: + interval = float(os.getenv("_DD_TRACE_STATS_WRITER_INTERVAL") or 10.0) + super(SpanStatsProcessorV06, self).__init__(interval=interval) + self._agent_url = agent_url + self._endpoint = "/v0.6/stats" + self._agent_endpoint = "%s%s" % (self._agent_url, self._endpoint) + self._timeout = timeout + # Have the bucket size match the interval in which flushes occur. + self._bucket_size_ns = int(interval * 1e9) # type: int + self._buckets = defaultdict( + lambda: defaultdict(SpanAggrStats) + ) # type: DefaultDict[int, DefaultDict[SpanAggrKey, SpanAggrStats]] + self._headers = { + "Datadog-Meta-Lang": "python", + "Datadog-Meta-Tracer-Version": ddtrace.__version__, + "Content-Type": "application/msgpack", + } # type: Dict[str, str] + self._hostname = "" + if config.report_hostname: + self._hostname = get_hostname() + self._lock = Lock() + self._enabled = True + + self._flush_stats_with_backoff = fibonacci_backoff_with_jitter( + attempts=retry_attempts, + initial_wait=0.618 * self.interval / (1.618**retry_attempts) / 2, + )(self._flush_stats) + + self.start() + + def on_span_start(self, span): + # type: (Span) -> None + pass + + def on_span_finish(self, span): + # type: (Span) -> None + if not self._enabled: + return + + is_top_level = _is_top_level(span) + if not is_top_level and not _is_measured(span): + return + + with self._lock: + # Align the span into the corresponding stats bucket + assert span.duration_ns is not None + span_end_ns = span.start_ns + span.duration_ns + bucket_time_ns = span_end_ns - (span_end_ns % self._bucket_size_ns) + aggr_key = _span_aggr_key(span) + stats = self._buckets[bucket_time_ns][aggr_key] + + stats.hits += 1 + stats.duration += span.duration_ns + if is_top_level: + stats.top_level_hits += 1 + if span.error: + stats.errors += 1 + stats.err_distribution.add(span.duration_ns) + else: + stats.ok_distribution.add(span.duration_ns) + + def _serialize_buckets(self): + # type: () -> List[Dict] + """Serialize and update the buckets. + + The current bucket is left in case any other spans are added. + """ + serialized_buckets = [] + serialized_bucket_keys = [] + for bucket_time_ns, bucket in self._buckets.items(): + bucket_aggr_stats = [] + serialized_bucket_keys.append(bucket_time_ns) + + for aggr_key, stat_aggr in bucket.items(): + name, service, resource, _type, http_status, synthetics = aggr_key + serialized_bucket = { + "Name": compat.ensure_text(name), + "Resource": compat.ensure_text(resource), + "Synthetics": synthetics, + "HTTPStatusCode": http_status, + "Hits": stat_aggr.hits, + "TopLevelHits": stat_aggr.top_level_hits, + "Duration": stat_aggr.duration, + "Errors": stat_aggr.errors, + "OkSummary": DDSketchProto.to_proto(stat_aggr.ok_distribution).SerializeToString(), + "ErrorSummary": DDSketchProto.to_proto(stat_aggr.err_distribution).SerializeToString(), + } + if service: + serialized_bucket["Service"] = compat.ensure_text(service) + if _type: + serialized_bucket["Type"] = compat.ensure_text(_type) + bucket_aggr_stats.append(serialized_bucket) + serialized_buckets.append( + { + "Start": bucket_time_ns, + "Duration": self._bucket_size_ns, + "Stats": bucket_aggr_stats, + } + ) + + # Clear out buckets that have been serialized + for key in serialized_bucket_keys: + del self._buckets[key] + + return serialized_buckets + + def _flush_stats(self, payload): + # type: (bytes) -> None + try: + conn = get_connection(self._agent_url, self._timeout) + conn.request("PUT", self._endpoint, payload, self._headers) + resp = get_connection_response(conn) + except Exception: + log.error("failed to submit span stats to the Datadog agent at %s", self._agent_endpoint, exc_info=True) + raise + else: + if resp.status == 404: + log.error( + "Datadog agent does not support tracer stats computation, disabling, please upgrade your agent" + ) + self._enabled = False + return + elif resp.status >= 400: + log.error( + "failed to send stats payload, %s (%s) (%s) response from Datadog agent at %s", + resp.status, + resp.reason, + resp.read(), + self._agent_endpoint, + ) + else: + log.info("sent %s to %s", _human_size(len(payload)), self._agent_endpoint) + + def periodic(self): + # type: (...) -> None + + with self._lock: + serialized_stats = self._serialize_buckets() + + if not serialized_stats: + # No stats to report, short-circuit. + return + raw_payload = { + "Stats": serialized_stats, + "Hostname": self._hostname, + } # type: Dict[str, Union[List[Dict], str]] + if config.env: + raw_payload["Env"] = compat.ensure_text(config.env) + if config.version: + raw_payload["Version"] = compat.ensure_text(config.version) + + payload = packb(raw_payload) + try: + self._flush_stats_with_backoff(payload) + except Exception: + log.error("retry limit exceeded submitting span stats to the Datadog agent at %s", self._agent_endpoint) + + def shutdown(self, timeout): + # type: (Optional[float]) -> None + self.periodic() + self.stop(timeout) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/trace.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/trace.py new file mode 100644 index 0000000..fe3b73d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/processor/trace.py @@ -0,0 +1,388 @@ +import abc +from collections import defaultdict +from threading import Lock +from threading import RLock +from typing import Dict # noqa:F401 +from typing import Iterable # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +import attr + +from ddtrace import config +from ddtrace.constants import BASE_SERVICE_KEY +from ddtrace.constants import SAMPLING_PRIORITY_KEY +from ddtrace.constants import SPAN_KIND +from ddtrace.constants import USER_KEEP +from ddtrace.internal import gitmetadata +from ddtrace.internal.constants import HIGHER_ORDER_TRACE_ID_BITS +from ddtrace.internal.constants import MAX_UINT_64BITS +from ddtrace.internal.logger import get_logger +from ddtrace.internal.processor import SpanProcessor +from ddtrace.internal.sampling import SpanSamplingRule +from ddtrace.internal.sampling import is_single_span_sampled +from ddtrace.internal.schema import schematize_service_name +from ddtrace.internal.service import ServiceStatusError +from ddtrace.internal.writer import TraceWriter +from ddtrace.span import Span # noqa:F401 +from ddtrace.span import _get_64_highest_order_bits_as_hex +from ddtrace.span import _is_top_level + + +if config._telemetry_enabled: + from ddtrace.internal import telemetry + from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_TRACER + +try: + from typing import DefaultDict # noqa:F401 +except ImportError: + from collections import defaultdict as DefaultDict + +log = get_logger(__name__) + + +@attr.s +class TraceProcessor(metaclass=abc.ABCMeta): + def __attrs_post_init__(self): + # type: () -> None + """Default post initializer which logs the representation of the + TraceProcessor at the ``logging.DEBUG`` level. + + The representation can be modified with the ``repr`` argument to the + attrs attribute:: + + @attr.s + class MyTraceProcessor(TraceProcessor): + field_to_include = attr.ib(repr=True) + field_to_exclude = attr.ib(repr=False) + """ + log.debug("initialized trace processor %r", self) + + @abc.abstractmethod + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + """Processes a trace. + + ``None`` can be returned to prevent the trace from being further + processed. + """ + pass + + +@attr.s +class TraceSamplingProcessor(TraceProcessor): + """Processor that keeps traces that have sampled spans. If all spans + are unsampled then ``None`` is returned. + + Note that this processor is only effective if complete traces are sent. If + the spans of a trace are divided in separate lists then it's possible that + parts of the trace are unsampled when the whole trace should be sampled. + """ + + _compute_stats_enabled = attr.ib(type=bool) + + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + if trace: + # When stats computation is enabled in the tracer then we can + # safely drop the traces. + if self._compute_stats_enabled: + priority = trace[0]._context.sampling_priority if trace[0]._context is not None else None + if priority is not None and priority <= 0: + # When any span is marked as keep by a single span sampling + # decision then we still send all and only those spans. + single_spans = [_ for _ in trace if is_single_span_sampled(_)] + + return single_spans or None + + return trace + + log.debug("dropping trace %d with %d spans", trace[0].trace_id, len(trace)) + + return None + + +@attr.s +class TopLevelSpanProcessor(SpanProcessor): + """Processor marks spans as top level + + A span is top level when it is the entrypoint method for a request to a service. + Top level span and service entry span are equivalent terms + + The "top level" metric will be used by the agent to calculate trace metrics + and determine how spans should be displaced in the UI. If this metric is not + set by the tracer the first span in a trace chunk will be marked as top level. + + """ + + def on_span_start(self, _): + # type: (Span) -> None + pass + + def on_span_finish(self, span): + # DEV: Update span after finished to avoid race condition + if _is_top_level(span): + span.set_metric("_dd.top_level", 1) + + +@attr.s +class TraceTagsProcessor(TraceProcessor): + """Processor that applies trace-level tags to the trace.""" + + def _set_git_metadata(self, chunk_root): + repository_url, commit_sha, main_package = gitmetadata.get_git_tags() + if repository_url: + chunk_root.set_tag_str("_dd.git.repository_url", repository_url) + if commit_sha: + chunk_root.set_tag_str("_dd.git.commit.sha", commit_sha) + if main_package: + chunk_root.set_tag_str("_dd.python_main_package", main_package) + + def process_trace(self, trace): + # type: (List[Span]) -> Optional[List[Span]] + if not trace: + return trace + + chunk_root = trace[0] + ctx = chunk_root._context + if not ctx: + return trace + + ctx._update_tags(chunk_root) + self._set_git_metadata(chunk_root) + chunk_root.set_tag_str("language", "python") + # for 128 bit trace ids + if chunk_root.trace_id > MAX_UINT_64BITS: + trace_id_hob = _get_64_highest_order_bits_as_hex(chunk_root.trace_id) + chunk_root.set_tag_str(HIGHER_ORDER_TRACE_ID_BITS, trace_id_hob) + return trace + + +@attr.s +class SpanAggregator(SpanProcessor): + """Processor that aggregates spans together by trace_id and writes the + spans to the provided writer when: + - The collection is assumed to be complete. A collection of spans is + assumed to be complete if all the spans that have been created with + the trace_id have finished; or + - A minimum threshold of spans (``partial_flush_min_spans``) have been + finished in the collection and ``partial_flush_enabled`` is True. + """ + + @attr.s + class _Trace(object): + spans = attr.ib(default=attr.Factory(list)) # type: List[Span] + num_finished = attr.ib(type=int, default=0) # type: int + + _partial_flush_enabled = attr.ib(type=bool) + _partial_flush_min_spans = attr.ib(type=int) + _trace_processors = attr.ib(type=Iterable[TraceProcessor]) + _writer = attr.ib(type=TraceWriter) + _traces = attr.ib( + factory=lambda: defaultdict(lambda: SpanAggregator._Trace()), + init=False, + type=DefaultDict[int, "_Trace"], + repr=False, + ) + if config._span_aggregator_rlock: + _lock = attr.ib(init=False, factory=RLock, repr=False, type=Union[RLock, Lock]) + else: + _lock = attr.ib(init=False, factory=Lock, repr=False, type=Union[RLock, Lock]) + # Tracks the number of spans created and tags each count with the api that was used + # ex: otel api, opentracing api, datadog api + _span_metrics = attr.ib( + init=False, + factory=lambda: { + "spans_created": defaultdict(int), + "spans_finished": defaultdict(int), + }, + type=Dict[str, DefaultDict], + ) + + def on_span_start(self, span): + # type: (Span) -> None + with self._lock: + trace = self._traces[span.trace_id] + trace.spans.append(span) + self._span_metrics["spans_created"][span._span_api] += 1 + self._queue_span_count_metrics("spans_created", "integration_name") + + def on_span_finish(self, span): + # type: (Span) -> None + with self._lock: + self._span_metrics["spans_finished"][span._span_api] += 1 + trace = self._traces[span.trace_id] + trace.num_finished += 1 + should_partial_flush = self._partial_flush_enabled and trace.num_finished >= self._partial_flush_min_spans + if trace.num_finished == len(trace.spans) or should_partial_flush: + trace_spans = trace.spans + trace.spans = [] + if trace.num_finished < len(trace_spans): + finished = [] + for s in trace_spans: + if s.finished: + finished.append(s) + else: + trace.spans.append(s) + else: + finished = trace_spans + + num_finished = len(finished) + + if should_partial_flush and num_finished > 0: + log.debug("Partially flushing %d spans for trace %d", num_finished, span.trace_id) + finished[0].set_metric("_dd.py.partial_flush", num_finished) + + trace.num_finished -= num_finished + + if len(trace.spans) == 0: + del self._traces[span.trace_id] + + spans = finished # type: Optional[List[Span]] + for tp in self._trace_processors: + try: + if spans is None: + return + spans = tp.process_trace(spans) + except Exception: + log.error("error applying processor %r", tp, exc_info=True) + + self._queue_span_count_metrics("spans_finished", "integration_name") + self._writer.write(spans) + return + + log.debug("trace %d has %d spans, %d finished", span.trace_id, len(trace.spans), trace.num_finished) + return None + + def shutdown(self, timeout): + # type: (Optional[float]) -> None + """ + This will stop the background writer/worker and flush any finished traces in the buffer. The tracer cannot be + used for tracing after this method has been called. A new tracer instance is required to continue tracing. + + :param timeout: How long in seconds to wait for the background worker to flush traces + before exiting or :obj:`None` to block until flushing has successfully completed (default: :obj:`None`) + :type timeout: :obj:`int` | :obj:`float` | :obj:`None` + """ + if config._telemetry_enabled and (self._span_metrics["spans_created"] or self._span_metrics["spans_finished"]): + telemetry.telemetry_writer._is_periodic = False + telemetry.telemetry_writer._enabled = True + # on_span_start queue span created counts in batches of 100. This ensures all remaining counts are sent + # before the tracer is shutdown. + self._queue_span_count_metrics("spans_created", "integration_name", 1) + # on_span_finish(...) queues span finish metrics in batches of 100. + # This ensures all remaining counts are sent before the tracer is shutdown. + self._queue_span_count_metrics("spans_finished", "integration_name", 1) + telemetry.telemetry_writer.periodic(True) + + try: + self._writer.stop(timeout) + except ServiceStatusError: + # It's possible the writer never got started in the first place :( + pass + + def _queue_span_count_metrics(self, metric_name, tag_name, min_count=100): + # type: (str, str, int) -> None + """Queues a telemetry count metric for span created and span finished""" + # perf: telemetry_metrics_writer.add_count_metric(...) is an expensive operation. + # We should avoid calling this method on every invocation of span finish and span start. + if config._telemetry_enabled and sum(self._span_metrics[metric_name].values()) >= min_count: + for tag_value, count in self._span_metrics[metric_name].items(): + telemetry.telemetry_writer.add_count_metric( + TELEMETRY_NAMESPACE_TAG_TRACER, metric_name, count, tags=((tag_name, tag_value),) + ) + self._span_metrics[metric_name] = defaultdict(int) + + +@attr.s +class SpanSamplingProcessor(SpanProcessor): + """SpanProcessor for sampling single spans: + + * Span sampling must be applied after trace sampling priority has been set. + * Span sampling rules are specified with a sample rate or rate limit as well as glob patterns + for matching spans on service and name. + * If the span sampling decision is to keep the span, then span sampling metrics are added to the span. + * If a dropped trace includes a span that had been kept by a span sampling rule, then the span is sent to the + Agent even if the dropped trace is not (as is the case when trace stats computation is enabled). + """ + + rules = attr.ib(type=List[SpanSamplingRule]) + + def on_span_start(self, span): + # type: (Span) -> None + pass + + def on_span_finish(self, span): + # type: (Span) -> None + # only sample if the span isn't already going to be sampled by trace sampler + if span.context.sampling_priority is not None and span.context.sampling_priority <= 0: + for rule in self.rules: + if rule.match(span): + rule.sample(span) + # If stats computation is enabled, we won't send all spans to the agent. + # In order to ensure that the agent does not update priority sampling rates + # due to single spans sampling, we set all of these spans to manual keep. + if config._trace_compute_stats: + span.set_metric(SAMPLING_PRIORITY_KEY, USER_KEEP) + break + + +class PeerServiceProcessor(TraceProcessor): + def __init__(self, peer_service_config): + self._config = peer_service_config + self._set_defaults_enabled = self._config.set_defaults_enabled + self._mapping = self._config.peer_service_mapping + + def process_trace(self, trace): + if not trace: + return + + traces_to_process = [] + if not self._set_defaults_enabled: + traces_to_process = filter(lambda x: x.get_tag(self._config.tag_name), trace) + else: + traces_to_process = filter( + lambda x: x.get_tag(self._config.tag_name) or x.get_tag(SPAN_KIND) in self._config.enabled_span_kinds, + trace, + ) + any(map(lambda x: self._update_peer_service_tags(x), traces_to_process)) + + return trace + + def _update_peer_service_tags(self, span): + tag = span.get_tag(self._config.tag_name) + + if tag: # If the tag already exists, assume it is user generated + span.set_tag_str(self._config.source_tag_name, self._config.tag_name) + else: + for data_source in self._config.prioritized_data_sources: + tag = span.get_tag(data_source) + if tag: + span.set_tag_str(self._config.tag_name, tag) + span.set_tag_str(self._config.source_tag_name, data_source) + break + + if tag in self._mapping: + span.set_tag_str(self._config.remap_tag_name, tag) + span.set_tag_str(self._config.tag_name, self._config.peer_service_mapping[tag]) + + +class BaseServiceProcessor(TraceProcessor): + def __init__(self): + self._global_service = schematize_service_name((config.service or "").lower()) + + def process_trace(self, trace): + if not trace: + return + + traces_to_process = filter( + lambda x: x.service and x.service.lower() != self._global_service, + trace, + ) + any(map(lambda x: self._update_dd_base_service(x), traces_to_process)) + + return trace + + def _update_dd_base_service(self, span): + span.set_tag_str(key=BASE_SERVICE_KEY, value=self._global_service) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/rate_limiter.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/rate_limiter.py new file mode 100644 index 0000000..e06493f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/rate_limiter.py @@ -0,0 +1,257 @@ +from __future__ import division + +import random +import threading +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 + +import attr + +from ..internal import compat +from ..internal.constants import DEFAULT_SAMPLING_RATE_LIMIT + + +class RateLimiter(object): + """ + A token bucket rate limiter implementation + """ + + __slots__ = ( + "_lock", + "current_window_ns", + "last_update_ns", + "max_tokens", + "prev_window_rate", + "rate_limit", + "tokens", + "tokens_allowed", + "tokens_total", + ) + + def __init__(self, rate_limit): + # type: (int) -> None + """ + Constructor for RateLimiter + + :param rate_limit: The rate limit to apply for number of requests per second. + rate limit > 0 max number of requests to allow per second, + rate limit == 0 to disallow all requests, + rate limit < 0 to allow all requests + :type rate_limit: :obj:`int` + """ + self.rate_limit = rate_limit + self.tokens = rate_limit # type: float + self.max_tokens = rate_limit + + self.last_update_ns = compat.monotonic_ns() + + self.current_window_ns = 0 # type: float + self.tokens_allowed = 0 + self.tokens_total = 0 + self.prev_window_rate = None # type: Optional[float] + + self._lock = threading.Lock() + + @property + def _has_been_configured(self): + return self.rate_limit != DEFAULT_SAMPLING_RATE_LIMIT + + def is_allowed(self, timestamp_ns): + # type: (int) -> bool + """ + Check whether the current request is allowed or not + + This method will also reduce the number of available tokens by 1 + + :param int timestamp_ns: timestamp in nanoseconds for the current request. + :returns: Whether the current request is allowed or not + :rtype: :obj:`bool` + """ + # Determine if it is allowed + allowed = self._is_allowed(timestamp_ns) + # Update counts used to determine effective rate + self._update_rate_counts(allowed, timestamp_ns) + return allowed + + def _update_rate_counts(self, allowed, timestamp_ns): + # type: (bool, int) -> None + # No tokens have been seen yet, start a new window + if not self.current_window_ns: + self.current_window_ns = timestamp_ns + + # If more than 1 second has past since last window, reset + # DEV: We are comparing nanoseconds, so 1e9 is 1 second + elif timestamp_ns - self.current_window_ns >= 1e9: + # Store previous window's rate to average with current for `.effective_rate` + self.prev_window_rate = self._current_window_rate() + self.tokens_allowed = 0 + self.tokens_total = 0 + self.current_window_ns = timestamp_ns + + # Keep track of total tokens seen vs allowed + if allowed: + self.tokens_allowed += 1 + self.tokens_total += 1 + + def _is_allowed(self, timestamp_ns): + # type: (int) -> bool + # Rate limit of 0 blocks everything + if self.rate_limit == 0: + return False + + # Negative rate limit disables rate limiting + elif self.rate_limit < 0: + return True + + # Lock, we need this to be thread safe, it should be shared by all threads + with self._lock: + self._replenish(timestamp_ns) + + if self.tokens >= 1: + self.tokens -= 1 + return True + + return False + + def _replenish(self, timestamp_ns): + # type: (int) -> None + try: + # If we are at the max, we do not need to add any more + if self.tokens == self.max_tokens: + return + + # Add more available tokens based on how much time has passed + # DEV: We store as nanoseconds, convert to seconds + elapsed = (timestamp_ns - self.last_update_ns) / 1e9 + finally: + # always update the timestamp + # we can't update at the beginning of the function, since if we did, our calculation for + # elapsed would be incorrect + self.last_update_ns = timestamp_ns + + # Update the number of available tokens, but ensure we do not exceed the max + self.tokens = min( + self.max_tokens, + self.tokens + (elapsed * self.rate_limit), + ) + + def _current_window_rate(self): + # type: () -> float + # No tokens have been seen, effectively 100% sample rate + # DEV: This is to avoid division by zero error + if not self.tokens_total: + return 1.0 + + # Get rate of tokens allowed + return self.tokens_allowed / self.tokens_total + + @property + def effective_rate(self): + # type: () -> float + """ + Return the effective sample rate of this rate limiter + + :returns: Effective sample rate value 0.0 <= rate <= 1.0 + :rtype: :obj:`float`` + """ + # If we have not had a previous window yet, return current rate + if self.prev_window_rate is None: + return self._current_window_rate() + + return (self._current_window_rate() + self.prev_window_rate) / 2.0 + + def __repr__(self): + return "{}(rate_limit={!r}, tokens={!r}, last_update_ns={!r}, effective_rate={!r})".format( + self.__class__.__name__, + self.rate_limit, + self.tokens, + self.last_update_ns, + self.effective_rate, + ) + + __str__ = __repr__ + + +class RateLimitExceeded(Exception): + pass + + +@attr.s +class BudgetRateLimiterWithJitter(object): + """A budget rate limiter with jitter. + + The jitter is induced by a uniform distribution. The rate limit can be + specified with ``limit_rate`` and the time scale can be controlled with the + ``tau`` parameter (which defaults to 1 second). The initial budget is the + product between ``limit_rate`` and the time-scale parameter ``tau``, which + is also taken as the maximum budget. By default, the ``RateLimitExceeded`` + exception is raised when the rate limit is exceeded. This can be changed by + setting ``raise_on_exceed`` to ``False``. The ``on_exceed`` argument can be + used to pass a callback that is to be called whenever the rate limit is + exceeded. The ``call_once`` argument controls whether the callback should be + called only once for every rate limit excess or every time the rate limiter + is invoked. + + Instances of this class can also be used as decorators. + + Since the initial and maximum budget are set to ``limit_rate * tau``, the + rate limiter could have an initial burst phase. When this is not desired, + ``tau`` should be set to ``1 / limit_rate`` to ensure an initial and maximum + budget of ``1``. + """ + + limit_rate = attr.ib(type=float) + tau = attr.ib(type=float, default=1.0) + raise_on_exceed = attr.ib(type=bool, default=True) + on_exceed = attr.ib(type=Callable, default=None) + call_once = attr.ib(type=bool, default=False) + budget = attr.ib(type=float, init=False) + max_budget = attr.ib(type=float, init=False) + last_time = attr.ib(type=float, init=False, factory=compat.monotonic) + _lock = attr.ib(type=threading.Lock, init=False, factory=threading.Lock) + + def __attrs_post_init__(self): + if self.limit_rate == float("inf"): + self.budget = self.max_budget = float("inf") + elif self.limit_rate: + self.budget = self.max_budget = self.limit_rate * self.tau + else: + self.budget = self.max_budget = 1.0 + self._on_exceed_called = False + + def limit(self, f=None, *args, **kwargs): + # type: (Optional[Callable[..., Any]], *Any, **Any) -> Any + """Make rate-limited calls to a function with the given arguments.""" + should_call = False + with self._lock: + now = compat.monotonic() + self.budget += self.limit_rate * (now - self.last_time) * (0.5 + random.random()) # jitter + should_call = self.budget >= 1.0 + if self.budget > self.max_budget: + self.budget = self.max_budget + self.last_time = now + + if should_call: + self._on_exceed_called = False + self.budget -= 1.0 + return f(*args, **kwargs) if f is not None else None + + if self.on_exceed is not None: + if not self.call_once: + self.on_exceed() + elif not self._on_exceed_called: + self.on_exceed() + self._on_exceed_called = True + + if self.raise_on_exceed: + raise RateLimitExceeded() + else: + return RateLimitExceeded + + def __call__(self, f): + # type: (Callable[..., Any]) -> Callable[..., Any] + def limited_f(*args, **kwargs): + return self.limit(f, *args, **kwargs) + + return limited_f diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_connectors.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_connectors.py new file mode 100644 index 0000000..06d358f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_connectors.py @@ -0,0 +1,90 @@ +from ctypes import c_char +import json +import multiprocessing +import os +import sys +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Mapping # noqa:F401 +from uuid import UUID + +from ddtrace.internal.compat import to_unicode +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + +# Size of the shared variable. It's calculated based on Remote Config Payloads. At 2023-04-26 we measure on stagging +# RC payloads and the max size of a multiprocess.array was 139.002 (sys.getsizeof(data.value)) and +# max len 138.969 (len(data.value)) +SHARED_MEMORY_SIZE = 603432 + +SharedDataType = Mapping[str, Any] + + +class UUIDEncoder(json.JSONEncoder): + def default(self, obj): + # type: (Any) -> Any + if isinstance(obj, UUID): + # if the obj is uuid, we simply return the value of uuid + return obj.hex + return json.JSONEncoder.default(self, obj) + + +class PublisherSubscriberConnector(object): + """ "PublisherSubscriberConnector is the bridge between Publisher and Subscriber class that uses an array of chars + to share information between processes. `multiprocessing.Array``, as far as we know, was the most efficient way to + share information. We compare this approach with: Multiprocess Manager, Multiprocess Value, Multiprocess Queues + """ + + def __init__(self): + self.data = multiprocessing.Array(c_char, SHARED_MEMORY_SIZE, lock=False) + # Checksum attr validates if the Publisher send new data + self.checksum = -1 + # shared_data_counter attr validates if the Subscriber send new data + self.shared_data_counter = 0 + + @staticmethod + def _hash_config(config_raw, metadata_raw): + # type: (Any, Any) -> int + return hash(str(config_raw) + str(metadata_raw)) + + def read(self): + # type: () -> SharedDataType + config_raw = to_unicode(self.data.value) + config = json.loads(config_raw) if config_raw else {} + if config: + shared_data_counter = config["shared_data_counter"] + if shared_data_counter > self.shared_data_counter: + self.shared_data_counter += 1 + return config + return {} + + def write(self, metadata, config_raw): + # type: (Any, Any) -> None + last_checksum = self._hash_config(config_raw, metadata) + if last_checksum != self.checksum: + data_len = len(self.data.value) + if data_len >= (SHARED_MEMORY_SIZE - 1000): + log.warning("Datadog Remote Config shared data is %s/%s", data_len, SHARED_MEMORY_SIZE) + data = self.serialize(metadata, config_raw, self.shared_data_counter + 1) + self.data.value = data + log.debug( + "[%s][P: %s] write message of size %s and len %s", + os.getpid(), + os.getppid(), + sys.getsizeof(self.data.value), + data_len, + ) + self.checksum = last_checksum + + @staticmethod + def serialize(metadata, config_raw, shared_data_counter): + # type: (Any, Dict[str, Any], int) -> bytes + return bytes( + json.dumps( + {"metadata": metadata, "config": config_raw, "shared_data_counter": shared_data_counter}, + cls=UUIDEncoder, + ), + encoding="utf-8", + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_publishers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_publishers.py new file mode 100644 index 0000000..bcbda92 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_publishers.py @@ -0,0 +1,120 @@ +import abc +import copy +import os +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Callable # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Tuple # noqa:F401 + + from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector # noqa:F401 + from ddtrace.internal.remoteconfig._pubsub import PubSub + + PreprocessFunc = Callable[[Dict[str, Any], Optional[PubSub]], Any] + +log = get_logger(__name__) + + +class RemoteConfigPublisherBase(metaclass=abc.ABCMeta): + _preprocess_results_func = None # type: Optional[PreprocessFunc] + + def __init__(self, data_connector, preprocess_func=None): + # type: (PublisherSubscriberConnector, Optional[PreprocessFunc]) -> None + self._data_connector = data_connector + self._preprocess_results_func = preprocess_func + + def dispatch(self, pubsub_instance=None): + # type: (Optional[Any]) -> None + raise NotImplementedError + + def append(self, config_content, target, config_metadata): + # type: (Optional[Any], str, Optional[Any]) -> None + raise NotImplementedError + + +class RemoteConfigPublisher(RemoteConfigPublisherBase): + """Standard Remote Config Publisher: each time Remote Config Client receives new payloads, RemoteConfigPublisher + shared them to all process. Dynamic Instrumentation uses this class + """ + + def __init__(self, data_connector, preprocess_func=None): + # type: (PublisherSubscriberConnector, Optional[PreprocessFunc]) -> None + super(RemoteConfigPublisher, self).__init__(data_connector, preprocess_func) + self._config_and_metadata = [] # type: List[Tuple[Optional[Any], Optional[Any]]] + + def append(self, config_content, target="", config_metadata=None): + # type: (Optional[Any], str, Optional[Any]) -> None + self._config_and_metadata.append((config_content, config_metadata)) + + def dispatch(self, pubsub_instance=None): + # type: (Optional[Any]) -> None + from attr import asdict + + # TODO: RemoteConfigPublisher doesn't need _preprocess_results_func callback at this moment. Uncomment those + # lines if a new product need it + # if self._preprocess_results_func: + # config = self._preprocess_results_func(config, pubsub_instance) + + log.debug("[%s][P: %s] Publisher publish data: %s", os.getpid(), os.getppid(), self._config_and_metadata) + + self._data_connector.write( + [asdict(metadata) if metadata else None for _, metadata in self._config_and_metadata], + [config for config, _ in self._config_and_metadata], + ) + self._config_and_metadata = [] + + +class RemoteConfigPublisherMergeDicts(RemoteConfigPublisherBase): + """Each time Remote Config Client receives a new payload, Publisher stores the target file path and its payload. + When the Client finishes to update/add the configuration, Client calls to `publisher.dispatch` which merges all + payloads and send it to the subscriber. ASM uses this class + """ + + def __init__(self, data_connector, preprocess_func): + # type: (PublisherSubscriberConnector, PreprocessFunc) -> None + super(RemoteConfigPublisherMergeDicts, self).__init__(data_connector, preprocess_func) + self._configs = {} # type: Dict[str, Any] + + def append(self, config_content, target, config_metadata=None): + # type: (Optional[Any], str, Optional[Any]) -> None + if not self._configs.get(target): + self._configs[target] = {} + + if config_content is False: + # Remove old config from the configs dict. _remove_previously_applied_configurations function should + # call to this method + del self._configs[target] + elif config_content is not None: + # Append the new config to the configs dict. _load_new_configurations function should + # call to this method + if isinstance(config_content, dict): + self._configs[target].update(config_content) + else: + raise ValueError("target %s config %s has type of %s" % (target, config_content, type(config_content))) + + def dispatch(self, pubsub_instance=None): + # type: (Optional[Any]) -> None + config_result = {} # type: Dict[str, Any] + try: + for _target, config_item in self._configs.items(): + for key, value in config_item.items(): + if isinstance(value, list): + config_result[key] = config_result.get(key, []) + value + elif isinstance(value, dict): + config_result[key] = value + else: + log.debug("[%s][P: %s] Invalid value %s for key %s", os.getpid(), os.getppid(), value, key) + result = copy.deepcopy(config_result) + if self._preprocess_results_func: + result = self._preprocess_results_func(result, pubsub_instance) + log.debug("[%s][P: %s] PublisherAfterMerge publish %s", os.getpid(), os.getppid(), str(result)[:100]) + self._data_connector.write({}, result) + except Exception: + log.error("[%s]: PublisherAfterMerge error", os.getpid(), exc_info=True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_pubsub.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_pubsub.py new file mode 100644 index 0000000..f6b366f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_pubsub.py @@ -0,0 +1,113 @@ +""" +The Datadog Remote Configuration Publisher-Subscriber system. + +A common Python web application use to execute a WSGI server (e.x: Gunicorn) and this server use many workers. + +Remote Configuration needs to keep all workers updated as soon as possible. Therefore, Remote Configuration may start +BEFORE the Gunicorn server in sitecustomize.py, it starts to poll information from the RC Agent, and for each new +payload, through this Pub-sub system, share this information with all child processes. + +In addition to this, there are different Remote Configuration behaviors: + +- When the Remote Configuration Client receives a new product target file payload, we need to call a callback. +- When the Remote Configuration Client receives a new product target file payload, we need to aggregate this target + file data for each product. After that, call the callback with all aggregated information. +- Remote Configuration may have a callback for each product. +- Remote Configuration may have a callback for one or more products. +- For each payload, Remote Configuration needs to execute specific actions on the main process and a different action + on child processes. + +To achieve this goal, a Remote Configuration product may register a PubSub instance. A PubSub class contains a publisher +that receives the Remote Configuration payload and shares it with Pubsub Subscriber instance. The Subscriber starts a +thread on each child process, waiting for a new update of the shared data between the Publisher on the main process +and the child process. Remote Configuration creates a thread listening to the main process for each instance of PubSub. +To connect this publisher and the child processes subscribers, we need a connector class: Shared Memory or File. +Each instance of PubSub works as a singleton when Remote Configuration dispatches the callbacks. That means if we +register the same instance of PubSub class on different products, we would have one thread waiting to the main process. + +Each DD Product (APM, ASM, DI, CI) may implement its PubSub Class. + +Example 1: A callback for one or more Remote Configuration Products +------------------------------------------------------------------- +AppSec needs to aggregate different products in the same callback for all child processes. + +class AppSecRC(PubSubMergeFirst): + __shared_data__ = ConnectorSharedMemory() + + def __init__(self, _preprocess_results, callback, name="Default"): + self._publisher = self.__publisher_class__(self.__shared_data__, _preprocess_results) + self._subscriber = self.__subscriber_class__(self.__shared_data__, callback, name) + +asm_callback = AppSecRC(preprocess_1click_activation, appsec_callback, "ASM") + +remoteconfig_poller.register("ASM_PRODUCT", asm_callback) +remoteconfig_poller.register("ASM_FEATURES_PRODUCT", asm_callback) + + +Example 2: One Callback for each product +---------------------------------------- +DI needs to aggregate different products in the same callback for all child processes. + +class DynamicInstrumentationRC(PubSub): + __shared_data__ = ConnectorSharedMemory() + + def __init__(self, _preprocess_results, callback, name="Default"): + self._publisher = self.__publisher_class__(self.__shared_data__, _preprocess_results) + self._subscriber = self.__subscriber_class__(self.__shared_data__, callback, name) + +di_callback_1 = DynamicInstrumentationRC(callback=di_callback_1, name="ASM") +di_callback_2 = DynamicInstrumentationRC(callback=di_callback_2, name="ASM") + +remoteconfig_poller.register("DI_1_PRODUCT", di_callback) +remoteconfig_poller.register("DI_2_PRODUCT", di_callback_2) + +""" +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.internal.remoteconfig._publishers import RemoteConfigPublisherBase # noqa:F401 +from ddtrace.internal.remoteconfig._subscribers import RemoteConfigSubscriber # noqa:F401 + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Optional # noqa:F401 + + from ddtrace import Tracer # noqa:F401 + from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector # noqa:F401 + +log = get_logger(__name__) + + +class PubSub(object): + _shared_data = None # type: PublisherSubscriberConnector + _publisher = None # type: RemoteConfigPublisherBase + _subscriber = None # type: RemoteConfigSubscriber + + def start_subscriber(self): + self._subscriber.start() + + def restart_subscriber(self, join=False): + self._subscriber.force_restart(join) + + def _poll_data(self, test_tracer=None): + # type: (Optional[Tracer]) -> None + self._subscriber._get_data_from_connector_and_exec(test_tracer=test_tracer) + + def stop(self, join=False): + # type: (bool) -> None + self._subscriber.stop(join=join) + + def publish(self): + # type: () -> None + self._publisher.dispatch(self) + + def append_and_publish(self, config_content=None, target="", config_metadata=None): + # type: (Optional[Any], str, Optional[Any]) -> None + """Append data to publisher and send the data to subscriber. It's a shortcut for testing purposes""" + self.append(config_content, target, config_metadata) + self.publish() + + def append(self, config_content, target, config_metadata): + # type: (Optional[Any], str, Optional[Any]) -> None + self._publisher.append(config_content, target, config_metadata) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_subscribers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_subscribers.py new file mode 100644 index 0000000..6a112f5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/_subscribers.py @@ -0,0 +1,59 @@ +import os +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.internal.periodic import PeriodicService +from ddtrace.internal.remoteconfig.utils import get_poll_interval_seconds + + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable # noqa:F401 + from typing import Optional # noqa:F401 + + from ddtrace import Tracer # noqa:F401 + from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector # noqa:F401 + from ddtrace.internal.remoteconfig._connectors import SharedDataType # noqa:F401 + + +log = get_logger(__name__) + + +class RemoteConfigSubscriber(PeriodicService): + def __init__(self, data_connector, callback, name): + # type: (PublisherSubscriberConnector, Callable, str) -> None + super().__init__(get_poll_interval_seconds() / 2) + + self._data_connector = data_connector + self._callback = callback + self._name = name + + log.debug("[PID %d] %s initialized", os.getpid(), self) + + def _exec_callback(self, data, test_tracer=None): + # type: (SharedDataType, Optional[Tracer]) -> None + if data: + log.debug("[PID %d] %s _exec_callback: %s", os.getpid(), self, str(data)[:50]) + self._callback(data, test_tracer=test_tracer) + + def _get_data_from_connector_and_exec(self, test_tracer=None): + # type: (Optional[Tracer]) -> None + data = self._data_connector.read() + self._exec_callback(data, test_tracer=test_tracer) + + def periodic(self): + try: + log.debug("[PID %d | PPID %d] %s is getting data", os.getpid(), os.getppid(), self) + self._get_data_from_connector_and_exec() + log.debug("[PID %d | PPID %d] %s got data", os.getpid(), os.getppid(), self) + except Exception: + log.error("[PID %d | PPID %d] %s while getting data", os.getpid(), os.getppid(), self, exc_info=True) + + def force_restart(self, join=False): + self.stop() + if join: + self.join() + self.start() + log.debug("[PID %d | PPID %d] %s restarted", os.getpid(), os.getppid(), self) + + def __str__(self): + return f"Subscriber {self._name}" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/client.py new file mode 100644 index 0000000..2064ae6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/client.py @@ -0,0 +1,593 @@ +import base64 +from datetime import datetime +import enum +import hashlib +import json +import os +import re +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Mapping # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 +import uuid + +import attr +import cattr +from envier import En + +import ddtrace +from ddtrace.appsec._capabilities import _rc_capabilities as appsec_rc_capabilities +from ddtrace.internal import agent +from ddtrace.internal import gitmetadata +from ddtrace.internal import runtime +from ddtrace.internal.hostname import get_hostname +from ddtrace.internal.logger import get_logger +from ddtrace.internal.remoteconfig.constants import REMOTE_CONFIG_AGENT_ENDPOINT +from ddtrace.internal.runtime import container +from ddtrace.internal.service import ServiceStatus +from ddtrace.internal.utils.time import parse_isoformat + +from ..utils.formats import parse_tags_str +from ..utils.version import _pep440_to_semver +from ._pubsub import PubSub # noqa:F401 + + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable # noqa:F401 + from typing import MutableMapping # noqa:F401 + from typing import Tuple # noqa:F401 + from typing import Union # noqa:F401 + +log = get_logger(__name__) + +TARGET_FORMAT = re.compile(r"^(datadog/\d+|employee)/([^/]+)/([^/]+)/([^/]+)$") + + +class RemoteConfigClientConfig(En): + __prefix__ = "_dd.remote_configuration" + + log_payloads = En.v(bool, "log_payloads", default=False) + + +config = RemoteConfigClientConfig() + + +class Capabilities(enum.IntFlag): + APM_TRACING_SAMPLE_RATE = 1 << 12 + APM_TRACING_LOGS_INJECTION = 1 << 13 + APM_TRACING_HTTP_HEADER_TAGS = 1 << 14 + APM_TRACING_CUSTOM_TAGS = 1 << 15 + APM_TRACING_ENABLED = 1 << 19 + + +class RemoteConfigError(Exception): + """ + An error occurred during the configuration update procedure. + The error is reported to the agent. + """ + + +@attr.s +class ConfigMetadata(object): + """ + Configuration TUF target metadata + """ + + id = attr.ib(type=str) + product_name = attr.ib(type=str) + sha256_hash = attr.ib(type=Optional[str]) + length = attr.ib(type=Optional[int]) + tuf_version = attr.ib(type=Optional[int]) + apply_state = attr.ib(type=Optional[int], default=1, eq=False) + apply_error = attr.ib(type=Optional[str], default=None, eq=False) + + +@attr.s +class Signature(object): + keyid = attr.ib(type=str) + sig = attr.ib(type=str) + + +@attr.s +class Key(object): + keytype = attr.ib(type=str) + keyid_hash_algorithms = attr.ib(type=List[str]) + keyval = attr.ib(type=Mapping) + scheme = attr.ib(type=str) + + +@attr.s +class Role(object): + keyids = attr.ib(type=List[str]) + threshold = attr.ib(type=int) + + +@attr.s +class Root(object): + _type = attr.ib(type=str, validator=attr.validators.in_(("root",))) + spec_version = attr.ib(type=str) + consistent_snapshot = attr.ib(type=bool) + expires = attr.ib(type=datetime, converter=parse_isoformat) + keys = attr.ib(type=Mapping[str, Key]) + roles = attr.ib(type=Mapping[str, Role]) + version = attr.ib(type=int) + + +@attr.s +class SignedRoot(object): + signatures = attr.ib(type=List[Signature]) + signed = attr.ib(type=Root) + + +@attr.s +class TargetDesc(object): + length = attr.ib(type=int) + hashes = attr.ib(type=Mapping[str, str]) + custom = attr.ib(type=Mapping[str, Any]) + + +@attr.s +class Targets(object): + _type = attr.ib(type=str, validator=attr.validators.in_(("targets",))) + custom = attr.ib(type=Mapping[str, Any]) + expires = attr.ib(type=datetime, converter=parse_isoformat) + spec_version = attr.ib(type=str, validator=attr.validators.in_(("1.0", "1.0.0"))) + targets = attr.ib(type=Mapping[str, TargetDesc]) + version = attr.ib(type=int) + + +@attr.s +class SignedTargets(object): + signatures = attr.ib(type=List[Signature]) + signed = attr.ib(type=Targets) + + +@attr.s +class TargetFile(object): + path = attr.ib(type=str) + raw = attr.ib(type=str) + + +@attr.s +class AgentPayload(object): + roots = attr.ib(type=List[SignedRoot], default=None) + targets = attr.ib(type=SignedTargets, default=None) + target_files = attr.ib(type=List[TargetFile], default=[]) + client_configs = attr.ib(type=Set[str], default={}) + + +AppliedConfigType = Dict[str, ConfigMetadata] +TargetsType = Dict[str, ConfigMetadata] + + +class RemoteConfigClient(object): + """ + The Remote Configuration client regularly checks for updates on the agent + and dispatches configurations to registered products. + """ + + def __init__(self): + # type: () -> None + tracer_version = _pep440_to_semver() + + self.id = str(uuid.uuid4()) + self.agent_url = agent.get_trace_url() + + self._headers = {"content-type": "application/json"} + additional_header_str = os.environ.get("_DD_REMOTE_CONFIGURATION_ADDITIONAL_HEADERS") + if additional_header_str is not None: + self._headers.update(parse_tags_str(additional_header_str)) + + container_info = container.get_container_info() + if container_info is not None: + container_id = container_info.container_id + if container_id is not None: + self._headers["Datadog-Container-Id"] = container_id + + tags = ddtrace.config.tags.copy() + + # Add git metadata tags, if available + gitmetadata.add_tags(tags) + + if ddtrace.config.env: + tags["env"] = ddtrace.config.env + if ddtrace.config.version: + tags["version"] = ddtrace.config.version + tags["tracer_version"] = tracer_version + tags["host_name"] = get_hostname() + + self._client_tracer = dict( + runtime_id=runtime.get_runtime_id(), + language="python", + tracer_version=tracer_version, + service=ddtrace.config.service, + extra_services=list(ddtrace.config._get_extra_services()), + env=ddtrace.config.env, + app_version=ddtrace.config.version, + tags=[":".join(_) for _ in tags.items()], + ) + self.cached_target_files = [] # type: List[AppliedConfigType] + self.converter = cattr.Converter() + + # cattrs doesn't implement datetime converter in Py27, we should register + def date_to_fromisoformat(val, cls): + return val + + self.converter.register_structure_hook(datetime, date_to_fromisoformat) + + def base64_to_struct(val, cls): + raw = base64.b64decode(val) + obj = json.loads(raw) + return self.converter.structure_attrs_fromdict(obj, cls) + + self.converter.register_structure_hook(SignedRoot, base64_to_struct) + self.converter.register_structure_hook(SignedTargets, base64_to_struct) + + self._products = dict() # type: MutableMapping[str, PubSub] + self._applied_configs = dict() # type: AppliedConfigType + self._last_targets_version = 0 + self._last_error = None # type: Optional[str] + self._backend_state = None # type: Optional[str] + + def _encode_capabilities(self, capabilities: enum.IntFlag) -> str: + return base64.b64encode(capabilities.to_bytes((capabilities.bit_length() + 7) // 8, "big")).decode() + + def renew_id(self): + # called after the process is forked to declare a new id + self.id = str(uuid.uuid4()) + self._client_tracer["runtime_id"] = runtime.get_runtime_id() + + def register_product(self, product_name, pubsub_instance=None): + # type: (str, Optional[PubSub]) -> None + if pubsub_instance is not None: + self._products[product_name] = pubsub_instance + else: + self._products.pop(product_name, None) + + def update_product_callback(self, product_name, callback): + # type: (str, Callable) -> bool + pubsub_instance = self._products.get(product_name) + if pubsub_instance: + pubsub_instance._subscriber._callback = callback + if not self.is_subscriber_running(pubsub_instance): + pubsub_instance.start_subscriber() + return True + return False + + def start_products(self, products_list): + # type: (list) -> None + for product_name in products_list: + pubsub_instance = self._products.get(product_name) + if pubsub_instance: + pubsub_instance.restart_subscriber() + + def unregister_product(self, product_name): + # type: (str) -> None + self._products.pop(product_name, None) + + def get_pubsubs(self): + for pubsub in set(self._products.values()): + yield pubsub + + def is_subscriber_running(self, pubsub_to_check): + # type: (PubSub) -> bool + for pubsub in self.get_pubsubs(): + if pubsub_to_check._subscriber is pubsub._subscriber and pubsub._subscriber.status == ServiceStatus.RUNNING: + return True + return False + + def reset_products(self): + self._products = dict() + + def _send_request(self, payload): + # type: (str) -> Optional[Mapping[str, Any]] + try: + log.debug( + "[%s][P: %s] Requesting RC data from products: %s", os.getpid(), os.getppid(), str(self._products) + ) # noqa: G200 + + if config.log_payloads: + log.debug("[%s][P: %s] RC request payload: %s", os.getpid(), os.getppid(), payload) # noqa: G200 + + conn = agent.get_connection(self.agent_url, timeout=ddtrace.config._agent_timeout_seconds) + conn.request("POST", REMOTE_CONFIG_AGENT_ENDPOINT, payload, self._headers) + resp = conn.getresponse() + data_length = resp.headers.get("Content-Length") + if data_length is not None and int(data_length) == 0: + log.debug("[%s][P: %s] RC response payload empty", os.getpid(), os.getppid()) + return None + data = resp.read() + + if config.log_payloads: + log.debug( + "[%s][P: %s] RC response payload: %s", os.getpid(), os.getppid(), data.decode("utf-8") + ) # noqa: G200 + except OSError as e: + log.debug("Unexpected connection error in remote config client request: %s", str(e)) # noqa: G200 + return None + finally: + conn.close() + + if resp.status == 404: + # Remote configuration is not enabled or unsupported by the agent + return None + + if resp.status < 200 or resp.status >= 300: + log.debug("Unexpected error: HTTP error status %s, reason %s", resp.status, resp.reason) + return None + + return json.loads(data) + + @staticmethod + def _extract_target_file(payload, target, config): + # type: (AgentPayload, str, ConfigMetadata) -> Optional[Dict[str, Any]] + candidates = [item.raw for item in payload.target_files if item.path == target] + if len(candidates) != 1 or candidates[0] is None: + log.debug( + "invalid target_files for %r. target files: %s", target, [item.path for item in payload.target_files] + ) + return None + + try: + raw = base64.b64decode(candidates[0]) + except Exception: + raise RemoteConfigError("invalid base64 target_files for {!r}".format(target)) + + computed_hash = hashlib.sha256(raw).hexdigest() + if computed_hash != config.sha256_hash: + raise RemoteConfigError( + "mismatch between target {!r} hashes {!r} != {!r}".format(target, computed_hash, config.sha256_hash) + ) + + try: + return json.loads(raw) + except Exception: + raise RemoteConfigError("invalid JSON content for target {!r}".format(target)) + + @staticmethod + def _parse_target(target, metadata): + # type: (str, TargetDesc) -> ConfigMetadata + m = TARGET_FORMAT.match(target) + if m is None: + raise RemoteConfigError("unexpected target format {!r}".format(target)) + _, product_name, config_id, _ = m.groups() + return ConfigMetadata( + id=config_id, + product_name=product_name, + sha256_hash=metadata.hashes.get("sha256"), + length=metadata.length, + tuf_version=metadata.custom.get("v"), + ) + + def _build_payload(self, state): + # type: (Mapping[str, Any]) -> Mapping[str, Any] + self._client_tracer["extra_services"] = list(ddtrace.config._get_extra_services()) + capabilities = ( + appsec_rc_capabilities() + | Capabilities.APM_TRACING_SAMPLE_RATE + | Capabilities.APM_TRACING_LOGS_INJECTION + | Capabilities.APM_TRACING_HTTP_HEADER_TAGS + | Capabilities.APM_TRACING_CUSTOM_TAGS + | Capabilities.APM_TRACING_ENABLED + ) + return dict( + client=dict( + id=self.id, + products=list(self._products.keys()), + is_tracer=True, + client_tracer=self._client_tracer, + state=state, + capabilities=self._encode_capabilities(capabilities), + ), + cached_target_files=self.cached_target_files, + ) + + def _build_state(self): + # type: () -> Mapping[str, Any] + has_error = self._last_error is not None + state = dict( + root_version=1, + targets_version=self._last_targets_version, + config_states=[ + dict( + id=config.id, + version=config.tuf_version, + product=config.product_name, + apply_state=config.apply_state, + apply_error=config.apply_error, + ) + if config.apply_error + else dict( + id=config.id, + version=config.tuf_version, + product=config.product_name, + apply_state=config.apply_state, + ) + for config in self._applied_configs.values() + ], + has_error=has_error, + ) + if self._backend_state is not None: + state["backend_client_state"] = self._backend_state + if has_error: + state["error"] = self._last_error + return state + + def _process_targets(self, payload): + # type: (AgentPayload) -> Tuple[Optional[int], Optional[str], Optional[TargetsType]] + if payload.targets is None: + # no targets received + return None, None, None + + signed = payload.targets.signed + targets = dict() # type: TargetsType + + for target, metadata in signed.targets.items(): + config = self._parse_target(target, metadata) + if config is not None: + targets[target] = config + + backend_state = signed.custom.get("opaque_backend_state") + return signed.version, backend_state, targets + + @staticmethod + def _apply_callback(list_callbacks, callback, config_content, target, config_metadata): + # type: (List[PubSub], Any, Any, str, ConfigMetadata) -> None + callback.append(config_content, target, config_metadata) + if callback not in list_callbacks and not any(filter(lambda x: x is callback, list_callbacks)): + list_callbacks.append(callback) + + def _remove_previously_applied_configurations(self, list_callbacks, applied_configs, client_configs, targets): + # type: (List[PubSub], AppliedConfigType, TargetsType, TargetsType) -> None + for target, config in self._applied_configs.items(): + if target in client_configs and targets.get(target) == config: + # The configuration has not changed. + applied_configs[target] = config + continue + elif target not in client_configs: + callback_action = False + else: + continue + + callback = self._products.get(config.product_name) + if callback: + try: + log.debug("[%s][P: %s] Disabling configuration: %s", os.getpid(), os.getppid(), target) + self._apply_callback(list_callbacks, callback, callback_action, target, config) + except Exception: + log.debug("error while removing product %s config %r", config.product_name, config) + continue + + def _load_new_configurations(self, list_callbacks, applied_configs, client_configs, payload): + # type: (List[PubSub], AppliedConfigType, TargetsType, AgentPayload) -> None + for target, config in client_configs.items(): + callback = self._products.get(config.product_name) + if callback: + applied_config = self._applied_configs.get(target) + if applied_config == config: + continue + + config_content = self._extract_target_file(payload, target, config) + if config_content is None: + continue + + try: + log.debug("[%s][P: %s] Load new configuration: %s. content", os.getpid(), os.getppid(), target) + self._apply_callback(list_callbacks, callback, config_content, target, config) + except Exception: + error_message = "Failed to apply configuration %s for product %r" % (config, config.product_name) + log.debug(error_message, exc_info=True) + config.apply_state = 3 # Error state + config.apply_error = error_message + applied_configs[target] = config + continue + else: + config.apply_state = 2 # Acknowledged (applied) + applied_configs[target] = config + + def _add_apply_config_to_cache(self): + if self._applied_configs: + cached_data = [] + for target, config in self._applied_configs.items(): + cached_data.append( + { + "path": target, + "length": config.length, + "hashes": [{"algorithm": "sha256", "hash": config.sha256_hash}], + } + ) + self.cached_target_files = cached_data + else: + self.cached_target_files = [] + + def _validate_config_exists_in_target_paths(self, payload_client_configs, payload_target_files): + # type: (Set[str], List[TargetFile]) -> None + paths = {_.path for _ in payload_target_files} + paths = paths.union({_["path"] for _ in self.cached_target_files}) + + # !(payload.client_configs is a subset of paths or payload.client_configs is equal to paths) + if not set(payload_client_configs) <= paths: + raise RemoteConfigError("Not all client configurations have target files") + + @staticmethod + def _validate_signed_target_files(payload_target_files, payload_targets_signed, client_configs): + # type: (List[TargetFile], Targets, TargetsType) -> None + for target in payload_target_files: + if (payload_targets_signed.targets and not payload_targets_signed.targets.get(target.path)) and ( + client_configs and not client_configs.get(target.path) + ): + raise RemoteConfigError( + "target file %s not exists in client_config and signed targets" % (target.path,) + ) + + def _publish_configuration(self, list_callbacks): + # type: (List[PubSub]) -> None + for callback_to_dispach in list_callbacks: + callback_to_dispach.publish() + + def _process_response(self, data): + # type: (Mapping[str, Any]) -> None + try: + payload = self.converter.structure_attrs_fromdict(data, AgentPayload) + except Exception: + log.debug("invalid agent payload received: %r", data, exc_info=True) + raise RemoteConfigError("invalid agent payload received") + + self._validate_config_exists_in_target_paths(payload.client_configs, payload.target_files) + + # 1. Deserialize targets + last_targets_version, backend_state, targets = self._process_targets(payload) + if last_targets_version is None or targets is None: + return + + client_configs = {k: v for k, v in targets.items() if k in payload.client_configs} + log.debug( + "[%s][P: %s] Retrieved client configs last version %s: %s", + os.getpid(), + os.getppid(), + last_targets_version, + client_configs, + ) + + self._validate_signed_target_files(payload.target_files, payload.targets.signed, client_configs) + + # 2. Remove previously applied configurations + applied_configs = dict() # type: AppliedConfigType + list_callbacks = [] # type: List[PubSub] + self._remove_previously_applied_configurations(list_callbacks, applied_configs, client_configs, targets) + + # 3. Load new configurations + self._load_new_configurations(list_callbacks, applied_configs, client_configs, payload) + + self._publish_configuration(list_callbacks) + + self._last_targets_version = last_targets_version + self._applied_configs = applied_configs + self._backend_state = backend_state + + self._add_apply_config_to_cache() + + def request(self): + # type: () -> bool + try: + state = self._build_state() + payload = json.dumps(self._build_payload(state)) + response = self._send_request(payload) + if response is None: + return False + self._process_response(response) + self._last_error = None + return True + + except RemoteConfigError as e: + self._last_error = str(e) + log.debug("remote configuration client reported an error", exc_info=True) + except ValueError: + log.debug("Unexpected response data", exc_info=True) + except Exception: + log.debug("Unexpected error", exc_info=True) + + return False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/constants.py new file mode 100644 index 0000000..c5b0c12 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/constants.py @@ -0,0 +1,2 @@ +ASM_FEATURES_PRODUCT = "ASM_FEATURES" +REMOTE_CONFIG_AGENT_ENDPOINT = "v0.7/config" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/utils.py new file mode 100644 index 0000000..0b228cf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/utils.py @@ -0,0 +1,6 @@ +from ddtrace import config + + +def get_poll_interval_seconds(): + # type:() -> float + return config._remote_config_poll_interval diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/worker.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/worker.py new file mode 100644 index 0000000..cee5b0a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/remoteconfig/worker.py @@ -0,0 +1,181 @@ +import os +from typing import List # noqa:F401 + +from ddtrace.internal import agent +from ddtrace.internal import atexit +from ddtrace.internal import forksafe +from ddtrace.internal import periodic +from ddtrace.internal.logger import get_logger +from ddtrace.internal.remoteconfig._pubsub import PubSub # noqa:F401 +from ddtrace.internal.remoteconfig.client import RemoteConfigClient +from ddtrace.internal.remoteconfig.constants import REMOTE_CONFIG_AGENT_ENDPOINT +from ddtrace.internal.remoteconfig.utils import get_poll_interval_seconds +from ddtrace.internal.service import ServiceStatus +from ddtrace.internal.utils.time import StopWatch +from ddtrace.settings import _config as ddconfig + + +log = get_logger(__name__) + + +class RemoteConfigPoller(periodic.PeriodicService): + """Remote configuration worker. + + This implements a finite-state machine that allows checking the agent for + the expected endpoint, which could be enabled after the client is started. + """ + + _worker_lock = forksafe.Lock() + _enable = True + + def __init__(self): + super(RemoteConfigPoller, self).__init__(interval=get_poll_interval_seconds()) + self._client = RemoteConfigClient() + self._state = self._agent_check + self._parent_id = os.getpid() + log.debug("RemoteConfigWorker created with polling interval %d", get_poll_interval_seconds()) + + def _agent_check(self): + # type: () -> None + try: + info = agent.info() + except Exception: + info = None + + if info: + endpoints = info.get("endpoints", []) + if endpoints and ( + REMOTE_CONFIG_AGENT_ENDPOINT in endpoints or ("/" + REMOTE_CONFIG_AGENT_ENDPOINT) in endpoints + ): + self._state = self._online + return + log.debug( + "Agent is down or Remote Config is not enabled in the Agent\n" + "Check your Agent version, you need an Agent running on 7.39.1 version or above.\n" + "Check Your Remote Config environment variables on your Agent:\n" + "DD_REMOTE_CONFIGURATION_ENABLED=true\n" + "See: https://docs.datadoghq.com/agent/guide/how_remote_config_works/", + ) + + def _online(self): + # type: () -> None + with StopWatch() as sw: + if not self._client.request(): + # An error occurred, so we transition back to the agent check + self._state = self._agent_check + return + + elapsed = sw.elapsed() + log.debug("request config in %.5fs to %s", elapsed, self._client.agent_url) + + def periodic(self): + # type: () -> None + return self._state() + + def enable(self): + # type: () -> bool + # TODO: this is only temporary. DD_REMOTE_CONFIGURATION_ENABLED variable will be deprecated + rc_env_enabled = ddconfig._remote_config_enabled + if rc_env_enabled and self._enable: + if self.status == ServiceStatus.RUNNING: + return True + + self.start() + forksafe.register(self.reset_at_fork) + atexit.register(self.disable) + return True + return False + + def reset_at_fork(self): + # type: () -> None + """Client Id needs to be refreshed when application forks""" + self._enable = False + log.debug("[%s][P: %s] Remote Config Poller fork. Refreshing state", os.getpid(), os.getppid()) + self._client.renew_id() + + def start_subscribers_by_product(self, products_list): + # type: (List[str]) -> None + self._client.start_products(products_list) + + def _poll_data(self, test_tracer=None): + """Force subscribers to poll new data. This function is only used in tests""" + for pubsub in self._client.get_pubsubs(): + pubsub._poll_data(test_tracer=test_tracer) + + def stop_subscribers(self, join=False): + # type: (bool) -> None + """ + Disable the remote config service and drop, remote config can be re-enabled + by calling ``enable`` again. + """ + log.debug( + "[%s][P: %s] Remote Config Poller fork. Stopping Pubsub services", + os.getpid(), + self._parent_id, + ) + for pubsub in self._client.get_pubsubs(): + pubsub.stop(join=join) + + def disable(self, join=False): + # type: (bool) -> None + self.stop_subscribers(join=join) + self._client.reset_products() + + if self.status == ServiceStatus.STOPPED: + return + + forksafe.unregister(self.reset_at_fork) + atexit.unregister(self.disable) + + self.stop(join=join) + + def _stop_service(self, *args, **kwargs): + # type: (...) -> None + self.stop_subscribers() + + if self.status == ServiceStatus.STOPPED or self._worker is None: + return + + super(RemoteConfigPoller, self)._stop_service(*args, **kwargs) + + def update_product_callback(self, product, callback): + """Some Products fork and restart their instances when application creates new process. In that case, + we need to update the callback instance to ensure the instance of the child process receives correctly the + Remote Configuration payloads. + """ + return self._client.update_product_callback(product, callback) + + def register(self, product, pubsub_instance, skip_enabled=False): + # type: (str, PubSub, bool) -> None + try: + # By enabling on registration we ensure we start the RCM client only + # if there is at least one registered product. + if not skip_enabled: + self.enable() + + self._client.register_product(product, pubsub_instance) + if not self._client.is_subscriber_running(pubsub_instance): + pubsub_instance.start_subscriber() + except Exception: + log.debug("error starting the RCM client", exc_info=True) + + def unregister(self, product): + try: + self._client.unregister_product(product) + except Exception: + log.debug("error starting the RCM client", exc_info=True) + + def get_registered(self, product): + return self._client._products.get(product) + + def __enter__(self): + # type: () -> RemoteConfigPoller + self.enable() + return self + + def __exit__(self, *args): + # type: (...) -> None + self.disable(join=True) + + +remoteconfig_poller = RemoteConfigPoller() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/__init__.py new file mode 100644 index 0000000..3ccfcfa --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/__init__.py @@ -0,0 +1,46 @@ +import typing as t +import uuid + +from .. import forksafe + + +__all__ = [ + "get_runtime_id", +] + + +def _generate_runtime_id(): + return uuid.uuid4().hex + + +_RUNTIME_ID = _generate_runtime_id() +_ANCESTOR_RUNTIME_ID: t.Optional[str] = None + + +@forksafe.register +def _set_runtime_id(): + global _RUNTIME_ID, _ANCESTOR_RUNTIME_ID + + # Save the runtime ID of the common ancestor of all processes. + if _ANCESTOR_RUNTIME_ID is None: + _ANCESTOR_RUNTIME_ID = _RUNTIME_ID + + _RUNTIME_ID = _generate_runtime_id() + + +def get_runtime_id(): + """Return a unique string identifier for this runtime. + + Do not store this identifier as it can change when, e.g., the process forks. + """ + return _RUNTIME_ID + + +def get_ancestor_runtime_id() -> t.Optional[str]: + """Return the runtime ID of the common ancestor of this process. + + Once this value is set (this will happen after a fork) it will not change + for the lifetime of the process. This function returns ``None`` for the + ancestor process. + """ + return _ANCESTOR_RUNTIME_ID diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/collector.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/collector.py new file mode 100644 index 0000000..d78e7e9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/collector.py @@ -0,0 +1,89 @@ +import importlib +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 +from typing import Tuple # noqa:F401 + +from ..logger import get_logger + + +log = get_logger(__name__) + + +class ValueCollector(object): + """A basic state machine useful for collecting, caching and updating data + obtained from different Python modules. + + The two primary use-cases are + 1) data loaded once (like tagging information) + 2) periodically updating data sources (like thread count) + + Functionality is provided for requiring and importing modules which may or + may not be installed. + """ + + enabled = True + periodic = False + required_modules = [] # type: List[str] + value = None # type: Optional[List[Tuple[str, str]]] + value_loaded = False + + def __init__(self, enabled=None, periodic=None, required_modules=None): + # type: (Optional[bool], Optional[bool], Optional[List[str]]) -> None + self.enabled = self.enabled if enabled is None else enabled + self.periodic = self.periodic if periodic is None else periodic + self.required_modules = self.required_modules if required_modules is None else required_modules + + self._modules_successfully_loaded = False + self.modules = self._load_modules() + if self._modules_successfully_loaded: + self._on_modules_load() + + def _on_modules_load(self): + """Hook triggered after all required_modules have been successfully loaded.""" + + def _load_modules(self): + modules = {} + try: + for module in self.required_modules: + modules[module] = importlib.import_module(module) + self._modules_successfully_loaded = True + except ImportError: + # DEV: disable collector if we cannot load any of the required modules + self.enabled = False + log.warning('Could not import module "%s" for %s. Disabling collector.', module, self) + return None + return modules + + def collect(self, keys=None): + # type: (Optional[Set[str]]) -> Optional[List[Tuple[str, str]]] + """Returns metrics as collected by `collect_fn`. + + :param keys: The keys of the metrics to collect. + """ + if not self.enabled: + return self.value + + keys = keys or set() + + if not self.periodic and self.value_loaded: + return self.value + + # call underlying collect function and filter out keys not requested + # TODO: provide base method collect_fn() in ValueCollector + self.value = self.collect_fn(keys) # type: ignore[attr-defined] + + # filter values for keys + if len(keys) > 0 and isinstance(self.value, list): + self.value = [(k, v) for (k, v) in self.value if k in keys] + + self.value_loaded = True + return self.value + + def __repr__(self): + return "<{}(enabled={},periodic={},required_modules={})>".format( + self.__class__.__name__, + self.enabled, + self.periodic, + self.required_modules, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/constants.py new file mode 100644 index 0000000..78b9c5e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/constants.py @@ -0,0 +1,27 @@ +GC_COUNT_GEN0 = "runtime.python.gc.count.gen0" +GC_COUNT_GEN1 = "runtime.python.gc.count.gen1" +GC_COUNT_GEN2 = "runtime.python.gc.count.gen2" + +THREAD_COUNT = "runtime.python.thread_count" +MEM_RSS = "runtime.python.mem.rss" +# `runtime.python.cpu.time.sys` metric is used to auto-enable runtime metrics dashboards in DD backend +CPU_TIME_SYS = "runtime.python.cpu.time.sys" +CPU_TIME_USER = "runtime.python.cpu.time.user" +CPU_PERCENT = "runtime.python.cpu.percent" +CTX_SWITCH_VOLUNTARY = "runtime.python.cpu.ctx_switch.voluntary" +CTX_SWITCH_INVOLUNTARY = "runtime.python.cpu.ctx_switch.involuntary" + +GC_RUNTIME_METRICS = set([GC_COUNT_GEN0, GC_COUNT_GEN1, GC_COUNT_GEN2]) + +PSUTIL_RUNTIME_METRICS = set( + [THREAD_COUNT, MEM_RSS, CTX_SWITCH_VOLUNTARY, CTX_SWITCH_INVOLUNTARY, CPU_TIME_SYS, CPU_TIME_USER, CPU_PERCENT] +) + +DEFAULT_RUNTIME_METRICS = GC_RUNTIME_METRICS | PSUTIL_RUNTIME_METRICS + +SERVICE = "service" +ENV = "env" +LANG_INTERPRETER = "lang_interpreter" +LANG_VERSION = "lang_version" +LANG = "lang" +TRACER_VERSION = "tracer_version" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/container.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/container.py new file mode 100644 index 0000000..ca90fc1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/container.py @@ -0,0 +1,115 @@ +import errno +import re +from typing import Optional # noqa:F401 + +import attr + +from ..logger import get_logger + + +log = get_logger(__name__) + + +@attr.s(slots=True) +class CGroupInfo(object): + """ + CGroup class for container information parsed from a group cgroup file + """ + + id = attr.ib(default=None) + groups = attr.ib(default=None) + path = attr.ib(default=None) + container_id = attr.ib(default=None) + controllers = attr.ib(default=None) + pod_id = attr.ib(default=None) + + # The second part is the PCF/Garden regexp. We currently assume no suffix ($) to avoid matching pod UIDs + # See https://github.com/DataDog/datadog-agent/blob/7.40.x/pkg/util/cgroups/reader.go#L50 + UUID_SOURCE_PATTERN = ( + r"[0-9a-f]{8}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{12}|([0-9a-f]{8}(-[0-9a-f]{4}){4}$)" + ) + CONTAINER_SOURCE_PATTERN = r"[0-9a-f]{64}" + TASK_PATTERN = r"[0-9a-f]{32}-\d+" + + LINE_RE = re.compile(r"^(\d+):([^:]*):(.+)$") + POD_RE = re.compile(r"pod({0})(?:\.slice)?$".format(UUID_SOURCE_PATTERN)) + CONTAINER_RE = re.compile( + r"(?:.+)?({0}|{1}|{2})(?:\.scope)?$".format(UUID_SOURCE_PATTERN, CONTAINER_SOURCE_PATTERN, TASK_PATTERN) + ) + + @classmethod + def from_line(cls, line): + # type: (str) -> Optional[CGroupInfo] + """ + Parse a new :class:`CGroupInfo` from the provided line + + :param line: A line from a cgroup file (e.g. /proc/self/cgroup) to parse information from + :type line: str + :returns: A :class:`CGroupInfo` object with all parsed data, if the line is valid, otherwise `None` + :rtype: :class:`CGroupInfo` | None + + """ + # Clean up the line + line = line.strip() + + # Ensure the line is valid + match = cls.LINE_RE.match(line) + if not match: + return None + + id_, groups, path = match.groups() + + # Parse the controllers from the groups + controllers = [c.strip() for c in groups.split(",") if c.strip()] + + # Break up the path to grab container_id and pod_id if available + # e.g. /docker/ + # e.g. /kubepods/test/pod/ + parts = [p for p in path.split("/")] + + # Grab the container id from the path if a valid id is present + container_id = None + if len(parts): + match = cls.CONTAINER_RE.match(parts.pop()) + if match: + container_id = match.group(1) + + # Grab the pod id from the path if a valid id is present + pod_id = None + if len(parts): + match = cls.POD_RE.match(parts.pop()) + if match: + pod_id = match.group(1) + + return cls(id=id_, groups=groups, path=path, container_id=container_id, controllers=controllers, pod_id=pod_id) + + +def get_container_info(pid="self"): + # type: (str) -> Optional[CGroupInfo] + """ + Helper to fetch the current container id, if we are running in a container + + We will parse `/proc/{pid}/cgroup` to determine our container id. + + The results of calling this function are cached + + :param pid: The pid of the cgroup file to parse (default: 'self') + :type pid: str | int + :returns: The cgroup file info if found, or else None + :rtype: :class:`CGroupInfo` | None + """ + + cgroup_file = "/proc/{0}/cgroup".format(pid) + + try: + with open(cgroup_file, mode="r") as fp: + for line in fp: + info = CGroupInfo.from_line(line) + if info and info.container_id: + return info + except IOError as e: + if e.errno != errno.ENOENT: + log.debug("Failed to open cgroup file for pid %r", pid, exc_info=True) + except Exception: + log.debug("Failed to parse cgroup file for pid %r", pid, exc_info=True) + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/metric_collectors.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/metric_collectors.py new file mode 100644 index 0000000..d43f99b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/metric_collectors.py @@ -0,0 +1,93 @@ +import os +from typing import List # noqa:F401 +from typing import Tuple # noqa:F401 + +from .collector import ValueCollector +from .constants import CPU_PERCENT +from .constants import CPU_TIME_SYS +from .constants import CPU_TIME_USER +from .constants import CTX_SWITCH_INVOLUNTARY +from .constants import CTX_SWITCH_VOLUNTARY +from .constants import GC_COUNT_GEN0 +from .constants import GC_COUNT_GEN1 +from .constants import GC_COUNT_GEN2 +from .constants import MEM_RSS +from .constants import THREAD_COUNT + + +class RuntimeMetricCollector(ValueCollector): + value = [] # type: List[Tuple[str, str]] + periodic = True + + +class GCRuntimeMetricCollector(RuntimeMetricCollector): + """Collector for garbage collection generational counts + + More information at https://docs.python.org/3/library/gc.html + """ + + required_modules = ["gc"] + + def collect_fn(self, keys): + gc = self.modules.get("gc") + + counts = gc.get_count() + metrics = [ + (GC_COUNT_GEN0, counts[0]), + (GC_COUNT_GEN1, counts[1]), + (GC_COUNT_GEN2, counts[2]), + ] + + return metrics + + +class PSUtilRuntimeMetricCollector(RuntimeMetricCollector): + """Collector for psutil metrics. + + Performs batched operations via proc.oneshot() to optimize the calls. + See https://psutil.readthedocs.io/en/latest/#psutil.Process.oneshot + for more information. + """ + + required_modules = ["ddtrace.vendor.psutil"] + delta_funs = { + CPU_TIME_SYS: lambda p: p.cpu_times().system, + CPU_TIME_USER: lambda p: p.cpu_times().user, + CTX_SWITCH_VOLUNTARY: lambda p: p.num_ctx_switches().voluntary, + CTX_SWITCH_INVOLUNTARY: lambda p: p.num_ctx_switches().involuntary, + } + abs_funs = { + THREAD_COUNT: lambda p: p.num_threads(), + MEM_RSS: lambda p: p.memory_info().rss, + CPU_PERCENT: lambda p: p.cpu_percent(), + } + + def _on_modules_load(self): + self.proc = self.modules["ddtrace.vendor.psutil"].Process(os.getpid()) + self.stored_values = {key: 0 for key in self.delta_funs.keys()} + + def collect_fn(self, keys): + with self.proc.oneshot(): + metrics = {} + + # Populate metrics for which we compute delta values + for metric, delta_fun in self.delta_funs.items(): + try: + value = delta_fun(self.proc) + except Exception: + value = 0 + + delta = value - self.stored_values.get(metric, 0) + self.stored_values[metric] = value + metrics[metric] = delta + + # Populate metrics that just take instantaneous reading + for metric, abs_fun in self.abs_funs.items(): + try: + value = abs_fun(self.proc) + except Exception: + value = 0 + + metrics[metric] = value + + return list(metrics.items()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/runtime_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/runtime_metrics.py new file mode 100644 index 0000000..662a3cf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/runtime_metrics.py @@ -0,0 +1,166 @@ +import itertools +import os +from typing import ClassVar # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 + +import attr + +import ddtrace +from ddtrace.internal import atexit +from ddtrace.internal import forksafe + +from .. import periodic +from .. import telemetry +from ..dogstatsd import get_dogstatsd_client +from ..logger import get_logger +from ..telemetry.constants import TELEMETRY_RUNTIMEMETRICS_ENABLED +from .constants import DEFAULT_RUNTIME_METRICS +from .metric_collectors import GCRuntimeMetricCollector +from .metric_collectors import PSUtilRuntimeMetricCollector +from .tag_collectors import PlatformTagCollector +from .tag_collectors import TracerTagCollector + + +log = get_logger(__name__) + + +class RuntimeCollectorsIterable(object): + def __init__(self, enabled=None): + self._enabled = enabled or self.ENABLED + # Initialize the collectors. + self._collectors = [c() for c in self.COLLECTORS] + + def __iter__(self): + collected = (collector.collect(self._enabled) for collector in self._collectors) + return itertools.chain.from_iterable(collected) + + def __repr__(self): + return "{}(enabled={})".format( + self.__class__.__name__, + self._enabled, + ) + + +class RuntimeTags(RuntimeCollectorsIterable): + # DEV: `None` means to allow all tags generated by PlatformTagCollector and TracerTagCollector + ENABLED = None + COLLECTORS = [ + PlatformTagCollector, + TracerTagCollector, + ] + + +class RuntimeMetrics(RuntimeCollectorsIterable): + ENABLED = DEFAULT_RUNTIME_METRICS + COLLECTORS = [ + GCRuntimeMetricCollector, + PSUtilRuntimeMetricCollector, + ] + + +def _get_interval_or_default(): + return float(os.getenv("DD_RUNTIME_METRICS_INTERVAL", default=10)) + + +@attr.s(eq=False) +class RuntimeWorker(periodic.PeriodicService): + """Worker thread for collecting and writing runtime metrics to a DogStatsd + client. + """ + + _interval = attr.ib(type=float, factory=_get_interval_or_default) + tracer = attr.ib(type=ddtrace.Tracer, default=None) + dogstatsd_url = attr.ib(type=Optional[str], default=None) + _dogstatsd_client = attr.ib(init=False, repr=False) + _runtime_metrics = attr.ib(factory=RuntimeMetrics, repr=False) + _services = attr.ib(type=Set[str], init=False, factory=set) + + enabled = False + _instance = None # type: ClassVar[Optional[RuntimeWorker]] + _lock = forksafe.Lock() + + def __attrs_post_init__(self): + # type: () -> None + self._dogstatsd_client = get_dogstatsd_client(self.dogstatsd_url or ddtrace.internal.agent.get_stats_url()) + self.tracer = self.tracer or ddtrace.tracer + + @classmethod + def disable(cls): + # type: () -> None + with cls._lock: + if cls._instance is None: + return + + forksafe.unregister(cls._restart) + + cls._instance.stop() + # DEV: Use timeout to avoid locking on shutdown. This seems to be + # required on some occasions by Python 2.7. Deadlocks seem to happen + # when some functionalities (e.g. platform.architecture) are used + # which end up calling + # _execute_child (/usr/lib/python2.7/subprocess.py:1023) + # This is a continuous attempt to read: + # _eintr_retry_call (/usr/lib/python2.7/subprocess.py:125) + # which is the eventual cause of the deadlock. + cls._instance.join(1) + cls._instance = None + cls.enabled = False + + # Report status to telemetry + telemetry.telemetry_writer.add_configuration(TELEMETRY_RUNTIMEMETRICS_ENABLED, False, origin="unknown") + + @classmethod + def _restart(cls): + cls.disable() + cls.enable() + + @classmethod + def enable(cls, flush_interval=None, tracer=None, dogstatsd_url=None): + # type: (Optional[float], Optional[ddtrace.Tracer], Optional[str]) -> None + with cls._lock: + if cls._instance is not None: + return + if flush_interval is None: + flush_interval = _get_interval_or_default() + runtime_worker = cls(flush_interval, tracer, dogstatsd_url) # type: ignore[arg-type] + runtime_worker.start() + # force an immediate update constant tags + runtime_worker.update_runtime_tags() + + forksafe.register(cls._restart) + atexit.register(cls.disable) + + cls._instance = runtime_worker + cls.enabled = True + + # Report status to telemetry + telemetry.telemetry_writer.add_configuration(TELEMETRY_RUNTIMEMETRICS_ENABLED, True, origin="unknown") + + def flush(self): + # type: () -> None + # The constant tags for the dogstatsd client needs to updated with any new + # service(s) that may have been added. + if self._services != self.tracer._services: + self._services = self.tracer._services + self.update_runtime_tags() + + with self._dogstatsd_client: + for key, value in self._runtime_metrics: + log.debug("Writing metric %s:%s", key, value) + self._dogstatsd_client.distribution(key, value) + + def _stop_service(self): + # type: (...) -> None + # De-register span hook + super(RuntimeWorker, self)._stop_service() + + def update_runtime_tags(self): + # type: () -> None + # DEV: ddstatsd expects tags in the form ['key1:value1', 'key2:value2', ...] + tags = ["{}:{}".format(k, v) for k, v in RuntimeTags()] + log.debug("Updating constant tags %s", tags) + self._dogstatsd_client.constant_tags = tags + + periodic = flush + on_shutdown = flush diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/tag_collectors.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/tag_collectors.py new file mode 100644 index 0000000..a8906c8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/runtime/tag_collectors.py @@ -0,0 +1,73 @@ +from typing import List # noqa:F401 +from typing import Tuple # noqa:F401 + +from ...constants import ENV_KEY +from ...constants import VERSION_KEY +from .collector import ValueCollector +from .constants import LANG +from .constants import LANG_INTERPRETER +from .constants import LANG_VERSION +from .constants import SERVICE +from .constants import TRACER_VERSION + + +class RuntimeTagCollector(ValueCollector): + periodic = False + value = [] # type: List[Tuple[str, str]] + + +class TracerTagCollector(RuntimeTagCollector): + """Tag collector for the ddtrace Tracer""" + + required_modules = ["ddtrace"] + + def collect_fn(self, keys): + ddtrace = self.modules.get("ddtrace") + + # make sure to copy _services to avoid RuntimeError: Set changed size during iteration + tags = [(SERVICE, service) for service in list(ddtrace.tracer._services)] + + # DEV: `DD_ENV`, `DD_VERSION`, and `DD_SERVICE` get picked up automatically by + # dogstatsd client, but someone might configure these via `ddtrace.config` + # instead of env vars, so better to collect them here again just in case + # DD_ENV gets stored in `config.env` + if ddtrace.config.env: + tags.append((ENV_KEY, ddtrace.config.env)) + + # DD_VERSION gets stored in `config.version` + if ddtrace.config.version: + tags.append((VERSION_KEY, ddtrace.config.version)) + + for key, value in ddtrace.tracer._tags.items(): + tags.append((key, value)) + return tags + + +class PlatformTagCollector(RuntimeTagCollector): + """Tag collector for the Python interpreter implementation. + + Tags collected: + - ``lang_interpreter``: + + * For CPython this is 'CPython'. + * For Pypy this is ``PyPy`` + * For Jython this is ``Jython`` + + - `lang_version``, eg ``2.7.10`` + - ``lang`` e.g. ``Python`` + - ``tracer_version`` e.g. ``0.29.0`` + + """ + + required_modules = ["platform", "ddtrace"] + + def collect_fn(self, keys): + platform = self.modules.get("platform") + ddtrace = self.modules.get("ddtrace") + tags = [ + (LANG, "python"), + (LANG_INTERPRETER, platform.python_implementation()), + (LANG_VERSION, platform.python_version()), + (TRACER_VERSION, ddtrace.__version__), + ] + return tags diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/safety.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/safety.py new file mode 100644 index 0000000..344ed5f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/safety.py @@ -0,0 +1,129 @@ +from typing import Any # noqa:F401 +from typing import Iterator # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Type # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal.compat import BUILTIN +from ddtrace.internal.compat import PYTHON_VERSION_INFO +from ddtrace.internal.utils.attrdict import AttrDict +from ddtrace.internal.utils.cache import cached +from ddtrace.vendor import wrapt + + +NoneType = type(None) + + +def _maybe_slots(obj): + # type: (Any) -> Union[Tuple[str], List[str]] + try: + slots = object.__getattribute__(obj, "__slots__") + if isinstance(slots, str): + return (slots,) + return slots + except AttributeError: + return [] + + +@cached() +def _slots(_type): + # type: (Type) -> Set[str] + return {_ for cls in object.__getattribute__(_type, "__mro__") for _ in _maybe_slots(cls)} + + +def get_slots(obj): + # type: (Any) -> Set[str] + """Get the object's slots.""" + return _slots(type(obj)) + + +def _isinstance(obj, types): + # type: (Any, Union[Type, Tuple[Union[Type, Tuple[Any, ...]], ...]]) -> bool + # DEV: isinstance falls back to calling __getattribute__ which could cause + # side effects. + return issubclass(type(obj), types) + + +IS_312_OR_NEWER = PYTHON_VERSION_INFO >= (3, 12) + + +class SafeObjectProxy(wrapt.ObjectProxy): + """Object proxy to make sure we don't call unsafe code. + + Access to the wrapped object is denied to prevent any potential + side-effects. Arbitrary objects are essentially converted into attribute + dictionaries. Callable objects are made uncallable to prevent accidental + calls that can also trigger side-effects. + """ + + def __call__(self, *args, **kwargs): + # type: (Any, Any) -> Optional[Any] + raise RuntimeError("Cannot call safe object") + + def __getattribute__(self, name): + # type: (str) -> Any + if name == "__wrapped__" and not IS_312_OR_NEWER: + raise AttributeError("Access denied") + + return super(SafeObjectProxy, self).__getattribute__(name) + + def __getattr__(self, name): + # type: (str) -> Any + if name == "__wrapped__" and IS_312_OR_NEWER: + raise AttributeError("Access denied") + return type(self).safe(super(SafeObjectProxy, self).__getattr__(name)) + + def __getitem__(self, item): + # type: (Any) -> Any + return type(self).safe(super(SafeObjectProxy, self).__getitem__(item)) + + def __iter__(self): + # type: () -> Any + return iter(type(self).safe(_) for _ in super(SafeObjectProxy, self).__iter__()) + + def items(self): + # type: () -> Iterator[Tuple[Any, Any]] + return ( + (type(self).safe(k), type(self).safe(v)) for k, v in super(SafeObjectProxy, self).__getattr__("items")() + ) + + # Custom object representations might cause side-effects + def __str__(self): + return object.__repr__(self) + + __repr__ = __str__ + + @classmethod + def safe(cls, obj): + # type: (Any) -> Optional[Any] + """Turn an object into a safe proxy.""" + _type = type(obj) + + if _isinstance(obj, type): + try: + if obj.__module__ == BUILTIN: + # We are assuming that builtin types are safe + return obj + except AttributeError: + # No __module__ attribute. We'll use caution + pass + + elif _type in {str, int, float, bool, NoneType, bytes, complex}: + # We are assuming that scalar builtin type instances are safe + return obj + + try: + return cls(AttrDict(object.__getattribute__(obj, "__dict__"))) + except AttributeError: + pass + + slots = get_slots(obj) + if slots: + # Handle slots objects + return cls(AttrDict({k: object.__getattribute__(obj, k) for k in slots})) + + # raise TypeError("Unhandled object type: %s", type(obj)) + return cls(obj) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sampling.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sampling.py new file mode 100644 index 0000000..daa9258 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sampling.py @@ -0,0 +1,319 @@ +import json +import re +from typing import TYPE_CHECKING # noqa:F401 +from typing import Optional # noqa:F401 + + +# TypedDict was added to typing in python 3.8 +try: + from typing import TypedDict # noqa:F401 +except ImportError: + from typing_extensions import TypedDict + +from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC +from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT +from ddtrace.constants import _SINGLE_SPAN_SAMPLING_MECHANISM +from ddtrace.constants import _SINGLE_SPAN_SAMPLING_RATE +from ddtrace.constants import SAMPLING_AGENT_DECISION +from ddtrace.constants import SAMPLING_LIMIT_DECISION +from ddtrace.constants import SAMPLING_RULE_DECISION +from ddtrace.constants import USER_REJECT +from ddtrace.internal.constants import _CATEGORY_TO_PRIORITIES +from ddtrace.internal.constants import _KEEP_PRIORITY_INDEX +from ddtrace.internal.constants import _REJECT_PRIORITY_INDEX +from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY +from ddtrace.internal.glob_matching import GlobMatcher +from ddtrace.internal.logger import get_logger +from ddtrace.sampling_rule import SamplingRule # noqa:F401 +from ddtrace.settings import _config as config + +from .rate_limiter import RateLimiter + + +log = get_logger(__name__) + +try: + from json.decoder import JSONDecodeError +except ImportError: + # handling python 2.X import error + JSONDecodeError = ValueError # type: ignore + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Dict # noqa:F401 + from typing import List # noqa:F401 + from typing import Text # noqa:F401 + + from ddtrace.context import Context # noqa:F401 + from ddtrace.span import Span # noqa:F401 + +# Big prime number to make hashing better distributed +KNUTH_FACTOR = 1111111111111111111 +MAX_SPAN_ID = 2**64 + + +class SamplingMechanism(object): + DEFAULT = 0 + AGENT_RATE = 1 + REMOTE_RATE = 2 + TRACE_SAMPLING_RULE = 3 + MANUAL = 4 + APPSEC = 5 + REMOTE_RATE_USER = 6 + REMOTE_RATE_DATADOG = 7 + SPAN_SAMPLING_RULE = 8 + + +# Use regex to validate trace tag value +TRACE_TAG_RE = re.compile(r"^-([0-9])$") + + +SpanSamplingRules = TypedDict( + "SpanSamplingRules", + { + "name": str, + "service": str, + "sample_rate": float, + "max_per_second": int, + }, + total=False, +) + + +def validate_sampling_decision( + meta, # type: Dict[str, str] +): + # type: (...) -> Dict[str, str] + value = meta.get(SAMPLING_DECISION_TRACE_TAG_KEY) + if value: + # Skip propagating invalid sampling mechanism trace tag + if TRACE_TAG_RE.match(value) is None: + del meta[SAMPLING_DECISION_TRACE_TAG_KEY] + meta["_dd.propagation_error"] = "decoding_error" + log.warning("failed to decode _dd.p.dm: %r", value, exc_info=True) + return meta + + +def set_sampling_decision_maker( + context, # type: Context + sampling_mechanism, # type: int +): + # type: (...) -> Optional[Text] + value = "-%d" % sampling_mechanism + context._meta[SAMPLING_DECISION_TRACE_TAG_KEY] = value + return value + + +class SpanSamplingRule: + """A span sampling rule to evaluate and potentially tag each span upon finish.""" + + __slots__ = ( + "_service_matcher", + "_name_matcher", + "_sample_rate", + "_max_per_second", + "_sampling_id_threshold", + "_limiter", + "_matcher", + ) + + def __init__( + self, + sample_rate, # type: float + max_per_second, # type: int + service=None, # type: Optional[str] + name=None, # type: Optional[str] + ): + self._sample_rate = sample_rate + self._sampling_id_threshold = self._sample_rate * MAX_SPAN_ID + + self._max_per_second = max_per_second + self._limiter = RateLimiter(max_per_second) + + # we need to create matchers for the service and/or name pattern provided + self._service_matcher = GlobMatcher(service) if service is not None else None + self._name_matcher = GlobMatcher(name) if name is not None else None + + def sample(self, span): + # type: (Span) -> bool + if self._sample(span): + if self._limiter.is_allowed(span.start_ns): + self.apply_span_sampling_tags(span) + return True + return False + + def _sample(self, span): + # type: (Span) -> bool + if self._sample_rate == 1: + return True + elif self._sample_rate == 0: + return False + + return ((span.span_id * KNUTH_FACTOR) % MAX_SPAN_ID) <= self._sampling_id_threshold + + def match(self, span): + # type: (Span) -> bool + """Determines if the span's service and name match the configured patterns""" + name = span.name + service = span.service + # If a span lacks a name and service, we can't match on it + if service is None and name is None: + return False + + # Default to True, as the rule may not have a name or service rule + # For whichever rules it does have, it will attempt to match on them + service_match = True + name_match = True + + if self._service_matcher: + if service is None: + return False + else: + service_match = self._service_matcher.match(service) + if self._name_matcher: + if name is None: + return False + else: + name_match = self._name_matcher.match(name) + return service_match and name_match + + def apply_span_sampling_tags(self, span): + # type: (Span) -> None + span.set_metric(_SINGLE_SPAN_SAMPLING_MECHANISM, SamplingMechanism.SPAN_SAMPLING_RULE) + span.set_metric(_SINGLE_SPAN_SAMPLING_RATE, self._sample_rate) + # Only set this tag if it's not the default -1 + if self._max_per_second != _SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT: + span.set_metric(_SINGLE_SPAN_SAMPLING_MAX_PER_SEC, self._max_per_second) + + +def get_span_sampling_rules(): + # type: () -> List[SpanSamplingRule] + json_rules = _get_span_sampling_json() + sampling_rules = [] + for rule in json_rules: + # If sample_rate not specified default to 100% + sample_rate = rule.get("sample_rate", 1.0) + service = rule.get("service") + name = rule.get("name") + + if not service and not name: + raise ValueError("Sampling rules must supply at least 'service' or 'name', got {}".format(json.dumps(rule))) + + # If max_per_second not specified default to no limit + max_per_second = rule.get("max_per_second", _SINGLE_SPAN_SAMPLING_MAX_PER_SEC_NO_LIMIT) + if service: + _check_unsupported_pattern(service) + if name: + _check_unsupported_pattern(name) + + try: + sampling_rule = SpanSamplingRule( + sample_rate=sample_rate, service=service, name=name, max_per_second=max_per_second + ) + except Exception as e: + raise ValueError("Error creating single span sampling rule {}: {}".format(json.dumps(rule), e)) + sampling_rules.append(sampling_rule) + return sampling_rules + + +def _get_span_sampling_json(): + # type: () -> List[Dict[str, Any]] + env_json_rules = _get_env_json() + file_json_rules = _get_file_json() + + if env_json_rules and file_json_rules: + log.warning( + ( + "DD_SPAN_SAMPLING_RULES and DD_SPAN_SAMPLING_RULES_FILE detected. " + "Defaulting to DD_SPAN_SAMPLING_RULES value." + ) + ) + return env_json_rules + return env_json_rules or file_json_rules or [] + + +def _get_file_json(): + # type: () -> Optional[List[Dict[str, Any]]] + file_json_raw = config._sampling_rules_file + if file_json_raw: + with open(file_json_raw) as f: + return _load_span_sampling_json(f.read()) + return None + + +def _get_env_json(): + # type: () -> Optional[List[Dict[str, Any]]] + env_json_raw = config._sampling_rules + if env_json_raw: + return _load_span_sampling_json(env_json_raw) + return None + + +def _load_span_sampling_json(raw_json_rules): + # type: (str) -> List[Dict[str, Any]] + try: + json_rules = json.loads(raw_json_rules) + if not isinstance(json_rules, list): + raise TypeError("DD_SPAN_SAMPLING_RULES is not list, got %r" % json_rules) + except JSONDecodeError: + raise ValueError("Unable to parse DD_SPAN_SAMPLING_RULES=%r" % raw_json_rules) + + return json_rules + + +def _check_unsupported_pattern(string): + # type: (str) -> None + # We don't support pattern bracket expansion or escape character + unsupported_chars = {"[", "]", "\\"} + for char in string: + if char in unsupported_chars: + raise ValueError("Unsupported Glob pattern found, character:%r is not supported" % char) + + +def is_single_span_sampled(span): + # type: (Span) -> bool + return span.get_metric(_SINGLE_SPAN_SAMPLING_MECHANISM) == SamplingMechanism.SPAN_SAMPLING_RULE + + +def _set_sampling_tags(span, sampled, sample_rate, priority_category): + # type: (Span, bool, float, str) -> None + mechanism = SamplingMechanism.TRACE_SAMPLING_RULE + if priority_category == "rule": + span.set_metric(SAMPLING_RULE_DECISION, sample_rate) + elif priority_category == "default": + mechanism = SamplingMechanism.DEFAULT + elif priority_category == "auto": + mechanism = SamplingMechanism.AGENT_RATE + span.set_metric(SAMPLING_AGENT_DECISION, sample_rate) + priorities = _CATEGORY_TO_PRIORITIES[priority_category] + _set_priority(span, priorities[_KEEP_PRIORITY_INDEX] if sampled else priorities[_REJECT_PRIORITY_INDEX]) + set_sampling_decision_maker(span.context, mechanism) + + +def _apply_rate_limit(span, sampled, limiter): + # type: (Span, bool, RateLimiter) -> bool + allowed = True + if sampled: + allowed = limiter.is_allowed(span.start_ns) + if not allowed: + _set_priority(span, USER_REJECT) + if limiter._has_been_configured: + span.set_metric(SAMPLING_LIMIT_DECISION, limiter.effective_rate) + return allowed + + +def _set_priority(span, priority): + # type: (Span, int) -> None + span.context.sampling_priority = priority + span.sampled = priority > 0 # Positive priorities mean it was kept + + +def _get_highest_precedence_rule_matching(span, rules): + # type: (Span, List[SamplingRule]) -> Optional[SamplingRule] + if not rules: + return None + + for rule in rules: + if rule.matches(span): + return rule + return None diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/__init__.py new file mode 100644 index 0000000..2314810 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/__init__.py @@ -0,0 +1,64 @@ +import logging +import os + +from ddtrace.internal.utils.formats import asbool + +from .span_attribute_schema import _DEFAULT_SPAN_SERVICE_NAMES +from .span_attribute_schema import _SPAN_ATTRIBUTE_TO_FUNCTION +from .span_attribute_schema import SpanDirection + + +log = logging.getLogger(__name__) + + +# Span attribute schema +def _validate_schema(version): + error_message = ( + "You have specified an invalid span attribute schema version: '{}'.".format(version), + "Valid options are: {}. You can change the specified value by updating".format( + _SPAN_ATTRIBUTE_TO_FUNCTION.keys() + ), + "the value exported in the 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA' environment variable.", + ) + + if version not in _SPAN_ATTRIBUTE_TO_FUNCTION.keys(): + log.warning(" ".join(error_message)) + return False + + return True + + +def _get_schema_version(): + version = os.getenv("DD_TRACE_SPAN_ATTRIBUTE_SCHEMA", default="v0") + if not _validate_schema(version): + version = "v0" + return version + + +SCHEMA_VERSION = _get_schema_version() +_remove_client_service_names = asbool(os.getenv("DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED", default=False)) +_service_name_schema_version = "v0" if SCHEMA_VERSION == "v0" and not _remove_client_service_names else "v1" + +DEFAULT_SPAN_SERVICE_NAME = _DEFAULT_SPAN_SERVICE_NAMES[_service_name_schema_version] +schematize_cache_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["cache_operation"] +schematize_cloud_api_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["cloud_api_operation"] +schematize_cloud_faas_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["cloud_faas_operation"] +schematize_cloud_messaging_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["cloud_messaging_operation"] +schematize_database_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["database_operation"] +schematize_messaging_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["messaging_operation"] +schematize_service_name = _SPAN_ATTRIBUTE_TO_FUNCTION[_service_name_schema_version]["service_name"] +schematize_url_operation = _SPAN_ATTRIBUTE_TO_FUNCTION[SCHEMA_VERSION]["url_operation"] + +__all__ = [ + "DEFAULT_SPAN_SERVICE_NAME", + "SCHEMA_VERSION", + "SpanDirection", + "schematize_cache_operation", + "schematize_cloud_api_operation", + "schematize_cloud_faas_operation", + "schematize_cloud_messaging_operation", + "schematize_database_operation", + "schematize_messaging_operation", + "schematize_service_name", + "schematize_url_operation", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/span_attribute_schema.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/span_attribute_schema.py new file mode 100644 index 0000000..b0cfbb8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/schema/span_attribute_schema.py @@ -0,0 +1,115 @@ +from enum import Enum + +from ddtrace.internal.constants import DEFAULT_SERVICE_NAME + + +class SpanDirection(Enum): + INBOUND = "inbound" + OUTBOUND = "outbound" + PROCESSING = "processing" + + +def service_name_v0(v0_service_name): + return v0_service_name + + +def service_name_v1(*_, **__): + from ddtrace import config as dd_config + + return dd_config.service + + +def database_operation_v0(v0_operation, database_provider=None): + return v0_operation + + +def database_operation_v1(v0_operation, database_provider=None): + operation = "query" + return "{}.{}".format(database_provider, operation) + + +def cache_operation_v0(v0_operation, cache_provider=None): + return v0_operation + + +def cache_operation_v1(v0_operation, cache_provider=None): + operation = "command" + return "{}.{}".format(cache_provider, operation) + + +def cloud_api_operation_v0(v0_operation, cloud_provider=None, cloud_service=None): + return v0_operation + + +def cloud_api_operation_v1(v0_operation, cloud_provider=None, cloud_service=None): + return "{}.{}.request".format(cloud_provider, cloud_service) + + +def cloud_faas_operation_v0(v0_operation, cloud_provider=None, cloud_service=None): + return v0_operation + + +def cloud_faas_operation_v1(v0_operation, cloud_provider=None, cloud_service=None): + return "{}.{}.invoke".format(cloud_provider, cloud_service) + + +def cloud_messaging_operation_v0(v0_operation, cloud_provider=None, cloud_service=None, direction=None): + return v0_operation + + +def cloud_messaging_operation_v1(v0_operation, cloud_provider=None, cloud_service=None, direction=None): + if direction == SpanDirection.INBOUND: + return "{}.{}.receive".format(cloud_provider, cloud_service) + elif direction == SpanDirection.OUTBOUND: + return "{}.{}.send".format(cloud_provider, cloud_service) + elif direction == SpanDirection.PROCESSING: + return "{}.{}.process".format(cloud_provider, cloud_service) + + +def messaging_operation_v0(v0_operation, provider=None, service=None, direction=None): + return v0_operation + + +def messaging_operation_v1(v0_operation, provider=None, direction=None): + if direction == SpanDirection.INBOUND: + return "{}.receive".format(provider) + elif direction == SpanDirection.OUTBOUND: + return "{}.send".format(provider) + elif direction == SpanDirection.PROCESSING: + return "{}.process".format(provider) + + +def url_operation_v0(v0_operation, protocol=None, direction=None): + return v0_operation + + +def url_operation_v1(v0_operation, protocol=None, direction=None): + server_or_client = {SpanDirection.INBOUND: "server", SpanDirection.OUTBOUND: "client"}[direction] + return "{}.{}.request".format(protocol, server_or_client) + + +_SPAN_ATTRIBUTE_TO_FUNCTION = { + "v0": { + "cache_operation": cache_operation_v0, + "cloud_api_operation": cloud_api_operation_v0, + "cloud_faas_operation": cloud_faas_operation_v0, + "cloud_messaging_operation": cloud_messaging_operation_v0, + "database_operation": database_operation_v0, + "messaging_operation": messaging_operation_v0, + "service_name": service_name_v0, + "url_operation": url_operation_v0, + }, + "v1": { + "cache_operation": cache_operation_v1, + "cloud_api_operation": cloud_api_operation_v1, + "cloud_faas_operation": cloud_faas_operation_v1, + "cloud_messaging_operation": cloud_messaging_operation_v1, + "database_operation": database_operation_v1, + "messaging_operation": messaging_operation_v1, + "service_name": service_name_v1, + "url_operation": url_operation_v1, + }, +} + + +_DEFAULT_SPAN_SERVICE_NAMES = {"v0": None, "v1": DEFAULT_SERVICE_NAME} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/__init__.py new file mode 100644 index 0000000..d66cb6b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/__init__.py @@ -0,0 +1,47 @@ +import os +from os import environ +from os import path + + +def in_aws_lambda(): + # type: () -> bool + """Returns whether the environment is an AWS Lambda. + This is accomplished by checking if the AWS_LAMBDA_FUNCTION_NAME environment + variable is defined. + """ + return bool(environ.get("AWS_LAMBDA_FUNCTION_NAME", False)) + + +def has_aws_lambda_agent_extension(): + # type: () -> bool + """Returns whether the environment has the AWS Lambda Datadog Agent + extension available. + """ + return path.exists("/opt/extensions/datadog-agent") + + +def in_gcp_function(): + # type: () -> bool + """Returns whether the environment is a GCP Function. + This is accomplished by checking for the presence of one of two pairs of environment variables, + with one pair being set by deprecated GCP Function runtimes, and the other set by newer runtimes. + """ + is_deprecated_gcp_function = environ.get("FUNCTION_NAME", "") != "" and environ.get("GCP_PROJECT", "") != "" + is_newer_gcp_function = environ.get("K_SERVICE", "") != "" and environ.get("FUNCTION_TARGET", "") != "" + return is_deprecated_gcp_function or is_newer_gcp_function + + +def in_azure_function_consumption_plan(): + # type: () -> bool + """Returns whether the environment is an Azure Consumption Plan Function. + This is accomplished by checking the presence of two Azure Function env vars, + as well as a third SKU variable indicating consumption plans. + """ + is_azure_function = ( + os.environ.get("FUNCTIONS_WORKER_RUNTIME", "") != "" and os.environ.get("FUNCTIONS_EXTENSION_VERSION", "") != "" + ) + + website_sku = os.environ.get("WEBSITE_SKU", "") + is_consumption_plan = website_sku == "" or website_sku == "Dynamic" + + return is_azure_function and is_consumption_plan diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/mini_agent.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/mini_agent.py new file mode 100644 index 0000000..05383da --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/serverless/mini_agent.py @@ -0,0 +1,48 @@ +import os +from subprocess import Popen +import sys + +from ..compat import PYTHON_VERSION_INFO +from ..logger import get_logger +from ..serverless import in_azure_function_consumption_plan +from ..serverless import in_gcp_function + + +log = get_logger(__name__) + + +def maybe_start_serverless_mini_agent(): + if not (in_gcp_function() or in_azure_function_consumption_plan()): + return + + if sys.platform != "win32" and sys.platform != "linux": + log.error("Serverless Mini Agent is only supported on Windows and Linux.") + return + + try: + rust_binary_path = get_rust_binary_path() + + log.debug("Trying to spawn the Serverless Mini Agent at path: %s", rust_binary_path) + Popen(rust_binary_path) + except Exception as e: + log.error("Error spawning Serverless Mini Agent process: %s", repr(e)) + + +def get_rust_binary_path(): + rust_binary_path = os.getenv("DD_MINI_AGENT_PATH") + + if rust_binary_path is not None: + return rust_binary_path + + if in_gcp_function(): + rust_binary_path = ( + "/layers/google.python.pip/pip/lib/python{}.{}/site-packages/" + "datadog-serverless-agent-linux-amd64/datadog-serverless-trace-mini-agent" + ).format(PYTHON_VERSION_INFO[0], PYTHON_VERSION_INFO[1]) + else: + rust_binary_path = ( + "/home/site/wwwroot/.python_packages/lib/site-packages/" + "datadog-serverless-agent-linux-amd64/datadog-serverless-trace-mini-agent" + ) + + return rust_binary_path diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/service.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/service.py new file mode 100644 index 0000000..4092114 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/service.py @@ -0,0 +1,106 @@ +import abc +import enum +import typing # noqa:F401 + +import attr + +from . import forksafe + + +class ServiceStatus(enum.Enum): + """A Service status.""" + + STOPPED = "stopped" + RUNNING = "running" + + +class ServiceStatusError(RuntimeError): + def __init__( + self, + service_cls, # type: typing.Type[Service] + current_status, # type: ServiceStatus + ): + # type: (...) -> None + self.current_status = current_status + super(ServiceStatusError, self).__init__( + "%s is already in status %s" % (service_cls.__name__, current_status.value) + ) + + +@attr.s(eq=False) +class Service(metaclass=abc.ABCMeta): + """A service that can be started or stopped.""" + + status = attr.ib(default=ServiceStatus.STOPPED, type=ServiceStatus, init=False, eq=False) + _service_lock = attr.ib(factory=forksafe.Lock, repr=False, init=False, eq=False) + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.stop() + self.join() + + def start( + self, + *args, # type: typing.Any + **kwargs, # type: typing.Any + ): + # type: (...) -> None + """Start the service.""" + # Use a lock so we're sure that if 2 threads try to start the service at the same time, one of them will raise + # an error. + with self._service_lock: + if self.status == ServiceStatus.RUNNING: + raise ServiceStatusError(self.__class__, self.status) + self._start_service(*args, **kwargs) + self.status = ServiceStatus.RUNNING + + @abc.abstractmethod + def _start_service( + self, + *args, # type: typing.Any + **kwargs, # type: typing.Any + ): + # type: (...) -> None + """Start the service for real. + + This method uses the internal lock to be sure there's no race conditions and that the service is really started + once start() returns. + + """ + + def stop( + self, + *args, # type: typing.Any + **kwargs, # type: typing.Any + ): + # type: (...) -> None + """Stop the service.""" + with self._service_lock: + if self.status == ServiceStatus.STOPPED: + raise ServiceStatusError(self.__class__, self.status) + self._stop_service(*args, **kwargs) + self.status = ServiceStatus.STOPPED + + @abc.abstractmethod + def _stop_service( + self, + *args, # type: typing.Any + **kwargs, # type: typing.Any + ): + # type: (...) -> None + """Stop the service for real. + + This method uses the internal lock to be sure there's no race conditions and that the service is really stopped + once start() returns. + + """ + + def join( + self, + timeout=None, # type: typing.Optional[float] + ): + # type: (...) -> None + """Join the service once stopped.""" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sma.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sma.py new file mode 100644 index 0000000..a453884 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/sma.py @@ -0,0 +1,67 @@ +from __future__ import division + + +class SimpleMovingAverage(object): + """ + Simple Moving Average implementation. + """ + + __slots__ = ( + "size", + "index", + "counts", + "totals", + "sum_count", + "sum_total", + ) + + def __init__(self, size): + # type: (int) -> None + """ + Constructor for SimpleMovingAverage. + + :param size: The size of the window to calculate the moving average. + :type size: :obj:`int` + """ + if size < 1: + raise ValueError + + self.index = 0 + self.size = size + + self.sum_count = 0 + self.sum_total = 0 + + self.counts = [0] * self.size + self.totals = [0] * self.size + + def get(self): + # type: () -> float + """ + Get the current SMA value. + """ + if self.sum_total == 0: + return 0.0 + + return float(self.sum_count) / self.sum_total + + def set(self, count, total): + # type: (int, int) -> None + """ + Set the value of the next bucket and update the SMA value. + + :param count: The valid quantity of the next bucket. + :type count: :obj:`int` + :param total: The total quantity of the next bucket. + :type total: :obj:`int` + """ + if count > total: + raise ValueError + + self.sum_count += count - self.counts[self.index] + self.sum_total += total - self.totals[self.index] + + self.counts[self.index] = count + self.totals[self.index] = total + + self.index = (self.index + 1) % self.size diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/__init__.py new file mode 100644 index 0000000..1331cb3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/__init__.py @@ -0,0 +1,74 @@ +""" +Instrumentation Telemetry API. +This is normally started automatically by ``ddtrace-run`` when the +``DD_INSTRUMENTATION_TELEMETRY_ENABLED`` variable is set. +To start the service manually, invoke the ``enable`` method:: + from ddtrace.internal import telemetry + telemetry.telemetry_writer.enable() +""" +import os +import sys + +from ddtrace.settings import _config as config + +from .writer import TelemetryWriter + + +telemetry_writer = TelemetryWriter() # type: TelemetryWriter + +__all__ = ["telemetry_writer"] + + +_ORIGINAL_EXCEPTHOOK = sys.excepthook + + +def _excepthook(tp, value, root_traceback): + if root_traceback is not None: + # Get the frame which raised the exception + traceback = root_traceback + while traceback.tb_next: + traceback = traceback.tb_next + + lineno = traceback.tb_frame.f_code.co_firstlineno + filename = traceback.tb_frame.f_code.co_filename + telemetry_writer.add_error(1, str(value), filename, lineno) + + dir_parts = filename.split(os.path.sep) + # Check if exception was raised in the `ddtrace.contrib` package + if "ddtrace" in dir_parts and "contrib" in dir_parts: + ddtrace_index = dir_parts.index("ddtrace") + contrib_index = dir_parts.index("contrib") + # Check if the filename has the following format: + # `../ddtrace/contrib/integration_name/..(subpath and/or file)...` + if ddtrace_index + 1 == contrib_index and len(dir_parts) - 2 > contrib_index: + integration_name = dir_parts[contrib_index + 1] + telemetry_writer.add_count_metric( + "tracers", + "integration_errors", + 1, + (("integration_name", integration_name), ("error_type", tp.__name__)), + ) + error_msg = "{}:{} {}".format(filename, lineno, str(value)) + telemetry_writer.add_integration(integration_name, True, error_msg=error_msg) + + if config._telemetry_enabled and not telemetry_writer.started: + telemetry_writer._app_started_event(False) + + telemetry_writer.app_shutdown() + + return _ORIGINAL_EXCEPTHOOK(tp, value, root_traceback) + + +def install_excepthook(): + """Install a hook that intercepts unhandled exception and send metrics about them.""" + sys.excepthook = _excepthook + + +def uninstall_excepthook(): + """Uninstall the global tracer except hook.""" + sys.excepthook = _ORIGINAL_EXCEPTHOOK + + +def disable_and_flush(): + telemetry_writer._enabled = False + telemetry_writer.periodic(True) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/constants.py new file mode 100644 index 0000000..8fa2c41 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/constants.py @@ -0,0 +1,69 @@ +TELEMETRY_NAMESPACE_TAG_TRACER = "tracers" +TELEMETRY_NAMESPACE_TAG_APPSEC = "appsec" +TELEMETRY_NAMESPACE_TAG_IAST = "iast" + +TELEMETRY_TYPE_GENERATE_METRICS = "generate-metrics" +TELEMETRY_TYPE_DISTRIBUTION = "distributions" +TELEMETRY_TYPE_LOGS = "logs" + +# Configuration names must map to values supported by backend services: +# https://github.com/DataDog/dd-go/blob/f88e85d64b173e7733ac03e23576d1c9be37f32e/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json +TELEMETRY_DYNAMIC_INSTRUMENTATION_ENABLED = "DD_DYNAMIC_INSTRUMENTATION_ENABLED" +TELEMETRY_EXCEPTION_DEBUGGING_ENABLED = "DD_EXCEPTION_DEBUGGING_ENABLED" + + +# Tracing Features + +TELEMETRY_TRACE_DEBUG = "DD_TRACE_DEBUG" +TELEMETRY_ANALYTICS_ENABLED = "DD_TRACE_ANALYTICS_ENABLED" +TELEMETRY_STARTUP_LOGS_ENABLED = "DD_TRACE_STARTUP_LOGS" +TELEMETRY_CLIENT_IP_ENABLED = "DD_TRACE_CLIENT_IP_ENABLED" +TELEMETRY_128_BIT_TRACEID_GENERATION_ENABLED = "DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED" +TELEMETRY_128_BIT_TRACEID_LOGGING_ENABLED = "DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED" +TELEMETRY_TRACE_COMPUTE_STATS = "DD_TRACE_COMPUTE_STATS" +TELEMETRY_OBFUSCATION_QUERY_STRING_PATTERN = "DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP" +TELEMETRY_OTEL_ENABLED = "DD_TRACE_OTEL_ENABLED" +TELEMETRY_TRACE_HEALTH_METRICS_ENABLED = "DD_TRACE_HEALTH_METRICS_ENABLED" +TELEMETRY_ENABLED = "DD_INSTRUMENTATION_TELEMETRY_ENABLED" +TELEMETRY_RUNTIMEMETRICS_ENABLED = "DD_RUNTIME_METRICS_ENABLED" +TELEMETRY_REMOTE_CONFIGURATION_ENABLED = "DD_REMOTE_CONFIGURATION_ENABLED" +TELEMETRY_REMOTE_CONFIGURATION_INTERVAL = "DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS" +TELEMETRY_SERVICE_MAPPING = "DD_SERVICE_MAPPING" +TELEMETRY_SPAN_SAMPLING_RULES = "DD_SPAN_SAMPLING_RULES" +TELEMETRY_SPAN_SAMPLING_RULES_FILE = "DD_SPAN_SAMPLING_RULES_FILE" +TELEMETRY_PROPAGATION_STYLE_INJECT = "DD_TRACE_PROPAGATION_STYLE_INJECT" +TELEMETRY_PROPAGATION_STYLE_EXTRACT = "DD_TRACE_PROPAGATION_STYLE_EXTRACT" +TELEMETRY_TRACE_SAMPLING_RULES = "DD_TRACE_SAMPLING_RULES" +TELEMETRY_TRACE_SAMPLING_LIMIT = "DD_TRACE_RATE_LIMIT" +TELEMETRY_PRIORITY_SAMPLING = "DD_PRIORITY_SAMPLING" +TELEMETRY_PARTIAL_FLUSH_ENABLED = "DD_TRACE_PARTIAL_FLUSH_ENABLED" +TELEMETRY_PARTIAL_FLUSH_MIN_SPANS = "DD_TRACE_PARTIAL_FLUSH_MIN_SPANS" +TELEMETRY_TRACE_SPAN_ATTRIBUTE_SCHEMA = "DD_TRACE_SPAN_ATTRIBUTE_SCHEMA" +TELEMETRY_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED = "DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED" +TELEMETRY_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = "DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED" +TELEMETRY_TRACE_PEER_SERVICE_MAPPING = "DD_TRACE_PEER_SERVICE_MAPPING" + +TELEMETRY_TRACE_API_VERSION = "DD_TRACE_API_VERSION" +TELEMETRY_TRACE_WRITER_BUFFER_SIZE_BYTES = "DD_TRACE_WRITER_BUFFER_SIZE_BYTES" +TELEMETRY_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES = "DD_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES" +TELEMETRY_TRACE_WRITER_INTERVAL_SECONDS = "DD_TRACE_WRITER_INTERVAL_SECONDS" +TELEMETRY_TRACE_WRITER_REUSE_CONNECTIONS = "DD_TRACE_WRITER_REUSE_CONNECTIONS" + +TELEMETRY_DOGSTATSD_PORT = "DD_DOGSTATSD_PORT" +TELEMETRY_DOGSTATSD_URL = "DD_DOGSTATSD_URL" + +TELEMETRY_AGENT_HOST = "DD_AGENT_HOST" +TELEMETRY_AGENT_PORT = "DD_AGENT_PORT" +TELEMETRY_AGENT_URL = "DD_TRACE_AGENT_URL" +TELEMETRY_TRACE_AGENT_TIMEOUT_SECONDS = "DD_TRACE_AGENT_TIMEOUT_SECONDS" + +# Profiling features +TELEMETRY_PROFILING_STACK_ENABLED = "DD_PROFILING_STACK_ENABLED" +TELEMETRY_PROFILING_MEMORY_ENABLED = "DD_PROFILING_MEMORY_ENABLED" +TELEMETRY_PROFILING_HEAP_ENABLED = "DD_PROFILING_HEAP_ENABLED" +TELEMETRY_PROFILING_LOCK_ENABLED = "DD_PROFILING_LOCK_ENABLED" +TELEMETRY_PROFILING_EXPORT_PY_ENABLED = "DD_PROFILING_EXPORT_PY_ENABLED" +TELEMETRY_PROFILING_EXPORT_LIBDD_ENABLED = "DD_PROFILING_EXPORT_LIBDD_ENABLED" +TELEMETRY_PROFILING_CAPTURE_PCT = "DD_PROFILING_CAPTURE_PCT" +TELEMETRY_PROFILING_UPLOAD_INTERVAL = "DD_PROFILING_UPLOAD_INTERVAL" +TELEMETRY_PROFILING_MAX_FRAMES = "DD_PROFILING_MAX_FRAMES" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/data.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/data.py new file mode 100644 index 0000000..4d141d0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/data.py @@ -0,0 +1,127 @@ +import platform +import sys +from typing import TYPE_CHECKING # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Tuple # noqa:F401 + +from ddtrace.internal.constants import DEFAULT_SERVICE_NAME +from ddtrace.internal.packages import Distribution +from ddtrace.internal.packages import filename_to_package +from ddtrace.internal.runtime.container import get_container_info +from ddtrace.internal.utils.cache import cached +from ddtrace.version import get_version + +from ...settings import _config as config # noqa:F401 +from ...settings.asm import config as asm_config +from ..hostname import get_hostname + + +def _format_version_info(vi): + # type: (sys._version_info) -> str + """Converts sys.version_info into a string with the format x.x.x""" + return "%d.%d.%d" % (vi.major, vi.minor, vi.micro) + + +def _get_container_id(): + # type: () -> str + """Get ID from docker container""" + container_info = get_container_info() + if container_info: + return container_info.container_id or "" + return "" + + +def _get_os_version(): + # type: () -> str + """Returns the os version for applications running on Mac or Windows 32-bit""" + try: + mver, _, _ = platform.mac_ver() + if mver: + return mver + + _, wver, _, _ = platform.win32_ver() + if wver: + return wver + except OSError: + # We were unable to lookup the proper version + pass + + return "" + + +@cached() +def _get_application(key): + # type: (Tuple[str, str, str]) -> Dict + """ + This helper packs and unpacks get_application arguments to support caching. + Cached() annotation only supports functions with one argument + """ + service, version, env = key + + return { + "service_name": service or DEFAULT_SERVICE_NAME, # mandatory field, can not be empty + "service_version": version or "", + "env": env or "", + "language_name": "python", + "language_version": _format_version_info(sys.version_info), + "tracer_version": get_version(), + "runtime_name": platform.python_implementation(), + "runtime_version": _format_version_info(sys.implementation.version), + "products": _get_products(), + } + + +def update_imported_dependencies( + already_imported: Dict[str, Distribution], new_modules: List[str] +) -> List[Dict[str, str]]: + deps = [] + + for module_path in new_modules: + if not module_path: + continue + try: + package = filename_to_package(module_path) + if not package or (package.name in already_imported) or package.name == "ddtrace": + continue # not third party or already imported + except AttributeError: + continue + already_imported[package.name] = package + deps.append({"name": package.name, "version": package.version}) + + return deps + + +def get_application(service, version, env): + # type: (str, str, str) -> Dict + """Creates a dictionary to store application data using ddtrace configurations and the System-Specific module""" + # We cache the application dict to reduce overhead since service, version, or env configurations + # can change during runtime + return _get_application((service, version, env)) + + +def _get_products(): + # type: () -> Dict + return { + "appsec": {"version": get_version(), "enabled": asm_config._asm_enabled}, + } + + +_host_info = None + + +def get_host_info(): + # type: () -> Dict + """Creates a dictionary to store host data using the platform module""" + global _host_info + if _host_info is None: + _host_info = { + "os": platform.system(), + "hostname": get_hostname(), + "os_version": _get_os_version(), + "kernel_name": platform.system(), + "kernel_release": platform.release(), + "kernel_version": platform.version(), + "container_id": _get_container_id(), + } + return _host_info diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics.py new file mode 100644 index 0000000..c2d177b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +import abc +import time +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + + +MetricTagType = Optional[Tuple[Tuple[str, str], ...]] + + +class Metric(metaclass=abc.ABCMeta): + """ + Telemetry Metrics are stored in DD dashboards, check the metrics in datadoghq.com/metric/explorer + """ + + metric_type = "" + __slots__ = ["namespace", "name", "_tags", "is_common_to_all_tracers", "interval", "_points", "_count"] + + def __init__(self, namespace, name, tags, common, interval=None): + # type: (str, str, MetricTagType, bool, Optional[float]) -> None + """ + namespace: the scope of the metric: tracer, appsec, etc. + name: string + tags: extra information attached to a metric + common: set to True if a metric is common to all tracers, false if it is python specific + interval: field set for gauge and rate metrics, any field set is ignored for count metrics (in secs) + """ + self.name = name.lower() + self.is_common_to_all_tracers = common + self.interval = interval + self.namespace = namespace + self._tags = tags + self._count = 0.0 + self._points = [] # type: List + + @classmethod + def get_id(cls, name, namespace, tags, metric_type): + # type: (str, str, MetricTagType, str) -> int + """ + https://www.datadoghq.com/blog/the-power-of-tagged-metrics/#whats-a-metric-tag + """ + return hash((name, namespace, tags, metric_type)) + + def __hash__(self): + return self.get_id(self.name, self.namespace, self._tags, self.metric_type) + + @abc.abstractmethod + def add_point(self, value=1.0): + # type: (float) -> None + """adds timestamped data point associated with a metric""" + pass + + def to_dict(self): + # type: () -> Dict + """returns a dictionary containing the metrics fields expected by the telemetry intake service""" + data = { + "metric": self.name, + "type": self.metric_type, + "common": self.is_common_to_all_tracers, + "points": self._points, + "tags": ["{}:{}".format(k, v).lower() for k, v in self._tags] if self._tags else [], + } + if self.interval is not None: + data["interval"] = int(self.interval) + return data + + +class CountMetric(Metric): + """ + A count type adds up all the submitted values in a time interval. This would be suitable for a + metric tracking the number of website hits, for instance. + """ + + metric_type = "count" + + def add_point(self, value=1.0): + # type: (float) -> None + """adds timestamped data point associated with a metric""" + if self._points: + self._points[0][1] += value + else: + self._points = [[time.time(), value]] + + +class GaugeMetric(Metric): + """ + A gauge type takes the last value reported during the interval. This type would make sense for tracking RAM or + CPU usage, where taking the last value provides a representative picture of the host’s behavior during the time + interval. In this case, using a different type such as count would probably lead to inaccurate and extreme values. + Choosing the correct metric type ensures accurate data. + """ + + metric_type = "gauge" + + def add_point(self, value=1.0): + # type: (float) -> None + """adds timestamped data point associated with a metric""" + self._points = [(time.time(), value)] + + +class RateMetric(Metric): + """ + The rate type takes the count and divides it by the length of the time interval. This is useful if you’re + interested in the number of hits per second. + """ + + metric_type = "rate" + + def add_point(self, value=1.0): + # type: (float) -> None + """Example: + https://github.com/DataDog/datadogpy/blob/ee5ac16744407dcbd7a3640ee7b4456536460065/datadog/threadstats/metrics.py#L181 + """ + self._count += value + rate = (self._count / self.interval) if self.interval else 0.0 + self._points = [(time.time(), rate)] + + +class DistributionMetric(Metric): + """ + The rate type takes the count and divides it by the length of the time interval. This is useful if you’re + interested in the number of hits per second. + """ + + metric_type = "distributions" + + def add_point(self, value=1.0): + # type: (float) -> None + """Example: + https://github.com/DataDog/datadogpy/blob/ee5ac16744407dcbd7a3640ee7b4456536460065/datadog/threadstats/metrics.py#L181 + """ + self._points.append(value) + + def to_dict(self): + # type: () -> Dict + """returns a dictionary containing the metrics fields expected by the telemetry intake service""" + data = { + "metric": self.name, + "points": self._points, + "tags": ["{}:{}".format(k, v).lower() for k, v in self._tags] if self._tags else [], + } + return data diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics_namespaces.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics_namespaces.py new file mode 100644 index 0000000..927f6de --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/metrics_namespaces.py @@ -0,0 +1,56 @@ +from collections import defaultdict +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Type # noqa:F401 + +from ddtrace.internal import forksafe +from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_DISTRIBUTION +from ddtrace.internal.telemetry.constants import TELEMETRY_TYPE_GENERATE_METRICS +from ddtrace.internal.telemetry.metrics import DistributionMetric +from ddtrace.internal.telemetry.metrics import Metric +from ddtrace.internal.telemetry.metrics import MetricTagType # noqa:F401 + + +NamespaceMetricType = Dict[str, Dict[str, Dict[str, Any]]] + + +class MetricNamespace: + def __init__(self): + # type: () -> None + self._lock = forksafe.Lock() # type: forksafe.ResetObject + self._metrics_data = { + TELEMETRY_TYPE_GENERATE_METRICS: defaultdict(dict), + TELEMETRY_TYPE_DISTRIBUTION: defaultdict(dict), + } # type: Dict[str, Dict[str, Dict[int, Metric]]] + + def flush(self): + # type: () -> Dict + with self._lock: + namespace_metrics = self._metrics_data + self._metrics_data = { + TELEMETRY_TYPE_GENERATE_METRICS: defaultdict(dict), + TELEMETRY_TYPE_DISTRIBUTION: defaultdict(dict), + } + return namespace_metrics + + def add_metric(self, metric_class, namespace, name, value=1.0, tags=None, interval=None): + # type: (Type[Metric], str, str, float, MetricTagType, Optional[float]) -> None + """ + Telemetry Metrics are stored in DD dashboards, check the metrics in datadoghq.com/metric/explorer. + The metric will store in dashboard as "dd.instrumentation_telemetry_data." + namespace + "." + name + """ + metric_id = Metric.get_id(name, namespace, tags, metric_class.metric_type) + if metric_class is DistributionMetric: + metrics_type_payload = TELEMETRY_TYPE_DISTRIBUTION + else: + metrics_type_payload = TELEMETRY_TYPE_GENERATE_METRICS + + with self._lock: + existing_metric = self._metrics_data[metrics_type_payload][namespace].get(metric_id) + if existing_metric: + existing_metric.add_point(value) + else: + new_metric = metric_class(namespace, name, tags=tags, common=True, interval=interval) + new_metric.add_point(value) + self._metrics_data[metrics_type_payload][namespace][metric_id] = new_metric diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/writer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/writer.py new file mode 100644 index 0000000..5f9629b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/telemetry/writer.py @@ -0,0 +1,760 @@ +# -*- coding: utf-8 -*- +import itertools +import os +import sys +import time +from types import ModuleType +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Set # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ...internal import atexit +from ...internal import forksafe +from ...internal.compat import parse +from ...internal.module import BaseModuleWatchdog +from ...internal.module import origin +from ...internal.schema import SCHEMA_VERSION +from ...internal.schema import _remove_client_service_names +from ...settings import _config as config +from ...settings.config import _ConfigSource +from ...settings.dynamic_instrumentation import config as di_config +from ...settings.exception_debugging import config as ed_config +from ...settings.peer_service import _ps_config +from ...settings.profiling import config as prof_config +from ..agent import get_connection +from ..agent import get_trace_url +from ..compat import get_connection_response +from ..compat import httplib +from ..encoding import JSONEncoderV2 +from ..logger import get_logger +from ..packages import Distribution +from ..periodic import PeriodicService +from ..runtime import get_runtime_id +from ..service import ServiceStatus +from ..utils.formats import asbool +from ..utils.time import StopWatch +from ..utils.version import _pep440_to_semver +from .constants import TELEMETRY_128_BIT_TRACEID_GENERATION_ENABLED +from .constants import TELEMETRY_128_BIT_TRACEID_LOGGING_ENABLED +from .constants import TELEMETRY_AGENT_HOST +from .constants import TELEMETRY_AGENT_PORT +from .constants import TELEMETRY_AGENT_URL +from .constants import TELEMETRY_ANALYTICS_ENABLED +from .constants import TELEMETRY_CLIENT_IP_ENABLED +from .constants import TELEMETRY_DOGSTATSD_PORT +from .constants import TELEMETRY_DOGSTATSD_URL +from .constants import TELEMETRY_DYNAMIC_INSTRUMENTATION_ENABLED +from .constants import TELEMETRY_ENABLED +from .constants import TELEMETRY_EXCEPTION_DEBUGGING_ENABLED +from .constants import TELEMETRY_OBFUSCATION_QUERY_STRING_PATTERN +from .constants import TELEMETRY_OTEL_ENABLED +from .constants import TELEMETRY_PARTIAL_FLUSH_ENABLED +from .constants import TELEMETRY_PARTIAL_FLUSH_MIN_SPANS +from .constants import TELEMETRY_PRIORITY_SAMPLING +from .constants import TELEMETRY_PROFILING_CAPTURE_PCT +from .constants import TELEMETRY_PROFILING_EXPORT_LIBDD_ENABLED +from .constants import TELEMETRY_PROFILING_EXPORT_PY_ENABLED +from .constants import TELEMETRY_PROFILING_HEAP_ENABLED +from .constants import TELEMETRY_PROFILING_LOCK_ENABLED +from .constants import TELEMETRY_PROFILING_MAX_FRAMES +from .constants import TELEMETRY_PROFILING_MEMORY_ENABLED +from .constants import TELEMETRY_PROFILING_STACK_ENABLED +from .constants import TELEMETRY_PROFILING_UPLOAD_INTERVAL +from .constants import TELEMETRY_PROPAGATION_STYLE_EXTRACT +from .constants import TELEMETRY_PROPAGATION_STYLE_INJECT +from .constants import TELEMETRY_REMOTE_CONFIGURATION_ENABLED +from .constants import TELEMETRY_REMOTE_CONFIGURATION_INTERVAL +from .constants import TELEMETRY_RUNTIMEMETRICS_ENABLED +from .constants import TELEMETRY_SERVICE_MAPPING +from .constants import TELEMETRY_SPAN_SAMPLING_RULES +from .constants import TELEMETRY_SPAN_SAMPLING_RULES_FILE +from .constants import TELEMETRY_STARTUP_LOGS_ENABLED +from .constants import TELEMETRY_TRACE_AGENT_TIMEOUT_SECONDS +from .constants import TELEMETRY_TRACE_API_VERSION +from .constants import TELEMETRY_TRACE_COMPUTE_STATS +from .constants import TELEMETRY_TRACE_DEBUG +from .constants import TELEMETRY_TRACE_HEALTH_METRICS_ENABLED +from .constants import TELEMETRY_TRACE_PEER_SERVICE_DEFAULTS_ENABLED +from .constants import TELEMETRY_TRACE_PEER_SERVICE_MAPPING +from .constants import TELEMETRY_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED +from .constants import TELEMETRY_TRACE_SAMPLING_LIMIT +from .constants import TELEMETRY_TRACE_SAMPLING_RULES +from .constants import TELEMETRY_TRACE_SPAN_ATTRIBUTE_SCHEMA +from .constants import TELEMETRY_TRACE_WRITER_BUFFER_SIZE_BYTES +from .constants import TELEMETRY_TRACE_WRITER_INTERVAL_SECONDS +from .constants import TELEMETRY_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES +from .constants import TELEMETRY_TRACE_WRITER_REUSE_CONNECTIONS +from .constants import TELEMETRY_TYPE_DISTRIBUTION +from .constants import TELEMETRY_TYPE_GENERATE_METRICS +from .constants import TELEMETRY_TYPE_LOGS +from .data import get_application +from .data import get_host_info +from .data import update_imported_dependencies +from .metrics import CountMetric +from .metrics import DistributionMetric +from .metrics import GaugeMetric +from .metrics import MetricTagType # noqa:F401 +from .metrics import RateMetric +from .metrics_namespaces import MetricNamespace +from .metrics_namespaces import NamespaceMetricType # noqa:F401 + + +log = get_logger(__name__) + + +class LogData(dict): + def __hash__(self): + return hash((self["message"], self["level"], self.get("tags"), self.get("stack_trace"))) + + def __eq__(self, other): + return ( + self["message"] == other["message"] + and self["level"] == other["level"] + and self.get("tags") == other.get("tags") + and self.get("stack_trace") == other.get("stack_trace") + ) + + +class _TelemetryClient: + def __init__(self, endpoint): + # type: (str) -> None + self._agent_url = get_trace_url() + self._endpoint = endpoint + self._encoder = JSONEncoderV2() + self._headers = { + "Content-Type": "application/json", + "DD-Client-Library-Language": "python", + "DD-Client-Library-Version": _pep440_to_semver(), + } + + @property + def url(self): + return parse.urljoin(self._agent_url, self._endpoint) + + def send_event(self, request: Dict) -> Optional[httplib.HTTPResponse]: + """Sends a telemetry request to the trace agent""" + resp = None + conn = None + try: + rb_json = self._encoder.encode(request) + headers = self.get_headers(request) + with StopWatch() as sw: + conn = get_connection(self._agent_url) + conn.request("POST", self._endpoint, rb_json, headers) + resp = get_connection_response(conn) + if resp.status < 300: + log.debug("sent %d in %.5fs to %s. response: %s", len(rb_json), sw.elapsed(), self.url, resp.status) + else: + log.debug("failed to send telemetry to the Datadog Agent at %s. response: %s", self.url, resp.status) + except Exception: + log.debug("failed to send telemetry to the Datadog Agent at %s.", self.url) + finally: + if conn is not None: + conn.close() + return resp + + def get_headers(self, request): + # type: (Dict) -> Dict + """Get all telemetry api v2 request headers""" + headers = self._headers.copy() + headers["DD-Telemetry-Debug-Enabled"] = request["debug"] + headers["DD-Telemetry-Request-Type"] = request["request_type"] + headers["DD-Telemetry-API-Version"] = request["api_version"] + return headers + + +class TelemetryWriterModuleWatchdog(BaseModuleWatchdog): + _initial = True + _new_imported: Set[str] = set() + + def after_import(self, module: ModuleType) -> None: + module_path = origin(module) + self._new_imported.add(str(module_path)) + + @classmethod + def get_new_imports(cls): + if cls._initial: + try: + # On the first call, use sys.modules to cover all imports before we started. This is not + # done on __init__ because we want to do this slow operation on the writer's periodic call + # and not on instantiation. + new_imports = [str(origin(i)) for i in sys.modules.values()] + except RuntimeError: + new_imports = [] + finally: + # If there is any problem with the above we don't want to repeat this slow process, instead we just + # switch to report new dependencies on further calls + cls._initial = False + else: + new_imports = list(cls._new_imported) + + cls._new_imported.clear() + return new_imports + + +class TelemetryWriter(PeriodicService): + """ + Submits Instrumentation Telemetry events to the datadog agent. + Supports v2 of the instrumentation telemetry api + """ + + # telemetry endpoint uses events platform v2 api + ENDPOINT_V2 = "telemetry/proxy/api/v2/apmtelemetry" + # Counter representing the number of events sent by the writer. Here we are relying on the atomicity + # of `itertools.count()` which is a CPython implementation detail. The sequence field in telemetry + # payloads is only used in tests and is not required to process Telemetry events. + _sequence = itertools.count(1) + + def __init__(self, is_periodic=True): + # type: (bool) -> None + super(TelemetryWriter, self).__init__(interval=min(config._telemetry_heartbeat_interval, 10)) + # Decouples the aggregation and sending of the telemetry events + # TelemetryWriter events will only be sent when _periodic_count == _periodic_threshold. + # By default this will occur at 10 second intervals. + self._periodic_threshold = int(config._telemetry_heartbeat_interval // self.interval) - 1 + self._periodic_count = 0 + self._is_periodic = is_periodic + self._integrations_queue = dict() # type: Dict[str, Dict] + # Currently telemetry only supports reporting a single error. + # If we'd like to report multiple errors in the future + # we could hack it in by xor-ing error codes and concatenating strings + self._error = (0, "") # type: Tuple[int, str] + self._namespace = MetricNamespace() + self._logs = set() # type: Set[Dict[str, Any]] + self._enabled = config._telemetry_enabled + self._forked = False # type: bool + self._events_queue = [] # type: List[Dict] + self._configuration_queue = {} # type: Dict[str, Dict] + self._lock = forksafe.Lock() # type: forksafe.ResetObject + self._imported_dependencies: Dict[str, Distribution] = dict() + + self.started = False + forksafe.register(self._fork_writer) + + # Debug flag that enables payload debug mode. + self._debug = asbool(os.environ.get("DD_TELEMETRY_DEBUG", "false")) + + self._client = _TelemetryClient(self.ENDPOINT_V2) + + def enable(self): + # type: () -> bool + """ + Enable the instrumentation telemetry collection service. If the service has already been + activated before, this method does nothing. Use ``disable`` to turn off the telemetry collection service. + """ + if not self._enabled: + return False + + if self.status == ServiceStatus.RUNNING: + return True + + if self._is_periodic: + self.start() + return True + + self.status = ServiceStatus.RUNNING + if config._telemetry_dependency_collection: + if not TelemetryWriterModuleWatchdog.is_installed(): + TelemetryWriterModuleWatchdog.install() + return True + + def disable(self): + # type: () -> None + """ + Disable the telemetry collection service and drop the existing integrations and events + Once disabled, telemetry collection can not be re-enabled. + """ + self._enabled = False + if TelemetryWriterModuleWatchdog.is_installed(): + TelemetryWriterModuleWatchdog.uninstall() + self.reset_queues() + if self._is_periodic and self.status is ServiceStatus.RUNNING: + self.stop() + else: + self.status = ServiceStatus.STOPPED + + def add_event(self, payload, payload_type): + # type: (Union[Dict[str, Any], List[Any]], str) -> None + """ + Adds a Telemetry event to the TelemetryWriter event buffer + + :param Dict payload: stores a formatted telemetry event + :param str payload_type: The payload_type denotes the type of telmetery request. + Payload types accepted by telemetry/proxy v2: app-started, app-closing, app-integrations-change + """ + if self.enable(): + event = { + "tracer_time": int(time.time()), + "runtime_id": get_runtime_id(), + "api_version": "v2", + "seq_id": next(self._sequence), + "debug": self._debug, + "application": get_application(config.service, config.version, config.env), + "host": get_host_info(), + "payload": payload, + "request_type": payload_type, + } + self._events_queue.append(event) + + def add_integration(self, integration_name, patched, auto_patched=None, error_msg=None, version=""): + # type: (str, bool, Optional[bool], Optional[str], Optional[str]) -> None + """ + Creates and queues the names and settings of a patched module + + :param str integration_name: name of patched module + :param bool auto_enabled: True if module is enabled in _monkey.PATCH_MODULES + """ + # Integrations can be patched before the telemetry writer is enabled. + with self._lock: + if integration_name not in self._integrations_queue: + self._integrations_queue[integration_name] = {"name": integration_name} + + self._integrations_queue[integration_name]["version"] = version + self._integrations_queue[integration_name]["enabled"] = patched + + if auto_patched is not None: + self._integrations_queue[integration_name]["auto_enabled"] = auto_patched + + if error_msg is not None: + self._integrations_queue[integration_name]["compatible"] = error_msg == "" + self._integrations_queue[integration_name]["error"] = error_msg + + def add_error(self, code, msg, filename, line_number): + # type: (int, str, Optional[str], Optional[int]) -> None + """Add an error to be submitted with an event. + Note that this overwrites any previously set errors. + """ + if filename and line_number is not None: + msg = "%s:%s: %s" % (filename, line_number, msg) + self._error = (code, msg) + + def add_configs_changed(self, cfg_names): + cs = [{"name": n, "value": v, "origin": o} for n, v, o in [self._telemetry_entry(n) for n in cfg_names]] + self._app_client_configuration_changed_event(cs) + + def _telemetry_entry(self, cfg_name: str) -> Tuple[str, str, _ConfigSource]: + item = config._config[cfg_name] + if cfg_name == "_trace_enabled": + name = "trace_enabled" + value = "true" if item.value() else "false" + elif cfg_name == "_profiling_enabled": + name = "profiling_enabled" + value = "true" if item.value() else "false" + elif cfg_name == "_asm_enabled": + name = "appsec_enabled" + value = "true" if item.value() else "false" + elif cfg_name == "_dsm_enabled": + name = "data_streams_enabled" + value = "true" if item.value() else "false" + elif cfg_name == "_trace_sample_rate": + name = "trace_sample_rate" + value = str(item.value()) + elif cfg_name == "logs_injection": + name = "logs_injection_enabled" + value = "true" if item.value() else "false" + elif cfg_name == "trace_http_header_tags": + name = "trace_header_tags" + value = ",".join(":".join(x) for x in item.value().items()) + elif cfg_name == "tags": + name = "trace_tags" + value = ",".join(":".join(x) for x in item.value().items()) + elif cfg_name == "_tracing_enabled": + name = "tracing_enabled" + value = "true" if item.value() else "false" + else: + raise ValueError("Unknown configuration item: %s" % cfg_name) + return name, value, item.source() + + def _app_started_event(self, register_app_shutdown=True): + # type: (bool) -> None + """Sent when TelemetryWriter is enabled or forks""" + if self._forked or self.started: + # app-started events should only be sent by the main process + return + # List of configurations to be collected + + self.started = True + if register_app_shutdown: + atexit.register(self.app_shutdown) + + self.add_configurations( + [ + self._telemetry_entry("_trace_enabled"), + self._telemetry_entry("_profiling_enabled"), + self._telemetry_entry("_asm_enabled"), + self._telemetry_entry("_dsm_enabled"), + self._telemetry_entry("_trace_sample_rate"), + self._telemetry_entry("logs_injection"), + self._telemetry_entry("trace_http_header_tags"), + self._telemetry_entry("tags"), + self._telemetry_entry("_tracing_enabled"), + (TELEMETRY_STARTUP_LOGS_ENABLED, config._startup_logs_enabled, "unknown"), + (TELEMETRY_DYNAMIC_INSTRUMENTATION_ENABLED, di_config.enabled, "unknown"), + (TELEMETRY_EXCEPTION_DEBUGGING_ENABLED, ed_config.enabled, "unknown"), + (TELEMETRY_PROPAGATION_STYLE_INJECT, ",".join(config._propagation_style_inject), "unknown"), + (TELEMETRY_PROPAGATION_STYLE_EXTRACT, ",".join(config._propagation_style_extract), "unknown"), + ("ddtrace_bootstrapped", config._ddtrace_bootstrapped, "unknown"), + ("ddtrace_auto_used", "ddtrace.auto" in sys.modules, "unknown"), + (TELEMETRY_RUNTIMEMETRICS_ENABLED, config._runtime_metrics_enabled, "unknown"), + (TELEMETRY_TRACE_DEBUG, config._debug_mode, "unknown"), + (TELEMETRY_ENABLED, config._telemetry_enabled, "unknown"), + (TELEMETRY_ANALYTICS_ENABLED, config.analytics_enabled, "unknown"), + (TELEMETRY_CLIENT_IP_ENABLED, config.client_ip_header, "unknown"), + (TELEMETRY_128_BIT_TRACEID_GENERATION_ENABLED, config._128_bit_trace_id_enabled, "unknown"), + (TELEMETRY_128_BIT_TRACEID_LOGGING_ENABLED, config._128_bit_trace_id_logging_enabled, "unknown"), + (TELEMETRY_TRACE_COMPUTE_STATS, config._trace_compute_stats, "unknown"), + ( + TELEMETRY_OBFUSCATION_QUERY_STRING_PATTERN, + config._obfuscation_query_string_pattern.pattern.decode("ascii") + if config._obfuscation_query_string_pattern + else "", + "unknown", + ), + (TELEMETRY_OTEL_ENABLED, config._otel_enabled, "unknown"), + (TELEMETRY_TRACE_HEALTH_METRICS_ENABLED, config.health_metrics_enabled, "unknown"), + (TELEMETRY_RUNTIMEMETRICS_ENABLED, config._runtime_metrics_enabled, "unknown"), + (TELEMETRY_REMOTE_CONFIGURATION_ENABLED, config._remote_config_enabled, "unknown"), + (TELEMETRY_REMOTE_CONFIGURATION_INTERVAL, config._remote_config_poll_interval, "unknown"), + (TELEMETRY_TRACE_SAMPLING_LIMIT, config._trace_rate_limit, "unknown"), + (TELEMETRY_SPAN_SAMPLING_RULES, config._sampling_rules, "unknown"), + (TELEMETRY_SPAN_SAMPLING_RULES_FILE, config._sampling_rules_file, "unknown"), + (TELEMETRY_TRACE_SAMPLING_RULES, config._trace_sampling_rules, "unknown"), + (TELEMETRY_PRIORITY_SAMPLING, config._priority_sampling, "unknown"), + (TELEMETRY_PARTIAL_FLUSH_ENABLED, config._partial_flush_enabled, "unknown"), + (TELEMETRY_PARTIAL_FLUSH_MIN_SPANS, config._partial_flush_min_spans, "unknown"), + (TELEMETRY_TRACE_SPAN_ATTRIBUTE_SCHEMA, SCHEMA_VERSION, "unknown"), + (TELEMETRY_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED, _remove_client_service_names, "unknown"), + (TELEMETRY_TRACE_PEER_SERVICE_DEFAULTS_ENABLED, _ps_config.set_defaults_enabled, "unknown"), + (TELEMETRY_TRACE_PEER_SERVICE_MAPPING, _ps_config._unparsed_peer_service_mapping, "unknown"), + (TELEMETRY_SERVICE_MAPPING, config._unparsed_service_mapping, "unknown"), + (TELEMETRY_TRACE_API_VERSION, config._trace_api, "unknown"), + (TELEMETRY_TRACE_WRITER_BUFFER_SIZE_BYTES, config._trace_writer_buffer_size, "unknown"), + (TELEMETRY_TRACE_WRITER_MAX_PAYLOAD_SIZE_BYTES, config._trace_writer_payload_size, "unknown"), + (TELEMETRY_TRACE_WRITER_INTERVAL_SECONDS, config._trace_writer_interval_seconds, "unknown"), + (TELEMETRY_TRACE_WRITER_REUSE_CONNECTIONS, config._trace_writer_connection_reuse, "unknown"), + (TELEMETRY_DOGSTATSD_PORT, config._stats_agent_port, "unknown"), + (TELEMETRY_DOGSTATSD_URL, config._stats_agent_url, "unknown"), + (TELEMETRY_AGENT_HOST, config._trace_agent_hostname, "unknown"), + (TELEMETRY_AGENT_PORT, config._trace_agent_port, "unknown"), + (TELEMETRY_AGENT_URL, config._trace_agent_url, "unknown"), + (TELEMETRY_TRACE_AGENT_TIMEOUT_SECONDS, config._agent_timeout_seconds, "unknown"), + (TELEMETRY_PROFILING_STACK_ENABLED, prof_config.stack.enabled, "unknown"), + (TELEMETRY_PROFILING_MEMORY_ENABLED, prof_config.memory.enabled, "unknown"), + (TELEMETRY_PROFILING_HEAP_ENABLED, prof_config.heap.sample_size > 0, "unknown"), + (TELEMETRY_PROFILING_LOCK_ENABLED, prof_config.lock.enabled, "unknown"), + (TELEMETRY_PROFILING_EXPORT_PY_ENABLED, prof_config.export.py_enabled, "unknown"), + (TELEMETRY_PROFILING_EXPORT_LIBDD_ENABLED, prof_config.export.libdd_enabled, "unknown"), + (TELEMETRY_PROFILING_CAPTURE_PCT, prof_config.capture_pct, "unknown"), + (TELEMETRY_PROFILING_MAX_FRAMES, prof_config.max_frames, "unknown"), + (TELEMETRY_PROFILING_UPLOAD_INTERVAL, prof_config.upload_interval, "unknown"), + ] + ) + + payload = { + "configuration": self._flush_configuration_queue(), + "error": { + "code": self._error[0], + "message": self._error[1], + }, + } # type: Dict[str, Union[Dict[str, Any], List[Any]]] + # Add time to value telemetry metrics for single step instrumentation + if config._telemetry_install_id or config._telemetry_install_type or config._telemetry_install_time: + payload["install_signature"] = { + "install_id": config._telemetry_install_id, + "install_type": config._telemetry_install_type, + "install_time": config._telemetry_install_time, + } + + # Reset the error after it has been reported. + self._error = (0, "") + self.add_event(payload, "app-started") + + def _app_heartbeat_event(self): + # type: () -> None + if self._forked: + # TODO: Enable app-heartbeat on forks + # Since we only send app-started events in the main process + # any forked processes won't be able to access the list of + # dependencies for this app, and therefore app-heartbeat won't + # add much value today. + return + + self.add_event({}, "app-heartbeat") + + def _app_closing_event(self): + # type: () -> None + """Adds a Telemetry event which notifies the agent that an application instance has terminated""" + if self._forked: + # app-closing event should only be sent by the main process + return + payload = {} # type: Dict + self.add_event(payload, "app-closing") + + def _app_integrations_changed_event(self, integrations): + # type: (List[Dict]) -> None + """Adds a Telemetry event which sends a list of configured integrations to the agent""" + payload = { + "integrations": integrations, + } + self.add_event(payload, "app-integrations-change") + + def _flush_integrations_queue(self): + # type: () -> List[Dict] + """Flushes and returns a list of all queued integrations""" + with self._lock: + integrations = list(self._integrations_queue.values()) + self._integrations_queue = dict() + return integrations + + def _flush_new_imported_dependencies(self) -> List[str]: + with self._lock: + new_deps = TelemetryWriterModuleWatchdog.get_new_imports() + return new_deps + + def _flush_configuration_queue(self): + # type: () -> List[Dict] + """Flushes and returns a list of all queued configurations""" + with self._lock: + configurations = list(self._configuration_queue.values()) + self._configuration_queue = {} + return configurations + + def _app_client_configuration_changed_event(self, configurations): + # type: (List[Dict]) -> None + """Adds a Telemetry event which sends list of modified configurations to the agent""" + payload = { + "configuration": configurations, + } + self.add_event(payload, "app-client-configuration-change") + + def _update_dependencies_event(self, newly_imported_deps: List[str]): + """Adds events to report imports done since the last periodic run""" + + if not config._telemetry_dependency_collection or not self._enabled: + return + + with self._lock: + packages = update_imported_dependencies(self._imported_dependencies, newly_imported_deps) + + if packages: + payload = {"dependencies": packages} + self.add_event(payload, "app-dependencies-loaded") + + def add_configuration(self, configuration_name, configuration_value, origin="unknown"): + # type: (str, Union[bool, float, str], str) -> None + """Creates and queues the name, origin, value of a configuration""" + with self._lock: + self._configuration_queue[configuration_name] = { + "name": configuration_name, + "origin": origin, + "value": configuration_value, + } + + def add_configurations(self, configuration_list): + # type: (List[Tuple[str, Union[bool, float, str], str]]) -> None + """Creates and queues a list of configurations""" + with self._lock: + for name, value, _origin in configuration_list: + self._configuration_queue[name] = { + "name": name, + "origin": _origin, + "value": value, + } + + def add_log(self, level, message, stack_trace="", tags=None): + # type: (str, str, str, Optional[Dict]) -> None + """ + Queues log. This event is meant to send library logs to Datadog’s backend through the Telemetry intake. + This will make support cycles easier and ensure we know about potentially silent issues in libraries. + """ + if tags is None: + tags = {} + + if self.enable(): + data = LogData( + { + "message": message, + "level": level, + "tracer_time": int(time.time()), + } + ) + if tags: + data["tags"] = ",".join(["%s:%s" % (k, str(v).lower()) for k, v in tags.items()]) + if stack_trace: + data["stack_trace"] = stack_trace + self._logs.add(data) + + def add_gauge_metric(self, namespace, name, value, tags=None): + # type: (str,str, float, MetricTagType) -> None + """ + Queues gauge metric + """ + if self.status == ServiceStatus.RUNNING or self.enable(): + self._namespace.add_metric( + GaugeMetric, + namespace, + name, + value, + tags, + self.interval, + ) + + def add_rate_metric(self, namespace, name, value=1.0, tags=None): + # type: (str,str, float, MetricTagType) -> None + """ + Queues rate metric + """ + if self.status == ServiceStatus.RUNNING or self.enable(): + self._namespace.add_metric( + RateMetric, + namespace, + name, + value, + tags, + self.interval, + ) + + def add_count_metric(self, namespace, name, value=1.0, tags=None): + # type: (str,str, float, MetricTagType) -> None + """ + Queues count metric + """ + if self.status == ServiceStatus.RUNNING or self.enable(): + self._namespace.add_metric( + CountMetric, + namespace, + name, + value, + tags, + ) + + def add_distribution_metric(self, namespace, name, value=1.0, tags=None): + # type: (str,str, float, MetricTagType) -> None + """ + Queues distributions metric + """ + if self.status == ServiceStatus.RUNNING or self.enable(): + self._namespace.add_metric( + DistributionMetric, + namespace, + name, + value, + tags, + ) + + def _flush_log_metrics(self): + # type () -> Set[Metric] + with self._lock: + log_metrics = self._logs + self._logs = set() + return log_metrics + + def _generate_metrics_event(self, namespace_metrics): + # type: (NamespaceMetricType) -> None + for payload_type, namespaces in namespace_metrics.items(): + for namespace, metrics in namespaces.items(): + if metrics: + payload = { + "namespace": namespace, + "series": [m.to_dict() for m in metrics.values()], + } + log.debug("%s request payload, namespace %s", payload_type, namespace) + if payload_type == TELEMETRY_TYPE_DISTRIBUTION: + self.add_event(payload, TELEMETRY_TYPE_DISTRIBUTION) + elif payload_type == TELEMETRY_TYPE_GENERATE_METRICS: + self.add_event(payload, TELEMETRY_TYPE_GENERATE_METRICS) + + def _generate_logs_event(self, payload): + # type: (Set[Dict[str, str]]) -> None + log.debug("%s request payload", TELEMETRY_TYPE_LOGS) + self.add_event(list(payload), TELEMETRY_TYPE_LOGS) + + def periodic(self, force_flush=False): + namespace_metrics = self._namespace.flush() + if namespace_metrics: + self._generate_metrics_event(namespace_metrics) + + logs_metrics = self._flush_log_metrics() + if logs_metrics: + self._generate_logs_event(logs_metrics) + + # Telemetry metrics and logs should be aggregated into payloads every time periodic is called. + # This ensures metrics and logs are submitted in 0 to 10 second time buckets. + # Optimization: All other events should be aggregated using `config._telemetry_heartbeat_interval`. + # Telemetry payloads will be submitted according to `config._telemetry_heartbeat_interval`. + if self._is_periodic and force_flush is False: + if self._periodic_count < self._periodic_threshold: + self._periodic_count += 1 + return + self._periodic_count = 0 + + integrations = self._flush_integrations_queue() + if integrations: + self._app_integrations_changed_event(integrations) + + configurations = self._flush_configuration_queue() + if configurations: + self._app_client_configuration_changed_event(configurations) + + if config._telemetry_dependency_collection: + newly_imported_deps = self._flush_new_imported_dependencies() + if newly_imported_deps: + self._update_dependencies_event(newly_imported_deps) + + if not self._events_queue: + # Optimization: only queue heartbeat if no other events are queued + self._app_heartbeat_event() + + telemetry_events = self._flush_events_queue() + for telemetry_event in telemetry_events: + self._client.send_event(telemetry_event) + + def app_shutdown(self): + self._app_closing_event() + self.periodic(force_flush=True) + self.disable() + + def reset_queues(self): + # type: () -> None + self._events_queue = [] + self._integrations_queue = dict() + self._namespace.flush() + self._logs = set() + + def _flush_events_queue(self): + # type: () -> List[Dict] + """Flushes and returns a list of all telemtery event""" + with self._lock: + events = self._events_queue + self._events_queue = [] + return events + + def _fork_writer(self): + # type: () -> None + self._forked = True + # Avoid sending duplicate events. + # Queued events should be sent in the main process. + self.reset_queues() + if self.status == ServiceStatus.STOPPED: + return + + if self._is_periodic: + self.stop(join=False) + + # Enable writer service in child process to avoid interpreter shutdown + # error in Python 3.12 + self.enable() + + def _restart_sequence(self): + self._sequence = itertools.count(1) + + def _stop_service(self, join=True, *args, **kwargs): + # type: (...) -> None + super(TelemetryWriter, self)._stop_service(*args, **kwargs) + if join: + self.join(timeout=2) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/tracemethods.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/tracemethods.py new file mode 100644 index 0000000..f6553e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/tracemethods.py @@ -0,0 +1,106 @@ +import typing # noqa:F401 + +import wrapt + + +def _parse_trace_methods(raw_dd_trace_methods): + # type: (str) -> typing.List[str] + """Return the methods to trace based on the specification of DD_TRACE_METHODS. + + DD_TRACE_METHODS is specified to be FullyQualifiedClassOrModuleName[comma-separated-methods] + + Note that support for wildcard methods ([*]) is not implemented. + """ + if not raw_dd_trace_methods: + return [] + + dd_trace_methods = [] + for qualified_methods in raw_dd_trace_methods.split(";"): + # Validate that methods are specified + if "[" not in qualified_methods or "]" not in qualified_methods: + raise ValueError( + ( + "Invalid DD_TRACE_METHODS: %s. " + "Methods must be specified in square brackets following the fully qualified module or class name." + ) + % qualified_methods + ) + + # Store the prefix of the qualified method name (eg. for "foo.bar.baz[qux,quux]", this is "foo.bar.baz") + qualified_method_prefix = qualified_methods.split("[")[0] + + if qualified_method_prefix == "__main__": + # __main__ cannot be used since the __main__ that exists now is not the same as the __main__ that the user + # application will have. __main__ when sitecustomize module is run is the builtin __main__. + raise ValueError( + "Invalid DD_TRACE_METHODS: %s. Methods cannot be traced on the __main__ module." % qualified_methods + ) + + # Get the class or module name of the method (eg. for "foo.bar.baz[qux,quux]", this is "baz[qux,quux]") + class_or_module_with_methods = qualified_methods.split(".")[-1] + + # Strip off the leading 'moduleOrClass[' and trailing ']' + methods = class_or_module_with_methods.split("[")[1] + methods = methods[:-1] + + # Add the methods to the list of methods to trace + for method in methods.split(","): + if not str.isidentifier(method): + raise ValueError( + "Invalid method name: %r. %s" + % ( + method, + "You might have a trailing comma." + if method == "" + else "Method names must be valid Python identifiers.", + ) + ) + dd_trace_methods.append("%s.%s" % (qualified_method_prefix, method)) + + return dd_trace_methods + + +def _install_trace_methods(raw_dd_trace_methods): + # type: (str) -> None + """Install tracing on the given methods.""" + for qualified_method in _parse_trace_methods(raw_dd_trace_methods): + # We don't know if the method is a class method or a module method, so we need to assume it's a module + # and if the import fails then go a level up and try again. + base_module_guess = ".".join(qualified_method.split(".")[:-1]) + method_name = qualified_method.split(".")[-1] + module = None + + while base_module_guess: + try: + module = __import__(base_module_guess) + except ImportError: + # Add the class to the method name + method_name = "%s.%s" % (base_module_guess.split(".")[-1], method_name) + base_module_guess = ".".join(base_module_guess.split(".")[:-1]) + else: + break + + if module is None: + raise ImportError("Could not import module for %r" % qualified_method) + + trace_method(base_module_guess, method_name) + + +def trace_method(module, method_name): + # type: (str, str) -> None + + @wrapt.importer.when_imported(module) + def _(m): + wrapt.wrap_function_wrapper(m, method_name, trace_wrapper) + + +def trace_wrapper(wrapped, instance, args, kwargs): + from ddtrace import tracer + + resource = wrapped.__name__ + if hasattr(instance, "__class__") and instance.__class__ is not type(None): # noqa: E721 + resource = "%s.%s" % (instance.__class__.__name__, resource) + + with tracer.trace("trace.annotation", resource=resource) as span: + span.set_tag_str("component", "trace") + return wrapped(*args, **kwargs) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uds.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uds.py new file mode 100644 index 0000000..5c8dbf0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uds.py @@ -0,0 +1,27 @@ +import socket +from typing import Any # noqa:F401 + +from .compat import httplib +from .http import BasePathMixin + + +class UDSHTTPConnection(BasePathMixin, httplib.HTTPConnection): + """An HTTP connection established over a Unix Domain Socket.""" + + # It's "important" to keep the hostname and port arguments here; while there are not used by the connection + # mechanism, they are actually used as HTTP headers such as `Host`. + def __init__( + self, + path, # type: str + *args, # type: Any + **kwargs, # type: Any + ): + # type: (...) -> None + super(UDSHTTPConnection, self).__init__(*args, **kwargs) + self.path = path + + def connect(self): + # type: () -> None + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.connect(self.path) + self.sock = sock diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/__init__.py new file mode 100644 index 0000000..44dd60a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/__init__.py @@ -0,0 +1,82 @@ +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + + +class ArgumentError(Exception): + """ + This is raised when an argument lookup, either by position or by keyword, is + not found. + """ + + +def get_argument_value( + args, # type: List[Any] + kwargs, # type: Dict[str, Any] + pos, # type: int + kw, # type: str + optional=False, # type: bool +): + # type: (...) -> Optional[Any] + """ + This function parses the value of a target function argument that may have been + passed in as a positional argument or a keyword argument. Because monkey-patched + functions do not define the same signature as their target function, the value of + arguments must be inferred from the packed args and kwargs. + Keyword arguments are prioritized, followed by the positional argument. If the + argument cannot be resolved, an ``ArgumentError`` exception is raised, which could + be used, e.g., to handle a default value by the caller. + :param args: Positional arguments + :param kwargs: Keyword arguments + :param pos: The positional index of the argument if passed in as a positional arg + :param kw: The name of the keyword if passed in as a keyword argument + :return: The value of the target argument + """ + try: + return kwargs[kw] + except KeyError: + try: + return args[pos] + except IndexError: + if optional: + return None + raise ArgumentError("%s (at position %d)" % (kw, pos)) + + +def set_argument_value( + args, # type: Tuple[Any, ...] + kwargs, # type: Dict[str, Any] + pos, # type: int + kw, # type: str + value, # type: Any +): + # type: (...) -> Tuple[Tuple[Any, ...], Dict[str, Any]] + """ + Returns a new args, kwargs with the given value updated + :param args: Positional arguments + :param kwargs: Keyword arguments + :param pos: The positional index of the argument + :param kw: The name of the keyword + :param value: The new value of the target argument + :return: Updated args and kwargs + """ + if len(args) > pos: + args = args[:pos] + (value,) + args[pos + 1 :] + elif kw in kwargs: + kwargs[kw] = value + else: + raise ArgumentError("%s (at position %d) is invalid" % (kw, pos)) + + return args, kwargs + + +def _get_metas_to_propagate(context): + # type: (Any) -> List[Tuple[str, str]] + metas_to_propagate = [] + # copying context._meta.items() to avoid RuntimeError: dictionary changed size during iteration + for k, v in list(context._meta.items()): + if isinstance(k, str) and k.startswith("_dd.p."): + metas_to_propagate.append((k, v)) + return metas_to_propagate diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/attrdict.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/attrdict.py new file mode 100644 index 0000000..1c74cb5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/attrdict.py @@ -0,0 +1,42 @@ +from typing import Any # noqa:F401 + + +class AttrDict(dict): + """ + dict implementation that allows for item attribute access + + + Example:: + + data = AttrDict() + data['key'] = 'value' + print(data['key']) + + data.key = 'new-value' + print(data.key) + + # Convert an existing `dict` + data = AttrDict(dict(key='value')) + print(data.key) + """ + + def __getattr__(self, key): + # type: (str) -> Any + if key in self: + return self[key] + return object.__getattribute__(self, key) + + def __setattr__(self, key, value): + # type: (str, Any) -> None + # 1) Ensure if the key exists from a dict key we always prefer that + # 2) If we do not have an existing key but we do have an attr, set that + # 3) No existing key or attr exists, so set a key + if key in self: + # Update any existing key + self[key] = value + elif hasattr(self, key): + # Allow overwriting an existing attribute, e.g. `self.global_config = dict()` + object.__setattr__(self, key, value) + else: + # Set a new key + self[key] = value diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/cache.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/cache.py new file mode 100644 index 0000000..2ce08ac --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/cache.py @@ -0,0 +1,139 @@ +from threading import RLock +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Type # noqa:F401 +from typing import TypeVar # noqa:F401 + +from ddtrace.internal.compat import getfullargspec +from ddtrace.internal.compat import is_not_void_function + + +miss = object() + +T = TypeVar("T") +F = Callable[[T], Any] +M = Callable[[Any, T], Any] + + +class LFUCache(dict): + """Simple LFU cache implementation. + + This cache is designed for memoizing functions with a single hashable + argument. The eviction policy is LFU, i.e. the least frequently used values + are evicted when the cache is full. The amortized cost of shrinking the + cache when it grows beyond the requested size is O(log(size)). + """ + + def __init__(self, maxsize=256): + # type: (int) -> None + self.maxsize = maxsize + self.lock = RLock() + self.count_lock = RLock() + + def get(self, key, f): # type: ignore[override] + # type: (T, F) -> Any + """Get a value from the cache. + + If the value with the given key is not in the cache, the expensive + function ``f`` is called on the key to generate it. The return value is + then stored in the cache and returned to the caller. + """ + + _ = super(LFUCache, self).get(key, miss) + if _ is not miss: + with self.count_lock: + value, count = _ + self[key] = (value, count + 1) + return value + + with self.lock: + _ = super(LFUCache, self).get(key, miss) + if _ is not miss: + with self.count_lock: + value, count = _ + self[key] = (value, count + 1) + return value + + # Cache miss: ensure that we have enough space in the cache + # by evicting half of the entries when we go over the threshold + while len(self) >= self.maxsize: + for h in sorted(self, key=lambda h: self[h][1])[: self.maxsize >> 1]: + del self[h] + + value = f(key) + + self[key] = (value, 1) + + return value + + +def cached(maxsize=256): + # type: (int) -> Callable[[F], F] + """Decorator for memoizing functions of a single argument (LFU policy).""" + + def cached_wrapper(f): + # type: (F) -> F + cache = LFUCache(maxsize) + + def cached_f(key): + # type: (T) -> Any + return cache.get(key, f) + + cached_f.invalidate = cache.clear # type: ignore[attr-defined] + + return cached_f + + return cached_wrapper + + +class CachedMethodDescriptor(object): + def __init__(self, method, maxsize): + # type: (M, int) -> None + self._method = method + self._maxsize = maxsize + + def __get__(self, obj, objtype=None): + # type: (Any, Optional[Type]) -> F + cached_method = cached(self._maxsize)(self._method.__get__(obj, objtype)) + setattr(obj, self._method.__name__, cached_method) + return cached_method + + +def cachedmethod(maxsize=256): + # type: (int) -> Callable[[M], CachedMethodDescriptor] + """Decorator for memoizing methods of a single argument (LFU policy).""" + + def cached_wrapper(f): + # type: (M) -> CachedMethodDescriptor + return CachedMethodDescriptor(f, maxsize) + + return cached_wrapper + + +def callonce(f): + # type: (Callable[[], Any]) -> Callable[[], Any] + """Decorator for executing a function only the first time.""" + argspec = getfullargspec(f) + if is_not_void_function(f, argspec): + raise ValueError("The callonce decorator can only be applied to functions with no arguments") + + def _(): + # type: () -> Any + try: + retval, exc = f.__callonce_result__ # type: ignore[attr-defined] + except AttributeError: + try: + retval = f() + exc = None + except Exception as e: + retval = None + exc = e + f.__callonce_result__ = retval, exc # type: ignore[attr-defined] + + if exc is not None: + raise exc + + return retval + + return _ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/config.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/config.py new file mode 100644 index 0000000..593f1fd --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/config.py @@ -0,0 +1,19 @@ +import os +import sys +import typing # noqa:F401 + + +def get_application_name(): + # type: () -> typing.Optional[str] + """Attempts to find the application name using system arguments.""" + try: + import __main__ + + name = __main__.__file__ + except (ImportError, AttributeError): + try: + name = sys.argv[0] + except (AttributeError, IndexError): + return None + + return os.path.basename(name) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/deprecations.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/deprecations.py new file mode 100644 index 0000000..ccbcf3e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/deprecations.py @@ -0,0 +1,6 @@ +class DDTraceDeprecationWarning(DeprecationWarning): + # Override module to simplify adding warning filters by querying for + # ddtrace.DDTraceDeprecationWarning but not have to expose this in the + # public API. This also allows us to avoid circular imports that would occur if + # it was contained in the top-level ddtrace package. + __module__ = "ddtrace" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/formats.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/formats.py new file mode 100644 index 0000000..778c01f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/formats.py @@ -0,0 +1,185 @@ +import logging +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Text # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import TypeVar # noqa:F401 +from typing import Union # noqa:F401 + +from ..compat import ensure_text + + +VALUE_PLACEHOLDER = "?" +VALUE_MAX_LEN = 100 +VALUE_TOO_LONG_MARK = "..." +CMD_MAX_LEN = 1000 + + +T = TypeVar("T") + + +log = logging.getLogger(__name__) + + +def deep_getattr(obj, attr_string, default=None): + # type: (Any, str, Optional[Any]) -> Optional[Any] + """ + Returns the attribute of `obj` at the dotted path given by `attr_string` + If no such attribute is reachable, returns `default` + + >>> deep_getattr(cass, 'cluster') + >> deep_getattr(cass, 'cluster.metadata.partitioner') + u'org.apache.cassandra.dht.Murmur3Partitioner' + + >>> deep_getattr(cass, 'i.dont.exist', default='default') + 'default' + """ + attrs = attr_string.split(".") + for attr in attrs: + try: + obj = getattr(obj, attr) + except AttributeError: + return default + + return obj + + +def asbool(value): + # type: (Union[str, bool, None]) -> bool + """Convert the given String to a boolean object. + + Accepted values are `True` and `1`. + """ + if value is None: + return False + + if isinstance(value, bool): + return value + + return value.lower() in ("true", "1") + + +def parse_tags_str(tags_str): + # type: (Optional[str]) -> Dict[str, str] + """Parse a string of tags typically provided via environment variables. + + The expected string is of the form:: + "key1:value1,key2:value2" + "key1:value1 key2:value2" + + :param tags_str: A string of the above form to parse tags from. + :return: A dict containing the tags that were parsed. + """ + if not tags_str: + return {} + + TAGSEP = ", " + + def parse_tags(tags): + # type: (List[str]) -> Tuple[List[Tuple[str, str]], List[str]] + parsed_tags = [] + invalids = [] + + for tag in tags: + key, sep, value = tag.partition(":") + if not sep or not key or "," in key: + invalids.append(tag) + else: + parsed_tags.append((key, value)) + + return parsed_tags, invalids + + tags_str = tags_str.strip(TAGSEP) + + # Take the maximal set of tags that can be parsed correctly for a given separator + tag_list = [] # type: List[Tuple[str, str]] + invalids = [] + for sep in TAGSEP: + ts = tags_str.split(sep) + tags, invs = parse_tags(ts) + if len(tags) > len(tag_list): + tag_list = tags + invalids = invs + elif len(tags) == len(tag_list) > 1: + # Both separators produce the same number of tags. + # DEV: This only works when checking between two separators. + tag_list[:] = [] + invalids[:] = [] + + if not tag_list: + log.error( + ( + "Failed to find separator for tag string: '%s'.\n" + "Tag strings must be comma or space separated:\n" + " key1:value1,key2:value2\n" + " key1:value1 key2:value2" + ), + tags_str, + ) + + for tag in invalids: + log.error("Malformed tag in tag pair '%s' from tag string '%s'.", tag, tags_str) + + return dict(tag_list) + + +def stringify_cache_args(args, value_max_len=VALUE_MAX_LEN, cmd_max_len=CMD_MAX_LEN): + # type: (List[Any], int, int) -> Text + """Convert a list of arguments into a space concatenated string + + This function is useful to convert a list of cache keys + into a resource name or tag value with a max size limit. + """ + length = 0 + out = [] # type: List[Text] + for arg in args: + try: + if isinstance(arg, (bytes, str)): + cmd = ensure_text(arg, errors="backslashreplace") + else: + cmd = str(arg) + + if len(cmd) > value_max_len: + cmd = cmd[:value_max_len] + VALUE_TOO_LONG_MARK + + if length + len(cmd) > cmd_max_len: + prefix = cmd[: cmd_max_len - length] + out.append("%s%s" % (prefix, VALUE_TOO_LONG_MARK)) + break + + out.append(cmd) + length += len(cmd) + except Exception: + out.append(VALUE_PLACEHOLDER) + break + + return " ".join(out) + + +def is_sequence(obj): + # type: (Any) -> bool + try: + return isinstance(obj, (list, tuple, set, frozenset)) + except TypeError: + # Checking the type of Generic Subclasses raises a TypeError + return False + + +def flatten_key_value(root_key, value): + # type: (str, Any) -> Dict[str, Any] + """Flattens attributes""" + if not is_sequence(value): + return {root_key: value} + + flattened = dict() + for i, item in enumerate(value): + key = f"{root_key}.{i}" + if is_sequence(item): + flattened.update(flatten_key_value(key, item)) + else: + flattened[key] = item + return flattened diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/http.py new file mode 100644 index 0000000..e40c822 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/http.py @@ -0,0 +1,441 @@ +from contextlib import contextmanager +from dataclasses import dataclass +from email.mime.application import MIMEApplication +from email.mime.multipart import MIMEMultipart +from json import loads +import logging +import os +import re +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import ContextManager # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Generator # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Pattern # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.constants import USER_ID_KEY +from ddtrace.internal import compat +from ddtrace.internal.compat import parse +from ddtrace.internal.constants import BLOCKED_RESPONSE_HTML +from ddtrace.internal.constants import BLOCKED_RESPONSE_JSON +from ddtrace.internal.constants import DEFAULT_TIMEOUT +from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY +from ddtrace.internal.constants import W3C_TRACESTATE_ORIGIN_KEY +from ddtrace.internal.constants import W3C_TRACESTATE_SAMPLING_PRIORITY_KEY +from ddtrace.internal.http import HTTPConnection +from ddtrace.internal.http import HTTPSConnection +from ddtrace.internal.uds import UDSHTTPConnection +from ddtrace.internal.utils import _get_metas_to_propagate +from ddtrace.internal.utils.cache import cached + + +ConnectionType = Union[HTTPSConnection, HTTPConnection, UDSHTTPConnection] + + +_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE = re.compile(r",|;|~|[^\x20-\x7E]+") +_W3C_TRACESTATE_INVALID_CHARS_REGEX_KEY = re.compile(r",| |=|[^\x20-\x7E]+") + + +Connector = Callable[[], ContextManager[compat.httplib.HTTPConnection]] + + +log = logging.getLogger(__name__) + + +@cached() +def normalize_header_name(header_name): + # type: (Optional[str]) -> Optional[str] + """ + Normalizes an header name to lower case, stripping all its leading and trailing white spaces. + :param header_name: the header name to normalize + :type header_name: str + :return: the normalized header name + :rtype: str + """ + return header_name.strip().lower() if header_name is not None else None + + +def strip_query_string(url): + # type: (str) -> str + """ + Strips the query string from a URL for use as tag in spans. + :param url: The URL to be stripped + :return: The given URL without query strings + """ + hqs, fs, f = url.partition("#") + h, _, _ = hqs.partition("?") + if not f: + return h + return h + fs + f + + +def redact_query_string(query_string, query_string_obfuscation_pattern): + # type: (str, Optional[re.Pattern]) -> Union[bytes, str] + if query_string_obfuscation_pattern is None: + return query_string + + bytes_query = query_string if isinstance(query_string, bytes) else query_string.encode("utf-8") + return query_string_obfuscation_pattern.sub(b"", bytes_query) + + +def redact_url(url, query_string_obfuscation_pattern, query_string=None): + # type: (str, re.Pattern, Optional[str]) -> Union[str,bytes] + + # Avoid further processing if obfuscation is disabled + if query_string_obfuscation_pattern is None: + return url + + parts = compat.parse.urlparse(url) + redacted_query = None + + if query_string: + redacted_query = redact_query_string(query_string, query_string_obfuscation_pattern) + elif parts.query: + redacted_query = redact_query_string(parts.query, query_string_obfuscation_pattern) + + if redacted_query is not None and len(parts) >= 5: + redacted_parts = parts[:4] + (redacted_query,) + parts[5:] # type: Tuple[Union[str, bytes], ...] + bytes_redacted_parts = tuple(x if isinstance(x, bytes) else x.encode("utf-8") for x in redacted_parts) + return urlunsplit(bytes_redacted_parts, url) + + # If no obfuscation is performed, return original url + return url + + +def urlunsplit(components, original_url): + # type: (Tuple[bytes, ...], str) -> bytes + """ + Adaptation from urlunsplit and urlunparse, using bytes components + """ + scheme, netloc, url, params, query, fragment = components + if params: + url = b"%s;%s" % (url, params) + if netloc or (scheme and url[:2] != b"//"): + if url and url[:1] != b"/": + url = b"/" + url + url = b"//%s%s" % ((netloc or b""), url) + if scheme: + url = b"%s:%s" % (scheme, url) + if query or (original_url and original_url[-1] in ("?", b"?")): + url = b"%s?%s" % (url, query) + if fragment or (original_url and original_url[-1] in ("#", b"#")): + url = b"%s#%s" % (url, fragment) + return url + + +def connector(url, **kwargs): + # type: (str, Any) -> Connector + """Create a connector context manager for the given URL. + + This function returns a context manager that wraps a connection object to + perform HTTP requests against the given URL. Extra keyword arguments can be + passed to the underlying connection object, if needed. + + Example:: + >>> connect = connector("http://localhost:8080") + >>> with connect() as conn: + ... conn.request("GET", "/") + ... ... + """ + + @contextmanager + def _connector_context(): + # type: () -> Generator[Union[compat.httplib.HTTPConnection, compat.httplib.HTTPSConnection], None, None] + connection = get_connection(url, **kwargs) + yield connection + connection.close() + + return _connector_context + + +def w3c_get_dd_list_member(context): + # Context -> str + tags = [] + if context.sampling_priority is not None: + tags.append("{}:{}".format(W3C_TRACESTATE_SAMPLING_PRIORITY_KEY, context.sampling_priority)) + if context.dd_origin: + tags.append( + "{}:{}".format( + W3C_TRACESTATE_ORIGIN_KEY, + w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", context.dd_origin)), + ) + ) + + sampling_decision = context._meta.get(SAMPLING_DECISION_TRACE_TAG_KEY) + if sampling_decision: + tags.append( + "t.dm:{}".format((w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", sampling_decision)))) + ) + # since this can change, we need to grab the value off the current span + usr_id = context._meta.get(USER_ID_KEY) + if usr_id: + tags.append("t.usr.id:{}".format(w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", usr_id)))) + + current_tags_len = sum(len(i) for i in tags) + for k, v in _get_metas_to_propagate(context): + if k not in [SAMPLING_DECISION_TRACE_TAG_KEY, USER_ID_KEY]: + # for key replace ",", "=", and characters outside the ASCII range 0x20 to 0x7E + # for value replace ",", ";", "~" and characters outside the ASCII range 0x20 to 0x7E + k = k.replace("_dd.p.", "t.") + next_tag = "{}:{}".format( + w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_KEY, "_", k)), + w3c_encode_tag((_W3C_TRACESTATE_INVALID_CHARS_REGEX_VALUE, "_", v)), + ) + # we need to keep the total length under 256 char + potential_current_tags_len = current_tags_len + len(next_tag) + if not potential_current_tags_len > 256: + tags.append(next_tag) + current_tags_len += len(next_tag) + else: + log.debug("tracestate would exceed 256 char limit with tag: %s. Tag will not be added.", next_tag) + + return ";".join(tags) + + +@cached() +def w3c_encode_tag(args): + # type: (Tuple[Pattern, str, str]) -> str + pattern, replacement, tag_val = args + tag_val = pattern.sub(replacement, tag_val) + # replace = with ~ if it wasn't already replaced by the regex + return tag_val.replace("=", "~") + + +class Response(object): + """ + Custom API Response object to represent a response from calling the API. + + We do this to ensure we know expected properties will exist, and so we + can call `resp.read()` and load the body once into an instance before we + close the HTTPConnection used for the request. + """ + + __slots__ = ["status", "body", "reason", "msg"] + + def __init__(self, status=None, body=None, reason=None, msg=None): + self.status = status + self.body = body + self.reason = reason + self.msg = msg + + @classmethod + def from_http_response(cls, resp): + """ + Build a ``Response`` from the provided ``HTTPResponse`` object. + + This function will call `.read()` to consume the body of the ``HTTPResponse`` object. + + :param resp: ``HTTPResponse`` object to build the ``Response`` from + :type resp: ``HTTPResponse`` + :rtype: ``Response`` + :returns: A new ``Response`` + """ + return cls( + status=resp.status, + body=resp.read(), + reason=getattr(resp, "reason", None), + msg=getattr(resp, "msg", None), + ) + + def get_json(self): + """Helper to parse the body of this request as JSON""" + try: + body = self.body + if not body: + log.debug("Empty reply from Datadog Agent, %r", self) + return + + if not isinstance(body, str) and hasattr(body, "decode"): + body = body.decode("utf-8") + + if hasattr(body, "startswith") and body.startswith("OK"): + # This typically happens when using a priority-sampling enabled + # library with an outdated agent. It still works, but priority sampling + # will probably send too many traces, so the next step is to upgrade agent. + log.debug( + "Cannot parse Datadog Agent response. " + "This occurs because Datadog agent is out of date or DATADOG_PRIORITY_SAMPLING=false is set" + ) + return + + return loads(body) + except (ValueError, TypeError): + log.debug("Unable to parse Datadog Agent JSON response: %r", body, exc_info=True) + + def __repr__(self): + return "{0}(status={1!r}, body={2!r}, reason={3!r}, msg={4!r})".format( + self.__class__.__name__, + self.status, + self.body, + self.reason, + self.msg, + ) + + +def get_connection(url, timeout=DEFAULT_TIMEOUT): + # type: (str, float) -> ConnectionType + """Return an HTTP connection to the given URL.""" + parsed = verify_url(url) + hostname = parsed.hostname or "" + path = parsed.path or "/" + + if parsed.scheme == "https": + return HTTPSConnection.with_base_path(hostname, parsed.port, base_path=path, timeout=timeout) + elif parsed.scheme == "http": + return HTTPConnection.with_base_path(hostname, parsed.port, base_path=path, timeout=timeout) + elif parsed.scheme == "unix": + return UDSHTTPConnection(path, hostname, parsed.port, timeout=timeout) + + raise ValueError("Unsupported protocol '%s'" % parsed.scheme) + + +def verify_url(url): + # type: (str) -> parse.ParseResult + """Validates that the given URL can be used as an intake + Returns a parse.ParseResult. + Raises a ``ValueError`` if the URL cannot be used as an intake + """ + parsed = parse.urlparse(url) + schemes = ("http", "https", "unix") + if parsed.scheme not in schemes: + raise ValueError( + "Unsupported protocol '%s' in intake URL '%s'. Must be one of: %s" + % (parsed.scheme, url, ", ".join(schemes)) + ) + elif parsed.scheme in ["http", "https"] and not parsed.hostname: + raise ValueError("Invalid hostname in intake URL '%s'" % url) + elif parsed.scheme == "unix" and not parsed.path: + raise ValueError("Invalid file path in intake URL '%s'" % url) + + return parsed + + +_HTML_BLOCKED_TEMPLATE_CACHE = None # type: Optional[str] +_JSON_BLOCKED_TEMPLATE_CACHE = None # type: Optional[str] + + +def _get_blocked_template(accept_header_value): + # type: (str) -> str + + global _HTML_BLOCKED_TEMPLATE_CACHE + global _JSON_BLOCKED_TEMPLATE_CACHE + + need_html_template = False + + if accept_header_value and "text/html" in accept_header_value.lower(): + need_html_template = True + + if need_html_template and _HTML_BLOCKED_TEMPLATE_CACHE: + return _HTML_BLOCKED_TEMPLATE_CACHE + + if not need_html_template and _JSON_BLOCKED_TEMPLATE_CACHE: + return _JSON_BLOCKED_TEMPLATE_CACHE + + if need_html_template: + template_path = os.getenv("DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML") + else: + template_path = os.getenv("DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON") + + if template_path: + try: + with open(template_path, "r") as template_file: + content = template_file.read() + + if need_html_template: + _HTML_BLOCKED_TEMPLATE_CACHE = content + else: + _JSON_BLOCKED_TEMPLATE_CACHE = content + return content + except (OSError, IOError) as e: # noqa: B014 + log.warning("Could not load custom template at %s: %s", template_path, str(e)) # noqa: G200 + + # No user-defined template at this point + if need_html_template: + _HTML_BLOCKED_TEMPLATE_CACHE = BLOCKED_RESPONSE_HTML + return BLOCKED_RESPONSE_HTML + + _JSON_BLOCKED_TEMPLATE_CACHE = BLOCKED_RESPONSE_JSON + return BLOCKED_RESPONSE_JSON + + +def parse_form_params(body: str) -> Dict[str, Union[str, List[str]]]: + """Return a dict of form data after HTTP form parsing""" + body_params = body.replace("+", " ") + req_body: Dict[str, Union[str, List[str]]] = dict() + for item in body_params.split("&"): + key, equal, val = item.partition("=") + if equal: + key = parse.unquote(key) + val = parse.unquote(val) + prev_value = req_body.get(key, None) + if prev_value is None: + req_body[key] = val + elif isinstance(prev_value, list): + prev_value.append(val) + else: + req_body[key] = [prev_value, val] + return req_body + + +def parse_form_multipart(body: str, headers: Optional[Dict] = None) -> Dict[str, Any]: + """Return a dict of form data after HTTP form parsing""" + import email + import json + from urllib.parse import parse_qs + + import xmltodict + + def parse_message(msg): + if msg.is_multipart(): + res = { + part.get_param("name", failobj=part.get_filename(), header="content-disposition"): parse_message(part) + for part in msg.get_payload() + } + else: + content_type = msg.get("Content-Type") + if content_type in ("application/json", "text/json"): + res = json.loads(msg.get_payload()) + elif content_type in ("application/xml", "text/xml"): + res = xmltodict.parse(msg.get_payload()) + elif content_type in ("application/x-url-encoded", "application/x-www-form-urlencoded"): + res = parse_qs(msg.get_payload()) + elif content_type in ("text/plain", None): + res = msg.get_payload() + else: + res = "" + + return res + + if headers is not None: + content_type = headers.get("Content-Type") or headers.get("content-type") + msg = email.message_from_string("MIME-Version: 1.0\nContent-Type: %s\n%s" % (content_type, body)) + return parse_message(msg) + return {} + + +@dataclass +class FormData: + name: str + filename: str + data: str + content_type: str + + +def multipart(parts: List[FormData]) -> Tuple[bytes, dict]: + msg = MIMEMultipart("form-data") + del msg["MIME-Version"] + + for part in parts: + app = MIMEApplication(part.data, part.content_type, lambda _: _) + app.add_header("Content-Disposition", "form-data", name=part.name, filename=part.filename) + del app["MIME-Version"] + msg.attach(app) + + # Split headers and body + headers, _, body = msg.as_string().partition("\n\n") + + return body.encode("utf-8"), dict(_.split(": ") for _ in headers.splitlines()) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/importlib.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/importlib.py new file mode 100644 index 0000000..7b1f94d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/importlib.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import + +from importlib import import_module +from types import TracebackType +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional +from typing import Type + + +class require_modules(object): + """Context manager to check the availability of required modules.""" + + def __init__(self, modules): + # type: (List[str]) -> None + self._missing_modules = [] + for module in modules: + try: + import_module(module) + except ImportError: + self._missing_modules.append(module) + + def __enter__(self): + # type: () -> List[str] + return self._missing_modules + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + return + + +def func_name(f): + # type: (Callable[..., Any]) -> str + """Return a human readable version of the function's name.""" + if hasattr(f, "__module__"): + return "%s.%s" % (f.__module__, getattr(f, "__name__", f.__class__.__name__)) + return getattr(f, "__name__", f.__class__.__name__) + + +def module_name(instance): + # type: (Any) -> str + """Return the instance module name.""" + return instance.__class__.__module__.split(".")[0] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/inspection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/inspection.py new file mode 100644 index 0000000..c1bdee8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/inspection.py @@ -0,0 +1,121 @@ +from collections import deque +from functools import partial +from functools import singledispatch +from pathlib import Path +from types import CodeType +from types import FunctionType +from typing import Set +from typing import cast + +from bytecode import Bytecode + +from ddtrace.internal.compat import PYTHON_VERSION_INFO as PY +from ddtrace.internal.safety import _isinstance + + +@singledispatch +def linenos(_) -> Set[int]: + raise NotImplementedError() + + +@linenos.register +def _(code: CodeType) -> Set[int]: + """Get the line numbers of a function.""" + if PY >= (3, 10): + return {_ for _ in (_[2] for _ in code.co_lines()) if _ is not None} - {code.co_firstlineno} + + return { + _ for _ in (instr.lineno for instr in Bytecode.from_code(code) if hasattr(instr, "lineno")) if _ is not None + } + + +@linenos.register +def _(f: FunctionType) -> Set[int]: + return linenos(f.__code__) + + +def undecorated(f: FunctionType, name: str, path: Path) -> FunctionType: + # Find the original function object from a decorated function. We use the + # expected function name to guide the search and pick the correct function. + # The recursion is needed in case of multiple decorators. We make it BFS + # to find the function as soon as possible. + + def match(g): + return g.__code__.co_name == name and Path(g.__code__.co_filename).resolve() == path + + if _isinstance(f, FunctionType) and match(f): + return f + + seen_functions = {f} + q = deque([f]) # FIFO: use popleft and append + + while q: + g = q.popleft() + + # Look for a wrapped function. These attributes are generally used by + # the decorators provided by the standard library (e.g. partial) + for attr in ("__wrapped__", "func"): + try: + wrapped = object.__getattribute__(g, attr) + if _isinstance(wrapped, FunctionType) and wrapped not in seen_functions: + if match(wrapped): + return wrapped + q.append(wrapped) + seen_functions.add(wrapped) + except AttributeError: + pass + + # A partial object is a common decorator. The function can either be the + # curried function, or it can appear as one of the arguments (e.g. the + # implementation of the wraps decorator). + if _isinstance(g, partial): + p = cast(partial, g) + if match(p.func): + return cast(FunctionType, p.func) + for arg in p.args: + if _isinstance(arg, FunctionType) and arg not in seen_functions: + if match(arg): + return arg + q.append(arg) + seen_functions.add(arg) + for arg in p.keywords.values(): + if _isinstance(arg, FunctionType) and arg not in seen_functions: + if match(arg): + return arg + q.append(arg) + seen_functions.add(arg) + + # Look for a closure (function decoration) + if _isinstance(g, FunctionType): + for c in (_.cell_contents for _ in (g.__closure__ or []) if _isinstance(_.cell_contents, FunctionType)): + if c not in seen_functions: + if match(c): + return c + q.append(c) + seen_functions.add(c) + + # Look for a function attribute (method decoration) + # DEV: We don't recurse over arbitrary objects. We stop at the first + # depth level. + try: + for v in object.__getattribute__(g, "__dict__").values(): + if _isinstance(v, FunctionType) and v not in seen_functions and match(v): + return v + except AttributeError: + # Maybe we have slots + try: + for v in (object.__getattribute__(g, _) for _ in object.__getattribute__(g, "__slots__")): + if _isinstance(v, FunctionType) and v not in seen_functions and match(v): + return v + except AttributeError: + pass + + # Last resort + try: + for v in (object.__getattribute__(g, a) for a in object.__dir__(g)): + if _isinstance(v, FunctionType) and v not in seen_functions and match(v): + return v + except AttributeError: + pass + + return f diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/retry.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/retry.py new file mode 100644 index 0000000..00db74a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/retry.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import + +from functools import wraps +from itertools import repeat +import random +from time import sleep +import typing as t + + +class RetryError(Exception): + pass + + +def retry( + after: t.Union[int, float, t.Iterable[t.Union[int, float]]], + until: t.Callable[[t.Any], bool] = lambda result: result is None, + initial_wait: float = 0, +) -> t.Callable: + def retry_decorator(f): + @wraps(f) + def retry_wrapped(*args, **kwargs): + sleep(initial_wait) + after_iter = repeat(after) if isinstance(after, (int, float)) else after + exception = None + + for s in after_iter: + try: + result = f(*args, **kwargs) + except Exception as e: + exception = e + result = e + + if until(result): + return result + + sleep(s) + + # Last chance to succeed + try: + result = f(*args, **kwargs) + except Exception as e: + exception = e + result = e + + if until(result): + return result + + if exception is not None: + raise exception + + raise RetryError(result) + + return retry_wrapped + + return retry_decorator + + +def fibonacci_backoff_with_jitter(attempts, initial_wait=1.0, until=lambda result: result is None): + # type: (int, float, t.Callable[[t.Any], bool]) -> t.Callable + return retry( + after=[random.uniform(0, initial_wait * (1.618**i)) for i in range(attempts - 1)], # nosec + until=until, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/signals.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/signals.py new file mode 100644 index 0000000..8cd3f1e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/signals.py @@ -0,0 +1,27 @@ +import signal + + +def handle_signal(sig, f): + """ + Returns a signal of type `sig` with function `f`, if there are + no previously defined signals. + + Else, wraps the given signal with the previously defined one, + so no signals are overridden. + """ + old_signal = signal.getsignal(sig) + + def wrap_signals(*args, **kwargs): + if old_signal is not None: + old_signal(*args, **kwargs) + f(*args, **kwargs) + + # Return the incoming signal if any of the following cases happens: + # - old signal does not exist, + # - old signal is the same as the incoming, or + # - old signal is our wrapper. + # This avoids multiple signal calling and infinite wrapping. + if not callable(old_signal) or old_signal == f or old_signal == wrap_signals: + return signal.signal(sig, f) + + return signal.signal(sig, wrap_signals) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/time.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/time.py new file mode 100644 index 0000000..1035e48 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/time.py @@ -0,0 +1,140 @@ +from datetime import datetime +from types import TracebackType +from typing import Optional +from typing import Type # noqa:F401 + +from ddtrace.internal import compat +from ddtrace.internal.logger import get_logger + + +log = get_logger(__name__) + + +def parse_isoformat(date): + # type: (str) -> Optional[datetime] + if date.endswith("Z"): + try: + return datetime.strptime(date, "%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError: + return datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") + try: + return datetime.fromisoformat(date) + except (ValueError, IndexError): + log.debug("unsupported isoformat: %s", date) + return None + + +class StopWatch(object): + """A simple timer/stopwatch helper class. + + Not thread-safe (when a single watch is mutated by multiple threads at + the same time). Thread-safe when used by a single thread (not shared) or + when operations are performed in a thread-safe manner on these objects by + wrapping those operations with locks. + + It will use the `monotonic`_ pypi library to find an appropriate + monotonically increasing time providing function (which typically varies + depending on operating system and Python version). + + .. _monotonic: https://pypi.python.org/pypi/monotonic/ + """ + + def __init__(self): + # type: () -> None + self._started_at = None # type: Optional[float] + self._stopped_at = None # type: Optional[float] + + def start(self): + # type: () -> StopWatch + """Starts the watch.""" + self._started_at = compat.monotonic() + return self + + def elapsed(self): + # type: () -> float + """Get how many seconds have elapsed. + + :return: Number of seconds elapsed + :rtype: float + """ + # NOTE: datetime.timedelta does not support nanoseconds, so keep a float here + if self._started_at is None: + raise RuntimeError("Can not get the elapsed time of a stopwatch" " if it has not been started/stopped") + if self._stopped_at is None: + now = compat.monotonic() + else: + now = self._stopped_at + return now - self._started_at + + def __enter__(self): + # type: () -> StopWatch + """Starts the watch.""" + self.start() + return self + + def __exit__( + self, tp: Optional[Type[BaseException]], value: Optional[BaseException], traceback: Optional[TracebackType] + ) -> None: + """Stops the watch.""" + self.stop() + + def stop(self): + # type: () -> StopWatch + """Stops the watch.""" + if self._started_at is None: + raise RuntimeError("Can not stop a stopwatch that has not been" " started") + self._stopped_at = compat.monotonic() + return self + + +class HourGlass(object): + """An implementation of an hourglass.""" + + def __init__(self, duration): + # type: (float) -> None + t = compat.monotonic() + + self._duration = duration + self._started_at = t - duration + self._end_at = t + + self.trickling = self._trickled # type: ignore[assignment] + + def turn(self): + # type: () -> None + """Turn the hourglass.""" + t = compat.monotonic() + top_0 = self._end_at - self._started_at + bottom = self._duration - top_0 + min(t - self._started_at, top_0) + + self._started_at = t + self._end_at = t + bottom + + self.trickling = self._trickling # type: ignore[assignment] + + def trickling(self): + # type: () -> bool + """Check if sand is still trickling.""" + return False + + def _trickled(self): + # type: () -> bool + return False + + def _trickling(self): + # type: () -> bool + if compat.monotonic() < self._end_at: + return True + + # No longer trickling, so we change state + self.trickling = self._trickled # type: ignore[assignment] + + return False + + def __enter__(self): + # type: () -> HourGlass + self.turn() + return self + + def __exit__(self, tp, value, traceback): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/version.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/version.py new file mode 100644 index 0000000..6a05b2b --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/version.py @@ -0,0 +1,79 @@ +import typing # noqa:F401 +from typing import Optional # noqa:F401 + +import ddtrace.vendor.packaging.version as packaging_version +from ddtrace.version import get_version + + +def parse_version(version): + # type: (str) -> typing.Tuple[int, int, int] + """Convert a version string to a tuple of (major, minor, micro) + + Examples:: + + 1.2.3 -> (1, 2, 3) + 1.2 -> (1, 2, 0) + 1 -> (1, 0, 0) + 1.0.0-beta1 -> (1, 0, 0) + 2020.6.19 -> (2020, 6, 19) + malformed -> (0, 0, 0) + 10.5.0 extra -> (10, 5, 0) + """ + # If we have any spaces/extra text, grab the first part + # "1.0.0 beta1" -> "1.0.0" + # "1.0.0" -> "1.0.0" + # DEV: Versions with spaces will get converted to LegacyVersion, we do this splitting + # to maximize the chances of getting a Version as a parsing result + if " " in version: + version = version.split()[0] + + # version() will not raise an exception, if the version if malformed instead + # we will end up with a LegacyVersion + + try: + parsed = packaging_version.parse(version) + except packaging_version.InvalidVersion: + # packaging>=22.0 raises an InvalidVersion instead of returning a LegacyVersion + return (0, 0, 0) + + # LegacyVersion.release will always be `None` + if not parsed.release: + return (0, 0, 0) + + # Version.release was added in 17.1 + # packaging >= 20.0 has `Version.{major,minor,micro}`, use the following + # to support older versions of the library + # https://github.com/pypa/packaging/blob/47d40f640fddb7c97b01315419b6a1421d2dedbb/packaging/version.py#L404-L417 + return ( + parsed.release[0] if len(parsed.release) >= 1 else 0, + parsed.release[1] if len(parsed.release) >= 2 else 0, + parsed.release[2] if len(parsed.release) >= 3 else 0, + ) + + +def _pep440_to_semver(version=None): + # type: (Optional[str]) -> str + # The library uses a PEP 440-compliant (https://peps.python.org/pep-0440/) versioning + # scheme, but the Agent spec requires that we use a SemVer-compliant version. + # + # However, we may have versions like: + # + # - 1.7.1.dev3+gf258c7d9 + # - 1.7.1rc2.dev3+gf258c7d9 + # + # Which are not Semver-compliant. + # + # The easiest fix is to replace the first occurrence of "rc" or + # ".dev" with "-rc" or "-dev" to make them compliant. + # + # Other than X.Y.Z, we are allowed `-+` + # https://semver.org/#backusnaur-form-grammar-for-valid-semver-versions + # + # e.g. 1.7.1-rc2.dev3+gf258c7d9 is valid + + tracer_version = version or get_version() + if "rc" in tracer_version and "-rc" not in tracer_version: + tracer_version = tracer_version.replace("rc", "-rc", 1) + elif ".dev" in tracer_version: + tracer_version = tracer_version.replace(".dev", "-dev", 1) + return tracer_version diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/wrappers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/wrappers.py new file mode 100644 index 0000000..7f608c7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/utils/wrappers.py @@ -0,0 +1,23 @@ +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 +from typing import TypeVar # noqa:F401 + +from ddtrace.vendor import wrapt + + +F = TypeVar("F", bound=Callable[..., Any]) + + +def iswrapped(obj, attr=None): + # type: (Any, Optional[str]) -> bool + """Returns whether an attribute is wrapped or not.""" + if attr is not None: + obj = getattr(obj, attr, None) + return hasattr(obj, "__wrapped__") and isinstance(obj, wrapt.ObjectProxy) + + +def unwrap(obj, attr): + # type: (Any, str) -> None + f = getattr(obj, attr) + setattr(obj, attr, f.__wrapped__) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uwsgi.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uwsgi.py new file mode 100644 index 0000000..a3b7b55 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/uwsgi.py @@ -0,0 +1,69 @@ +from __future__ import absolute_import + +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 + + +class uWSGIConfigError(Exception): + """uWSGI configuration error. + + This is raised when uwsgi configuration is incompatible with the library. + """ + + +class uWSGIMasterProcess(Exception): + """The process is uWSGI master process.""" + + +def check_uwsgi(worker_callback=None, atexit=None): + # type: (Optional[Callable], Optional[Callable]) -> None + """Check whetever uwsgi is running and what needs to be done. + + :param worker_callback: Callback function to call in uWSGI worker processes. + """ + try: + import uwsgi + except ImportError: + return + + if not (uwsgi.opt.get("enable-threads") or int(uwsgi.opt.get("threads") or 0)): + msg = "enable-threads option must be set to true, or a positive number of threads must be set" + raise uWSGIConfigError(msg) + + # If uwsgi has more than one process, it is running in prefork operational mode: uwsgi is going to fork multiple + # sub-processes. + # If lazy-app is enabled, then the app is loaded in each subprocess independently. This is fine. + # If it's not enabled, then the app will be loaded in the master process, and uwsgi will `fork()` abruptly, + # bypassing Python sanity checks. We need to handle this case properly. + # The proper way to handle that is to allow to register a callback function to run in the subprocess at their + # startup, and warn the caller that this is the master process and that (probably) nothing should be done. + if uwsgi.numproc > 1 and not uwsgi.opt.get("lazy-apps") and uwsgi.worker_id() == 0: + if not uwsgi.opt.get("master"): + # Having multiple workers without the master process is not supported: + # the postfork hooks are not available, so there's no way to start a different profiler in each + # worker + raise uWSGIConfigError("master option must be enabled when multiple processes are used") + + # Register the function to be called in child process at startup + if worker_callback is not None: + try: + import uwsgidecorators + except ImportError: + raise uWSGIConfigError("Running under uwsgi but uwsgidecorators cannot be imported") + uwsgidecorators.postfork(worker_callback) + + if atexit is not None: + original_atexit = getattr(uwsgi, "atexit", None) + + def _atexit(): + try: + atexit() + except Exception: + pass + + if original_atexit is not None: + original_atexit() + + uwsgi.atexit = _atexit + + raise uWSGIMasterProcess() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/__init__.py new file mode 100644 index 0000000..dae0c18 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/__init__.py @@ -0,0 +1,306 @@ +from types import FunctionType +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import cast # noqa:F401 + + +try: + from typing import Protocol # noqa:F401 +except ImportError: + from typing_extensions import Protocol # type: ignore[assignment] + +import bytecode as bc +from bytecode import Instr + +from ddtrace.internal.assembly import Assembly +from ddtrace.internal.compat import PYTHON_VERSION_INFO as PY +from ddtrace.internal.wrapping.asyncs import wrap_async +from ddtrace.internal.wrapping.generators import wrap_generator + + +class WrappedFunction(Protocol): + """A wrapped function.""" + + __dd_wrapped__ = None # type: Optional[FunctionType] + __dd_wrappers__ = None # type: Optional[Dict[Any, Any]] + + def __call__(self, *args, **kwargs): + pass + + +Wrapper = Callable[[FunctionType, Tuple[Any], Dict[str, Any]], Any] + + +def _add(lineno): + if PY >= (3, 11): + return Instr("BINARY_OP", bc.BinaryOp.ADD, lineno=lineno) + + return Instr("INPLACE_ADD", lineno=lineno) + + +UPDATE_MAP = Assembly() +if PY >= (3, 12): + UPDATE_MAP.parse( + r""" + copy 1 + load_method $update + load_fast {varkwargsname} + call 1 + pop_top + """ + ) + +elif PY >= (3, 11): + UPDATE_MAP.parse( + r""" + copy 1 + load_method $update + load_fast {varkwargsname} + precall 1 + call 1 + pop_top + """ + ) +else: + UPDATE_MAP.parse( + r""" + dup_top + load_attr $update + load_fast {varkwargsname} + call_function 1 + pop_top + """ + ) + + +CALL_RETURN = Assembly() +if PY >= (3, 12): + CALL_RETURN.parse( + r""" + call {arg} + return_value + """ + ) + +elif PY >= (3, 11): + CALL_RETURN.parse( + r""" + precall {arg} + call {arg} + return_value + """ + ) + +else: + CALL_RETURN.parse( + r""" + call_function {arg} + return_value + """ + ) + + +FIRSTLINENO_OFFSET = int(PY >= (3, 11)) + + +def wrap_bytecode(wrapper, wrapped): + # type: (Wrapper, FunctionType) -> bc.Bytecode + """Wrap a function with a wrapper function. + + The wrapper function expects the wrapped function as the first argument, + followed by the tuple of arguments and the dictionary of keyword arguments. + The nature of the wrapped function is also honored, meaning that a generator + function will return a generator function, and a coroutine function will + return a coroutine function, and so on. The signature is also preserved to + avoid breaking, e.g., usages of the ``inspect`` module. + """ + + code = wrapped.__code__ + lineno = code.co_firstlineno + FIRSTLINENO_OFFSET + varargs = bool(code.co_flags & bc.CompilerFlags.VARARGS) + varkwargs = bool(code.co_flags & bc.CompilerFlags.VARKEYWORDS) + nargs = code.co_argcount + argnames = code.co_varnames[:nargs] + try: + kwonlyargs = code.co_kwonlyargcount + except AttributeError: + kwonlyargs = 0 + kwonlyargnames = code.co_varnames[nargs : nargs + kwonlyargs] + varargsname = code.co_varnames[nargs + kwonlyargs] if varargs else None + varkwargsname = code.co_varnames[nargs + kwonlyargs + varargs] if varkwargs else None + + # Push the wrapper function that is to be called and the wrapped function to + # be passed as first argument. + instrs = [ + bc.Instr("LOAD_CONST", wrapper, lineno=lineno), + bc.Instr("LOAD_CONST", wrapped, lineno=lineno), + ] + if PY >= (3, 11): + # From insert_prefix_instructions + instrs[0:0] = [ + bc.Instr("RESUME", 0, lineno=lineno - 1), + bc.Instr("PUSH_NULL", lineno=lineno), + ] + + if code.co_cellvars: + instrs[0:0] = [Instr("MAKE_CELL", bc.CellVar(_), lineno=lineno) for _ in code.co_cellvars] + + if code.co_freevars: + instrs.insert(0, bc.Instr("COPY_FREE_VARS", len(code.co_freevars), lineno=lineno)) + + # Build the tuple of all the positional arguments + if nargs: + instrs.extend( + [ + Instr("LOAD_DEREF", bc.CellVar(argname), lineno=lineno) + if PY >= (3, 11) and argname in code.co_cellvars + else bc.Instr("LOAD_FAST", argname, lineno=lineno) + for argname in argnames + ] + ) + instrs.append(bc.Instr("BUILD_TUPLE", nargs, lineno=lineno)) + if varargs: + instrs.extend( + [ + bc.Instr("LOAD_FAST", varargsname, lineno=lineno), + _add(lineno), + ] + ) + elif varargs: + instrs.append(bc.Instr("LOAD_FAST", varargsname, lineno=lineno)) + else: + instrs.append(bc.Instr("BUILD_TUPLE", 0, lineno=lineno)) + + # Prepare the keyword arguments + if kwonlyargs: + for arg in kwonlyargnames: + instrs.extend( + [ + bc.Instr("LOAD_CONST", arg, lineno=lineno), + bc.Instr("LOAD_FAST", arg, lineno=lineno), + ] + ) + instrs.append(bc.Instr("BUILD_MAP", kwonlyargs, lineno=lineno)) + if varkwargs: + instrs.extend(UPDATE_MAP.bind({"varkwargsname": varkwargsname}, lineno=lineno)) + + elif varkwargs: + instrs.append(bc.Instr("LOAD_FAST", varkwargsname, lineno=lineno)) + + else: + instrs.append(bc.Instr("BUILD_MAP", 0, lineno=lineno)) + + # Call the wrapper function with the wrapped function, the positional and + # keyword arguments, and return the result. + instrs.extend(CALL_RETURN.bind({"arg": 3}, lineno=lineno)) + + # If the function has special flags set, like the generator, async generator + # or coroutine, inject unraveling code before the return opcode. + if bc.CompilerFlags.GENERATOR & code.co_flags and not (bc.CompilerFlags.COROUTINE & code.co_flags): + wrap_generator(instrs, code, lineno) + else: + wrap_async(instrs, code, lineno) + + return bc.Bytecode(instrs) + + +def wrap(f, wrapper): + # type: (FunctionType, Wrapper) -> WrappedFunction + """Wrap a function with a wrapper. + + The wrapper expects the function as first argument, followed by the tuple + of positional arguments and the dict of keyword arguments. + + Note that this changes the behavior of the original function with the + wrapper function, instead of creating a new function object. + """ + wrapped = FunctionType( + f.__code__, + f.__globals__, + "", + f.__defaults__, + f.__closure__, + ) + try: + wf = cast(WrappedFunction, f) + cast(WrappedFunction, wrapped).__dd_wrapped__ = cast(FunctionType, wf.__dd_wrapped__) + except AttributeError: + pass + + wrapped.__kwdefaults__ = f.__kwdefaults__ + + code = wrap_bytecode(wrapper, wrapped) + code.freevars = f.__code__.co_freevars + if PY >= (3, 11): + code.cellvars = f.__code__.co_cellvars + code.name = f.__code__.co_name + code.filename = f.__code__.co_filename + code.flags = f.__code__.co_flags + code.argcount = f.__code__.co_argcount + try: + code.posonlyargcount = f.__code__.co_posonlyargcount + except AttributeError: + pass + + nargs = code.argcount + try: + code.kwonlyargcount = f.__code__.co_kwonlyargcount + nargs += code.kwonlyargcount + except AttributeError: + pass + nargs += bool(code.flags & bc.CompilerFlags.VARARGS) + bool(code.flags & bc.CompilerFlags.VARKEYWORDS) + code.argnames = f.__code__.co_varnames[:nargs] + + f.__code__ = code.to_code() + + # DEV: Multiple wrapping is implemented as a singly-linked list via the + # __dd_wrapped__ attribute. + wf = cast(WrappedFunction, f) + wf.__dd_wrapped__ = wrapped + + return wf + + +def unwrap(wf, wrapper): + # type: (WrappedFunction, Wrapper) -> FunctionType + """Unwrap a wrapped function. + + This is the reverse of :func:`wrap`. In case of multiple wrapping layers, + this will unwrap the one that uses ``wrapper``. If the function was not + wrapped with ``wrapper``, it will return the first argument. + """ + # DEV: Multiple wrapping layers are singly-linked via __dd_wrapped__. When + # we find the layer that needs to be removed we also have to ensure that we + # update the link at the deletion site if there is a non-empty tail. + try: + inner = cast(FunctionType, wf.__dd_wrapped__) + + # Sanity check + assert inner.__name__ == "", "Wrapper has wrapped function" # nosec + + if wrapper not in cast(FunctionType, wf).__code__.co_consts: + # This is not the correct wrapping layer. Try with the next one. + inner_wf = cast(WrappedFunction, inner) + return unwrap(inner_wf, wrapper) + + # Remove the current wrapping layer by moving the next one over the + # current one. + f = cast(FunctionType, wf) + f.__code__ = inner.__code__ + try: + # Update the link to the next layer. + inner_wf = cast(WrappedFunction, inner) + wf.__dd_wrapped__ = inner_wf.__dd_wrapped__ # type: ignore[assignment] + except AttributeError: + # No more wrapping layers. Restore the original function by removing + # this extra attribute. + del wf.__dd_wrapped__ + + return f + + except AttributeError: + # The function is not wrapped so we return it as is. + return cast(FunctionType, wf) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/asyncs.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/asyncs.py new file mode 100644 index 0000000..3f39165 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/asyncs.py @@ -0,0 +1,646 @@ +import bytecode as bc + +from ddtrace.internal.assembly import Assembly +from ddtrace.internal.compat import PYTHON_VERSION_INFO as PY + + +# ----------------------------------------------------------------------------- +# Coroutine and Async Generator Wrapping +# ----------------------------------------------------------------------------- +# DEV: The wrapping of async generators is roughly equivalent to +# +# __ddgen = wrapper(wrapped, args, kwargs) +# __ddgensend = __ddgen.asend +# try: +# value = await __ddgen.__anext__() +# while True: +# try: +# tosend = yield value +# except GeneratorExit: +# await __ddgen.aclose() +# except: +# value = await __ddgen.athrow(*sys.exc_info()) +# else: +# value = await __ddgensend(tosend) +# except StopAsyncIteration: +# return +# ----------------------------------------------------------------------------- + +COROUTINE_ASSEMBLY = Assembly() +ASYNC_GEN_ASSEMBLY = Assembly() +ASYNC_HEAD_ASSEMBLY = None + +if PY >= (3, 12): + ASYNC_HEAD_ASSEMBLY = Assembly() + ASYNC_HEAD_ASSEMBLY.parse( + r""" + return_generator + pop_top + """ + ) + + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable 0 + load_const None + + presend: + send @send + yield_value 2 + resume 3 + jump_backward_no_interrupt @presend + send: + end_send + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + try @stopiter + copy 1 + store_fast $__ddgen + load_attr (False, 'asend') + store_fast $__ddgensend + load_fast $__ddgen + load_attr (True, '__anext__') + call 0 + + loop: + get_awaitable 0 + load_const None + presend0: + send @send0 + tried + + try @genexit lasti + yield_value 3 + resume 3 + jump_backward_no_interrupt @loop + send0: + end_send + + yield: + call_intrinsic_1 asm.Intrinsic1Op.INTRINSIC_ASYNC_GEN_WRAP + yield_value 3 + resume 1 + push_null + swap 2 + load_fast $__ddgensend + swap 2 + call 1 + jump_backward @loop + tried + + genexit: + try @stopiter + push_exc_info + load_const GeneratorExit + check_exc_match + pop_jump_if_false @exc + pop_top + load_fast $__ddgen + load_attr (True, 'aclose') + call 0 + get_awaitable 0 + load_const None + + presend1: + send @send1 + yield_value 4 + resume 3 + jump_backward_no_interrupt @presend1 + send1: + end_send + pop_top + pop_except + return_const None + + exc: + pop_top + push_null + load_fast $__ddgen + load_attr (False, 'athrow') + push_null + load_const sys.exc_info + call 0 + call_function_ex 0 + get_awaitable 0 + load_const None + + presend2: + send @send2 + yield_value 4 + resume 3 + jump_backward_no_interrupt @presend2 + send2: + end_send + swap 2 + pop_except + jump_backward @yield + tried + + stopiter: + push_exc_info + load_const StopAsyncIteration + check_exc_match + pop_jump_if_false @propagate + pop_top + pop_except + return_const None + + propagate: + reraise 0 + """ + ) + + +elif PY >= (3, 11): + ASYNC_HEAD_ASSEMBLY = Assembly() + ASYNC_HEAD_ASSEMBLY.parse( + r""" + return_generator + pop_top + """ + ) + + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable 0 + load_const None + + presend: + send @send + yield_value + resume 3 + jump_backward_no_interrupt @presend + send: + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + try @stopiter + copy 1 + store_fast $__ddgen + load_attr $asend + store_fast $__ddgensend + load_fast $__ddgen + load_method $__anext__ + precall 0 + call 0 + + loop: + get_awaitable 0 + load_const None + presend0: + send @send0 + tried + + try @genexit lasti + yield_value + resume 3 + jump_backward_no_interrupt @loop + send0: + + yield: + async_gen_wrap + yield_value + resume 1 + push_null + swap 2 + load_fast $__ddgensend + swap 2 + precall 1 + call 1 + jump_backward @loop + tried + + genexit: + try @stopiter + push_exc_info + load_const GeneratorExit + check_exc_match + pop_jump_forward_if_false @exc + pop_top + load_fast $__ddgen + load_method $aclose + precall 0 + call 0 + get_awaitable 0 + load_const None + + presend1: + send @send1 + yield_value + resume 3 + jump_backward_no_interrupt @presend1 + send1: + pop_top + pop_except + load_const None + return_value + + exc: + pop_top + push_null + load_fast $__ddgen + load_attr $athrow + push_null + load_const sys.exc_info + precall 0 + call 0 + call_function_ex 0 + get_awaitable 0 + load_const None + + presend2: + send @send2 + yield_value + resume 3 + jump_backward_no_interrupt @presend2 + send2: + swap 2 + pop_except + jump_backward @yield + tried + + stopiter: + push_exc_info + load_const StopAsyncIteration + check_exc_match + pop_jump_forward_if_false @propagate + pop_top + pop_except + load_const None + return_value + + propagate: + reraise 0 + """ + ) + + +elif PY >= (3, 10): + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable + load_const None + yield_from + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $asend + store_fast $__ddgensend + load_fast $__ddgen + load_attr $__anext__ + call_function 0 + + loop: + get_awaitable + load_const None + yield_from + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + call_function 1 + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + jump_if_not_exc_match @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $aclose + call_function 0 + get_awaitable + load_const None + yield_from + pop_except + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $athrow + load_const sys.exc_info + call_function 0 + call_function_ex 0 + get_awaitable + load_const None + yield_from + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopAsyncIteration + jump_if_not_exc_match @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + reraise 0 + """ + ) + + +elif PY >= (3, 9): + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable + load_const None + yield_from + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $asend + store_fast $__ddgensend + load_fast $__ddgen + load_attr $__anext__ + call_function 0 + + loop: + get_awaitable + load_const None + yield_from + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + call_function 1 + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + jump_if_not_exc_match @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $aclose + call_function 0 + get_awaitable + load_const None + yield_from + pop_except + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $athrow + load_const sys.exc_info + call_function 0 + call_function_ex 0 + get_awaitable + load_const None + yield_from + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopAsyncIteration + jump_if_not_exc_match @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + reraise + """ + ) + +elif PY >= (3, 8): + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable + load_const None + yield_from + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $asend + store_fast $__ddgensend + load_fast $__ddgen + load_attr $__anext__ + call_function 0 + + loop: + get_awaitable + load_const None + yield_from + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + call_function 1 + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $aclose + call_function 0 + get_awaitable + load_const None + yield_from + pop_except + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $athrow + load_const sys.exc_info + call_function 0 + call_function_ex 0 + get_awaitable + load_const None + yield_from + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopAsyncIteration + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + end_finally + load_const None + return_value + """ + ) + +elif PY >= (3, 7): + COROUTINE_ASSEMBLY.parse( + r""" + get_awaitable + load_const None + yield_from + """ + ) + + ASYNC_GEN_ASSEMBLY.parse( + r""" + setup_except @stopiter + dup_top + store_fast $__ddgen + load_attr $asend + store_fast $__ddgensend + load_fast $__ddgen + load_attr $__anext__ + call_function 0 + + loop: + get_awaitable + load_const None + yield_from + + yield: + setup_except @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + call_function 1 + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $aclose + call_function 0 + get_awaitable + load_const None + yield_from + pop_except + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $athrow + load_const sys.exc_info + call_function 0 + call_function_ex 0 + get_awaitable + load_const None + yield_from + store_fast $__value + pop_except + load_fast $__value + jump_absolute @yield + + stopiter: + dup_top + load_const StopAsyncIteration + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + end_finally + load_const None + return_value + """ + ) + + +else: + msg = "No async wrapping support for Python %d.%d" % PY[:2] + raise RuntimeError(msg) + + +def wrap_async(instrs, code, lineno): + if (bc.CompilerFlags.ASYNC_GENERATOR | bc.CompilerFlags.COROUTINE) & code.co_flags: + if ASYNC_HEAD_ASSEMBLY is not None: + instrs[0:0] = ASYNC_HEAD_ASSEMBLY.bind() + + if bc.CompilerFlags.COROUTINE & code.co_flags: + # DEV: This is just + # >>> return await wrapper(wrapped, args, kwargs) + instrs[-1:-1] = COROUTINE_ASSEMBLY.bind() + + elif bc.CompilerFlags.ASYNC_GENERATOR & code.co_flags: + instrs[-1:] = ASYNC_GEN_ASSEMBLY.bind() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/generators.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/generators.py new file mode 100644 index 0000000..9d12224 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/wrapping/generators.py @@ -0,0 +1,466 @@ +from ddtrace.internal.assembly import Assembly +from ddtrace.internal.compat import PYTHON_VERSION_INFO as PY + + +# ----------------------------------------------------------------------------- +# Generator Wrapping +# ----------------------------------------------------------------------------- +# DEV: This is roughly equivalent to +# +# __ddgen = wrapper(wrapped, args, kwargs) +# __ddgensend = __ddgen.send +# try: +# value = next(__ddgen) +# while True: +# try: +# tosend = yield value +# except GeneratorExit: +# return __ddgen.close() +# except: +# value = __ddgen.throw(*sys.exc_info()) +# else: +# value = __ddgensend(tosend) +# except StopIteration: +# return +# ----------------------------------------------------------------------------- +GENERATOR_ASSEMBLY = Assembly() +GENERATOR_HEAD_ASSEMBLY = None + +if PY >= (3, 12): + GENERATOR_HEAD_ASSEMBLY = Assembly() + GENERATOR_HEAD_ASSEMBLY.parse( + r""" + return_generator + pop_top + """ + ) + + GENERATOR_ASSEMBLY.parse( + r""" + try @stopiter + copy 1 + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + push_null + load_const next + load_fast $__ddgen + + loop: + call 1 + tried + + yield: + try @genexit lasti + yield_value 3 + resume 1 + push_null + swap 2 + load_fast $__ddgensend + swap 2 + jump_backward @loop + tried + + genexit: + try @stopiter + push_exc_info + load_const GeneratorExit + check_exc_match + pop_jump_if_false @exc + pop_top + load_fast $__ddgen + load_method $close + call 0 + swap 2 + pop_except + return_value + + exc: + pop_top + push_null + load_fast $__ddgen + load_attr $throw + push_null + load_const sys.exc_info + call 0 + call_function_ex 0 + swap 2 + pop_except + jump_backward @yield + tried + + stopiter: + push_exc_info + load_const StopIteration + check_exc_match + pop_jump_if_false @propagate + pop_top + pop_except + return_const None + + propagate: + reraise 0 + """ + ) + +elif PY >= (3, 11): + GENERATOR_HEAD_ASSEMBLY = Assembly() + GENERATOR_HEAD_ASSEMBLY.parse( + r""" + return_generator + pop_top + """ + ) + + GENERATOR_ASSEMBLY.parse( + r""" + try @stopiter + copy 1 + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + push_null + load_const next + load_fast $__ddgen + + loop: + precall 1 + call 1 + tried + + yield: + try @genexit lasti + yield_value + resume 1 + push_null + swap 2 + load_fast $__ddgensend + swap 2 + jump_backward @loop + tried + + genexit: + try @stopiter + push_exc_info + load_const GeneratorExit + check_exc_match + pop_jump_forward_if_false @exc + pop_top + load_fast $__ddgen + load_method $close + precall 0 + call 0 + swap 2 + pop_except + return_value + + exc: + pop_top + push_null + load_fast $__ddgen + load_attr $throw + push_null + load_const sys.exc_info + precall 0 + call 0 + call_function_ex 0 + swap 2 + pop_except + jump_backward @yield + tried + + stopiter: + push_exc_info + load_const StopIteration + check_exc_match + pop_jump_forward_if_false @propagate + pop_top + pop_except + load_const None + return_value + + propagate: + reraise 0 + """ + ) + +elif PY >= (3, 10): + GENERATOR_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + load_const next + load_fast $__ddgen + + loop: + call_function 1 + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + jump_if_not_exc_match @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $close + call_function 0 + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $throw + load_const sys.exc_info + call_function 0 + call_function_ex 0 + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopIteration + jump_if_not_exc_match @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + reraise 0 + """ + ) + +elif PY >= (3, 9): + GENERATOR_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + load_const next + load_fast $__ddgen + + loop: + call_function 1 + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + jump_if_not_exc_match @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $close + call_function 0 + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $throw + load_const sys.exc_info + call_function 0 + call_function_ex 0 + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopIteration + jump_if_not_exc_match @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + reraise + """ + ) + +elif PY >= (3, 8): + GENERATOR_ASSEMBLY.parse( + r""" + setup_finally @stopiter + dup_top + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + load_const next + load_fast $__ddgen + + loop: + call_function 1 + + yield: + setup_finally @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $close + call_function 0 + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $throw + load_const sys.exc_info + call_function 0 + call_function_ex 0 + rot_four + pop_except + jump_absolute @yield + + stopiter: + dup_top + load_const StopIteration + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + end_finally + load_const None + return_value + """ + ) + + +elif PY >= (3, 7): + GENERATOR_ASSEMBLY.parse( + r""" + setup_except @stopiter + dup_top + store_fast $__ddgen + load_attr $send + store_fast $__ddgensend + load_const next + load_fast $__ddgen + + loop: + call_function 1 + + yield: + setup_except @genexit + yield_value + pop_block + load_fast $__ddgensend + rot_two + jump_absolute @loop + + genexit: + dup_top + load_const GeneratorExit + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @exc + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $close + call_function 0 + return_value + + exc: + pop_top + pop_top + pop_top + pop_top + load_fast $__ddgen + load_attr $throw + load_const sys.exc_info + call_function 0 + call_function_ex 0 + store_fast $__value + pop_except + load_fast $__value + jump_absolute @yield + + stopiter: + dup_top + load_const StopIteration + compare_op asm.Compare.EXC_MATCH + pop_jump_if_false @propagate + pop_top + pop_top + pop_top + pop_except + load_const None + return_value + + propagate: + end_finally + load_const None + return_value + """ + ) + +else: + msg = "No generator wrapping support for Python %d.%d" % PY[:2] + raise RuntimeError(msg) + + +def wrap_generator(instrs, code, lineno): + if GENERATOR_HEAD_ASSEMBLY is not None: + instrs[0:0] = GENERATOR_HEAD_ASSEMBLY.bind(lineno=lineno) + + instrs[-1:] = GENERATOR_ASSEMBLY.bind(lineno=lineno) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/__init__.py new file mode 100644 index 0000000..3818832 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/__init__.py @@ -0,0 +1,9 @@ +from .writer import AgentResponse # noqa: I001,F401 +from .writer import AgentWriter # noqa:I001,F401 +from .writer import DEFAULT_SMA_WINDOW # noqa:F401 +from .writer import HTTPWriter # noqa:F401 +from .writer import LogWriter # noqa:F401 +from .writer import Response # noqa:F401 +from .writer import TraceWriter # noqa:F401 +from .writer import _human_size # noqa:F401 +from .writer_client import WriterClientBase # noqa:F401 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer.py new file mode 100644 index 0000000..e615286 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer.py @@ -0,0 +1,630 @@ +import abc +import binascii +from collections import defaultdict +import logging +import os +import sys +import threading +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import TextIO # noqa:F401 + +import ddtrace +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter +from ddtrace.settings import _config as config +from ddtrace.settings.asm import config as asm_config +from ddtrace.vendor.dogstatsd import DogStatsd + +from ...constants import KEEP_SPANS_RATE_KEY +from ...internal.utils.formats import parse_tags_str +from ...internal.utils.http import Response +from ...internal.utils.time import StopWatch +from .. import compat +from .. import periodic +from .. import service +from .._encoding import BufferFull +from .._encoding import BufferItemTooLarge +from ..agent import get_connection +from ..constants import _HTTPLIB_NO_TRACE_REQUEST +from ..encoding import JSONEncoderV2 +from ..logger import get_logger +from ..runtime import container +from ..serverless import in_azure_function_consumption_plan +from ..serverless import in_gcp_function +from ..sma import SimpleMovingAverage +from .writer_client import WRITER_CLIENTS +from .writer_client import AgentWriterClientV3 +from .writer_client import AgentWriterClientV4 +from .writer_client import WriterClientBase # noqa:F401 + + +if TYPE_CHECKING: # pragma: no cover + from typing import Callable # noqa:F401 + from typing import Tuple # noqa:F401 + + from ddtrace import Span # noqa:F401 + + from .agent import ConnectionType # noqa:F401 + + +log = get_logger(__name__) + +LOG_ERR_INTERVAL = 60 + + +class NoEncodableSpansError(Exception): + pass + + +# The window size should be chosen so that the look-back period is +# greater-equal to the agent API's timeout. Although most tracers have a +# 2s timeout, the java tracer has a 10s timeout, so we set the window size +# to 10 buckets of 1s duration. +DEFAULT_SMA_WINDOW = 10 + + +def _human_size(nbytes): + """Return a human-readable size.""" + i = 0 + suffixes = ["B", "KB", "MB", "GB", "TB"] + while nbytes >= 1000 and i < len(suffixes) - 1: + nbytes /= 1000.0 + i += 1 + f = ("%.2f" % nbytes).rstrip("0").rstrip(".") + return "%s%s" % (f, suffixes[i]) + + +class TraceWriter(metaclass=abc.ABCMeta): + @abc.abstractmethod + def recreate(self): + # type: () -> TraceWriter + pass + + @abc.abstractmethod + def stop(self, timeout=None): + # type: (Optional[float]) -> None + pass + + @abc.abstractmethod + def write(self, spans=None): + # type: (Optional[List[Span]]) -> None + pass + + @abc.abstractmethod + def flush_queue(self): + # type: () -> None + pass + + +class LogWriter(TraceWriter): + def __init__( + self, + out=sys.stdout, # type: TextIO + ): + # type: (...) -> None + self.encoder = JSONEncoderV2() + self.out = out + + def recreate(self): + # type: () -> LogWriter + """Create a new instance of :class:`LogWriter` using the same settings from this instance + + :rtype: :class:`LogWriter` + :returns: A new :class:`LogWriter` instance + """ + writer = self.__class__(out=self.out) + return writer + + def stop(self, timeout=None): + # type: (Optional[float]) -> None + return + + def write(self, spans=None): + # type: (Optional[List[Span]]) -> None + if not spans: + return + + encoded = self.encoder.encode_traces([spans]) + self.out.write(encoded + "\n") + self.out.flush() + + def flush_queue(self): + # type: () -> None + pass + + +class HTTPWriter(periodic.PeriodicService, TraceWriter): + """Writer to an arbitrary HTTP intake endpoint.""" + + RETRY_ATTEMPTS = 3 + HTTP_METHOD = "PUT" + STATSD_NAMESPACE = "tracer" + + def __init__( + self, + intake_url, # type: str + clients, # type: List[WriterClientBase] + processing_interval=None, # type: Optional[float] + # Match the payload size since there is no functionality + # to flush dynamically. + buffer_size=None, # type: Optional[int] + max_payload_size=None, # type: Optional[int] + timeout=None, # type: Optional[float] + dogstatsd=None, # type: Optional[DogStatsd] + sync_mode=False, # type: bool + reuse_connections=None, # type: Optional[bool] + headers=None, # type: Optional[Dict[str, str]] + ): + # type: (...) -> None + + if processing_interval is None: + processing_interval = config._trace_writer_interval_seconds + if timeout is None: + timeout = config._agent_timeout_seconds + super(HTTPWriter, self).__init__(interval=processing_interval) + self.intake_url = intake_url + self._buffer_size = buffer_size + self._max_payload_size = max_payload_size + self._headers = headers or {} + self._timeout = timeout + + self._clients = clients + self.dogstatsd = dogstatsd + self._metrics = defaultdict(int) # type: Dict[str, int] + self._drop_sma = SimpleMovingAverage(DEFAULT_SMA_WINDOW) + self._sync_mode = sync_mode + self._conn = None # type: Optional[ConnectionType] + # The connection has to be locked since there exists a race between + # the periodic thread of HTTPWriter and other threads that might + # force a flush with `flush_queue()`. + self._conn_lck = threading.RLock() # type: threading.RLock + + self._send_payload_with_backoff = fibonacci_backoff_with_jitter( # type ignore[assignment] + attempts=self.RETRY_ATTEMPTS, + initial_wait=0.618 * self.interval / (1.618**self.RETRY_ATTEMPTS) / 2, + until=lambda result: isinstance(result, Response), + )(self._send_payload) + + self._reuse_connections = ( + config._trace_writer_connection_reuse if reuse_connections is None else reuse_connections + ) + + def _intake_endpoint(self, client=None): + return "{}/{}".format(self._intake_url(client), client.ENDPOINT if client else self._endpoint) + + @property + def _endpoint(self): + return self._clients[0].ENDPOINT + + @property + def _encoder(self): + return self._clients[0].encoder + + def _intake_url(self, client=None): + if client and hasattr(client, "_intake_url"): + return client._intake_url + return self.intake_url + + def _metrics_dist(self, name, count=1, tags=None): + # type: (str, int, Optional[List]) -> None + if config.health_metrics_enabled and self.dogstatsd: + self.dogstatsd.distribution("datadog.%s.%s" % (self.STATSD_NAMESPACE, name), count, tags=tags) + + def _set_drop_rate(self): + # type: () -> None + accepted = self._metrics["accepted_traces"] + sent = self._metrics["sent_traces"] + encoded = sum([len(client.encoder) for client in self._clients]) + # The number of dropped traces is the number of accepted traces minus the number of traces in the encoder + # This calculation is a best effort. Due to race conditions it may result in a slight underestimate. + dropped = max(accepted - sent - encoded, 0) # dropped spans should never be negative + self._drop_sma.set(dropped, accepted) + self._metrics["sent_traces"] = 0 # reset sent traces for the next interval + self._metrics["accepted_traces"] = encoded # sets accepted traces to number of spans in encoders + + def _set_keep_rate(self, trace): + if trace: + trace[0].set_metric(KEEP_SPANS_RATE_KEY, 1.0 - self._drop_sma.get()) + + def _reset_connection(self): + # type: () -> None + with self._conn_lck: + if self._conn: + self._conn.close() + self._conn = None + + def _put(self, data, headers, client, no_trace): + # type: (bytes, Dict[str, str], WriterClientBase, bool) -> Response + sw = StopWatch() + sw.start() + with self._conn_lck: + if self._conn is None: + log.debug("creating new intake connection to %s with timeout %d", self.intake_url, self._timeout) + self._conn = get_connection(self._intake_url(client), self._timeout) + setattr(self._conn, _HTTPLIB_NO_TRACE_REQUEST, no_trace) + try: + log.debug("Sending request: %s %s %s", self.HTTP_METHOD, client.ENDPOINT, headers) + self._conn.request( + self.HTTP_METHOD, + client.ENDPOINT, + data, + headers, + ) + resp = compat.get_connection_response(self._conn) + log.debug("Got response: %s %s", resp.status, resp.reason) + t = sw.elapsed() + if t >= self.interval: + log_level = logging.WARNING + else: + log_level = logging.DEBUG + log.log(log_level, "sent %s in %.5fs to %s", _human_size(len(data)), t, self._intake_endpoint(client)) + except Exception: + # Always reset the connection when an exception occurs + self._reset_connection() + raise + else: + return Response.from_http_response(resp) + finally: + # Reset the connection if reusing connections is disabled. + if not self._reuse_connections: + self._reset_connection() + + def _get_finalized_headers(self, count, client): + # type: (int, WriterClientBase) -> dict + headers = self._headers.copy() + headers.update({"Content-Type": client.encoder.content_type}) # type: ignore[attr-defined] + if hasattr(client, "_headers"): + headers.update(client._headers) + return headers + + def _send_payload(self, payload, count, client): + # type: (...) -> Response + headers = self._get_finalized_headers(count, client) + + self._metrics_dist("http.requests") + + response = self._put(payload, headers, client, no_trace=True) + + if response.status >= 400: + self._metrics_dist("http.errors", tags=["type:%s" % response.status]) + else: + self._metrics_dist("http.sent.bytes", len(payload)) + self._metrics["sent_traces"] += count + + if response.status not in (404, 415) and response.status >= 400: + msg = "failed to send traces to intake at %s: HTTP error status %s, reason %s" + log_args = ( + self._intake_endpoint(client), + response.status, + response.reason, + ) # type: Tuple[Any, Any, Any] + # Append the payload if requested + if config._trace_writer_log_err_payload: + msg += ", payload %s" + # If the payload is bytes then hex encode the value before logging + if isinstance(payload, bytes): + log_args += (binascii.hexlify(payload).decode(),) # type: ignore + else: + log_args += (payload,) # type: ignore + + log.error(msg, *log_args) + self._metrics_dist("http.dropped.bytes", len(payload)) + self._metrics_dist("http.dropped.traces", count) + return response + + def write(self, spans=None): + for client in self._clients: + self._write_with_client(client, spans=spans) + if self._sync_mode: + self.flush_queue() + + def _write_with_client(self, client, spans=None): + # type: (WriterClientBase, Optional[List[Span]]) -> None + if spans is None: + return + + if self._sync_mode is False: + # Start the HTTPWriter on first write. + try: + if self.status != service.ServiceStatus.RUNNING: + self.start() + + except service.ServiceStatusError: + pass + + self._metrics_dist("writer.accepted.traces") + self._metrics["accepted_traces"] += 1 + self._set_keep_rate(spans) + + try: + client.encoder.put(spans) + except BufferItemTooLarge as e: + payload_size = e.args[0] + log.warning( + "trace (%db) larger than payload buffer item limit (%db), dropping", + payload_size, + client.encoder.max_item_size, + ) + self._metrics_dist("buffer.dropped.traces", 1, tags=["reason:t_too_big"]) + self._metrics_dist("buffer.dropped.bytes", payload_size, tags=["reason:t_too_big"]) + except BufferFull as e: + payload_size = e.args[0] + log.warning( + "trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping (writer status: %s)", + len(client.encoder), + client.encoder.size, + client.encoder.max_size, + payload_size, + self.status.value, + ) + self._metrics_dist("buffer.dropped.traces", 1, tags=["reason:full"]) + self._metrics_dist("buffer.dropped.bytes", payload_size, tags=["reason:full"]) + except NoEncodableSpansError: + self._metrics_dist("buffer.dropped.traces", 1, tags=["reason:incompatible"]) + else: + self._metrics_dist("buffer.accepted.traces", 1) + self._metrics_dist("buffer.accepted.spans", len(spans)) + + def flush_queue(self, raise_exc=False): + try: + for client in self._clients: + self._flush_queue_with_client(client, raise_exc=raise_exc) + finally: + self._set_drop_rate() + + def _flush_queue_with_client(self, client, raise_exc=False): + # type: (WriterClientBase, bool) -> None + n_traces = len(client.encoder) + try: + encoded = client.encoder.encode() + if encoded is None: + return + except Exception: + log.error("failed to encode trace with encoder %r", client.encoder, exc_info=True) + self._metrics_dist("encoder.dropped.traces", n_traces) + return + + try: + self._send_payload_with_backoff(encoded, n_traces, client) + except Exception: + self._metrics_dist("http.errors", tags=["type:err"]) + self._metrics_dist("http.dropped.bytes", len(encoded)) + self._metrics_dist("http.dropped.traces", n_traces) + if raise_exc: + raise + else: + log.error( + "failed to send, dropping %d traces to intake at %s after %d retries", + n_traces, + self._intake_endpoint(client), + self.RETRY_ATTEMPTS, + ) + finally: + self._metrics_dist("http.sent.bytes", len(encoded)) + self._metrics_dist("http.sent.traces", n_traces) + + def periodic(self): + self.flush_queue(raise_exc=False) + + def _stop_service( + self, + timeout=None, # type: Optional[float] + ): + # type: (...) -> None + # FIXME: don't join() on stop(), let the caller handle this + super(HTTPWriter, self)._stop_service() + self.join(timeout=timeout) + + def on_shutdown(self): + try: + self.periodic() + finally: + self._reset_connection() + + +class AgentResponse(object): + def __init__(self, rate_by_service): + # type: (Dict[str, float]) -> None + self.rate_by_service = rate_by_service + + +class AgentWriter(HTTPWriter): + """ + The Datadog Agent supports (at the time of writing this) receiving trace + payloads up to 50MB. A trace payload is just a list of traces and the agent + expects a trace to be complete. That is, all spans with the same trace_id + should be in the same trace. + """ + + RETRY_ATTEMPTS = 3 + HTTP_METHOD = "PUT" + STATSD_NAMESPACE = "tracer" + + def __init__( + self, + agent_url, # type: str + priority_sampling=False, # type: bool + processing_interval=None, # type: Optional[float] + # Match the payload size since there is no functionality + # to flush dynamically. + buffer_size=None, # type: Optional[int] + max_payload_size=None, # type: Optional[int] + timeout=None, # type: Optional[float] + dogstatsd: Optional[DogStatsd] = None, + report_metrics=False, # type: bool + sync_mode=False, # type: bool + api_version=None, # type: Optional[str] + reuse_connections=None, # type: Optional[bool] + headers=None, # type: Optional[Dict[str, str]] + response_callback=None, # type: Optional[Callable[[AgentResponse], None]] + ): + # type: (...) -> None + if processing_interval is None: + processing_interval = config._trace_writer_interval_seconds + if timeout is None: + timeout = config._agent_timeout_seconds + if buffer_size is not None and buffer_size <= 0: + raise ValueError("Writer buffer size must be positive") + if max_payload_size is not None and max_payload_size <= 0: + raise ValueError("Max payload size must be positive") + # Default to v0.4 if we are on Windows since there is a known compatibility issue + # https://github.com/DataDog/dd-trace-py/issues/4829 + # DEV: sys.platform on windows should be `win32` or `cygwin`, but using `startswith` + # as a safety precaution. + # https://docs.python.org/3/library/sys.html#sys.platform + is_windows = sys.platform.startswith("win") or sys.platform.startswith("cygwin") + + default_api_version = "v0.5" + if is_windows or in_gcp_function() or in_azure_function_consumption_plan(): + default_api_version = "v0.4" + + self._api_version = api_version or config._trace_api or default_api_version + if is_windows and self._api_version == "v0.5": + raise RuntimeError( + "There is a known compatibility issue with v0.5 API and Windows, " + "please see https://github.com/DataDog/dd-trace-py/issues/4829 for more details." + ) + + buffer_size = buffer_size or config._trace_writer_buffer_size + max_payload_size = max_payload_size or config._trace_writer_payload_size + try: + client = WRITER_CLIENTS[self._api_version](buffer_size, max_payload_size) + except KeyError: + raise ValueError( + "Unsupported api version: '%s'. The supported versions are: %r" + % (self._api_version, ", ".join(sorted(WRITER_CLIENTS.keys()))) + ) + + _headers = { + "Datadog-Meta-Lang": "python", + "Datadog-Meta-Lang-Version": compat.PYTHON_VERSION, + "Datadog-Meta-Lang-Interpreter": compat.PYTHON_INTERPRETER, + "Datadog-Meta-Tracer-Version": ddtrace.__version__, + "Datadog-Client-Computed-Top-Level": "yes", + } + if headers: + _headers.update(headers) + self._container_info = container.get_container_info() + if self._container_info and self._container_info.container_id: + _headers.update( + { + "Datadog-Container-Id": self._container_info.container_id, + } + ) + + _headers.update({"Content-Type": client.encoder.content_type}) # type: ignore[attr-defined] + additional_header_str = os.environ.get("_DD_TRACE_WRITER_ADDITIONAL_HEADERS") + if additional_header_str is not None: + _headers.update(parse_tags_str(additional_header_str)) + self._response_cb = response_callback + super(AgentWriter, self).__init__( + intake_url=agent_url, + clients=[client], + processing_interval=processing_interval, + buffer_size=buffer_size, + max_payload_size=max_payload_size, + timeout=timeout, + dogstatsd=dogstatsd, + sync_mode=sync_mode, + reuse_connections=reuse_connections, + headers=_headers, + ) + + def recreate(self): + # type: () -> HTTPWriter + return self.__class__( + agent_url=self.agent_url, + processing_interval=self._interval, + buffer_size=self._buffer_size, + max_payload_size=self._max_payload_size, + timeout=self._timeout, + dogstatsd=self.dogstatsd, + sync_mode=self._sync_mode, + api_version=self._api_version, + ) + + @property + def agent_url(self): + return self.intake_url + + @property + def _agent_endpoint(self): + return self._intake_endpoint(client=None) + + def _downgrade(self, payload, response, client): + if client.ENDPOINT == "v0.5/traces": + self._clients = [AgentWriterClientV4(self._buffer_size, self._max_payload_size)] + # Since we have to change the encoding in this case, the payload + # would need to be converted to the downgraded encoding before + # sending it, but we chuck it away instead. + log.warning( + "Dropping trace payload due to the downgrade to an incompatible API version (from v0.5 to v0.4). To " + "avoid this from happening in the future, either ensure that the Datadog agent has a v0.5/traces " + "endpoint available, or explicitly set the trace API version to, e.g., v0.4." + ) + return None + if client.ENDPOINT == "v0.4/traces": + self._clients = [AgentWriterClientV3(self._buffer_size, self._max_payload_size)] + # These endpoints share the same encoding, so we can try sending the + # same payload over the downgraded endpoint. + return payload + raise ValueError() + + def _send_payload(self, payload, count, client): + # type: (...) -> Response + response = super(AgentWriter, self)._send_payload(payload, count, client) + if response.status in [404, 415]: + log.debug("calling endpoint '%s' but received %s; downgrading API", client.ENDPOINT, response.status) + try: + payload = self._downgrade(payload, response, client) + except ValueError: + log.error( + "unsupported endpoint '%s': received response %s from intake (%s)", + client.ENDPOINT, + response.status, + self.intake_url, + ) + else: + if payload is not None: + self._send_payload(payload, count, client) + elif response.status < 400: + if self._response_cb: + raw_resp = response.get_json() + if raw_resp and "rate_by_service" in raw_resp: + self._response_cb( + AgentResponse( + rate_by_service=raw_resp["rate_by_service"], + ) + ) + return response + + def start(self): + super(AgentWriter, self).start() + try: + if config._telemetry_enabled: + from ...internal import telemetry + + if telemetry.telemetry_writer.started: + return + + telemetry.telemetry_writer._app_started_event() + + # appsec remote config should be enabled/started after the global tracer and configs + # are initialized + if os.getenv("AWS_LAMBDA_FUNCTION_NAME") is None and ( + asm_config._asm_enabled or config._remote_config_enabled + ): + from ddtrace.appsec._remoteconfiguration import enable_appsec_rc + + enable_appsec_rc() + except service.ServiceStatusError: + pass + + def _get_finalized_headers(self, count, client): + # type: (int, WriterClientBase) -> dict + headers = super(AgentWriter, self)._get_finalized_headers(count, client) + headers["X-Datadog-Trace-Count"] = str(count) + return headers diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer_client.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer_client.py new file mode 100644 index 0000000..f55b6e7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/internal/writer/writer_client.py @@ -0,0 +1,49 @@ +from .._encoding import BufferedEncoder +from ..encoding import MSGPACK_ENCODERS + + +class WriterClientBase(object): + """A class encapsulating an endpoint/encoder pair that a TraceWriter can send payloads to""" + + ENDPOINT = "" + + def __init__( + self, + encoder: BufferedEncoder, + ): + self.encoder = encoder + + +class AgentWriterClientV5(WriterClientBase): + ENDPOINT = "v0.5/traces" + + def __init__(self, buffer_size, max_payload_size): + super(AgentWriterClientV5, self).__init__( + MSGPACK_ENCODERS["v0.5"]( + max_size=buffer_size, + max_item_size=max_payload_size, + ) + ) + + +class AgentWriterClientV4(WriterClientBase): + ENDPOINT = "v0.4/traces" + + def __init__(self, buffer_size, max_payload_size): + super(AgentWriterClientV4, self).__init__( + MSGPACK_ENCODERS["v0.4"]( + max_size=buffer_size, + max_item_size=max_payload_size, + ) + ) + + +class AgentWriterClientV3(AgentWriterClientV4): + ENDPOINT = "v0.3/traces" + + +WRITER_CLIENTS = { + "v0.3": AgentWriterClientV3, + "v0.4": AgentWriterClientV4, + "v0.5": AgentWriterClientV5, +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/__init__.py new file mode 100644 index 0000000..816400e --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/__init__.py @@ -0,0 +1,114 @@ +""" +OpenTelemetry API +================= + +The dd-trace-py library provides an implementation of the +`OpenTelemetry API `_. +When ddtrace OpenTelemetry support is configured, all operations defined in the +OpenTelemetry trace api can be used to create, configure, and propagate a distributed trace. +All operations defined the opentelemetry trace api are configured to use the ddtrace global tracer (``ddtrace.tracer``) +and generate datadog compatible traces. By default all opentelemetry traces are submitted to a Datadog agent. + + +Configuration +------------- + +When using ``ddtrace-run``, OpenTelemetry support can be enabled by setting +the ``DD_TRACE_OTEL_ENABLED`` environment variable to True (the default value is ``False``). + +OpenTelemetry support can be enabled programmatically by setting ``DD_TRACE_OTEL_ENABLED=True`` +and setting the ``ddtrace.opentelemetry.TracerProvider``. These configurations +must be set before any OpenTelemetry Tracers are initialized:: + + import os + # Must be set before ddtrace is imported! + os.environ["DD_TRACE_OTEL_ENABLED"] = "true" + + from opentelemetry.trace import set_tracer_provider + from ddtrace.opentelemetry import TracerProvider + + set_tracer_provider(TracerProvider()) + + ... + + +Usage +----- + +Datadog and OpenTelemetry APIs can be used interchangeably:: + + # Sample Usage + import opentelemetry + import ddtrace + + oteltracer = opentelemetry.trace.get_tracer(__name__) + + with oteltracer.start_as_current_span("otel-span") as parent_span: + parent_span.set_attribute("otel_key", "otel_val") + with ddtrace.tracer.trace("ddtrace-span") as child_span: + child_span.set_tag("dd_key", "dd_val") + + @oteltracer.start_as_current_span("span_name") + def some_function(): + pass + + +Mapping +------- + +The OpenTelemetry API support implementation maps OpenTelemetry spans to Datadog spans. This mapping is described by the following table, using the protocol buffer field names used in `OpenTelemetry `_ and `Datadog `_. + + +.. list-table:: + :header-rows: 1 + :widths: 30, 30, 40 + + * - OpenTelemetry + - Datadog + - Description + * - ``trace_id`` + - ``traceID`` + - + * - ``span_id`` + - ``spanID`` + - + * - ``trace_state`` + - ``meta["tracestate"]`` + - Datadog vendor-specific data is set in trace state using the ``dd=`` prefix + * - ``parent_span_id`` + - ``parentID`` + - + * - ``name`` + - ``resource`` + - + * - ``kind`` + - ``meta["span.kind"]`` + - + * - ``start_time_unix_nano`` + - ``start`` + - + * - ``end_time_unix_nano`` + - ``duration`` + - Derived from start and end time + * - ``attributes[]`` + - ``meta[]`` + - Datadog tags (``meta``) are set for each OpenTelemetry attribute + * - ``links[]`` + - ``meta["_dd.span_links"]`` + - + * - ``status`` + - ``error`` + - Derived from status + * - ``events[]`` + - N/A + - Span events not supported on the Datadog platform + + +""" # noqa: E501 + +from ._trace import TracerProvider + + +__all__ = [ + "TracerProvider", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_context.py new file mode 100644 index 0000000..6148235 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_context.py @@ -0,0 +1,87 @@ +""" +Implementation details of parenting open telemetry spans should be kept internal. This will give us the flexibility +to support new features (ex: baggage) and refactor this module with out introducing a breaking change. +""" +from opentelemetry.context.context import Context as OtelContext +from opentelemetry.trace import NonRecordingSpan as OtelNonRecordingSpan +from opentelemetry.trace import Span as OtelSpan +from opentelemetry.trace import SpanContext as OtelSpanContext +from opentelemetry.trace import get_current_span +from opentelemetry.trace import set_span_in_context +from opentelemetry.trace.span import TraceFlags +from opentelemetry.trace.span import TraceState + +from ddtrace import tracer as ddtracer +from ddtrace.context import Context as DDContext +from ddtrace.internal.logger import get_logger +from ddtrace.opentelemetry._span import Span +from ddtrace.propagation.http import _TraceContext +from ddtrace.provider import BaseContextProvider as DDBaseContextProvider # noqa:F401 +from ddtrace.span import Span as DDSpan + + +log = get_logger(__name__) + + +class DDRuntimeContext: + def attach(self, otel_context): + # type: (OtelContext) -> object + """ + Converts an OpenTelemetry Span to a Datadog Span/Context then stores the + Datadog representation in the Global DDtrace Trace Context Provider. + """ + otel_span = get_current_span(otel_context) + if otel_span: + if isinstance(otel_span, Span): + self._ddcontext_provider.activate(otel_span._ddspan) + elif isinstance(otel_span, OtelSpan): + trace_id, span_id, _, tf, ts, _ = otel_span.get_span_context() + trace_state = ts.to_header() if ts else None + ddcontext = _TraceContext._get_context(trace_id, span_id, tf, trace_state) + self._ddcontext_provider.activate(ddcontext) + else: + log.error( + "Programming ERROR: ddtrace does not support activiting spans with the type: %s. Please open a " + "github issue at: https://github.com/Datadog/dd-trace-py and set DD_TRACE_OTEL_ENABLED=True.", + type(otel_span), + ) + + # A return value with the type `object` is required by the otel api to remove/deactivate spans. + # Since manually deactivating spans is not supported by ddtrace this object will never be used. + return object() + + def get_current(self): + # type: (...) -> OtelContext + """ + Converts the active datadog span to an Opentelemetry Span and then stores it + in a format that can be parsed by the OpenTelemetry API. + """ + ddactive = self._ddcontext_provider.active() + context = OtelContext() + if isinstance(ddactive, DDSpan): + span = Span(ddactive) + context = set_span_in_context(span, context) + elif isinstance(ddactive, DDContext): + tf = TraceFlags.SAMPLED if ddactive._traceflags == "01" else TraceFlags.DEFAULT + ts = TraceState.from_header([ddactive._tracestate]) + span_context = OtelSpanContext(ddactive.trace_id or 0, ddactive.span_id or 0, True, tf, ts) + span = OtelNonRecordingSpan(span_context) + context = set_span_in_context(span, context) + return context + + def detach(self, token): + # type: (object) -> None + """ + NOP, The otel api uses this method to deactivate spans but this operation is not supported by + the datadog context provider. + """ + pass + + @property + def _ddcontext_provider(self): + # type: () -> DDBaseContextProvider + """ + Get the ddtrace context provider from the global Datadog tracer. + This can reterive a default, gevent, or asyncio context provider. + """ + return ddtracer.context_provider diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_span.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_span.py new file mode 100644 index 0000000..adbdee7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_span.py @@ -0,0 +1,308 @@ +from typing import TYPE_CHECKING + +from opentelemetry.trace import Span as OtelSpan +from opentelemetry.trace import SpanContext +from opentelemetry.trace import SpanKind +from opentelemetry.trace import Status +from opentelemetry.trace import StatusCode +from opentelemetry.trace.span import TraceFlags +from opentelemetry.trace.span import TraceState + +from ddtrace.constants import ERROR_MSG +from ddtrace.constants import SPAN_KIND +from ddtrace.internal import core +from ddtrace.internal.compat import time_ns +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.formats import flatten_key_value +from ddtrace.internal.utils.formats import is_sequence + + +if TYPE_CHECKING: + from typing import Mapping # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Union # noqa:F401 + + from opentelemetry.util.types import Attributes # noqa:F401 + from opentelemetry.util.types import AttributeValue # noqa:F401 + + from ddtrace.internal.compat import NumericType # noqa:F401 + from ddtrace.span import Span as DDSpan # noqa:F401 + + +log = get_logger(__name__) + + +def _ddmap(span, attribute, value): + # type: (DDSpan, str, Union[bytes, NumericType]) -> DDSpan + if attribute.startswith("meta") or attribute.startswith("metrics"): + meta_key = attribute.split("'")[1] if len(attribute.split("'")) == 3 else None + if meta_key: + span.set_tag(meta_key, value) + else: + setattr(span, attribute, value) + return span + + +_OTelDatadogMapping = { + "service.name": "service", + "resource.name": "resource", + "span.type": "span_type", + "analytics.event": "metrics['_dd1.sr.eausr']", +} + + +class Span(OtelSpan): + """Initializes an OpenTelemetry compatible shim for a Datadog span + + TODO: Add mapping table from otel to datadog + """ + + _RECORD_EXCEPTION_KEY = "_dd.otel.record_exception" + _SET_EXCEPTION_STATUS_KEY = "_dd.otel.set_status_on_exception" + + def __init__( + self, + datadog_span, # type: DDSpan + kind=SpanKind.INTERNAL, # type: SpanKind + attributes=None, # type: Optional[Mapping[str, AttributeValue]] + start_time=None, # type: Optional[int] + record_exception=None, # type: Optional[bool] + set_status_on_exception=None, # type: Optional[bool] + ): + # type: (...) -> None + if start_time is not None: + # start_time should be set in nanoseconds + datadog_span.start_ns = start_time + + self._ddspan = datadog_span + if record_exception is not None: + self._record_exception = record_exception + if set_status_on_exception is not None: + self._set_status_on_exception = set_status_on_exception + + if kind is not SpanKind.INTERNAL: + # Only set if it isn't "internal" to save on bytes + self.set_attribute(SPAN_KIND, kind.name.lower()) + + if attributes: + self.set_attributes(attributes) + + @property + def _record_exception(self): + # type: () -> bool + # default value is True, if record exception key is not set return True + return core.get_item(self._RECORD_EXCEPTION_KEY, span=self._ddspan) is not False + + @_record_exception.setter + def _record_exception(self, value): + # type: (bool) -> None + core.set_item(self._RECORD_EXCEPTION_KEY, value, span=self._ddspan) + + @property + def _set_status_on_exception(self): + # type: () -> bool + # default value is True, if set status on exception key is not set return True + return core.get_item(self._SET_EXCEPTION_STATUS_KEY, span=self._ddspan) is not False + + @_set_status_on_exception.setter + def _set_status_on_exception(self, value): + # type: (bool) -> None + core.set_item(self._SET_EXCEPTION_STATUS_KEY, value, span=self._ddspan) + + def end(self, end_time=None): + # type: (Optional[int]) -> None + """ + Marks the end time of a span. This method should be called once. + + :param end_time: The end time of the span, in nanoseconds. Defaults to ``now``. + """ + if end_time is None: + end_time = time_ns() + override_name = self._datadog_operation_name + if override_name: + self._ddspan.name = override_name + self._ddspan._finish_ns(end_time) + + @property + def kind(self): + """Gets span kind attribute""" + # BUG: Span.kind is required by the otel library instrumentation (ex: flask, asgi, django) but + # this property is only defined in the opentelemetry-sdk and NOT defined the opentelemetry-api. + # TODO: Propose a fix in opentelemetry-python-contrib project + return self._ddspan._meta.get(SPAN_KIND, SpanKind.INTERNAL.name.lower()) + + def get_span_context(self): + # type: () -> SpanContext + """Returns an OpenTelemetry SpanContext""" + ts = None + tf = TraceFlags.DEFAULT + if self._ddspan.context: + ts = TraceState.from_header([self._ddspan.context._tracestate]) + if self._ddspan.context.sampling_priority and self._ddspan.context.sampling_priority > 0: + tf = TraceFlags.SAMPLED + + return SpanContext(self._ddspan.trace_id, self._ddspan.span_id, False, tf, ts) + + def set_attributes(self, attributes): + # type: (Mapping[str, AttributeValue]) -> None + """Sets attributes/tags""" + for k, v in attributes.items(): + self.set_attribute(k, v) + + def set_attribute(self, key, value): + # type: (str, AttributeValue) -> None + """Sets an attribute or service name on a tag""" + if not self.is_recording(): + return + + # Override reserved OTel span attributes + ddattribute = _OTelDatadogMapping.get(key) + if ddattribute is not None: + _ddmap(self._ddspan, ddattribute, value) + return + + if is_sequence(value): + for k, v in flatten_key_value(key, value).items(): + self._ddspan.set_tag(k, v) + return + self._ddspan.set_tag(key, value) + + def add_event(self, name, attributes=None, timestamp=None): + # type: (str, Optional[Attributes], Optional[int]) -> None + """NOOP - events are not yet supported""" + return + + def update_name(self, name): + # type: (str) -> None + """Updates the name of a span""" + if not self.is_recording(): + return + self._ddspan.resource = name + + def is_recording(self): + # type: () -> bool + """Returns False if Span.end() is called.""" + return not self._ddspan.finished + + def set_status(self, status, description=None): + # type: (Union[Status, StatusCode], Optional[str]) -> None + """ + Updates a Span from StatusCode.OK to StatusCode.ERROR. + Note - The default status is OK. Setting the status to StatusCode.UNSET or updating the + status from StatusCode.ERROR to StatusCode.OK is not supported. + """ + if not self.is_recording(): + return + + if isinstance(status, Status): + status_code = status.status_code + message = status.description + log.warning("Description %s ignored. Use either `Status` or `(StatusCode, Description)`", description) + else: + status_code = status + message = description + + if status_code is StatusCode.ERROR: + self._ddspan.error = 1 + if message: + self.set_attribute(ERROR_MSG, message) + + def record_exception(self, exception, attributes=None, timestamp=None, escaped=False): + # type: (BaseException, Optional[Attributes], Optional[int], bool) -> None + """ + Records the type, message, and traceback of an exception as Span attributes. + Note - Span Events are not yet supported. + """ + if not self.is_recording(): + return + self._ddspan._set_exc_tags(type(exception), exception, exception.__traceback__) + if attributes: + self.set_attributes(attributes) + + def __enter__(self): + # type: () -> Span + """Invoked when `Span` is used as a context manager. + Returns the `Span` itself. + """ + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Ends Span context manager""" + if exc_val: + if self._record_exception: + self.record_exception(exc_val) + if self._set_status_on_exception: + # do not overwrite the status message set by record exception + self.set_status(StatusCode.ERROR) + self.end() + + @property + def _datadog_operation_name(self): + # Adapted from https://github.com/DataDog/dd-trace-java/blob/4131e509a94db430b47104769800ec14de5f0a0d/dd-java-agent/instrumentation/opentelemetry/opentelemetry-1.4/src/main/java/datadog/trace/instrumentation/opentelemetry14/trace/OtelConventions.java#L107 + ddspan = self._ddspan + span_kind = self.kind + + operation_name = ddspan.get_tag("operation.name") + if operation_name: + return operation_name + + if ddspan.get_tag("http.request.method"): + if span_kind == SpanKind.SERVER: + return "http.server.request" + if span_kind == SpanKind.CLIENT: + return "http.client.request" + + db_system = ddspan.get_tag("db.system") + if db_system and span_kind == SpanKind.CLIENT: + return f"{db_system}.query" + + messaging_system = ddspan.get_tag("messaging.system") + messaging_operation = ddspan.get_tag("messaging.operation") + if ( + messaging_system + and messaging_operation + and ( + span_kind == SpanKind.CONSUMER + or span_kind == SpanKind.PRODUCER + or span_kind == SpanKind.CLIENT + or span_kind == SpanKind.SERVER + ) + ): + return messaging_system + "." + messaging_operation + + rpc_system = ddspan.get_tag("rpc.system") + if span_kind == SpanKind.CLIENT and rpc_system == "aws-api": + rpc_service = ddspan.get_tag("rpc.service") + if not rpc_service: + return "aws.client.request" + return f"aws.{rpc_service}.request" + if span_kind == SpanKind.CLIENT and rpc_system: + return f"{rpc_system}.client.request" + if span_kind == SpanKind.SERVER and rpc_system: + return f"{rpc_system}.server.request" + + faas_invoked_provider = ddspan.get_tag("faas.invoked_provider") + faas_invoked_name = ddspan.get_tag("faas.invoked_name") + if span_kind == SpanKind.CLIENT and faas_invoked_provider and faas_invoked_name: + return f"{faas_invoked_provider}.{faas_invoked_name}.invoke" + faas_trigger = ddspan.get_tag("faas.trigger") + if span_kind == SpanKind.SERVER and faas_trigger: + return f"{faas_trigger}.invoke" + + graphql_operation_type = ddspan.get_tag("graphql.operation.type") + if graphql_operation_type: + return "graphql.server.request" + + network_protocol_name = ddspan.get_tag("network.protocol.name") + if span_kind == SpanKind.SERVER: + if network_protocol_name: + return f"{network_protocol_name}.server.request" + else: + return "server.request" + if span_kind == SpanKind.CLIENT: + if network_protocol_name: + return f"{network_protocol_name}.client.request" + else: + return "server.request" + + return span_kind diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_trace.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_trace.py new file mode 100644 index 0000000..579586a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentelemetry/_trace.py @@ -0,0 +1,154 @@ +from contextlib import contextmanager +from typing import TYPE_CHECKING + +from opentelemetry.context import Context as OtelContext # noqa:F401 +from opentelemetry.trace import SpanKind as OtelSpanKind +from opentelemetry.trace import Tracer as OtelTracer +from opentelemetry.trace import TracerProvider as OtelTracerProvider +from opentelemetry.trace import use_span +from opentelemetry.trace.propagation import get_current_span +from opentelemetry.trace.span import INVALID_SPAN +from opentelemetry.trace.span import Span as OtelSpan + +import ddtrace +from ddtrace.internal.constants import SPAN_API_OTEL +from ddtrace.internal.logger import get_logger +from ddtrace.opentelemetry._span import Span +from ddtrace.propagation.http import _TraceContext + + +if TYPE_CHECKING: + from typing import Iterator # noqa:F401 + from typing import Mapping # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Sequence # noqa:F401 + from typing import Union # noqa:F401 + + from opentelemetry.trace import Link as OtelLink # noqa:F401 + from opentelemetry.util.types import AttributeValue as OtelAttributeValue # noqa:F401 + + from ddtrace import Tracer as DDTracer # noqa:F401 + from ddtrace.span import _MetaDictType # noqa:F401 + + +log = get_logger(__name__) + + +class TracerProvider(OtelTracerProvider): + """ + Entry point of the OpenTelemetry API and provides access to OpenTelemetry compatible Tracers. + One TracerProvider should be initialized and set per application. + """ + + def __init__(self) -> None: + self._ddtracer = ddtrace.tracer + super().__init__() + + def get_tracer( + self, + instrumenting_module_name, + instrumenting_library_version=None, + schema_url=None, + ): + # type: (str, Optional[str], Optional[str]) -> OtelTracer + """Returns an opentelemetry compatible Tracer.""" + return Tracer(self._ddtracer) + + +class Tracer(OtelTracer): + """Starts and/or activates OpenTelemetry compatible Spans using the global Datadog Tracer.""" + + def __init__(self, datadog_tracer): + # type: (DDTracer) -> None + self._tracer = datadog_tracer + super(Tracer, self).__init__() + + def start_span( + self, + name, # type: str + context=None, # type: Optional[OtelContext] + kind=OtelSpanKind.INTERNAL, # type: OtelSpanKind + attributes=None, # type: Optional[Mapping[str, OtelAttributeValue]] + links=None, # type: Optional[Sequence[OtelLink]] + start_time=None, # type: Optional[int] + record_exception=True, # type: bool + set_status_on_exception=True, # type: bool + ): + # type: (...) -> OtelSpan + """Creates and starts an opentelemetry span.""" + # Get active otel span + curr_otel_span = get_current_span(context) + if curr_otel_span is INVALID_SPAN: + # There is no active datadog/otel span + dd_active = None # type: Optional[Union[ddtrace.context.Context, ddtrace.Span]] + elif isinstance(curr_otel_span, Span): + # Get underlying ddtrace span from the active otel span + dd_active = curr_otel_span._ddspan + elif isinstance(curr_otel_span, OtelSpan): + # Otel span was not generated by the ddtrace library and does not have an underlying ddtrace span. + # Convert otel span to a ddtrace context object. + trace_id, span_id, _, tf, ts, _ = curr_otel_span.get_span_context() + trace_state = ts.to_header() if ts else None + dd_active = _TraceContext._get_context(trace_id, span_id, tf, trace_state) + else: + log.error( + "Programming Error: The current active Span is not supported by ddtrace. The following span will not " + "have a parent: %s. Please open a github issue at: https://github.com/Datadog/dd-trace-py and avoid " + "setting the ddtrace OpenTelemetry TracerProvider.", + curr_otel_span, + ) + # Create a new Datadog span (not activated), then return a valid OTel span + dd_span = self._tracer.start_span(name, child_of=dd_active, activate=False, span_api=SPAN_API_OTEL) + + if links: + for link in links: + dd_span.set_link( + link.context.trace_id, + link.context.span_id, + link.context.trace_state.to_header(), + link.context.trace_flags, + link.attributes, + ) + return Span( + dd_span, + kind=kind, + attributes=attributes, + start_time=start_time, + record_exception=record_exception, + set_status_on_exception=set_status_on_exception, + ) + + @contextmanager + def start_as_current_span( + self, + name, # type: str + context=None, # type: Optional[OtelContext] + kind=OtelSpanKind.INTERNAL, # type: OtelSpanKind + attributes=None, # type: Optional[Mapping[str, OtelAttributeValue]] + links=None, # type: Optional[Sequence[OtelLink]] + start_time=None, # type: Optional[int] + record_exception=True, # type: bool + set_status_on_exception=True, # type: bool + end_on_exit=True, # type: bool + ): + # type: (...) -> Iterator[OtelSpan] + """Context manager for creating and activating a new opentelemetry span.""" + # Create a new non-active OTel span wrapper + span = self.start_span( + name, + context=context, + kind=kind, + attributes=attributes, + links=links, + start_time=start_time, + record_exception=record_exception, + set_status_on_exception=set_status_on_exception, + ) + + with use_span( + span, + end_on_exit=end_on_exit, + record_exception=record_exception, + set_status_on_exception=set_status_on_exception, + ) as span: + yield span diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/__init__.py new file mode 100644 index 0000000..9df526f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/__init__.py @@ -0,0 +1,8 @@ +from .helpers import set_global_tracer +from .tracer import Tracer + + +__all__ = [ + "Tracer", + "set_global_tracer", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/helpers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/helpers.py new file mode 100644 index 0000000..e8e6c48 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/helpers.py @@ -0,0 +1,25 @@ +from typing import TYPE_CHECKING + +import opentracing + +import ddtrace + + +if TYPE_CHECKING: # pragma: no cover + from ddtrace.opentracer import Tracer # noqa:F401 + + +""" +Helper routines for Datadog OpenTracing. +""" + + +def set_global_tracer(tracer): + # type: (Tracer) -> None + """Sets the global tracers to the given tracer.""" + + # overwrite the opentracer reference + opentracing.tracer = tracer + + # overwrite the Datadog tracer reference + ddtrace.tracer = tracer._dd_tracer diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/__init__.py new file mode 100644 index 0000000..04ddde7 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/__init__.py @@ -0,0 +1,6 @@ +from .http import HTTPPropagator + + +__all__ = [ + "HTTPPropagator", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/binary.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/binary.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/http.py new file mode 100644 index 0000000..539f8dc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/http.py @@ -0,0 +1,74 @@ +from typing import Dict # noqa:F401 + +from opentracing import InvalidCarrierException + +from ddtrace.propagation.http import HTTPPropagator as DDHTTPPropagator + +from ...internal.logger import get_logger +from ..span_context import SpanContext +from .propagator import Propagator + + +log = get_logger(__name__) + +_HTTP_BAGGAGE_PREFIX = "ot-baggage-" +_HTTP_BAGGAGE_PREFIX_LEN = len(_HTTP_BAGGAGE_PREFIX) + + +class HTTPPropagator(Propagator): + """OpenTracing compatible HTTP_HEADER and TEXT_MAP format propagator. + + `HTTPPropagator` provides compatibility by using existing OpenTracing + compatible methods from the ddtracer along with new logic supporting the + outstanding OpenTracing-defined functionality. + """ + + @staticmethod + def inject(span_context, carrier): + # type: (SpanContext, Dict[str, str]) -> None + """Inject a span context into a carrier. + + *span_context* is injected into the carrier by first using an + :class:`ddtrace.propagation.http.HTTPPropagator` to inject the ddtracer + specific fields. + + Then the baggage is injected into *carrier*. + + :param span_context: span context to inject. + + :param carrier: carrier to inject into. + """ + if not isinstance(carrier, dict): + raise InvalidCarrierException("propagator expects carrier to be a dict") + + DDHTTPPropagator.inject(span_context._dd_context, carrier) + + # Add the baggage + if span_context.baggage is not None: + for key in span_context.baggage: + carrier[_HTTP_BAGGAGE_PREFIX + key] = span_context.baggage[key] + + @staticmethod + def extract(carrier): + # type: (Dict[str, str]) -> SpanContext + """Extract a span context from a carrier. + + :class:`ddtrace.propagation.http.HTTPPropagator` is used to extract + ddtracer supported fields into a `ddtrace.Context` context which is + combined with new logic to extract the baggage which is returned in an + OpenTracing compatible span context. + + :param carrier: carrier to extract from. + + :return: extracted span context. + """ + if not isinstance(carrier, dict): + raise InvalidCarrierException("propagator expects carrier to be a dict") + + ddspan_ctx = DDHTTPPropagator.extract(carrier) + baggage = {} + for key in carrier: + if key.startswith(_HTTP_BAGGAGE_PREFIX): + baggage[key[_HTTP_BAGGAGE_PREFIX_LEN:]] = carrier[key] + + return SpanContext(ddcontext=ddspan_ctx, baggage=baggage) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/propagator.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/propagator.py new file mode 100644 index 0000000..77eadf3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/propagator.py @@ -0,0 +1,13 @@ +import abc + + +class Propagator(metaclass=abc.ABCMeta): + @staticmethod + @abc.abstractmethod + def inject(span_context, carrier): + pass + + @staticmethod + @abc.abstractmethod + def extract(carrier): + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/text.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/propagation/text.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/settings.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/settings.py new file mode 100644 index 0000000..944df88 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/settings.py @@ -0,0 +1,41 @@ +from collections import namedtuple +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 + + +# Keys used for the configuration dict +ConfigKeyNames = namedtuple( + "ConfigKeyNames", + [ + "AGENT_HOSTNAME", + "AGENT_HTTPS", + "AGENT_PORT", + "DEBUG", + "ENABLED", + "GLOBAL_TAGS", + "SAMPLER", + "PRIORITY_SAMPLING", + "UDS_PATH", + "SETTINGS", + ], +) + +ConfigKeys = ConfigKeyNames( + AGENT_HOSTNAME="agent_hostname", + AGENT_HTTPS="agent_https", + AGENT_PORT="agent_port", + DEBUG="debug", + ENABLED="enabled", + GLOBAL_TAGS="global_tags", + SAMPLER="sampler", + PRIORITY_SAMPLING="priority_sampling", + UDS_PATH="uds_path", + SETTINGS="settings", +) + + +def config_invalid_keys(config): + # type: (Dict[str, Any]) -> List[str] + """Returns a list of keys that exist in *config* and not in KEYS.""" + return [key for key in config.keys() if key not in ConfigKeys] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span.py new file mode 100644 index 0000000..2dd35e1 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span.py @@ -0,0 +1,197 @@ +import threading +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Text # noqa:F401 +from typing import Union # noqa:F401 + +from opentracing import Span as OpenTracingSpan +from opentracing.ext import tags as OTTags + +from ddtrace.constants import ERROR_MSG +from ddtrace.constants import ERROR_STACK +from ddtrace.constants import ERROR_TYPE +from ddtrace.context import Context as DatadogContext # noqa:F401 +from ddtrace.internal.compat import NumericType # noqa:F401 +from ddtrace.internal.constants import SPAN_API_OPENTRACING +from ddtrace.span import Span as DatadogSpan + +from .span_context import SpanContext +from .tags import Tags + + +if TYPE_CHECKING: # pragma: no cover + from .tracer import Tracer # noqa:F401 + + +_TagNameType = Union[Text, bytes] + + +class Span(OpenTracingSpan): + """Datadog implementation of :class:`opentracing.Span`""" + + def __init__(self, tracer, context, operation_name): + # type: (Tracer, Optional[SpanContext], str) -> None + if context is not None: + context = SpanContext(ddcontext=context._dd_context, baggage=context.baggage) + else: + context = SpanContext() + + super(Span, self).__init__(tracer, context) + + self.finished = False + self._lock = threading.Lock() + # use a datadog span + self._dd_span = DatadogSpan(operation_name, context=context._dd_context, span_api=SPAN_API_OPENTRACING) + + def finish(self, finish_time=None): + # type: (Optional[float]) -> None + """Finish the span. + + This calls finish on the ddspan. + + :param finish_time: specify a custom finish time with a unix timestamp + per time.time() + :type timestamp: float + """ + if self.finished: + return + + # finish the datadog span + self._dd_span.finish(finish_time) + self.finished = True + + def set_baggage_item(self, key, value): + # type: (str, Any) -> Span + """Sets a baggage item in the span context of this span. + + Baggage is used to propagate state between spans. + + :param key: baggage item key + :type key: str + + :param value: baggage item value + :type value: a type that can be str'd + + :rtype: Span + :return: itself for chaining calls + """ + new_ctx = self.context.with_baggage_item(key, value) + with self._lock: + self._context = new_ctx + return self + + def get_baggage_item(self, key): + # type: (str) -> Optional[str] + """Gets a baggage item from the span context of this span. + + :param key: baggage item key + :type key: str + + :rtype: str + :return: the baggage value for the given key or ``None``. + """ + return self.context.get_baggage_item(key) + + def set_operation_name(self, operation_name): + # type: (str) -> Span + """Set the operation name.""" + self._dd_span.name = operation_name + return self + + def log_kv(self, key_values, timestamp=None): + # type: (Dict[_TagNameType, Any], Optional[float]) -> Span + """Add a log record to this span. + + Passes on relevant opentracing key values onto the datadog span. + + :param key_values: a dict of string keys and values of any type + :type key_values: dict + + :param timestamp: a unix timestamp per time.time() + :type timestamp: float + + :return: the span itself, for call chaining + :rtype: Span + """ + + # match opentracing defined keys to datadog functionality + # opentracing/specification/blob/1be630515dafd4d2a468d083300900f89f28e24d/semantic_conventions.md#log-fields-table # noqa: E501 + for key, val in key_values.items(): + if key == "event" and val == "error": + # TODO: not sure if it's actually necessary to set the error manually + self._dd_span.error = 1 + self.set_tag("error", 1) + elif key == "error" or key == "error.object": + self.set_tag(ERROR_TYPE, val) + elif key == "message": + self.set_tag(ERROR_MSG, val) + elif key == "stack": + self.set_tag(ERROR_STACK, val) + else: + pass + + return self + + def set_tag(self, key, value): + # type: (_TagNameType, Any) -> Span + """Set a tag on the span. + + This sets the tag on the underlying datadog span. + """ + if key == Tags.SPAN_TYPE: + self._dd_span.span_type = value + elif key == Tags.SERVICE_NAME: + self._dd_span.service = value + elif key == Tags.RESOURCE_NAME or key == OTTags.DATABASE_STATEMENT: + self._dd_span.resource = value + elif key == OTTags.PEER_HOSTNAME: + self._dd_span.set_tag_str(Tags.TARGET_HOST, value) + elif key == OTTags.PEER_PORT: + self._dd_span.set_tag(Tags.TARGET_PORT, value) + elif key == Tags.SAMPLING_PRIORITY: + self._dd_span.context.sampling_priority = value + else: + self._dd_span.set_tag(key, value) + return self + + def _get_tag(self, key): + # type: (_TagNameType) -> Optional[Text] + """Gets a tag from the span. + + This method retrieves the tag from the underlying datadog span. + """ + return self._dd_span.get_tag(key) + + def _get_metric(self, key): + # type: (_TagNameType) -> Optional[NumericType] + """Gets a metric from the span. + + This method retrieves the metric from the underlying datadog span. + """ + return self._dd_span.get_metric(key) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type: + self._dd_span.set_exc_info(exc_type, exc_val, exc_tb) + + # note: self.finish() AND _dd_span.__exit__ will call _span.finish() but + # it is idempotent + self._dd_span.__exit__(exc_type, exc_val, exc_tb) + self.finish() + + def _associate_dd_span(self, ddspan): + # type: (DatadogSpan) -> None + """Associates a DD span with this span.""" + # get the datadog span context + self._dd_span = ddspan + self.context._dd_context = ddspan.context + + @property + def _dd_context(self): + # type: () -> DatadogContext + return self._dd_span.context diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span_context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span_context.py new file mode 100644 index 0000000..c04eb88 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/span_context.py @@ -0,0 +1,66 @@ +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 + +from opentracing import SpanContext as OpenTracingSpanContext + +from ddtrace.context import Context as DatadogContext +from ddtrace.internal.compat import NumericType # noqa:F401 + + +class SpanContext(OpenTracingSpanContext): + """Implementation of the OpenTracing span context.""" + + def __init__( + self, + trace_id=None, # type: Optional[int] + span_id=None, # type: Optional[int] + sampling_priority=None, # type: Optional[NumericType] + baggage=None, # type: Optional[Dict[str, Any]] + ddcontext=None, # type: Optional[DatadogContext] + ): + # type: (...) -> None + # create a new dict for the baggage if it is not provided + # NOTE: it would be preferable to use opentracing.SpanContext.EMPTY_BAGGAGE + # but it is mutable. + # see: opentracing-python/blob/8775c7bfc57fd66e1c8bcf9a54d3e434d37544f9/opentracing/span.py#L30 + baggage = baggage or {} + + if ddcontext is not None: + self._dd_context = ddcontext + else: + self._dd_context = DatadogContext( + trace_id=trace_id, + span_id=span_id, + sampling_priority=sampling_priority, + ) + + self._baggage = dict(baggage) + + @property + def baggage(self): + # type: () -> Dict[str, Any] + return self._baggage + + def set_baggage_item(self, key, value): + # type: (str, Any) -> None + """Sets a baggage item in this span context. + + Note that this operation mutates the baggage of this span context + """ + self.baggage[key] = value + + def with_baggage_item(self, key, value): + # type: (str, Any) -> SpanContext + """Returns a copy of this span with a new baggage item. + + Useful for instantiating new child span contexts. + """ + baggage = dict(self._baggage) + baggage[key] = value + return SpanContext(ddcontext=self._dd_context, baggage=baggage) + + def get_baggage_item(self, key): + # type: (str) -> Optional[Any] + """Gets a baggage item in this span context.""" + return self.baggage.get(key, None) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tags.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tags.py new file mode 100644 index 0000000..ebc2d86 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tags.py @@ -0,0 +1,23 @@ +from collections import namedtuple + + +TagNames = namedtuple( + "TagNames", + [ + "RESOURCE_NAME", + "SAMPLING_PRIORITY", + "SERVICE_NAME", + "SPAN_TYPE", + "TARGET_HOST", + "TARGET_PORT", + ], +) + +Tags = TagNames( + RESOURCE_NAME="resource.name", + SAMPLING_PRIORITY="sampling.priority", + SERVICE_NAME="service.name", + TARGET_HOST="out.host", + TARGET_PORT="network.destination.port", + SPAN_TYPE="span.type", +) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tracer.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tracer.py new file mode 100644 index 0000000..e3addba --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/tracer.py @@ -0,0 +1,376 @@ +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +import opentracing +from opentracing import Format +from opentracing import Scope # noqa:F401 +from opentracing import ScopeManager # noqa:F401 +from opentracing.scope_managers import ThreadLocalScopeManager + +import ddtrace +from ddtrace import Span as DatadogSpan +from ddtrace import Tracer as DatadogTracer +from ddtrace.context import Context as DatadogContext # noqa:F401 +from ddtrace.internal.constants import SPAN_API_OPENTRACING +from ddtrace.internal.utils.config import get_application_name +from ddtrace.settings import ConfigException + +from ..internal.logger import get_logger +from .propagation import HTTPPropagator +from .settings import ConfigKeys as keys +from .settings import config_invalid_keys +from .span import Span +from .span_context import SpanContext +from .utils import get_context_provider_for_scope_manager + + +log = get_logger(__name__) + +DEFAULT_CONFIG = { + keys.AGENT_HOSTNAME: None, + keys.AGENT_HTTPS: None, + keys.AGENT_PORT: None, + keys.DEBUG: False, + keys.ENABLED: None, + keys.GLOBAL_TAGS: {}, + keys.SAMPLER: None, + keys.PRIORITY_SAMPLING: None, + keys.UDS_PATH: None, + keys.SETTINGS: { + "FILTERS": [], + }, +} # type: Dict[str, Any] + + +class Tracer(opentracing.Tracer): + """A wrapper providing an OpenTracing API for the Datadog tracer.""" + + def __init__( + self, + service_name=None, # type: Optional[str] + config=None, # type: Optional[Dict[str, Any]] + scope_manager=None, # type: Optional[ScopeManager] + dd_tracer=None, # type: Optional[DatadogTracer] + ): + # type: (...) -> None + """Initialize a new Datadog opentracer. + + :param service_name: (optional) the name of the service that this + tracer will be used with. Note if not provided, a service name will + try to be determined based off of ``sys.argv``. If this fails a + :class:`ddtrace.settings.ConfigException` will be raised. + :param config: (optional) a configuration object to specify additional + options. See the documentation for further information. + :param scope_manager: (optional) the scope manager for this tracer to + use. The available managers are listed in the Python OpenTracing repo + here: https://github.com/opentracing/opentracing-python#scope-managers. + If ``None`` is provided, defaults to + :class:`opentracing.scope_managers.ThreadLocalScopeManager`. + :param dd_tracer: (optional) the Datadog tracer for this tracer to use. This + should only be passed if a custom Datadog tracer is being used. Defaults + to the global ``ddtrace.tracer`` tracer. + """ + # Merge the given config with the default into a new dict + self._config = DEFAULT_CONFIG.copy() + if config is not None: + self._config.update(config) + # Pull out commonly used properties for performance + self._service_name = service_name or get_application_name() + self._debug = self._config.get(keys.DEBUG) + + if self._debug: + # Ensure there are no typos in any of the keys + invalid_keys = config_invalid_keys(self._config) + if invalid_keys: + str_invalid_keys = ",".join(invalid_keys) + raise ConfigException("invalid key(s) given ({})".format(str_invalid_keys)) + + if not self._service_name: + raise ConfigException( + """ Cannot detect the \'service_name\'. + Please set the \'service_name=\' + keyword argument. + """ + ) + + self._scope_manager = scope_manager or ThreadLocalScopeManager() + dd_context_provider = get_context_provider_for_scope_manager(self._scope_manager) + + self._dd_tracer = dd_tracer or ddtrace.tracer or DatadogTracer() + self._dd_tracer.set_tags(self._config.get(keys.GLOBAL_TAGS)) # type: ignore[arg-type] + self._dd_tracer.configure( + enabled=self._config.get(keys.ENABLED), + hostname=self._config.get(keys.AGENT_HOSTNAME), + https=self._config.get(keys.AGENT_HTTPS), + port=self._config.get(keys.AGENT_PORT), + sampler=self._config.get(keys.SAMPLER), + settings=self._config.get(keys.SETTINGS), + priority_sampling=self._config.get(keys.PRIORITY_SAMPLING), + uds_path=self._config.get(keys.UDS_PATH), + context_provider=dd_context_provider, # type: ignore[arg-type] + ) + self._propagators = { + Format.HTTP_HEADERS: HTTPPropagator, + Format.TEXT_MAP: HTTPPropagator, + } + + @property + def scope_manager(self): + # type: () -> ScopeManager + """Returns the scope manager being used by this tracer.""" + return self._scope_manager + + def start_active_span( + self, + operation_name, # type: str + child_of=None, # type: Optional[Union[Span, SpanContext]] + references=None, # type: Optional[List[Any]] + tags=None, # type: Optional[Dict[str, str]] + start_time=None, # type: Optional[int] + ignore_active_span=False, # type: bool + finish_on_close=True, # type: bool + ): + # type: (...) -> Scope + """Returns a newly started and activated `Scope`. + The returned `Scope` supports with-statement contexts. For example:: + + with tracer.start_active_span('...') as scope: + scope.span.set_tag('http.method', 'GET') + do_some_work() + # Span.finish() is called as part of Scope deactivation through + # the with statement. + + It's also possible to not finish the `Span` when the `Scope` context + expires:: + + with tracer.start_active_span('...', + finish_on_close=False) as scope: + scope.span.set_tag('http.method', 'GET') + do_some_work() + # Span.finish() is not called as part of Scope deactivation as + # `finish_on_close` is `False`. + + :param operation_name: name of the operation represented by the new + span from the perspective of the current service. + :param child_of: (optional) a Span or SpanContext instance representing + the parent in a REFERENCE_CHILD_OF Reference. If specified, the + `references` parameter must be omitted. + :param references: (optional) a list of Reference objects that identify + one or more parent SpanContexts. (See the Reference documentation + for detail). + :param tags: an optional dictionary of Span Tags. The caller gives up + ownership of that dictionary, because the Tracer may use it as-is + to avoid extra data copying. + :param start_time: an explicit Span start time as a unix timestamp per + time.time(). + :param ignore_active_span: (optional) an explicit flag that ignores + the current active `Scope` and creates a root `Span`. + :param finish_on_close: whether span should automatically be finished + when `Scope.close()` is called. + :return: a `Scope`, already registered via the `ScopeManager`. + """ + otspan = self.start_span( + operation_name=operation_name, + child_of=child_of, + references=references, + tags=tags, + start_time=start_time, + ignore_active_span=ignore_active_span, + ) + + # activate this new span + scope = self._scope_manager.activate(otspan, finish_on_close) + self._dd_tracer.context_provider.activate(otspan._dd_span) + return scope + + def start_span( + self, + operation_name=None, # type: Optional[str] + child_of=None, # type: Optional[Union[Span, SpanContext]] + references=None, # type: Optional[List[Any]] + tags=None, # type: Optional[Dict[str, str]] + start_time=None, # type: Optional[int] + ignore_active_span=False, # type: bool + ): + # type: (...) -> Span + """Starts and returns a new Span representing a unit of work. + + Starting a root Span (a Span with no causal references):: + + tracer.start_span('...') + + Starting a child Span (see also start_child_span()):: + + tracer.start_span( + '...', + child_of=parent_span) + + Starting a child Span in a more verbose way:: + + tracer.start_span( + '...', + references=[opentracing.child_of(parent_span)]) + + Note: the precedence when defining a relationship is the following, from highest to lowest: + 1. *child_of* + 2. *references* + 3. `scope_manager.active` (unless *ignore_active_span* is True) + 4. None + + Currently Datadog only supports `child_of` references. + + :param operation_name: name of the operation represented by the new + span from the perspective of the current service. + :param child_of: (optional) a Span or SpanContext instance representing + the parent in a REFERENCE_CHILD_OF Reference. If specified, the + `references` parameter must be omitted. + :param references: (optional) a list of Reference objects that identify + one or more parent SpanContexts. (See the Reference documentation + for detail) + :param tags: an optional dictionary of Span Tags. The caller gives up + ownership of that dictionary, because the Tracer may use it as-is + to avoid extra data copying. + :param start_time: an explicit Span start time as a unix timestamp per + time.time() + :param ignore_active_span: an explicit flag that ignores the current + active `Scope` and creates a root `Span`. + :return: an already-started Span instance. + """ + ot_parent = None # 'ot_parent' is more readable than 'child_of' + ot_parent_context = None # the parent span's context + # dd_parent: the child_of to pass to the ddtracer + dd_parent = None # type: Optional[Union[DatadogSpan, DatadogContext]] + + if child_of is not None: + ot_parent = child_of # 'ot_parent' is more readable than 'child_of' + elif references and isinstance(references, list): + # we currently only support child_of relations to one span + ot_parent = references[0].referenced_context + + # - whenever child_of is not None ddspans with parent-child + # relationships will share a ddcontext which maintains a hierarchy of + # ddspans for the execution flow + # - when child_of is a ddspan then the ddtracer uses this ddspan to + # create the child ddspan + # - when child_of is a ddcontext then the ddtracer uses the ddcontext to + # get_current_span() for the parent + if ot_parent is None and not ignore_active_span: + # attempt to get the parent span from the scope manager + scope = self._scope_manager.active + parent_span = getattr(scope, "span", None) + ot_parent_context = getattr(parent_span, "context", None) + + # Compare the active ot and dd spans. Using the one which + # was created later as the parent. + active_dd_parent = self._dd_tracer.context_provider.active() + if parent_span and isinstance(active_dd_parent, DatadogSpan): + dd_parent_span = parent_span._dd_span + if active_dd_parent.start_ns >= dd_parent_span.start_ns: + dd_parent = active_dd_parent + else: + dd_parent = dd_parent_span + else: + dd_parent = active_dd_parent + elif ot_parent is not None and isinstance(ot_parent, Span): + # a span is given to use as a parent + ot_parent_context = ot_parent.context + dd_parent = ot_parent._dd_span + elif ot_parent is not None and isinstance(ot_parent, SpanContext): + # a span context is given to use to find the parent ddspan + dd_parent = ot_parent._dd_context + elif ot_parent is None: + # user wants to create a new parent span we don't have to do + # anything + pass + else: + raise TypeError("invalid span configuration given") + + # create a new otspan and ddspan using the ddtracer and associate it + # with the new otspan + ddspan = self._dd_tracer.start_span( + name=operation_name, # type: ignore[arg-type] + child_of=dd_parent, + service=self._service_name, + activate=False, + span_api=SPAN_API_OPENTRACING, + ) + + # set the start time if one is specified + ddspan.start = start_time or ddspan.start + + otspan = Span(self, ot_parent_context, operation_name) # type: ignore[arg-type] + # sync up the OT span with the DD span + otspan._associate_dd_span(ddspan) + + if tags is not None: + for k in tags: + # Make sure we set the tags on the otspan to ensure that the special compatibility tags + # are handled correctly (resource name, span type, sampling priority, etc). + otspan.set_tag(k, tags[k]) + + return otspan + + @property + def active_span(self): + # type: () -> Optional[Span] + """Retrieves the active span from the opentracing scope manager + + Falls back to using the datadog active span if one is not found. This + allows opentracing users to use datadog instrumentation. + """ + scope = self._scope_manager.active + if scope: + return scope.span + else: + dd_span = self._dd_tracer.current_span() + ot_span = None # type: Optional[Span] + if dd_span: + ot_span = Span(self, None, dd_span.name) + ot_span._associate_dd_span(dd_span) + return ot_span + + def inject(self, span_context, format, carrier): # noqa: A002 + # type: (SpanContext, str, Dict[str, str]) -> None + """Injects a span context into a carrier. + + :param span_context: span context to inject. + :param format: format to encode the span context with. + :param carrier: the carrier of the encoded span context. + """ + propagator = self._propagators.get(format, None) + + if propagator is None: + raise opentracing.UnsupportedFormatException + + propagator.inject(span_context, carrier) + + def extract(self, format, carrier): # noqa: A002 + # type: (str, Dict[str, str]) -> SpanContext + """Extracts a span context from a carrier. + + :param format: format that the carrier is encoded with. + :param carrier: the carrier to extract from. + """ + propagator = self._propagators.get(format, None) + + if propagator is None: + raise opentracing.UnsupportedFormatException + + # we have to manually activate the returned context from a distributed + # trace + ot_span_ctx = propagator.extract(carrier) + dd_span_ctx = ot_span_ctx._dd_context + self._dd_tracer.context_provider.activate(dd_span_ctx) + return ot_span_ctx + + def get_log_correlation_context(self): + # type: () -> Dict[str, str] + """Retrieves the data used to correlate a log with the current active trace. + Generates a dictionary for custom logging instrumentation including the trace id and + span id of the current active span, as well as the configured service, version, and environment names. + If there is no active span, a dictionary with an empty string for each value will be returned. + """ + return self._dd_tracer.get_log_correlation_context() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/utils.py new file mode 100644 index 0000000..9cce535 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/opentracer/utils.py @@ -0,0 +1,60 @@ +from opentracing import ScopeManager # noqa:F401 + +from ddtrace.provider import BaseContextProvider # noqa:F401 + + +# DEV: If `asyncio` or `gevent` are unavailable we do not throw an error, +# `context_provider` will just not be set and we'll get an `AttributeError` instead + + +def get_context_provider_for_scope_manager(scope_manager): + # type: (ScopeManager) -> BaseContextProvider + """Returns the context_provider to use with a given scope_manager.""" + + scope_manager_type = type(scope_manager).__name__ + + # avoid having to import scope managers which may not be compatible + # with the version of python being used + if scope_manager_type == "AsyncioScopeManager": + import ddtrace.contrib.asyncio + + dd_context_provider = ddtrace.contrib.asyncio.context_provider # type: BaseContextProvider + elif scope_manager_type == "GeventScopeManager": + import ddtrace.contrib.gevent + + dd_context_provider = ddtrace.contrib.gevent.context_provider + else: + from ddtrace.provider import DefaultContextProvider + + dd_context_provider = DefaultContextProvider() + + _patch_scope_manager(scope_manager, dd_context_provider) + + return dd_context_provider + + +def _patch_scope_manager(scope_manager, context_provider): + # type: (ScopeManager, BaseContextProvider) -> None + """ + Patches a scope manager so that any time a span is activated + it'll also activate the underlying ddcontext with the underlying + datadog context provider. + + This allows opentracing users to rely on ddtrace.contrib patches and + have them parent correctly. + + :param scope_manager: Something that implements `opentracing.ScopeManager` + :param context_provider: Something that implements `datadog.provider.BaseContextProvider` + """ + if getattr(scope_manager, "_datadog_patch", False): + return + scope_manager._datadog_patch = True + + old_method = scope_manager.activate + + def _patched_activate(*args, **kwargs): + otspan = kwargs.get("span", args[0]) + context_provider.activate(otspan._dd_context) + return old_method(*args, **kwargs) + + scope_manager.activate = _patched_activate diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/pin.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/pin.py new file mode 100644 index 0000000..f2ec40d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/pin.py @@ -0,0 +1,205 @@ +from typing import TYPE_CHECKING # noqa:F401 +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 +from typing import Optional # noqa:F401 + +import ddtrace + +from .internal.logger import get_logger +from .vendor import wrapt + + +log = get_logger(__name__) + + +# To set attributes on wrapt proxy objects use this prefix: +# http://wrapt.readthedocs.io/en/latest/wrappers.html +_DD_PIN_NAME = "_datadog_pin" +_DD_PIN_PROXY_NAME = "_self_" + _DD_PIN_NAME + + +class Pin(object): + """Pin (a.k.a Patch INfo) is a small class which is used to + set tracing metadata on a particular traced connection. + This is useful if you wanted to, say, trace two different + database clusters. + + >>> conn = sqlite.connect('/tmp/user.db') + >>> # Override a pin for a specific connection + >>> pin = Pin.override(conn, service='user-db') + >>> conn = sqlite.connect('/tmp/image.db') + """ + + __slots__ = ["tags", "tracer", "_target", "_config", "_initialized"] + + def __init__( + self, + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + _config=None, # type: Optional[Dict[str, Any]] + ): + # type: (...) -> None + tracer = tracer or ddtrace.tracer + self.tags = tags + self.tracer = tracer + self._target = None # type: Optional[int] + # keep the configuration attribute internal because the + # public API to access it is not the Pin class + self._config = _config or {} # type: Dict[str, Any] + # [Backward compatibility]: service argument updates the `Pin` config + self._config["service_name"] = service + self._initialized = True + + @property + def service(self): + # type: () -> str + """Backward compatibility: accessing to `pin.service` returns the underlying + configuration value. + """ + return self._config["service_name"] + + def __setattr__(self, name, value): + if getattr(self, "_initialized", False) and name != "_target": + raise AttributeError("can't mutate a pin, use override() or clone() instead") + super(Pin, self).__setattr__(name, value) + + def __repr__(self): + return "Pin(service=%s, tags=%s, tracer=%s)" % (self.service, self.tags, self.tracer) + + @staticmethod + def _find(*objs): + # type: (Any) -> Optional[Pin] + """ + Return the first :class:`ddtrace.pin.Pin` found on any of the provided objects or `None` if none were found + + + >>> pin = Pin._find(wrapper, instance, conn) + + :param objs: The objects to search for a :class:`ddtrace.pin.Pin` on + :type objs: List of objects + :rtype: :class:`ddtrace.pin.Pin`, None + :returns: The first found :class:`ddtrace.pin.Pin` or `None` is none was found + """ + for obj in objs: + pin = Pin.get_from(obj) + if pin: + return pin + return None + + @staticmethod + def get_from(obj): + # type: (Any) -> Optional[Pin] + """Return the pin associated with the given object. If a pin is attached to + `obj` but the instance is not the owner of the pin, a new pin is cloned and + attached. This ensures that a pin inherited from a class is a copy for the new + instance, avoiding that a specific instance overrides other pins values. + + >>> pin = Pin.get_from(conn) + + :param obj: The object to look for a :class:`ddtrace.pin.Pin` on + :type obj: object + :rtype: :class:`ddtrace.pin.Pin`, None + :returns: :class:`ddtrace.pin.Pin` associated with the object, or None if none was found + """ + if hasattr(obj, "__getddpin__"): + return obj.__getddpin__() + + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + pin = getattr(obj, pin_name, None) + # detect if the PIN has been inherited from a class + if pin is not None and pin._target != id(obj): + pin = pin.clone() + pin.onto(obj) + return pin + + @classmethod + def override( + cls, + obj, # type: Any + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + # type: (...) -> None + """Override an object with the given attributes. + + That's the recommended way to customize an already instrumented client, without + losing existing attributes. + + >>> conn = sqlite.connect('/tmp/user.db') + >>> # Override a pin for a specific connection + >>> Pin.override(conn, service='user-db') + """ + if not obj: + return + + pin = cls.get_from(obj) + if pin is None: + Pin(service=service, tags=tags, tracer=tracer).onto(obj) + else: + pin.clone(service=service, tags=tags, tracer=tracer).onto(obj) + + def enabled(self): + # type: () -> bool + """Return true if this pin's tracer is enabled.""" + return bool(self.tracer) and self.tracer.enabled + + def onto(self, obj, send=True): + # type: (Any, bool) -> None + """Patch this pin onto the given object. If send is true, it will also + queue the metadata to be sent to the server. + """ + # Actually patch it on the object. + try: + if hasattr(obj, "__setddpin__"): + return obj.__setddpin__(self) + + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + + # set the target reference; any get_from, clones and retarget the new PIN + self._target = id(obj) + if self.service: + ddtrace.config._add_extra_service(self.service) + return setattr(obj, pin_name, self) + except AttributeError: + log.debug("can't pin onto object. skipping", exc_info=True) + + def remove_from(self, obj): + # type: (Any) -> None + # Remove pin from the object. + try: + pin_name = _DD_PIN_PROXY_NAME if isinstance(obj, wrapt.ObjectProxy) else _DD_PIN_NAME + + pin = Pin.get_from(obj) + if pin is not None: + delattr(obj, pin_name) + except AttributeError: + log.debug("can't remove pin from object. skipping", exc_info=True) + + def clone( + self, + service=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + tracer=None, + ): + # type: (...) -> Pin + """Return a clone of the pin with the given attributes replaced.""" + # do a shallow copy of Pin dicts + if not tags and self.tags: + tags = self.tags.copy() + + # we use a copy instead of a deepcopy because we expect configurations + # to have only a root level dictionary without nested objects. Using + # deepcopy introduces a big overhead: + # + # copy: 0.00654911994934082 + # deepcopy: 0.2787208557128906 + config = self._config.copy() + + return Pin( + service=service or self.service, + tags=tags, + tracer=tracer or self.tracer, # do not clone the Tracer + _config=config, + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/__init__.py new file mode 100644 index 0000000..3361b57 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/__init__.py @@ -0,0 +1,21 @@ +import sys + +from ddtrace.profiling import _build + +from .profiler import Profiler # noqa:F401 + + +def _not_compatible_abi(): + raise ImportError( + "Python ABI is not compatible, you need to recompile this module.\n" + "Reinstall it with the following command:\n" + " pip install --no-binary ddtrace ddtrace" + ) + + +if (3, 7) < _build.compiled_with <= (3, 7, 3): + if sys.version_info[:3] > (3, 7, 3): + _not_compatible_abi() +elif (3, 7, 3) < _build.compiled_with < (3, 8): + if (3, 7) < sys.version_info[:3] <= (3, 7, 3): + _not_compatible_abi() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_asyncio.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_asyncio.py new file mode 100644 index 0000000..eebcf2c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_asyncio.py @@ -0,0 +1,65 @@ +# -*- encoding: utf-8 -*- +from functools import partial +import sys +from types import ModuleType # noqa:F401 +import typing # noqa:F401 + +from ddtrace.internal.module import ModuleWatchdog +from ddtrace.internal.utils import get_argument_value +from ddtrace.internal.wrapping import wrap + +from . import _threading + + +THREAD_LINK = None # type: typing.Optional[_threading._ThreadLink] + + +def current_task(loop=None): + return None + + +def all_tasks(loop=None): + return [] + + +def _task_get_name(task): + return "Task-%d" % id(task) + + +@ModuleWatchdog.after_module_imported("asyncio") +def _(asyncio): + # type: (ModuleType) -> None + global THREAD_LINK + + if hasattr(asyncio, "current_task"): + globals()["current_task"] = asyncio.current_task + elif hasattr(asyncio.Task, "current_task"): + globals()["current_task"] = asyncio.Task.current_task + + if hasattr(asyncio, "all_tasks"): + globals()["all_tasks"] = asyncio.all_tasks + elif hasattr(asyncio.Task, "all_tasks"): + globals()["all_tasks"] = asyncio.Task.all_tasks + + if hasattr(asyncio.Task, "get_name"): + # `get_name` is only available in Python ≥ 3.8 + globals()["_task_get_name"] = lambda task: task.get_name() + + if THREAD_LINK is None: + THREAD_LINK = _threading._ThreadLink() + + @partial(wrap, sys.modules["asyncio.events"].BaseDefaultEventLoopPolicy.set_event_loop) + def _(f, args, kwargs): + try: + return f(*args, **kwargs) + finally: + THREAD_LINK.clear_threads(set(sys._current_frames().keys())) + loop = get_argument_value(args, kwargs, 1, "loop") + if loop is not None: + THREAD_LINK.link_object(loop) + + +def get_event_loop_for_thread(thread_id): + global THREAD_LINK + + return THREAD_LINK.get_object(thread_id) if THREAD_LINK is not None else None diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..6bcf2cd Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.pyi new file mode 100644 index 0000000..88f43fc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_build.pyi @@ -0,0 +1,3 @@ +import typing + +compiled_with: typing.Tuple[int, int, int] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..06c483e Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.pyi new file mode 100644 index 0000000..5cd331f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_threading.pyi @@ -0,0 +1,11 @@ +import typing + +def get_thread_name(thread_id: int) -> str: ... +def get_thread_native_id(thread_id: int) -> int: ... + +_T = typing.TypeVar("_T") + +class _ThreadLink(typing.Generic[_T]): + def link_object(self, obj: _T) -> None: ... + def clear_threads(self, existing_thread_ids: typing.Set[int]) -> None: ... + def get_object(self, thread_id: int) -> typing.Optional[_T]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_traceback.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_traceback.py new file mode 100644 index 0000000..130df7c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/_traceback.py @@ -0,0 +1,5 @@ +import traceback + + +def format_exception(e): + return traceback.format_exception_only(type(e), e)[0].rstrip() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/auto.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/auto.py new file mode 100644 index 0000000..9801608 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/auto.py @@ -0,0 +1,9 @@ +"""Automatically starts a collector when imported.""" +from ddtrace.internal.logger import get_logger +from ddtrace.profiling.bootstrap import sitecustomize # noqa:F401 + + +log = get_logger(__name__) +log.debug("Enabling the profiler by auto import") + +start_profiler = sitecustomize.start_profiler diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/bootstrap/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/bootstrap/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/bootstrap/sitecustomize.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/bootstrap/sitecustomize.py new file mode 100644 index 0000000..c186df0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/bootstrap/sitecustomize.py @@ -0,0 +1,15 @@ +# -*- encoding: utf-8 -*- +"""Bootstrapping code that is run when using `ddtrace.profiling.auto`.""" +from ddtrace.profiling import bootstrap +from ddtrace.profiling import profiler + + +def start_profiler(): + if hasattr(bootstrap, "profiler"): + bootstrap.profiler.stop() + # Export the profiler so we can introspect it if needed + bootstrap.profiler = profiler.Profiler() + bootstrap.profiler.start() + + +start_profiler() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/__init__.py new file mode 100644 index 0000000..e9da783 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/__init__.py @@ -0,0 +1,81 @@ +# -*- encoding: utf-8 -*- +import typing # noqa:F401 + +import attr + +from ddtrace.internal import periodic +from ddtrace.internal import service +from ddtrace.settings.profiling import config + +from .. import event # noqa:F401 + + +class CollectorError(Exception): + pass + + +class CollectorUnavailable(CollectorError): + pass + + +@attr.s +class Collector(service.Service): + """A profile collector.""" + + recorder = attr.ib() + + @staticmethod + def snapshot(): + """Take a snapshot of collected data. + + :return: A list of sample list to push in the recorder. + """ + + +@attr.s(slots=True) +class PeriodicCollector(Collector, periodic.PeriodicService): + """A collector that needs to run periodically.""" + + def periodic(self): + # type: (...) -> None + """Collect events and push them into the recorder.""" + for events in self.collect(): + self.recorder.push_events(events) + + def collect(self): + # type: (...) -> typing.Iterable[typing.Iterable[event.Event]] + """Collect the actual data. + + :return: A list of event list to push in the recorder. + """ + raise NotImplementedError + + +@attr.s +class CaptureSampler(object): + """Determine the events that should be captured based on a sampling percentage.""" + + capture_pct = attr.ib(default=100) + _counter = attr.ib(default=0, init=False) + + @capture_pct.validator + def capture_pct_validator(self, attribute, value): + if value < 0 or value > 100: + raise ValueError("Capture percentage should be between 0 and 100 included") + + def capture(self): + self._counter += self.capture_pct + if self._counter >= 100: + self._counter -= 100 + return True + return False + + +def _create_capture_sampler(collector): + return CaptureSampler(collector.capture_pct) + + +@attr.s +class CaptureSamplerCollector(Collector): + capture_pct = attr.ib(type=float, default=config.capture_pct) + _capture_sampler = attr.ib(default=attr.Factory(_create_capture_sampler, takes_self=True), init=False, repr=False) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_lock.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_lock.py new file mode 100644 index 0000000..519946f --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_lock.py @@ -0,0 +1,230 @@ +from __future__ import absolute_import + +import _thread +import abc +import os.path +import sys +import typing + +import attr + +from ddtrace.internal import compat +from ddtrace.profiling import _threading +from ddtrace.profiling import collector +from ddtrace.profiling import event +from ddtrace.profiling.collector import _task +from ddtrace.profiling.collector import _traceback +from ddtrace.settings.profiling import config +from ddtrace.vendor import wrapt + + +@event.event_class +class LockEventBase(event.StackBasedEvent): + """Base Lock event.""" + + lock_name = attr.ib(default="", type=str) + sampling_pct = attr.ib(default=0, type=int) + + +@event.event_class +class LockAcquireEvent(LockEventBase): + """A lock has been acquired.""" + + wait_time_ns = attr.ib(default=0, type=int) + + +@event.event_class +class LockReleaseEvent(LockEventBase): + """A lock has been released.""" + + locked_for_ns = attr.ib(default=0, type=int) + + +def _current_thread(): + # type: (...) -> typing.Tuple[int, str] + thread_id = _thread.get_ident() + return thread_id, _threading.get_thread_name(thread_id) + + +# We need to know if wrapt is compiled in C or not. If it's not using the C module, then the wrappers function will +# appear in the stack trace and we need to hide it. +if os.environ.get("WRAPT_DISABLE_EXTENSIONS"): + WRAPT_C_EXT = False +else: + try: + import ddtrace.vendor.wrapt._wrappers as _w # noqa: F401 + except ImportError: + WRAPT_C_EXT = False + else: + WRAPT_C_EXT = True + del _w + + +class _ProfiledLock(wrapt.ObjectProxy): + ACQUIRE_EVENT_CLASS = LockAcquireEvent + RELEASE_EVENT_CLASS = LockReleaseEvent + + def __init__(self, wrapped, recorder, tracer, max_nframes, capture_sampler, endpoint_collection_enabled): + wrapt.ObjectProxy.__init__(self, wrapped) + self._self_recorder = recorder + self._self_tracer = tracer + self._self_max_nframes = max_nframes + self._self_capture_sampler = capture_sampler + self._self_endpoint_collection_enabled = endpoint_collection_enabled + frame = sys._getframe(2 if WRAPT_C_EXT else 3) + code = frame.f_code + self._self_name = "%s:%d" % (os.path.basename(code.co_filename), frame.f_lineno) + + def __aenter__(self): + return self.__wrapped__.__aenter__() + + def __aexit__(self, *args, **kwargs): + return self.__wrapped__.__aexit__(*args, **kwargs) + + def acquire(self, *args, **kwargs): + if not self._self_capture_sampler.capture(): + return self.__wrapped__.acquire(*args, **kwargs) + + start = compat.monotonic_ns() + try: + return self.__wrapped__.acquire(*args, **kwargs) + finally: + try: + end = self._self_acquired_at = compat.monotonic_ns() + thread_id, thread_name = _current_thread() + task_id, task_name, task_frame = _task.get_task(thread_id) + + if task_frame is None: + frame = sys._getframe(1) + else: + frame = task_frame + + frames, nframes = _traceback.pyframe_to_frames(frame, self._self_max_nframes) + + event = self.ACQUIRE_EVENT_CLASS( + lock_name=self._self_name, + frames=frames, + nframes=nframes, + thread_id=thread_id, + thread_name=thread_name, + task_id=task_id, + task_name=task_name, + wait_time_ns=end - start, + sampling_pct=self._self_capture_sampler.capture_pct, + ) + + if self._self_tracer is not None: + event.set_trace_info(self._self_tracer.current_span(), self._self_endpoint_collection_enabled) + + self._self_recorder.push_event(event) + except Exception: + pass # nosec + + def release(self, *args, **kwargs): + # type (typing.Any, typing.Any) -> None + try: + return self.__wrapped__.release(*args, **kwargs) + finally: + try: + if hasattr(self, "_self_acquired_at"): + try: + end = compat.monotonic_ns() + thread_id, thread_name = _current_thread() + task_id, task_name, task_frame = _task.get_task(thread_id) + + if task_frame is None: + frame = sys._getframe(1) + else: + frame = task_frame + + frames, nframes = _traceback.pyframe_to_frames(frame, self._self_max_nframes) + + event = self.RELEASE_EVENT_CLASS( + lock_name=self._self_name, + frames=frames, + nframes=nframes, + thread_id=thread_id, + thread_name=thread_name, + task_id=task_id, + task_name=task_name, + locked_for_ns=end - self._self_acquired_at, + sampling_pct=self._self_capture_sampler.capture_pct, + ) + + if self._self_tracer is not None: + event.set_trace_info( + self._self_tracer.current_span(), self._self_endpoint_collection_enabled + ) + + self._self_recorder.push_event(event) + finally: + del self._self_acquired_at + except Exception: + pass # nosec + + acquire_lock = acquire + + +class FunctionWrapper(wrapt.FunctionWrapper): + # Override the __get__ method: whatever happens, _allocate_lock is always considered by Python like a "static" + # method, even when used as a class attribute. Python never tried to "bind" it to a method, because it sees it is a + # builtin function. Override default wrapt behavior here that tries to detect bound method. + def __get__(self, instance, owner=None): + return self + + +@attr.s +class LockCollector(collector.CaptureSamplerCollector): + """Record lock usage.""" + + nframes = attr.ib(type=int, default=config.max_frames) + endpoint_collection_enabled = attr.ib(type=bool, default=config.endpoint_collection) + + tracer = attr.ib(default=None) + + _original = attr.ib(init=False, repr=False, type=typing.Any, cmp=False) + + @abc.abstractmethod + def _get_original(self): + # type: (...) -> typing.Any + pass + + @abc.abstractmethod + def _set_original( + self, + value, # type: typing.Any + ): + # type: (...) -> None + pass + + def _start_service(self): + # type: (...) -> None + """Start collecting lock usage.""" + self.patch() + super(LockCollector, self)._start_service() + + def _stop_service(self): + # type: (...) -> None + """Stop collecting lock usage.""" + super(LockCollector, self)._stop_service() + self.unpatch() + + def patch(self): + # type: (...) -> None + """Patch the module for tracking lock allocation.""" + # We only patch the lock from the `threading` module. + # Nobody should use locks from `_thread`; if they do so, then it's deliberate and we don't profile. + self.original = self._get_original() + + def _allocate_lock(wrapped, instance, args, kwargs): + lock = wrapped(*args, **kwargs) + return self.PROFILED_LOCK_CLASS( + lock, self.recorder, self.tracer, self.nframes, self._capture_sampler, self.endpoint_collection_enabled + ) + + self._set_original(FunctionWrapper(self.original, _allocate_lock)) + + def unpatch(self): + # type: (...) -> None + """Unpatch the threading module for tracking lock allocation.""" + self._set_original(self.original) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..5001a32 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.pyi new file mode 100644 index 0000000..c5864b0 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_memalloc.pyi @@ -0,0 +1,15 @@ +import typing + +from .. import event + +# (filename, line number, function name) +FrameType = event.DDFrame +StackType = event.StackTraceType + +# (stack, nframe, thread_id) +TracebackType = typing.Tuple[StackType, int, int] + +def start(max_nframe: int, max_events: int, heap_sample_size: int) -> None: ... +def stop() -> None: ... +def heap() -> typing.List[typing.Tuple[TracebackType, int]]: ... +def iter_events() -> typing.Iterator[typing.Tuple[TracebackType, int]]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..71af180 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.pyi new file mode 100644 index 0000000..f26c5d6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_task.pyi @@ -0,0 +1,7 @@ +import types +import typing + +def get_task( + thread_id: int, +) -> typing.Tuple[typing.Optional[int], typing.Optional[str], typing.Optional[types.FrameType]]: ... +def list_tasks() -> typing.List[typing.Tuple[int, str, types.FrameType]]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..6d6036d Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.pyi new file mode 100644 index 0000000..d962ed5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/_traceback.pyi @@ -0,0 +1,9 @@ +import types +import typing + +from .. import event + +def traceback_to_frames( + traceback: types.TracebackType, max_nframes: int +) -> typing.Tuple[typing.List[event.DDFrame], int]: ... +def pyframe_to_frames(frame: types.FrameType, max_nframes: int) -> typing.Tuple[typing.List[event.DDFrame], int]: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/asyncio.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/asyncio.py new file mode 100644 index 0000000..dd7eba5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/asyncio.py @@ -0,0 +1,49 @@ +import typing # noqa:F401 + +import attr + +from .. import collector +from .. import event +from . import _lock + + +@event.event_class +class AsyncioLockAcquireEvent(_lock.LockAcquireEvent): + """An asyncio.Lock has been acquired.""" + + +@event.event_class +class AsyncioLockReleaseEvent(_lock.LockReleaseEvent): + """An asyncio.Lock has been released.""" + + +class _ProfiledAsyncioLock(_lock._ProfiledLock): + ACQUIRE_EVENT_CLASS = AsyncioLockAcquireEvent + RELEASE_EVENT_CLASS = AsyncioLockReleaseEvent + + +@attr.s +class AsyncioLockCollector(_lock.LockCollector): + """Record asyncio.Lock usage.""" + + PROFILED_LOCK_CLASS = _ProfiledAsyncioLock + + def _start_service(self): + # type: (...) -> None + """Start collecting lock usage.""" + try: + import asyncio + except ImportError as e: + raise collector.CollectorUnavailable(e) + self._asyncio_module = asyncio + return super(AsyncioLockCollector, self)._start_service() + + def _get_original(self): + # type: (...) -> typing.Any + return self._asyncio_module.Lock + + def _set_original( + self, value # type: typing.Any + ): + # type: (...) -> None + self._asyncio_module.Lock = value # type: ignore[misc] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/memalloc.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/memalloc.py new file mode 100644 index 0000000..5ab3bae --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/memalloc.py @@ -0,0 +1,206 @@ +# -*- encoding: utf-8 -*- +import logging +from math import ceil +import os +import threading +import typing # noqa:F401 + +import attr + + +try: + from ddtrace.profiling.collector import _memalloc +except ImportError: + _memalloc = None # type: ignore[assignment] + +from ddtrace.internal.datadog.profiling import ddup +from ddtrace.profiling import _threading +from ddtrace.profiling import collector +from ddtrace.profiling import event +from ddtrace.settings.profiling import config + + +LOG = logging.getLogger(__name__) + + +@event.event_class +class MemoryAllocSampleEvent(event.StackBasedEvent): + """A sample storing memory allocation tracked.""" + + size = attr.ib(default=0, type=int) + """Allocation size in bytes.""" + + capture_pct = attr.ib(default=None, type=float) + """The capture percentage.""" + + nevents = attr.ib(default=0, type=int) + """The total number of allocation events sampled.""" + + +@event.event_class +class MemoryHeapSampleEvent(event.StackBasedEvent): + """A sample storing memory allocation tracked.""" + + size = attr.ib(default=0, type=int) + """Allocation size in bytes.""" + + sample_size = attr.ib(default=0, type=int) + """The sampling size.""" + + +@attr.s +class MemoryCollector(collector.PeriodicCollector): + """Memory allocation collector.""" + + _DEFAULT_MAX_EVENTS = 16 + _DEFAULT_INTERVAL = 0.5 + + # Arbitrary interval to empty the _memalloc event buffer + _interval = attr.ib(default=_DEFAULT_INTERVAL, repr=False) + + # TODO make this dynamic based on the 1. interval and 2. the max number of events allowed in the Recorder + _max_events = attr.ib(type=int, default=config.memory.events_buffer) + max_nframe = attr.ib(default=config.max_frames, type=int) + heap_sample_size = attr.ib(type=int, default=config.heap.sample_size) + ignore_profiler = attr.ib(default=config.ignore_profiler, type=bool) + _export_libdd_enabled = attr.ib(type=bool, default=config.export.libdd_enabled) + _export_py_enabled = attr.ib(type=bool, default=config.export.py_enabled) + + def _start_service(self): + # type: (...) -> None + """Start collecting memory profiles.""" + if _memalloc is None: + raise collector.CollectorUnavailable + + try: + _memalloc.start(self.max_nframe, self._max_events, self.heap_sample_size) + except RuntimeError: + # This happens on fork because we don't call the shutdown hook since + # the thread responsible for doing so is not running in the child + # process. Therefore we stop and restart the collector instead. + _memalloc.stop() + _memalloc.start(self.max_nframe, self._max_events, self.heap_sample_size) + + super(MemoryCollector, self)._start_service() + + @staticmethod + def on_shutdown(): + # type: () -> None + if _memalloc is not None: + try: + _memalloc.stop() + except RuntimeError: + pass + + def _get_thread_id_ignore_set(self): + # type: () -> typing.Set[int] + # This method is not perfect and prone to race condition in theory, but very little in practice. + # Anyhow it's not a big deal — it's a best effort feature. + return { + thread.ident + for thread in threading.enumerate() + if getattr(thread, "_ddtrace_profiling_ignore", False) and thread.ident is not None + } + + def snapshot(self): + thread_id_ignore_set = self._get_thread_id_ignore_set() + + try: + events = _memalloc.heap() + except RuntimeError: + # DEV: This can happen if either _memalloc has not been started or has been stopped. + LOG.debug("Unable to collect heap events from process %d", os.getpid(), exc_info=True) + return tuple() + + if self._export_libdd_enabled: + for (frames, nframes, thread_id), size in events: + if not self.ignore_profiler or thread_id not in thread_id_ignore_set: + ddup.start_sample(nframes) + ddup.push_heap(size) + ddup.push_threadinfo( + thread_id, _threading.get_thread_native_id(thread_id), _threading.get_thread_name(thread_id) + ) + try: + for frame in frames: + ddup.push_frame(frame.function_name, frame.file_name, 0, frame.lineno) + ddup.flush_sample() + except AttributeError: + # DEV: This might happen if the memalloc sofile is unlinked and relinked without module + # re-initialization. ddup re-initializes the sample on `start_sample()`, so no + # need to cleanup. + LOG.debug("Invalid state detected in memalloc module, suppressing profile") + + if self._export_py_enabled: + return ( + tuple( + MemoryHeapSampleEvent( + thread_id=thread_id, + thread_name=_threading.get_thread_name(thread_id), + thread_native_id=_threading.get_thread_native_id(thread_id), + frames=frames, + nframes=nframes, + size=size, + sample_size=self.heap_sample_size, + ) + for (frames, nframes, thread_id), size in events + if not self.ignore_profiler or thread_id not in thread_id_ignore_set + ), + ) + else: + return tuple() + + def collect(self): + # TODO: The event timestamp is slightly off since it's going to be the time we copy the data from the + # _memalloc buffer to our Recorder. This is fine for now, but we might want to store the nanoseconds + # timestamp in C and then return it via iter_events. + try: + events_iter, count, alloc_count = _memalloc.iter_events() + except RuntimeError: + # DEV: This can happen if either _memalloc has not been started or has been stopped. + LOG.debug("Unable to collect memory events from process %d", os.getpid(), exc_info=True) + return tuple() + + # `events_iter` is a consumable view into `iter_events()`; copy it so we can send it to both pyprof + # and libdatadog. This will be changed if/when we ever return to only a single possible exporter + events = list(events_iter) + capture_pct = 100 * count / alloc_count + thread_id_ignore_set = self._get_thread_id_ignore_set() + + if self._export_libdd_enabled: + for (frames, nframes, thread_id), size, _domain in events: + if thread_id in thread_id_ignore_set: + continue + ddup.start_sample(nframes) + ddup.push_alloc(int((ceil(size) * alloc_count) / count), count) # Roundup to help float precision + ddup.push_threadinfo( + thread_id, _threading.get_thread_native_id(thread_id), _threading.get_thread_name(thread_id) + ) + try: + for frame in frames: + ddup.push_frame(frame.function_name, frame.file_name, 0, frame.lineno) + ddup.flush_sample() + except AttributeError: + # DEV: This might happen if the memalloc sofile is unlinked and relinked without module + # re-initialization. ddup re-initializes the sample on `start_sample()`, so no + # need to cleanup. + LOG.debug("Invalid state detected in memalloc module, suppressing profile") + + if self._export_py_enabled: + return ( + tuple( + MemoryAllocSampleEvent( + thread_id=thread_id, + thread_name=_threading.get_thread_name(thread_id), + thread_native_id=_threading.get_thread_native_id(thread_id), + frames=frames, + nframes=nframes, + size=size, + capture_pct=capture_pct, + nevents=alloc_count, + ) + for (frames, nframes, thread_id), size, domain in events + if not self.ignore_profiler or thread_id not in thread_id_ignore_set + ), + ) + else: + return [] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..9dfd382 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.pyi new file mode 100644 index 0000000..e1181c9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack.pyi @@ -0,0 +1,7 @@ +import typing + +import ddtrace +from ddtrace.profiling import collector + +class StackCollector(collector.PeriodicCollector): + tracer: typing.Optional[ddtrace.Tracer] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack_event.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack_event.py new file mode 100644 index 0000000..3ec24af --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/stack_event.py @@ -0,0 +1,22 @@ +import typing + +import attr + +from ddtrace.profiling import event + + +@event.event_class +class StackSampleEvent(event.StackBasedEvent): + """A sample storing executions frames for a thread.""" + + # Wall clock + wall_time_ns = attr.ib(default=0, type=int) + # CPU time in nanoseconds + cpu_time_ns = attr.ib(default=0, type=int) + + +@event.event_class +class StackExceptionSampleEvent(event.StackBasedEvent): + """A a sample storing raised exceptions and their stack frames.""" + + exc_type = attr.ib(default=None, type=typing.Optional[str]) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/threading.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/threading.py new file mode 100644 index 0000000..0e66589 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/collector/threading.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import + +import threading +import typing # noqa:F401 + +import attr + +from .. import event +from . import _lock + + +@event.event_class +class ThreadingLockAcquireEvent(_lock.LockAcquireEvent): + """A threading.Lock has been acquired.""" + + +@event.event_class +class ThreadingLockReleaseEvent(_lock.LockReleaseEvent): + """A threading.Lock has been released.""" + + +class _ProfiledThreadingLock(_lock._ProfiledLock): + ACQUIRE_EVENT_CLASS = ThreadingLockAcquireEvent + RELEASE_EVENT_CLASS = ThreadingLockReleaseEvent + + +@attr.s +class ThreadingLockCollector(_lock.LockCollector): + """Record threading.Lock usage.""" + + PROFILED_LOCK_CLASS = _ProfiledThreadingLock + + def _get_original(self): + # type: (...) -> typing.Any + return threading.Lock + + def _set_original( + self, value # type: typing.Any + ): + # type: (...) -> None + threading.Lock = value # type: ignore[misc] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/event.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/event.py new file mode 100644 index 0000000..12c63b3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/event.py @@ -0,0 +1,76 @@ +from collections import namedtuple +import typing + +import attr + +from ddtrace import span as ddspan # noqa:F401 +from ddtrace.internal import compat + + +_T = typing.TypeVar("_T") + +DDFrame = namedtuple("DDFrame", ["file_name", "lineno", "function_name", "class_name"]) +StackTraceType = typing.List[DDFrame] + + +def event_class( + klass, # type: typing.Type[_T] +): + # type: (...) -> typing.Type[_T] + return attr.s(slots=True)(klass) + + +@event_class +class Event(object): + """An event happening at a point in time.""" + + timestamp = attr.ib(factory=compat.time_ns) + + @property + def name(self): + # type: (...) -> str + """Name of the event.""" + return self.__class__.__name__ + + +@event_class +class TimedEvent(Event): + """An event that has a duration.""" + + duration = attr.ib(default=None) + + +@event_class +class SampleEvent(Event): + """An event representing a sample gathered from the system.""" + + sampling_period = attr.ib(default=None) + + +@event_class +class StackBasedEvent(SampleEvent): + thread_id = attr.ib(default=None, type=typing.Optional[int]) + thread_name = attr.ib(default=None, type=typing.Optional[str]) + thread_native_id = attr.ib(default=None, type=typing.Optional[int]) + task_id = attr.ib(default=None, type=typing.Optional[int]) + task_name = attr.ib(default=None, type=typing.Optional[str]) + frames = attr.ib(default=None, type=StackTraceType) + nframes = attr.ib(default=0, type=int) + local_root_span_id = attr.ib(default=None, type=typing.Optional[int]) + span_id = attr.ib(default=None, type=typing.Optional[int]) + trace_type = attr.ib(default=None, type=typing.Optional[str]) + trace_resource_container = attr.ib(default=None, type=typing.List[str]) + + def set_trace_info( + self, + span, # type: typing.Optional[ddspan.Span] + endpoint_collection_enabled, # type: bool + ): + # type: (...) -> None + if span: + self.span_id = span.span_id + if span._local_root is not None: + self.local_root_span_id = span._local_root.span_id + self.trace_type = span._local_root.span_type + if endpoint_collection_enabled: + self.trace_resource_container = span._local_root._resource diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/__init__.py new file mode 100644 index 0000000..c18bd20 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/__init__.py @@ -0,0 +1,46 @@ +import typing + +import attr + + +if typing.TYPE_CHECKING: # pragma: no cover + from .. import recorder # noqa:F401 + + +class ExportError(Exception): + pass + + +@attr.s +class Exporter(object): + """Exporter base class.""" + + def export( + self, + events, # type: recorder.EventsType + start_time_ns, # type: int + end_time_ns, # type: int + ): + # type: (...) -> typing.Any + """Export events. + + :param events: List of events to export. + :param start_time_ns: The start time of recording. + :param end_time_ns: The end time of recording. + """ + raise NotImplementedError + + +@attr.s +class NullExporter(Exporter): + """Exporter that does nothing.""" + + def export( + self, + events, # type: recorder.EventsType + start_time_ns, # type: int + end_time_ns, # type: int + ): + # type: (...) -> None + """Discard events.""" + pass diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/file.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/file.py new file mode 100644 index 0000000..416aa25 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/file.py @@ -0,0 +1,39 @@ +import gzip +import os +import typing # noqa:F401 + +import attr + +from ddtrace.profiling.exporter import pprof + +from .. import recorder # noqa:F401 + + +@attr.s +class PprofFileExporter(pprof.PprofExporter): + """PProf file exporter.""" + + prefix = attr.ib(default="profile", type=str) + _increment = attr.ib(default=1, init=False, repr=False, type=int) + + def export( + self, + events, # type: recorder.EventsType + start_time_ns, # type: int + end_time_ns, # type: int + ): + # type: (...) -> typing.Tuple[pprof.pprof_ProfileType, typing.List[pprof.Package]] + """Export events to pprof file. + + The file name is based on the prefix passed to init. The process ID number and type of export is then added as a + suffix. + + :param events: The event dictionary from a `ddtrace.profiling.recorder.Recorder`. + :param start_time_ns: The start time of recording. + :param end_time_ns: The end time of recording. + """ + profile, libs = super(PprofFileExporter, self).export(events, start_time_ns, end_time_ns) + with gzip.open(self.prefix + (".%d.%d" % (os.getpid(), self._increment)), "wb") as f: + f.write(profile.SerializeToString()) + self._increment += 1 + return profile, libs diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/http.py new file mode 100644 index 0000000..2529a1c --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/http.py @@ -0,0 +1,261 @@ +# -*- encoding: utf-8 -*- +import binascii +import datetime +import gzip +from http import client as http_client +import io +import itertools +import json +import os +import platform +import typing +from typing import Any # noqa:F401 +from typing import Dict # noqa:F401 + +import attr + +import ddtrace +from ddtrace.ext.git import COMMIT_SHA +from ddtrace.ext.git import MAIN_PACKAGE +from ddtrace.ext.git import REPOSITORY_URL +from ddtrace.internal import agent +from ddtrace.internal import compat +from ddtrace.internal import gitmetadata +from ddtrace.internal import runtime +from ddtrace.internal.processor.endpoint_call_counter import EndpointCallCounterProcessor +from ddtrace.internal.runtime import container +from ddtrace.internal.utils.formats import parse_tags_str +from ddtrace.internal.utils.retry import fibonacci_backoff_with_jitter +from ddtrace.profiling import exporter +from ddtrace.profiling import recorder # noqa:F401 +from ddtrace.profiling.exporter import pprof +from ddtrace.settings.profiling import config + + +HOSTNAME = platform.node() +PYTHON_IMPLEMENTATION = platform.python_implementation() +PYTHON_VERSION = platform.python_version() + + +@attr.s +class PprofHTTPExporter(pprof.PprofExporter): + """PProf HTTP exporter.""" + + RETRY_ATTEMPTS = 3 + + # repeat this to please mypy + enable_code_provenance = attr.ib(default=True, type=bool) + + endpoint = attr.ib(type=str, factory=agent.get_trace_url) + api_key = attr.ib(default=None, type=typing.Optional[str]) + # Do not use the default agent timeout: it is too short, the agent is just a unbuffered proxy and the profiling + # backend is not as fast as the tracer one. + timeout = attr.ib(default=config.api_timeout, type=float) + service = attr.ib(default=None, type=typing.Optional[str]) + env = attr.ib(default=None, type=typing.Optional[str]) + version = attr.ib(default=None, type=typing.Optional[str]) + tags = attr.ib(factory=dict, type=typing.Dict[str, str]) + max_retry_delay = attr.ib(default=None) + _container_info = attr.ib(factory=container.get_container_info, repr=False) + endpoint_path = attr.ib(default="/profiling/v1/input") + + endpoint_call_counter_span_processor = attr.ib(default=None, type=EndpointCallCounterProcessor) + + def _update_git_metadata_tags(self, tags): + """ + Update profiler tags with git metadata + """ + # clean tags, because values will be combined and inserted back in the same way as for tracer + gitmetadata.clean_tags(tags) + repository_url, commit_sha, main_package = gitmetadata.get_git_tags() + if repository_url: + tags[REPOSITORY_URL] = repository_url + if commit_sha: + tags[COMMIT_SHA] = commit_sha + if main_package: + tags[MAIN_PACKAGE] = main_package + return tags + + def __attrs_post_init__(self): + if self.max_retry_delay is None: + self.max_retry_delay = self.timeout * 3 + + self._upload = fibonacci_backoff_with_jitter( + initial_wait=self.max_retry_delay / (1.618 ** (self.RETRY_ATTEMPTS - 1)), + attempts=self.RETRY_ATTEMPTS, + )(self._upload) + + tags = { + k: compat.ensure_text(v, "utf-8") + for k, v in itertools.chain( + self._update_git_metadata_tags(parse_tags_str(os.environ.get("DD_TAGS"))).items(), + config.tags.items(), + ) + } + tags.update(self.tags) + tags.update( + { + "host": HOSTNAME, + "language": "python", + "runtime": PYTHON_IMPLEMENTATION, + "runtime_version": PYTHON_VERSION, + "profiler_version": ddtrace.__version__, + } + ) + if self.version: + tags["version"] = self.version + + if self.env: + tags["env"] = self.env + + self.tags = tags + + @staticmethod + def _encode_multipart_formdata( + event, # type: bytes + data, # type: typing.List[typing.Dict[str, bytes]] + ): + # type: (...) -> typing.Tuple[bytes, bytes] + boundary = binascii.hexlify(os.urandom(16)) + + # The body that is generated is very sensitive and must perfectly match what the server expects. + body = ( + (b"--%s\r\n" % boundary) + + b'Content-Disposition: form-data; name="event"; filename="event.json"\r\n' + + b"Content-Type: application/json\r\n\r\n" + + event + + b"\r\n" + + b"".join( + (b"--%s\r\n" % boundary) + + (b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (item["name"], item["filename"])) + + (b"Content-Type: %s\r\n\r\n" % (item["content-type"])) + + item["data"] + + b"\r\n" + for item in data + ) + + b"--%s--\r\n" % boundary + ) + + content_type = b"multipart/form-data; boundary=%s" % boundary + + return content_type, body + + def _get_tags( + self, + service, # type: str + ): + # type: (...) -> str + tags = { + "service": service, + "runtime-id": runtime.get_runtime_id(), + } + + tags.update(self.tags) + + return ",".join(tag + ":" + value for tag, value in tags.items()) + + def export( + self, + events, # type: recorder.EventsType + start_time_ns, # type: int + end_time_ns, # type: int + ): + # type: (...) -> typing.Tuple[pprof.pprof_ProfileType, typing.List[pprof.Package]] + """Export events to an HTTP endpoint. + + :param events: The event dictionary from a `ddtrace.profiling.recorder.Recorder`. + :param start_time_ns: The start time of recording. + :param end_time_ns: The end time of recording. + """ + if self.api_key: + headers = { + "DD-API-KEY": self.api_key.encode(), + } + else: + headers = {} + + if self._container_info and self._container_info.container_id: + headers["Datadog-Container-Id"] = self._container_info.container_id + + profile, libs = super(PprofHTTPExporter, self).export(events, start_time_ns, end_time_ns) + pprof = io.BytesIO() + with gzip.GzipFile(fileobj=pprof, mode="wb") as gz: + gz.write(profile.SerializeToString()) + + data = [ + { + "name": b"auto", + "filename": b"auto.pprof", + "content-type": b"application/octet-stream", + "data": pprof.getvalue(), + } + ] + + if self.enable_code_provenance: + code_provenance = io.BytesIO() + with gzip.GzipFile(fileobj=code_provenance, mode="wb") as gz: + gz.write( + json.dumps( + { + "v1": libs, + } + ).encode("utf-8") + ) + data.append( + { + "name": b"code-provenance", + "filename": b"code-provenance.json", + "content-type": b"application/json", + "data": code_provenance.getvalue(), + } + ) + + service = self.service or os.path.basename(profile.string_table[profile.mapping[0].filename]) + event = { + "version": "4", + "family": "python", + "attachments": [item["filename"].decode("utf-8") for item in data], + "tags_profiler": self._get_tags(service), + "start": (datetime.datetime.utcfromtimestamp(start_time_ns / 1e9).replace(microsecond=0).isoformat() + "Z"), + "end": (datetime.datetime.utcfromtimestamp(end_time_ns / 1e9).replace(microsecond=0).isoformat() + "Z"), + } # type: Dict[str, Any] + + if self.endpoint_call_counter_span_processor is not None: + event["endpoint_counts"] = self.endpoint_call_counter_span_processor.reset() + + content_type, body = self._encode_multipart_formdata( + event=json.dumps(event).encode("utf-8"), + data=data, + ) + headers["Content-Type"] = content_type + + client = agent.get_connection(self.endpoint, self.timeout) + self._upload(client, self.endpoint_path, body, headers) + + return profile, libs + + def _upload(self, client, path, body, headers): + try: + client.request("POST", path, body=body, headers=headers) + response = client.getresponse() + response.read() # reading is mandatory + except (http_client.HTTPException, EnvironmentError) as e: + raise exporter.ExportError("HTTP upload request failed: %s" % e) + finally: + client.close() + + if 200 <= response.status < 300: + return + + if 500 <= response.status < 600: + raise RuntimeError("Server returned %d" % response.status) + + if response.status == 400: + raise exporter.ExportError("Server returned 400, check your API key") + elif response.status == 404 and not self.api_key: + raise exporter.ExportError( + "Datadog Agent is not accepting profiles. " + "Agent-based profiling deployments require Datadog Agent >= 7.20" + ) + + raise exporter.ExportError("HTTP Error %d" % response.status) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.cpython-311-x86_64-linux-gnu.so new file mode 100755 index 0000000..39bba34 Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.cpython-311-x86_64-linux-gnu.so differ diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.proto b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.proto new file mode 100644 index 0000000..2659026 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.proto @@ -0,0 +1,212 @@ +// Copyright 2016 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Profile is a common stacktrace profile format. +// +// Measurements represented with this format should follow the +// following conventions: +// +// - Consumers should treat unset optional fields as if they had been +// set with their default value. +// +// - When possible, measurements should be stored in "unsampled" form +// that is most useful to humans. There should be enough +// information present to determine the original sampled values. +// +// - On-disk, the serialized proto must be gzip-compressed. +// +// - The profile is represented as a set of samples, where each sample +// references a sequence of locations, and where each location belongs +// to a mapping. +// - There is a N->1 relationship from sample.location_id entries to +// locations. For every sample.location_id entry there must be a +// unique Location with that id. +// - There is an optional N->1 relationship from locations to +// mappings. For every nonzero Location.mapping_id there must be a +// unique Mapping with that id. + +syntax = "proto3"; + +package perftools.profiles; + +option java_package = "com.google.perftools.profiles"; +option java_outer_classname = "ProfileProto"; + +message Profile { + // A description of the samples associated with each Sample.value. + // For a cpu profile this might be: + // [["cpu","nanoseconds"]] or [["wall","seconds"]] or [["syscall","count"]] + // For a heap profile, this might be: + // [["allocations","count"], ["space","bytes"]], + // If one of the values represents the number of events represented + // by the sample, by convention it should be at index 0 and use + // sample_type.unit == "count". + repeated ValueType sample_type = 1; + // The set of samples recorded in this profile. + repeated Sample sample = 2; + // Mapping from address ranges to the image/binary/library mapped + // into that address range. mapping[0] will be the main binary. + repeated Mapping mapping = 3; + // Useful program location + repeated Location location = 4; + // Functions referenced by locations + repeated Function function = 5; + // A common table for strings referenced by various messages. + // string_table[0] must always be "". + repeated string string_table = 6; + // frames with Function.function_name fully matching the following + // regexp will be dropped from the samples, along with their successors. + int64 drop_frames = 7; // Index into string table. + // frames with Function.function_name fully matching the following + // regexp will be kept, even if it matches drop_functions. + int64 keep_frames = 8; // Index into string table. + + // The following fields are informational, do not affect + // interpretation of results. + + // Time of collection (UTC) represented as nanoseconds past the epoch. + int64 time_nanos = 9; + // Duration of the profile, if a duration makes sense. + int64 duration_nanos = 10; + // The kind of events between sampled occurrences. + // e.g [ "cpu","cycles" ] or [ "heap","bytes" ] + ValueType period_type = 11; + // The number of events between sampled occurrences. + int64 period = 12; + // Freeform text associated to the profile. + repeated int64 comment = 13; // Indices into string table. + // Index into the string table of the type of the preferred sample + // value. If unset, clients should default to the last sample value. + int64 default_sample_type = 14; +} + +// ValueType describes the semantics and measurement units of a value. +message ValueType { + int64 type = 1; // Index into string table. + int64 unit = 2; // Index into string table. +} + +// Each Sample records values encountered in some program +// context. The program context is typically a stack trace, perhaps +// augmented with auxiliary information like the thread-id, some +// indicator of a higher level request being handled etc. +message Sample { + // The ids recorded here correspond to a Profile.location.id. + // The leaf is at location_id[0]. + repeated uint64 location_id = 1; + // The type and unit of each value is defined by the corresponding + // entry in Profile.sample_type. All samples must have the same + // number of values, the same as the length of Profile.sample_type. + // When aggregating multiple samples into a single sample, the + // result has a list of values that is the elemntwise sum of the + // lists of the originals. + repeated int64 value = 2; + // label includes additional context for this sample. It can include + // things like a thread id, allocation size, etc + repeated Label label = 3; +} + +message Label { + int64 key = 1; // Index into string table + + // At most one of the following must be present + int64 str = 2; // Index into string table + int64 num = 3; + + // Should only be present when num is present. + // Specifies the units of num. + // Use arbitrary string (for example, "requests") as a custom count unit. + // If no unit is specified, consumer may apply heuristic to deduce the unit. + // Consumers may also interpret units like "bytes" and "kilobytes" as memory + // units and units like "seconds" and "nanoseconds" as time units, + // and apply appropriate unit conversions to these. + int64 num_unit = 4; // Index into string table +} + +message Mapping { + // Unique nonzero id for the mapping. + uint64 id = 1; + // Address at which the binary (or DLL) is loaded into memory. + uint64 memory_start = 2; + // The limit of the address range occupied by this mapping. + uint64 memory_limit = 3; + // Offset in the binary that corresponds to the first mapped address. + uint64 file_offset = 4; + // The object this entry is loaded from. This can be a filename on + // disk for the main binary and shared libraries, or virtual + // abstractions like "[vdso]". + int64 filename = 5; // Index into string table + // A string that uniquely identifies a particular program version + // with high probability. E.g., for binaries generated by GNU tools, + // it could be the contents of the .note.gnu.build-id field. + int64 build_id = 6; // Index into string table + + // The following fields indicate the resolution of symbolic info. + bool has_functions = 7; + bool has_filenames = 8; + bool has_line_numbers = 9; + bool has_inline_frames = 10; +} + +// Describes function and line table debug information. +message Location { + // Unique nonzero id for the location. A profile could use + // instruction addresses or any integer sequence as ids. + uint64 id = 1; + // The id of the corresponding profile.Mapping for this location. + // It can be unset if the mapping is unknown or not applicable for + // this profile type. + uint64 mapping_id = 2; + // The instruction address for this location, if available. It + // should be within [Mapping.memory_start...Mapping.memory_limit] + // for the corresponding mapping. A non-leaf address may be in the + // middle of a call instruction. It is up to display tools to find + // the beginning of the instruction if necessary. + uint64 address = 3; + // Multiple line indicates this location has inlined functions, + // where the last entry represents the caller into which the + // preceding entries were inlined. + // + // E.g., if memcpy() is inlined into printf: + // line[0].function_name == "memcpy" + // line[1].function_name == "printf" + repeated Line line = 4; + // Provides an indication that multiple symbols map to this location's + // address, for example due to identical code folding by the linker. In that + // case the line information above represents one of the multiple + // symbols. This field must be recomputed when the symbolization state of the + // profile changes. + bool is_folded = 5; +} + +message Line { + // The id of the corresponding profile.Function for this line. + uint64 function_id = 1; + // Line number in source code. + int64 line = 2; +} + +message Function { + // Unique nonzero id for the function. + uint64 id = 1; + // Name of the function, in human-readable form if available. + int64 name = 2; // Index into string table + // Name of the function, as identified by the system. + // For instance, it can be a C++ mangled name. + int64 system_name = 3; // Index into string table + // Source file containing the function. + int64 filename = 4; // Index into string table + // Line number in source file. + int64 start_line = 5; +} diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.pyi b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.pyi new file mode 100644 index 0000000..d9f58af --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof.pyi @@ -0,0 +1,138 @@ +import typing +from typing import Any + +from ddtrace.profiling import exporter +from ddtrace.profiling import recorder as recorder +from ddtrace.profiling.collector import _lock +from ddtrace.profiling.collector import memalloc +from ddtrace.profiling.collector import stack_event +from ddtrace.profiling.collector import threading as threading + +stdlib_path: Any +platstdlib_path: Any +purelib_path: Any +platlib_path: Any +STDLIB: Any + +class Package(typing.TypedDict): + name: str + version: str + kind: typing.Literal["library"] + paths: typing.List[str] + +class pprof_LocationType: + id: int + +class pprof_Mapping: + filename: int + +class pprof_ProfileType: + id: int + string_table: typing.Dict[int, str] + mapping: typing.List[pprof_Mapping] + def SerializeToString(self) -> bytes: ... + +class pprof_FunctionType: + id: int + +HashableStackTraceType: Any + +class _PprofConverter: + def convert_stack_event( + self, + thread_id: str, + thread_native_id: str, + thread_name: str, + task_id: str, + task_name: str, + local_root_span_id: str, + span_id: str, + trace_resource: str, + trace_type: str, + frames: HashableStackTraceType, # noqa + nframes: int, + samples: typing.List[stack_event.StackSampleEvent], + ) -> None: ... + def convert_memalloc_event( + self, + thread_id: str, + thread_native_id: str, + thread_name: str, + frames: HashableStackTraceType, # noqa + nframes: int, + events: typing.List[memalloc.MemoryAllocSampleEvent], + ) -> None: ... + def convert_memalloc_heap_event(self, event: memalloc.MemoryHeapSampleEvent) -> None: ... + def convert_lock_acquire_event( + self, + lock_name: str, + thread_id: str, + thread_name: str, + task_id: str, + task_name: str, + local_root_span_id: str, + span_id: str, + trace_resource: str, + trace_type: str, + frames: HashableStackTraceType, # noqa + nframes: int, + events: typing.List[_lock.LockAcquireEvent], + sampling_ratio: float, + ) -> None: ... + def convert_lock_release_event( + self, + lock_name: str, + thread_id: str, + thread_name: str, + task_id: str, + task_name: str, + local_root_span_id: str, + span_id: str, + trace_resource: str, + trace_type: str, + frames: HashableStackTraceType, # noqa + nframes: int, + events: typing.List[_lock.LockReleaseEvent], + sampling_ratio: float, + ) -> None: ... + def convert_stack_exception_event( + self, + thread_id: str, + thread_native_id: str, + thread_name: str, + local_root_span_id: str, + span_id: str, + trace_resource: str, + trace_type: str, + frames: HashableStackTraceType, # noqa + nframes: int, + exc_type_name: str, + events: typing.List[stack_event.StackExceptionSampleEvent], + ) -> None: ... + def __init__( + self, + functions: Any, + locations: Any, + string_table: Any, + last_location_id: Any, + last_func_id: Any, + location_values: Any, + ) -> None: ... + def __lt__(self, other: Any) -> Any: ... + def __le__(self, other: Any) -> Any: ... + def __gt__(self, other: Any) -> Any: ... + def __ge__(self, other: Any) -> Any: ... + +StackEventGroupKey: Any +LockEventGroupKey: Any +StackExceptionEventGroupKey: Any + +class PprofExporter(exporter.Exporter): + def export( + self, events: recorder.EventsType, start_time_ns: int, end_time_ns: int + ) -> typing.Tuple[pprof_ProfileType, typing.List[Package]]: ... + def __init__(self) -> None: ... + def __lt__(self, other: Any) -> Any: ... + def __le__(self, other: Any) -> Any: ... + def __gt__(self, other: Any) -> Any: ... + def __ge__(self, other: Any) -> Any: ... diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_312_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_312_pb2.py new file mode 100644 index 0000000..6de31a5 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_312_pb2.py @@ -0,0 +1,620 @@ +# type: ignore + +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: ddtrace/profiling/exporter/pprof.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='ddtrace/profiling/exporter/pprof.proto', + package='perftools.profiles', + syntax='proto3', + serialized_options=b'\n\035com.google.perftools.profilesB\014ProfileProto', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n&ddtrace/profiling/exporter/pprof.proto\x12\x12perftools.profiles\"\xd5\x03\n\x07Profile\x12\x32\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x1d.perftools.profiles.ValueType\x12*\n\x06sample\x18\x02 \x03(\x0b\x32\x1a.perftools.profiles.Sample\x12,\n\x07mapping\x18\x03 \x03(\x0b\x32\x1b.perftools.profiles.Mapping\x12.\n\x08location\x18\x04 \x03(\x0b\x32\x1c.perftools.profiles.Location\x12.\n\x08\x66unction\x18\x05 \x03(\x0b\x32\x1c.perftools.profiles.Function\x12\x14\n\x0cstring_table\x18\x06 \x03(\t\x12\x13\n\x0b\x64rop_frames\x18\x07 \x01(\x03\x12\x13\n\x0bkeep_frames\x18\x08 \x01(\x03\x12\x12\n\ntime_nanos\x18\t \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\n \x01(\x03\x12\x32\n\x0bperiod_type\x18\x0b \x01(\x0b\x32\x1d.perftools.profiles.ValueType\x12\x0e\n\x06period\x18\x0c \x01(\x03\x12\x0f\n\x07\x63omment\x18\r \x03(\x03\x12\x1b\n\x13\x64\x65\x66\x61ult_sample_type\x18\x0e \x01(\x03\"\'\n\tValueType\x12\x0c\n\x04type\x18\x01 \x01(\x03\x12\x0c\n\x04unit\x18\x02 \x01(\x03\"V\n\x06Sample\x12\x13\n\x0blocation_id\x18\x01 \x03(\x04\x12\r\n\x05value\x18\x02 \x03(\x03\x12(\n\x05label\x18\x03 \x03(\x0b\x32\x19.perftools.profiles.Label\"@\n\x05Label\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0b\n\x03str\x18\x02 \x01(\x03\x12\x0b\n\x03num\x18\x03 \x01(\x03\x12\x10\n\x08num_unit\x18\x04 \x01(\x03\"\xdd\x01\n\x07Mapping\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_start\x18\x02 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x03 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x04 \x01(\x04\x12\x10\n\x08\x66ilename\x18\x05 \x01(\x03\x12\x10\n\x08\x62uild_id\x18\x06 \x01(\x03\x12\x15\n\rhas_functions\x18\x07 \x01(\x08\x12\x15\n\rhas_filenames\x18\x08 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\t \x01(\x08\x12\x19\n\x11has_inline_frames\x18\n \x01(\x08\"v\n\x08Location\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x12\n\nmapping_id\x18\x02 \x01(\x04\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\x04\x12&\n\x04line\x18\x04 \x03(\x0b\x32\x18.perftools.profiles.Line\x12\x11\n\tis_folded\x18\x05 \x01(\x08\")\n\x04Line\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\x04\x12\x0c\n\x04line\x18\x02 \x01(\x03\"_\n\x08\x46unction\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\x03\x12\x13\n\x0bsystem_name\x18\x03 \x01(\x03\x12\x10\n\x08\x66ilename\x18\x04 \x01(\x03\x12\x12\n\nstart_line\x18\x05 \x01(\x03\x42-\n\x1d\x63om.google.perftools.profilesB\x0cProfileProtob\x06proto3' +) + + + + +_PROFILE = _descriptor.Descriptor( + name='Profile', + full_name='perftools.profiles.Profile', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='sample_type', full_name='perftools.profiles.Profile.sample_type', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='sample', full_name='perftools.profiles.Profile.sample', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='mapping', full_name='perftools.profiles.Profile.mapping', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='location', full_name='perftools.profiles.Profile.location', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='function', full_name='perftools.profiles.Profile.function', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='string_table', full_name='perftools.profiles.Profile.string_table', index=5, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='drop_frames', full_name='perftools.profiles.Profile.drop_frames', index=6, + number=7, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='keep_frames', full_name='perftools.profiles.Profile.keep_frames', index=7, + number=8, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='time_nanos', full_name='perftools.profiles.Profile.time_nanos', index=8, + number=9, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='duration_nanos', full_name='perftools.profiles.Profile.duration_nanos', index=9, + number=10, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='period_type', full_name='perftools.profiles.Profile.period_type', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='period', full_name='perftools.profiles.Profile.period', index=11, + number=12, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='comment', full_name='perftools.profiles.Profile.comment', index=12, + number=13, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='default_sample_type', full_name='perftools.profiles.Profile.default_sample_type', index=13, + number=14, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=63, + serialized_end=532, +) + + +_VALUETYPE = _descriptor.Descriptor( + name='ValueType', + full_name='perftools.profiles.ValueType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='perftools.profiles.ValueType.type', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='unit', full_name='perftools.profiles.ValueType.unit', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=534, + serialized_end=573, +) + + +_SAMPLE = _descriptor.Descriptor( + name='Sample', + full_name='perftools.profiles.Sample', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='location_id', full_name='perftools.profiles.Sample.location_id', index=0, + number=1, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='value', full_name='perftools.profiles.Sample.value', index=1, + number=2, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='label', full_name='perftools.profiles.Sample.label', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=575, + serialized_end=661, +) + + +_LABEL = _descriptor.Descriptor( + name='Label', + full_name='perftools.profiles.Label', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='perftools.profiles.Label.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='str', full_name='perftools.profiles.Label.str', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='num', full_name='perftools.profiles.Label.num', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='num_unit', full_name='perftools.profiles.Label.num_unit', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=663, + serialized_end=727, +) + + +_MAPPING = _descriptor.Descriptor( + name='Mapping', + full_name='perftools.profiles.Mapping', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Mapping.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='memory_start', full_name='perftools.profiles.Mapping.memory_start', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='memory_limit', full_name='perftools.profiles.Mapping.memory_limit', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='file_offset', full_name='perftools.profiles.Mapping.file_offset', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='filename', full_name='perftools.profiles.Mapping.filename', index=4, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='build_id', full_name='perftools.profiles.Mapping.build_id', index=5, + number=6, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='has_functions', full_name='perftools.profiles.Mapping.has_functions', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='has_filenames', full_name='perftools.profiles.Mapping.has_filenames', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='has_line_numbers', full_name='perftools.profiles.Mapping.has_line_numbers', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='has_inline_frames', full_name='perftools.profiles.Mapping.has_inline_frames', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=730, + serialized_end=951, +) + + +_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='perftools.profiles.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Location.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='mapping_id', full_name='perftools.profiles.Location.mapping_id', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='address', full_name='perftools.profiles.Location.address', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='line', full_name='perftools.profiles.Location.line', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_folded', full_name='perftools.profiles.Location.is_folded', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=953, + serialized_end=1071, +) + + +_LINE = _descriptor.Descriptor( + name='Line', + full_name='perftools.profiles.Line', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='function_id', full_name='perftools.profiles.Line.function_id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='line', full_name='perftools.profiles.Line.line', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1073, + serialized_end=1114, +) + + +_FUNCTION = _descriptor.Descriptor( + name='Function', + full_name='perftools.profiles.Function', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Function.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='name', full_name='perftools.profiles.Function.name', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='system_name', full_name='perftools.profiles.Function.system_name', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='filename', full_name='perftools.profiles.Function.filename', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='start_line', full_name='perftools.profiles.Function.start_line', index=4, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1116, + serialized_end=1211, +) + +_PROFILE.fields_by_name['sample_type'].message_type = _VALUETYPE +_PROFILE.fields_by_name['sample'].message_type = _SAMPLE +_PROFILE.fields_by_name['mapping'].message_type = _MAPPING +_PROFILE.fields_by_name['location'].message_type = _LOCATION +_PROFILE.fields_by_name['function'].message_type = _FUNCTION +_PROFILE.fields_by_name['period_type'].message_type = _VALUETYPE +_SAMPLE.fields_by_name['label'].message_type = _LABEL +_LOCATION.fields_by_name['line'].message_type = _LINE +DESCRIPTOR.message_types_by_name['Profile'] = _PROFILE +DESCRIPTOR.message_types_by_name['ValueType'] = _VALUETYPE +DESCRIPTOR.message_types_by_name['Sample'] = _SAMPLE +DESCRIPTOR.message_types_by_name['Label'] = _LABEL +DESCRIPTOR.message_types_by_name['Mapping'] = _MAPPING +DESCRIPTOR.message_types_by_name['Location'] = _LOCATION +DESCRIPTOR.message_types_by_name['Line'] = _LINE +DESCRIPTOR.message_types_by_name['Function'] = _FUNCTION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Profile = _reflection.GeneratedProtocolMessageType('Profile', (_message.Message,), { + 'DESCRIPTOR' : _PROFILE, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Profile) + }) +_sym_db.RegisterMessage(Profile) + +ValueType = _reflection.GeneratedProtocolMessageType('ValueType', (_message.Message,), { + 'DESCRIPTOR' : _VALUETYPE, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.ValueType) + }) +_sym_db.RegisterMessage(ValueType) + +Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), { + 'DESCRIPTOR' : _SAMPLE, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Sample) + }) +_sym_db.RegisterMessage(Sample) + +Label = _reflection.GeneratedProtocolMessageType('Label', (_message.Message,), { + 'DESCRIPTOR' : _LABEL, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Label) + }) +_sym_db.RegisterMessage(Label) + +Mapping = _reflection.GeneratedProtocolMessageType('Mapping', (_message.Message,), { + 'DESCRIPTOR' : _MAPPING, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Mapping) + }) +_sym_db.RegisterMessage(Mapping) + +Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), { + 'DESCRIPTOR' : _LOCATION, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Location) + }) +_sym_db.RegisterMessage(Location) + +Line = _reflection.GeneratedProtocolMessageType('Line', (_message.Message,), { + 'DESCRIPTOR' : _LINE, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Line) + }) +_sym_db.RegisterMessage(Line) + +Function = _reflection.GeneratedProtocolMessageType('Function', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTION, + '__module__' : 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Function) + }) +_sym_db.RegisterMessage(Function) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_319_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_319_pb2.py new file mode 100644 index 0000000..c86b46a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_319_pb2.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: pprof.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0bpprof.proto\x12\x12perftools.profiles\"\xd5\x03\n\x07Profile\x12\x32\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x1d.perftools.profiles.ValueType\x12*\n\x06sample\x18\x02 \x03(\x0b\x32\x1a.perftools.profiles.Sample\x12,\n\x07mapping\x18\x03 \x03(\x0b\x32\x1b.perftools.profiles.Mapping\x12.\n\x08location\x18\x04 \x03(\x0b\x32\x1c.perftools.profiles.Location\x12.\n\x08\x66unction\x18\x05 \x03(\x0b\x32\x1c.perftools.profiles.Function\x12\x14\n\x0cstring_table\x18\x06 \x03(\t\x12\x13\n\x0b\x64rop_frames\x18\x07 \x01(\x03\x12\x13\n\x0bkeep_frames\x18\x08 \x01(\x03\x12\x12\n\ntime_nanos\x18\t \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\n \x01(\x03\x12\x32\n\x0bperiod_type\x18\x0b \x01(\x0b\x32\x1d.perftools.profiles.ValueType\x12\x0e\n\x06period\x18\x0c \x01(\x03\x12\x0f\n\x07\x63omment\x18\r \x03(\x03\x12\x1b\n\x13\x64\x65\x66\x61ult_sample_type\x18\x0e \x01(\x03\"\'\n\tValueType\x12\x0c\n\x04type\x18\x01 \x01(\x03\x12\x0c\n\x04unit\x18\x02 \x01(\x03\"V\n\x06Sample\x12\x13\n\x0blocation_id\x18\x01 \x03(\x04\x12\r\n\x05value\x18\x02 \x03(\x03\x12(\n\x05label\x18\x03 \x03(\x0b\x32\x19.perftools.profiles.Label\"@\n\x05Label\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0b\n\x03str\x18\x02 \x01(\x03\x12\x0b\n\x03num\x18\x03 \x01(\x03\x12\x10\n\x08num_unit\x18\x04 \x01(\x03\"\xdd\x01\n\x07Mapping\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_start\x18\x02 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x03 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x04 \x01(\x04\x12\x10\n\x08\x66ilename\x18\x05 \x01(\x03\x12\x10\n\x08\x62uild_id\x18\x06 \x01(\x03\x12\x15\n\rhas_functions\x18\x07 \x01(\x08\x12\x15\n\rhas_filenames\x18\x08 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\t \x01(\x08\x12\x19\n\x11has_inline_frames\x18\n \x01(\x08\"v\n\x08Location\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x12\n\nmapping_id\x18\x02 \x01(\x04\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\x04\x12&\n\x04line\x18\x04 \x03(\x0b\x32\x18.perftools.profiles.Line\x12\x11\n\tis_folded\x18\x05 \x01(\x08\")\n\x04Line\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\x04\x12\x0c\n\x04line\x18\x02 \x01(\x03\"_\n\x08\x46unction\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\x03\x12\x13\n\x0bsystem_name\x18\x03 \x01(\x03\x12\x10\n\x08\x66ilename\x18\x04 \x01(\x03\x12\x12\n\nstart_line\x18\x05 \x01(\x03\x42-\n\x1d\x63om.google.perftools.profilesB\x0cProfileProtob\x06proto3') + + + +_PROFILE = DESCRIPTOR.message_types_by_name['Profile'] +_VALUETYPE = DESCRIPTOR.message_types_by_name['ValueType'] +_SAMPLE = DESCRIPTOR.message_types_by_name['Sample'] +_LABEL = DESCRIPTOR.message_types_by_name['Label'] +_MAPPING = DESCRIPTOR.message_types_by_name['Mapping'] +_LOCATION = DESCRIPTOR.message_types_by_name['Location'] +_LINE = DESCRIPTOR.message_types_by_name['Line'] +_FUNCTION = DESCRIPTOR.message_types_by_name['Function'] +Profile = _reflection.GeneratedProtocolMessageType('Profile', (_message.Message,), { + 'DESCRIPTOR' : _PROFILE, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Profile) + }) +_sym_db.RegisterMessage(Profile) + +ValueType = _reflection.GeneratedProtocolMessageType('ValueType', (_message.Message,), { + 'DESCRIPTOR' : _VALUETYPE, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.ValueType) + }) +_sym_db.RegisterMessage(ValueType) + +Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), { + 'DESCRIPTOR' : _SAMPLE, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Sample) + }) +_sym_db.RegisterMessage(Sample) + +Label = _reflection.GeneratedProtocolMessageType('Label', (_message.Message,), { + 'DESCRIPTOR' : _LABEL, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Label) + }) +_sym_db.RegisterMessage(Label) + +Mapping = _reflection.GeneratedProtocolMessageType('Mapping', (_message.Message,), { + 'DESCRIPTOR' : _MAPPING, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Mapping) + }) +_sym_db.RegisterMessage(Mapping) + +Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), { + 'DESCRIPTOR' : _LOCATION, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Location) + }) +_sym_db.RegisterMessage(Location) + +Line = _reflection.GeneratedProtocolMessageType('Line', (_message.Message,), { + 'DESCRIPTOR' : _LINE, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Line) + }) +_sym_db.RegisterMessage(Line) + +Function = _reflection.GeneratedProtocolMessageType('Function', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTION, + '__module__' : 'pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Function) + }) +_sym_db.RegisterMessage(Function) + +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\035com.google.perftools.profilesB\014ProfileProto' + _PROFILE._serialized_start=36 + _PROFILE._serialized_end=505 + _VALUETYPE._serialized_start=507 + _VALUETYPE._serialized_end=546 + _SAMPLE._serialized_start=548 + _SAMPLE._serialized_end=634 + _LABEL._serialized_start=636 + _LABEL._serialized_end=700 + _MAPPING._serialized_start=703 + _MAPPING._serialized_end=924 + _LOCATION._serialized_start=926 + _LOCATION._serialized_end=1044 + _LINE._serialized_start=1046 + _LINE._serialized_end=1087 + _FUNCTION._serialized_start=1089 + _FUNCTION._serialized_end=1184 +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_3_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_3_pb2.py new file mode 100644 index 0000000..1591174 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_3_pb2.py @@ -0,0 +1,615 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: ddtrace/profiling/exporter/pprof.proto + +import sys + + +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pb2 +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='ddtrace.profiling/exporter/pprof.proto', + package='perftools.profiles', + syntax='proto3', + serialized_pb=_b('\n\x1e\x64\x64profile/exporter/pprof.proto\x12\x12perftools.profiles\"\xd5\x03\n\x07Profile\x12\x32\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x1d.perftools.profiles.ValueType\x12*\n\x06sample\x18\x02 \x03(\x0b\x32\x1a.perftools.profiles.Sample\x12,\n\x07mapping\x18\x03 \x03(\x0b\x32\x1b.perftools.profiles.Mapping\x12.\n\x08location\x18\x04 \x03(\x0b\x32\x1c.perftools.profiles.Location\x12.\n\x08\x66unction\x18\x05 \x03(\x0b\x32\x1c.perftools.profiles.Function\x12\x14\n\x0cstring_table\x18\x06 \x03(\t\x12\x13\n\x0b\x64rop_frames\x18\x07 \x01(\x03\x12\x13\n\x0bkeep_frames\x18\x08 \x01(\x03\x12\x12\n\ntime_nanos\x18\t \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\n \x01(\x03\x12\x32\n\x0bperiod_type\x18\x0b \x01(\x0b\x32\x1d.perftools.profiles.ValueType\x12\x0e\n\x06period\x18\x0c \x01(\x03\x12\x0f\n\x07\x63omment\x18\r \x03(\x03\x12\x1b\n\x13\x64\x65\x66\x61ult_sample_type\x18\x0e \x01(\x03\"\'\n\tValueType\x12\x0c\n\x04type\x18\x01 \x01(\x03\x12\x0c\n\x04unit\x18\x02 \x01(\x03\"V\n\x06Sample\x12\x13\n\x0blocation_id\x18\x01 \x03(\x04\x12\r\n\x05value\x18\x02 \x03(\x03\x12(\n\x05label\x18\x03 \x03(\x0b\x32\x19.perftools.profiles.Label\"@\n\x05Label\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0b\n\x03str\x18\x02 \x01(\x03\x12\x0b\n\x03num\x18\x03 \x01(\x03\x12\x10\n\x08num_unit\x18\x04 \x01(\x03\"\xdd\x01\n\x07Mapping\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_start\x18\x02 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x03 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x04 \x01(\x04\x12\x10\n\x08\x66ilename\x18\x05 \x01(\x03\x12\x10\n\x08\x62uild_id\x18\x06 \x01(\x03\x12\x15\n\rhas_functions\x18\x07 \x01(\x08\x12\x15\n\rhas_filenames\x18\x08 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\t \x01(\x08\x12\x19\n\x11has_inline_frames\x18\n \x01(\x08\"v\n\x08Location\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x12\n\nmapping_id\x18\x02 \x01(\x04\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\x04\x12&\n\x04line\x18\x04 \x03(\x0b\x32\x18.perftools.profiles.Line\x12\x11\n\tis_folded\x18\x05 \x01(\x08\")\n\x04Line\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\x04\x12\x0c\n\x04line\x18\x02 \x01(\x03\"_\n\x08\x46unction\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\x03\x12\x13\n\x0bsystem_name\x18\x03 \x01(\x03\x12\x10\n\x08\x66ilename\x18\x04 \x01(\x03\x12\x12\n\nstart_line\x18\x05 \x01(\x03\x42-\n\x1d\x63om.google.perftools.profilesB\x0cProfileProtob\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_PROFILE = _descriptor.Descriptor( + name='Profile', + full_name='perftools.profiles.Profile', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sample_type', full_name='perftools.profiles.Profile.sample_type', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sample', full_name='perftools.profiles.Profile.sample', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mapping', full_name='perftools.profiles.Profile.mapping', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='location', full_name='perftools.profiles.Profile.location', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='function', full_name='perftools.profiles.Profile.function', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_table', full_name='perftools.profiles.Profile.string_table', index=5, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='drop_frames', full_name='perftools.profiles.Profile.drop_frames', index=6, + number=7, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keep_frames', full_name='perftools.profiles.Profile.keep_frames', index=7, + number=8, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time_nanos', full_name='perftools.profiles.Profile.time_nanos', index=8, + number=9, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='duration_nanos', full_name='perftools.profiles.Profile.duration_nanos', index=9, + number=10, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='period_type', full_name='perftools.profiles.Profile.period_type', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='period', full_name='perftools.profiles.Profile.period', index=11, + number=12, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='comment', full_name='perftools.profiles.Profile.comment', index=12, + number=13, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sample_type', full_name='perftools.profiles.Profile.default_sample_type', index=13, + number=14, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=55, + serialized_end=524, +) + + +_VALUETYPE = _descriptor.Descriptor( + name='ValueType', + full_name='perftools.profiles.ValueType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='perftools.profiles.ValueType.type', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unit', full_name='perftools.profiles.ValueType.unit', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=526, + serialized_end=565, +) + + +_SAMPLE = _descriptor.Descriptor( + name='Sample', + full_name='perftools.profiles.Sample', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='location_id', full_name='perftools.profiles.Sample.location_id', index=0, + number=1, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='perftools.profiles.Sample.value', index=1, + number=2, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='perftools.profiles.Sample.label', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=567, + serialized_end=653, +) + + +_LABEL = _descriptor.Descriptor( + name='Label', + full_name='perftools.profiles.Label', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='perftools.profiles.Label.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='str', full_name='perftools.profiles.Label.str', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='num', full_name='perftools.profiles.Label.num', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='num_unit', full_name='perftools.profiles.Label.num_unit', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=655, + serialized_end=719, +) + + +_MAPPING = _descriptor.Descriptor( + name='Mapping', + full_name='perftools.profiles.Mapping', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Mapping.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='memory_start', full_name='perftools.profiles.Mapping.memory_start', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='memory_limit', full_name='perftools.profiles.Mapping.memory_limit', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='file_offset', full_name='perftools.profiles.Mapping.file_offset', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filename', full_name='perftools.profiles.Mapping.filename', index=4, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='build_id', full_name='perftools.profiles.Mapping.build_id', index=5, + number=6, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_functions', full_name='perftools.profiles.Mapping.has_functions', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_filenames', full_name='perftools.profiles.Mapping.has_filenames', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_line_numbers', full_name='perftools.profiles.Mapping.has_line_numbers', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_inline_frames', full_name='perftools.profiles.Mapping.has_inline_frames', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=722, + serialized_end=943, +) + + +_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='perftools.profiles.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Location.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mapping_id', full_name='perftools.profiles.Location.mapping_id', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='address', full_name='perftools.profiles.Location.address', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='line', full_name='perftools.profiles.Location.line', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='is_folded', full_name='perftools.profiles.Location.is_folded', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=945, + serialized_end=1063, +) + + +_LINE = _descriptor.Descriptor( + name='Line', + full_name='perftools.profiles.Line', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='function_id', full_name='perftools.profiles.Line.function_id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='line', full_name='perftools.profiles.Line.line', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1065, + serialized_end=1106, +) + + +_FUNCTION = _descriptor.Descriptor( + name='Function', + full_name='perftools.profiles.Function', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='perftools.profiles.Function.id', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='perftools.profiles.Function.name', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='system_name', full_name='perftools.profiles.Function.system_name', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filename', full_name='perftools.profiles.Function.filename', index=3, + number=4, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_line', full_name='perftools.profiles.Function.start_line', index=4, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1108, + serialized_end=1203, +) + +_PROFILE.fields_by_name['sample_type'].message_type = _VALUETYPE +_PROFILE.fields_by_name['sample'].message_type = _SAMPLE +_PROFILE.fields_by_name['mapping'].message_type = _MAPPING +_PROFILE.fields_by_name['location'].message_type = _LOCATION +_PROFILE.fields_by_name['function'].message_type = _FUNCTION +_PROFILE.fields_by_name['period_type'].message_type = _VALUETYPE +_SAMPLE.fields_by_name['label'].message_type = _LABEL +_LOCATION.fields_by_name['line'].message_type = _LINE +DESCRIPTOR.message_types_by_name['Profile'] = _PROFILE +DESCRIPTOR.message_types_by_name['ValueType'] = _VALUETYPE +DESCRIPTOR.message_types_by_name['Sample'] = _SAMPLE +DESCRIPTOR.message_types_by_name['Label'] = _LABEL +DESCRIPTOR.message_types_by_name['Mapping'] = _MAPPING +DESCRIPTOR.message_types_by_name['Location'] = _LOCATION +DESCRIPTOR.message_types_by_name['Line'] = _LINE +DESCRIPTOR.message_types_by_name['Function'] = _FUNCTION + +Profile = _reflection.GeneratedProtocolMessageType('Profile', (_message.Message,), dict( + DESCRIPTOR = _PROFILE, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Profile) + )) +_sym_db.RegisterMessage(Profile) + +ValueType = _reflection.GeneratedProtocolMessageType('ValueType', (_message.Message,), dict( + DESCRIPTOR = _VALUETYPE, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.ValueType) + )) +_sym_db.RegisterMessage(ValueType) + +Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), dict( + DESCRIPTOR = _SAMPLE, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Sample) + )) +_sym_db.RegisterMessage(Sample) + +Label = _reflection.GeneratedProtocolMessageType('Label', (_message.Message,), dict( + DESCRIPTOR = _LABEL, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Label) + )) +_sym_db.RegisterMessage(Label) + +Mapping = _reflection.GeneratedProtocolMessageType('Mapping', (_message.Message,), dict( + DESCRIPTOR = _MAPPING, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Mapping) + )) +_sym_db.RegisterMessage(Mapping) + +Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict( + DESCRIPTOR = _LOCATION, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Location) + )) +_sym_db.RegisterMessage(Location) + +Line = _reflection.GeneratedProtocolMessageType('Line', (_message.Message,), dict( + DESCRIPTOR = _LINE, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Line) + )) +_sym_db.RegisterMessage(Line) + +Function = _reflection.GeneratedProtocolMessageType('Function', (_message.Message,), dict( + DESCRIPTOR = _FUNCTION, + __module__ = 'ddtrace.profiling.exporter.pprof_pb2' + # @@protoc_insertion_point(class_scope:perftools.profiles.Function) + )) +_sym_db.RegisterMessage(Function) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\035com.google.perftools.profilesB\014ProfileProto')) +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_421_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_421_pb2.py new file mode 100644 index 0000000..d556db8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/exporter/pprof_421_pb2.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: pprof.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0bpprof.proto\x12\x12perftools.profiles\"\xd5\x03\n\x07Profile\x12\x32\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x1d.perftools.profiles.ValueType\x12*\n\x06sample\x18\x02 \x03(\x0b\x32\x1a.perftools.profiles.Sample\x12,\n\x07mapping\x18\x03 \x03(\x0b\x32\x1b.perftools.profiles.Mapping\x12.\n\x08location\x18\x04 \x03(\x0b\x32\x1c.perftools.profiles.Location\x12.\n\x08\x66unction\x18\x05 \x03(\x0b\x32\x1c.perftools.profiles.Function\x12\x14\n\x0cstring_table\x18\x06 \x03(\t\x12\x13\n\x0b\x64rop_frames\x18\x07 \x01(\x03\x12\x13\n\x0bkeep_frames\x18\x08 \x01(\x03\x12\x12\n\ntime_nanos\x18\t \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\n \x01(\x03\x12\x32\n\x0bperiod_type\x18\x0b \x01(\x0b\x32\x1d.perftools.profiles.ValueType\x12\x0e\n\x06period\x18\x0c \x01(\x03\x12\x0f\n\x07\x63omment\x18\r \x03(\x03\x12\x1b\n\x13\x64\x65\x66\x61ult_sample_type\x18\x0e \x01(\x03\"\'\n\tValueType\x12\x0c\n\x04type\x18\x01 \x01(\x03\x12\x0c\n\x04unit\x18\x02 \x01(\x03\"V\n\x06Sample\x12\x13\n\x0blocation_id\x18\x01 \x03(\x04\x12\r\n\x05value\x18\x02 \x03(\x03\x12(\n\x05label\x18\x03 \x03(\x0b\x32\x19.perftools.profiles.Label\"@\n\x05Label\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\x0b\n\x03str\x18\x02 \x01(\x03\x12\x0b\n\x03num\x18\x03 \x01(\x03\x12\x10\n\x08num_unit\x18\x04 \x01(\x03\"\xdd\x01\n\x07Mapping\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_start\x18\x02 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x03 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x04 \x01(\x04\x12\x10\n\x08\x66ilename\x18\x05 \x01(\x03\x12\x10\n\x08\x62uild_id\x18\x06 \x01(\x03\x12\x15\n\rhas_functions\x18\x07 \x01(\x08\x12\x15\n\rhas_filenames\x18\x08 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\t \x01(\x08\x12\x19\n\x11has_inline_frames\x18\n \x01(\x08\"v\n\x08Location\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x12\n\nmapping_id\x18\x02 \x01(\x04\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x01(\x04\x12&\n\x04line\x18\x04 \x03(\x0b\x32\x18.perftools.profiles.Line\x12\x11\n\tis_folded\x18\x05 \x01(\x08\")\n\x04Line\x12\x13\n\x0b\x66unction_id\x18\x01 \x01(\x04\x12\x0c\n\x04line\x18\x02 \x01(\x03\"_\n\x08\x46unction\x12\n\n\x02id\x18\x01 \x01(\x04\x12\x0c\n\x04name\x18\x02 \x01(\x03\x12\x13\n\x0bsystem_name\x18\x03 \x01(\x03\x12\x10\n\x08\x66ilename\x18\x04 \x01(\x03\x12\x12\n\nstart_line\x18\x05 \x01(\x03\x42-\n\x1d\x63om.google.perftools.profilesB\x0cProfileProtob\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'pprof_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\035com.google.perftools.profilesB\014ProfileProto' + _PROFILE._serialized_start=36 + _PROFILE._serialized_end=505 + _VALUETYPE._serialized_start=507 + _VALUETYPE._serialized_end=546 + _SAMPLE._serialized_start=548 + _SAMPLE._serialized_end=634 + _LABEL._serialized_start=636 + _LABEL._serialized_end=700 + _MAPPING._serialized_start=703 + _MAPPING._serialized_end=924 + _LOCATION._serialized_start=926 + _LOCATION._serialized_end=1044 + _LINE._serialized_start=1046 + _LINE._serialized_end=1087 + _FUNCTION._serialized_start=1089 + _FUNCTION._serialized_end=1184 +# @@protoc_insertion_point(module_scope) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/profiler.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/profiler.py new file mode 100644 index 0000000..0b5d9b8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/profiler.py @@ -0,0 +1,383 @@ +# -*- encoding: utf-8 -*- +import logging +import os +import typing +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Type # noqa:F401 +from typing import Union # noqa:F401 + +import attr + +import ddtrace +from ddtrace.internal import agent +from ddtrace.internal import atexit +from ddtrace.internal import forksafe +from ddtrace.internal import service +from ddtrace.internal import uwsgi +from ddtrace.internal import writer +from ddtrace.internal.datadog.profiling import ddup +from ddtrace.internal.module import ModuleWatchdog +from ddtrace.profiling import collector +from ddtrace.profiling import exporter # noqa:F401 +from ddtrace.profiling import recorder +from ddtrace.profiling import scheduler +from ddtrace.profiling.collector import asyncio +from ddtrace.profiling.collector import memalloc +from ddtrace.profiling.collector import stack +from ddtrace.profiling.collector import stack_event +from ddtrace.profiling.collector import threading +from ddtrace.settings.profiling import config + + +LOG = logging.getLogger(__name__) + + +class Profiler(object): + """Run profiling while code is executed. + + Note that the whole Python process is profiled, not only the code executed. Data from all running threads are + caught. + + """ + + def __init__(self, *args, **kwargs): + self._profiler = _ProfilerInstance(*args, **kwargs) + + def start(self, stop_on_exit=True, profile_children=True): + """Start the profiler. + + :param stop_on_exit: Whether to stop the profiler and flush the profile on exit. + :param profile_children: Whether to start a profiler in child processes. + """ + + if profile_children: + try: + uwsgi.check_uwsgi(self._restart_on_fork, atexit=self.stop if stop_on_exit else None) + except uwsgi.uWSGIMasterProcess: + # Do nothing, the start() method will be called in each worker subprocess + return + + self._profiler.start() + + if stop_on_exit: + atexit.register(self.stop) + + if profile_children: + forksafe.register(self._restart_on_fork) + + def stop(self, flush=True): + """Stop the profiler. + + :param flush: Flush last profile. + """ + atexit.unregister(self.stop) + try: + self._profiler.stop(flush) + except service.ServiceStatusError: + # Not a best practice, but for backward API compatibility that allowed to call `stop` multiple times. + pass + + def _restart_on_fork(self): + # Be sure to stop the parent first, since it might have to e.g. unpatch functions + # Do not flush data as we don't want to have multiple copies of the parent profile exported. + try: + self._profiler.stop(flush=False, join=False) + except service.ServiceStatusError: + # This can happen in uWSGI mode: the children won't have the _profiler started from the master process + pass + self._profiler = self._profiler.copy() + self._profiler.start() + + def __getattr__( + self, + key, # type: str + ): + # type: (...) -> typing.Any + return getattr(self._profiler, key) + + +@attr.s +class _ProfilerInstance(service.Service): + """A instance of the profiler. + + Each process must manage its own instance. + + """ + + # User-supplied values + url = attr.ib(default=None) + service = attr.ib(factory=lambda: os.environ.get("DD_SERVICE")) + tags = attr.ib(factory=dict, type=typing.Dict[str, str]) + env = attr.ib(factory=lambda: os.environ.get("DD_ENV")) + version = attr.ib(factory=lambda: os.environ.get("DD_VERSION")) + tracer = attr.ib(default=ddtrace.tracer) + api_key = attr.ib(factory=lambda: os.environ.get("DD_API_KEY"), type=Optional[str]) + agentless = attr.ib(type=bool, default=config.agentless) + _memory_collector_enabled = attr.ib(type=bool, default=config.memory.enabled) + _stack_collector_enabled = attr.ib(type=bool, default=config.stack.enabled) + _lock_collector_enabled = attr.ib(type=bool, default=config.lock.enabled) + enable_code_provenance = attr.ib(type=bool, default=config.code_provenance) + endpoint_collection_enabled = attr.ib(type=bool, default=config.endpoint_collection) + + _recorder = attr.ib(init=False, default=None) + _collectors = attr.ib(init=False, default=None) + _collectors_on_import = attr.ib(init=False, default=None, eq=False) + _scheduler = attr.ib(init=False, default=None, type=Union[scheduler.Scheduler, scheduler.ServerlessScheduler]) + _lambda_function_name = attr.ib( + init=False, factory=lambda: os.environ.get("AWS_LAMBDA_FUNCTION_NAME"), type=Optional[str] + ) + _export_libdd_enabled = attr.ib(type=bool, default=config.export.libdd_enabled) + _export_py_enabled = attr.ib(type=bool, default=config.export.py_enabled) + + ENDPOINT_TEMPLATE = "https://intake.profile.{}" + + def _build_default_exporters(self): + # type: (...) -> List[exporter.Exporter] + _OUTPUT_PPROF = config.output_pprof + if _OUTPUT_PPROF: + # DEV: Import this only if needed to avoid importing protobuf + # unnecessarily + from ddtrace.profiling.exporter import file + + return [ + file.PprofFileExporter(prefix=_OUTPUT_PPROF), + ] + + if self.url is not None: + endpoint = self.url + elif self.agentless: + LOG.warning( + "Agentless uploading is currently for internal usage only and not officially supported. " + "You should not enable it unless somebody at Datadog instructed you to do so." + ) + endpoint = self.ENDPOINT_TEMPLATE.format(os.environ.get("DD_SITE", "datadoghq.com")) + else: + if isinstance(self.tracer._writer, writer.AgentWriter): + endpoint = self.tracer._writer.agent_url + else: + endpoint = agent.get_trace_url() + + if self.agentless: + endpoint_path = "/api/v2/profile" + else: + # Agent mode + # path is relative because it is appended + # to the agent base path. + endpoint_path = "profiling/v1/input" + + if self._lambda_function_name is not None: + self.tags.update({"functionname": self._lambda_function_name}) + + # Build the list of enabled Profiling features and send along as a tag + configured_features = [] + if self._stack_collector_enabled: + configured_features.append("stack") + if self._lock_collector_enabled: + configured_features.append("lock") + if self._memory_collector_enabled: + configured_features.append("mem") + if config.heap.sample_size > 0: + configured_features.append("heap") + if self._export_libdd_enabled: + configured_features.append("exp_dd") + if self._export_py_enabled: + configured_features.append("exp_py") + configured_features.append("CAP" + str(config.capture_pct)) + configured_features.append("MAXF" + str(config.max_frames)) + self.tags.update({"profiler_config": "_".join(configured_features)}) + + endpoint_call_counter_span_processor = self.tracer._endpoint_call_counter_span_processor + if self.endpoint_collection_enabled: + endpoint_call_counter_span_processor.enable() + + if self._export_libdd_enabled: + versionname = ( + "{}.libdd".format(self.version) + if self._export_py_enabled and self.version is not None + else self.version + ) + ddup.init( + env=self.env, + service=self.service, + version=versionname, + tags=self.tags, + max_nframes=config.max_frames, + url=endpoint, + ) + + if self._export_py_enabled: + # DEV: Import this only if needed to avoid importing protobuf + # unnecessarily + from ddtrace.profiling.exporter import http + + return [ + http.PprofHTTPExporter( + service=self.service, + env=self.env, + tags=self.tags, + version=self.version, + api_key=self.api_key, + endpoint=endpoint, + endpoint_path=endpoint_path, + enable_code_provenance=self.enable_code_provenance, + endpoint_call_counter_span_processor=endpoint_call_counter_span_processor, + ) + ] + return [] + + def __attrs_post_init__(self): + # type: (...) -> None + # Allow to store up to 10 threads for 60 seconds at 50 Hz + max_stack_events = 10 * 60 * 50 + r = self._recorder = recorder.Recorder( + max_events={ + stack_event.StackSampleEvent: max_stack_events, + stack_event.StackExceptionSampleEvent: int(max_stack_events / 2), + # (default buffer size / interval) * export interval + memalloc.MemoryAllocSampleEvent: int( + (memalloc.MemoryCollector._DEFAULT_MAX_EVENTS / memalloc.MemoryCollector._DEFAULT_INTERVAL) * 60 + ), + # Do not limit the heap sample size as the number of events is relative to allocated memory anyway + memalloc.MemoryHeapSampleEvent: None, + }, + default_max_events=config.max_events, + ) + + self._collectors = [] + + if self._stack_collector_enabled: + LOG.debug("Profiling collector (stack) enabled") + try: + self._collectors.append( + stack.StackCollector( + r, + tracer=self.tracer, + endpoint_collection_enabled=self.endpoint_collection_enabled, + ) # type: ignore[call-arg] + ) + LOG.debug("Profiling collector (stack) initialized") + except Exception: + LOG.error("Failed to start stack collector, disabling.", exc_info=True) + + if self._lock_collector_enabled: + # These collectors require the import of modules, so we create them + # if their import is detected at runtime. + def start_collector(collector_class: Type) -> None: + with self._service_lock: + col = collector_class(r, tracer=self.tracer) + + if self.status == service.ServiceStatus.RUNNING: + # The profiler is already running so we need to start the collector + try: + col.start() + LOG.debug("Started collector %r", col) + except collector.CollectorUnavailable: + LOG.debug("Collector %r is unavailable, disabling", col) + return + except Exception: + LOG.error("Failed to start collector %r, disabling.", col, exc_info=True) + return + + self._collectors.append(col) + + self._collectors_on_import = [ + ("threading", lambda _: start_collector(threading.ThreadingLockCollector)), + ("asyncio", lambda _: start_collector(asyncio.AsyncioLockCollector)), + ] + + for module, hook in self._collectors_on_import: + ModuleWatchdog.register_module_hook(module, hook) + + if self._memory_collector_enabled: + self._collectors.append(memalloc.MemoryCollector(r)) + + exporters = self._build_default_exporters() + + if exporters or self._export_libdd_enabled: + scheduler_class = ( + scheduler.ServerlessScheduler if self._lambda_function_name else scheduler.Scheduler + ) # type: (Type[Union[scheduler.Scheduler, scheduler.ServerlessScheduler]]) + + self._scheduler = scheduler_class( + recorder=r, + exporters=exporters, + before_flush=self._collectors_snapshot, + ) + + def _collectors_snapshot(self): + for c in self._collectors: + try: + snapshot = c.snapshot() + if snapshot: + for events in snapshot: + self._recorder.push_events(events) + except Exception: + LOG.error("Error while snapshoting collector %r", c, exc_info=True) + + _COPY_IGNORE_ATTRIBUTES = {"status"} + + def copy(self): + return self.__class__( + **{ + a.name: getattr(self, a.name) + for a in attr.fields(self.__class__) + if a.name[0] != "_" and a.name not in self._COPY_IGNORE_ATTRIBUTES + } + ) + + def _start_service(self): + # type: (...) -> None + """Start the profiler.""" + collectors = [] + for col in self._collectors: + try: + col.start() + except collector.CollectorUnavailable: + LOG.debug("Collector %r is unavailable, disabling", col) + except Exception: + LOG.error("Failed to start collector %r, disabling.", col, exc_info=True) + else: + collectors.append(col) + self._collectors = collectors + + if self._scheduler is not None: + self._scheduler.start() + + def _stop_service(self, flush=True, join=True): + # type: (bool, bool) -> None + """Stop the profiler. + + :param flush: Flush a last profile. + """ + # Prevent doing more initialisation now that we are shutting down. + if self._lock_collector_enabled: + for module, hook in self._collectors_on_import: + try: + ModuleWatchdog.unregister_module_hook(module, hook) + except ValueError: + pass + + if self._scheduler is not None: + self._scheduler.stop() + # Wait for the export to be over: export might need collectors (e.g., for snapshot) so we can't stop + # collectors before the possibly running flush is finished. + if join: + self._scheduler.join() + if flush: + # Do not stop the collectors before flushing, they might be needed (snapshot) + self._scheduler.flush() + + for col in reversed(self._collectors): + try: + col.stop() + except service.ServiceStatusError: + # It's possible some collector failed to start, ignore failure to stop + pass + + if join: + for col in reversed(self._collectors): + col.join() + + def visible_events(self): + return self._export_py_enabled diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/recorder.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/recorder.py new file mode 100644 index 0000000..6f75aca --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/recorder.py @@ -0,0 +1,95 @@ +# -*- encoding: utf-8 -*- +import collections +import threading +import typing + +import attr + +from ddtrace.internal import forksafe +from ddtrace.settings.profiling import config + +from . import event + + +class _defaultdictkey(dict): + """A variant of defaultdict that calls default_factory with the missing key as argument.""" + + def __init__(self, default_factory=None): + self.default_factory = default_factory + + def __missing__(self, key): + if self.default_factory: + v = self[key] = self.default_factory(key) + return v + raise KeyError(key) + + +EventsType = typing.Dict[event.Event, typing.Sequence[event.Event]] + + +@attr.s +class Recorder(object): + """An object that records program activity.""" + + default_max_events = attr.ib(default=config.spec.max_events.default, type=int) + """The maximum number of events for an event type if one is not specified.""" + + max_events = attr.ib(factory=dict, type=typing.Dict[typing.Type[event.Event], typing.Optional[int]]) + """A dict of {event_type_class: max events} to limit the number of events to record.""" + + events = attr.ib(init=False, repr=False, eq=False, type=EventsType) + _events_lock = attr.ib(init=False, repr=False, factory=threading.RLock, eq=False) + + def __attrs_post_init__(self): + # type: (...) -> None + self._reset_events() + forksafe.register(self._after_fork) + + def _after_fork(self): + # type: (...) -> None + # NOTE: do not try to push events if the process forked + # This means we don't know the state of _events_lock and it might be unusable — we'd deadlock + self.push_events = self._push_events_noop # type: ignore[assignment] + + def _push_events_noop(self, events): + pass + + def push_event(self, event): + """Push an event in the recorder. + + :param event: The `ddtrace.profiling.event.Event` to push. + """ + return self.push_events([event]) + + def push_events(self, events): + """Push multiple events in the recorder. + + All the events MUST be of the same type. + There is no sanity check as whether all the events are from the same class for performance reasons. + + :param events: The event list to push. + """ + if events: + event_type = events[0].__class__ + with self._events_lock: + q = self.events[event_type] + q.extend(events) + + def _get_deque_for_event_type(self, event_type): + return collections.deque(maxlen=self.max_events.get(event_type, self.default_max_events)) + + def _reset_events(self): + self.events = _defaultdictkey(self._get_deque_for_event_type) + + def reset(self): + """Reset the recorder. + + This is useful when e.g. exporting data. Once the event queue is retrieved, a new one can be created by calling + the reset method, avoiding iterating on a mutating event list. + + :return: The list of events that has been removed. + """ + with self._events_lock: + events = self.events + self._reset_events() + return events diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/scheduler.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/scheduler.py new file mode 100644 index 0000000..b47c73a --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/profiling/scheduler.py @@ -0,0 +1,111 @@ +# -*- encoding: utf-8 -*- +import logging + +import attr + +from ddtrace.internal import compat +from ddtrace.internal import periodic +from ddtrace.internal.datadog.profiling import ddup +from ddtrace.profiling import _traceback +from ddtrace.profiling import exporter +from ddtrace.settings.profiling import config + + +LOG = logging.getLogger(__name__) + + +@attr.s +class Scheduler(periodic.PeriodicService): + """Schedule export of recorded data.""" + + recorder = attr.ib() + exporters = attr.ib() + before_flush = attr.ib(default=None, eq=False) + _interval = attr.ib(type=float, default=config.upload_interval) + _configured_interval = attr.ib(init=False) + _last_export = attr.ib(init=False, default=None, eq=False) + _export_libdd_enabled = attr.ib(type=bool, default=config.export.libdd_enabled) + _export_py_enabled = attr.ib(type=bool, default=config.export.py_enabled) + + def __attrs_post_init__(self): + # Copy the value to use it later since we're going to adjust the real interval + self._configured_interval = self.interval + + def _start_service(self): + # type: (...) -> None + """Start the scheduler.""" + LOG.debug("Starting scheduler") + super(Scheduler, self)._start_service() + self._last_export = compat.time_ns() + LOG.debug("Scheduler started") + + def flush(self): + """Flush events from recorder to exporters.""" + LOG.debug("Flushing events") + if self._export_libdd_enabled: + ddup.upload() + + if not self._export_py_enabled: + # If we're not using the Python profiler, then stop now + # But set these fields for compatibility + start = self._last_export + self._last_export = compat.time_ns() + return + + if self.before_flush is not None: + try: + self.before_flush() + except Exception: + LOG.error("Scheduler before_flush hook failed", exc_info=True) + events = self.recorder.reset() + start = self._last_export + self._last_export = compat.time_ns() + for exp in self.exporters: + try: + exp.export(events, start, self._last_export) + except exporter.ExportError as e: + LOG.warning("Unable to export profile: %s. Ignoring.", _traceback.format_exception(e)) + except Exception: + LOG.exception( + "Unexpected error while exporting events. " + "Please report this bug to https://github.com/DataDog/dd-trace-py/issues" + ) + + def periodic(self): + start_time = compat.monotonic() + try: + self.flush() + finally: + self.interval = max(0, self._configured_interval - (compat.monotonic() - start_time)) + + +@attr.s +class ServerlessScheduler(Scheduler): + """Serverless scheduler that works on, e.g., AWS Lambda. + + The idea with this scheduler is to not sleep 60s, but to sleep 1s and flush out profiles after 60 sleeping period. + As the service can be frozen a few seconds after flushing out a profile, we want to make sure the next flush is not + > 60s later, but after at least 60 periods of 1s. + + """ + + # We force this interval everywhere + FORCED_INTERVAL = 1.0 + FLUSH_AFTER_INTERVALS = 60.0 + + _interval = attr.ib(default=FORCED_INTERVAL, type=float) + _profiled_intervals = attr.ib(init=False, default=0) + + def periodic(self): + # Check both the number of intervals and time frame to be sure we don't flush, e.g., empty profiles + if self._profiled_intervals >= self.FLUSH_AFTER_INTERVALS and (compat.time_ns() - self._last_export) >= ( + self.FORCED_INTERVAL * self.FLUSH_AFTER_INTERVALS + ): + try: + super(ServerlessScheduler, self).periodic() + finally: + # Override interval so it's always back to the value we n + self.interval = self.FORCED_INTERVAL + self._profiled_intervals = 0 + else: + self._profiled_intervals += 1 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_database_monitoring.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_database_monitoring.py new file mode 100644 index 0000000..9f3163d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_database_monitoring.py @@ -0,0 +1,97 @@ +from typing import TYPE_CHECKING # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal.logger import get_logger +from ddtrace.settings.peer_service import PeerServiceConfig +from ddtrace.vendor.sqlcommenter import generate_sql_comment as _generate_sql_comment + +from ..internal import compat +from ..internal.utils import get_argument_value +from ..internal.utils import set_argument_value +from ..settings import _config as dd_config +from ..settings._database_monitoring import dbm_config + + +if TYPE_CHECKING: + from typing import Optional # noqa:F401 + + from ddtrace import Span # noqa:F401 + +DBM_PARENT_SERVICE_NAME_KEY = "ddps" +DBM_DATABASE_SERVICE_NAME_KEY = "dddbs" +DBM_ENVIRONMENT_KEY = "dde" +DBM_VERSION_KEY = "ddpv" +DBM_TRACE_PARENT_KEY = "traceparent" +DBM_TRACE_INJECTED_TAG = "_dd.dbm_trace_injected" + + +log = get_logger(__name__) + + +def default_sql_injector(dbm_comment, sql_statement): + # type: (str, Union[str, bytes]) -> Union[str, bytes] + try: + if isinstance(sql_statement, bytes): + return dbm_comment.encode("utf-8", errors="strict") + sql_statement + return dbm_comment + sql_statement + except (TypeError, ValueError): + log.warning( + "Linking Database Monitoring profiles to spans is not supported for the following query type: %s. " + "To disable this feature please set the following environment variable: " + "DD_DBM_PROPAGATION_MODE=disabled", + type(sql_statement), + ) + return sql_statement + + +class _DBM_Propagator(object): + def __init__(self, sql_pos, sql_kw, sql_injector=default_sql_injector): + self.sql_pos = sql_pos + self.sql_kw = sql_kw + self.sql_injector = sql_injector + + def inject(self, dbspan, args, kwargs): + dbm_comment = self._get_dbm_comment(dbspan) + if dbm_comment is None: + # injection_mode is disabled + return args, kwargs + + original_sql_statement = get_argument_value(args, kwargs, self.sql_pos, self.sql_kw) + # add dbm comment to original_sql_statement + sql_with_dbm_tags = self.sql_injector(dbm_comment, original_sql_statement) + # replace the original query or procedure with sql_with_dbm_tags + args, kwargs = set_argument_value(args, kwargs, self.sql_pos, self.sql_kw, sql_with_dbm_tags) + return args, kwargs + + def _get_dbm_comment(self, db_span): + # type: (Span) -> Optional[str] + """Generate DBM trace injection comment and updates span tags + This method will set the ``_dd.dbm_trace_injected: "true"`` tag + on ``db_span`` if the configured injection mode is ``"full"``. + """ + if dbm_config.propagation_mode == "disabled": + return None + + # set the following tags if DBM injection mode is full or service + peer_service_enabled = PeerServiceConfig().set_defaults_enabled + service_name_key = db_span.service + if peer_service_enabled: + db_name = db_span.get_tags().get("db.name") + service_name_key = compat.ensure_text(db_name) if db_name else db_span.service + + dbm_tags = { + DBM_PARENT_SERVICE_NAME_KEY: dd_config.service, + DBM_ENVIRONMENT_KEY: dd_config.env, + DBM_VERSION_KEY: dd_config.version, + DBM_DATABASE_SERVICE_NAME_KEY: service_name_key, + } + + if dbm_config.propagation_mode == "full": + db_span.set_tag_str(DBM_TRACE_INJECTED_TAG, "true") + dbm_tags[DBM_TRACE_PARENT_KEY] = db_span.context._traceparent + + sql_comment = _generate_sql_comment(**dbm_tags) + if sql_comment: + # replace leading whitespace with trailing whitespace + return sql_comment.strip() + " " + return "" diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_utils.py new file mode 100644 index 0000000..de08b34 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/_utils.py @@ -0,0 +1,31 @@ +from typing import Optional # noqa:F401 + +from ddtrace.internal.utils.cache import cached + + +@cached() +def get_wsgi_header(header): + # type: (str) -> str + """Returns a WSGI compliant HTTP header. + See https://www.python.org/dev/peps/pep-3333/#environ-variables for + information from the spec. + """ + return "HTTP_{}".format(header.upper().replace("-", "_")) + + +@cached() +def from_wsgi_header(header): + # type: (str) -> Optional[str] + """Convert a WSGI compliant HTTP header into the original header. + See https://www.python.org/dev/peps/pep-3333/#environ-variables for + information from the spec. + """ + HTTP_PREFIX = "HTTP_" + # PEP 333 gives two headers which aren't prepended with HTTP_. + UNPREFIXED_HEADERS = {"CONTENT_TYPE", "CONTENT_LENGTH"} + + if header.startswith(HTTP_PREFIX): + header = header[len(HTTP_PREFIX) :] + elif header not in UNPREFIXED_HEADERS: + return None + return header.replace("_", "-").title() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/http.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/http.py new file mode 100644 index 0000000..24e9ae3 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/propagation/http.py @@ -0,0 +1,981 @@ +import re +import sys +from typing import Dict # noqa:F401 +from typing import FrozenSet # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Text # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import cast # noqa:F401 + + +if sys.version_info >= (3, 8): + from typing import Literal # noqa:F401 +else: + from typing_extensions import Literal # noqa:F401 + + +from ddtrace import config +from ddtrace.tracing._span_link import SpanLink + +from ..constants import AUTO_KEEP +from ..constants import AUTO_REJECT +from ..constants import USER_KEEP +from ..context import Context +from ..internal._tagset import TagsetDecodeError +from ..internal._tagset import TagsetEncodeError +from ..internal._tagset import TagsetMaxSizeDecodeError +from ..internal._tagset import TagsetMaxSizeEncodeError +from ..internal._tagset import decode_tagset_string +from ..internal._tagset import encode_tagset_values +from ..internal.compat import ensure_text +from ..internal.constants import _PROPAGATION_STYLE_NONE +from ..internal.constants import _PROPAGATION_STYLE_W3C_TRACECONTEXT +from ..internal.constants import HIGHER_ORDER_TRACE_ID_BITS as _HIGHER_ORDER_TRACE_ID_BITS +from ..internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS +from ..internal.constants import PROPAGATION_STYLE_B3_MULTI +from ..internal.constants import PROPAGATION_STYLE_B3_SINGLE +from ..internal.constants import PROPAGATION_STYLE_DATADOG +from ..internal.constants import W3C_TRACEPARENT_KEY +from ..internal.constants import W3C_TRACESTATE_KEY +from ..internal.logger import get_logger +from ..internal.sampling import validate_sampling_decision +from ..span import _get_64_highest_order_bits_as_hex +from ..span import _get_64_lowest_order_bits_as_int +from ..span import _MetaDictType +from ._utils import get_wsgi_header + + +log = get_logger(__name__) + + +# HTTP headers one should set for distributed tracing. +# These are cross-language (eg: Python, Go and other implementations should honor these) +_HTTP_BAGGAGE_PREFIX = "ot-baggage-" +HTTP_HEADER_TRACE_ID = "x-datadog-trace-id" +HTTP_HEADER_PARENT_ID = "x-datadog-parent-id" +HTTP_HEADER_SAMPLING_PRIORITY = "x-datadog-sampling-priority" +HTTP_HEADER_ORIGIN = "x-datadog-origin" +_HTTP_HEADER_B3_SINGLE = "b3" +_HTTP_HEADER_B3_TRACE_ID = "x-b3-traceid" +_HTTP_HEADER_B3_SPAN_ID = "x-b3-spanid" +_HTTP_HEADER_B3_SAMPLED = "x-b3-sampled" +_HTTP_HEADER_B3_FLAGS = "x-b3-flags" +_HTTP_HEADER_TAGS = "x-datadog-tags" +_HTTP_HEADER_TRACEPARENT = "traceparent" +_HTTP_HEADER_TRACESTATE = "tracestate" + + +def _possible_header(header): + # type: (str) -> FrozenSet[str] + return frozenset([header, get_wsgi_header(header).lower()]) + + +# Note that due to WSGI spec we have to also check for uppercased and prefixed +# versions of these headers +POSSIBLE_HTTP_HEADER_TRACE_IDS = _possible_header(HTTP_HEADER_TRACE_ID) +POSSIBLE_HTTP_HEADER_PARENT_IDS = _possible_header(HTTP_HEADER_PARENT_ID) +POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES = _possible_header(HTTP_HEADER_SAMPLING_PRIORITY) +POSSIBLE_HTTP_HEADER_ORIGIN = _possible_header(HTTP_HEADER_ORIGIN) +_POSSIBLE_HTTP_HEADER_TAGS = frozenset([_HTTP_HEADER_TAGS, get_wsgi_header(_HTTP_HEADER_TAGS).lower()]) +_POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER = _possible_header(_HTTP_HEADER_B3_SINGLE) +_POSSIBLE_HTTP_HEADER_B3_TRACE_IDS = _possible_header(_HTTP_HEADER_B3_TRACE_ID) +_POSSIBLE_HTTP_HEADER_B3_SPAN_IDS = _possible_header(_HTTP_HEADER_B3_SPAN_ID) +_POSSIBLE_HTTP_HEADER_B3_SAMPLEDS = _possible_header(_HTTP_HEADER_B3_SAMPLED) +_POSSIBLE_HTTP_HEADER_B3_FLAGS = _possible_header(_HTTP_HEADER_B3_FLAGS) +_POSSIBLE_HTTP_HEADER_TRACEPARENT = _possible_header(_HTTP_HEADER_TRACEPARENT) +_POSSIBLE_HTTP_HEADER_TRACESTATE = _possible_header(_HTTP_HEADER_TRACESTATE) + + +# https://www.w3.org/TR/trace-context/#traceparent-header-field-values +# Future proofing: The traceparent spec is additive, future traceparent versions may contain more than 4 values +# The regex below matches the version, trace id, span id, sample flag, and end-string/future values (if version>00) +_TRACEPARENT_HEX_REGEX = re.compile( + r""" + ^ # Start of string + ([a-f0-9]{2})- # 2 character hex version + ([a-f0-9]{32})- # 32 character hex trace id + ([a-f0-9]{16})- # 16 character hex span id + ([a-f0-9]{2}) # 2 character hex sample flag + (-.+)? # optional, start of any additional values + $ # end of string + """, + re.VERBOSE, +) + + +def _extract_header_value(possible_header_names, headers, default=None): + # type: (FrozenSet[str], Dict[str, str], Optional[str]) -> Optional[str] + for header in possible_header_names: + if header in headers: + return ensure_text(headers[header], errors="backslashreplace") + + return default + + +def _attach_baggage_to_context(headers: Dict[str, str], context: Context): + if context is not None: + for key, value in headers.items(): + if key[: len(_HTTP_BAGGAGE_PREFIX)] == _HTTP_BAGGAGE_PREFIX: + context._set_baggage_item(key[len(_HTTP_BAGGAGE_PREFIX) :], value) + + +def _hex_id_to_dd_id(hex_id): + # type: (str) -> int + """Helper to convert hex ids into Datadog compatible ints.""" + return int(hex_id, 16) + + +_b3_id_to_dd_id = _hex_id_to_dd_id + + +def _dd_id_to_b3_id(dd_id): + # type: (int) -> str + """Helper to convert Datadog trace/span int ids into lower case hex values""" + if dd_id > _MAX_UINT_64BITS: + # b3 trace ids can have the length of 16 or 32 characters: + # https://github.com/openzipkin/b3-propagation#traceid + return "{:032x}".format(dd_id) + return "{:016x}".format(dd_id) + + +class _DatadogMultiHeader: + """Helper class for injecting/extract Datadog multi header format + + Headers: + + - ``x-datadog-trace-id`` the context trace id as a uint64 integer + - ``x-datadog-parent-id`` the context current span id as a uint64 integer + - ``x-datadog-sampling-priority`` integer representing the sampling decision. + ``<= 0`` (Reject) or ``> 1`` (Keep) + - ``x-datadog-origin`` optional name of origin Datadog product which initiated the request + - ``x-datadog-tags`` optional tracer tags + + Restrictions: + + - Trace tag key-value pairs in ``x-datadog-tags`` are extracted from incoming requests. + - Only trace tags with keys prefixed with ``_dd.p.`` are propagated. + - The trace tag keys must be printable ASCII characters excluding space, comma, and equals. + - The trace tag values must be printable ASCII characters excluding comma. Leading and + trailing spaces are trimmed. + """ + + _X_DATADOG_TAGS_EXTRACT_REJECT = frozenset(["_dd.p.upstream_services"]) + + @staticmethod + def _is_valid_datadog_trace_tag_key(key): + return key.startswith("_dd.p.") + + @staticmethod + def _get_tags_value(headers): + # type: (Dict[str, str]) -> Optional[str] + return _extract_header_value( + _POSSIBLE_HTTP_HEADER_TAGS, + headers, + default="", + ) + + @staticmethod + def _extract_meta(tags_value): + # Do not fail if the tags are malformed + try: + meta = { + k: v + for (k, v) in decode_tagset_string(tags_value).items() + if ( + k not in _DatadogMultiHeader._X_DATADOG_TAGS_EXTRACT_REJECT + and _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) + ) + } + except TagsetMaxSizeDecodeError: + meta = { + "_dd.propagation_error": "extract_max_size", + } + log.warning("failed to decode x-datadog-tags", exc_info=True) + except TagsetDecodeError: + meta = { + "_dd.propagation_error": "decoding_error", + } + log.debug("failed to decode x-datadog-tags: %r", tags_value, exc_info=True) + return meta + + @staticmethod + def _put_together_trace_id(trace_id_hob_hex: str, low_64_bits: int) -> int: + # combine highest and lowest order hex values to create a 128 bit trace_id + return int(trace_id_hob_hex + "{:016x}".format(low_64_bits), 16) + + @staticmethod + def _higher_order_is_valid(upper_64_bits: str) -> bool: + try: + if len(upper_64_bits) != 16 or not (int(upper_64_bits, 16) or (upper_64_bits.islower())): + raise ValueError + except ValueError: + return False + + return True + + @staticmethod + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None + if span_context.trace_id is None or span_context.span_id is None: + log.debug("tried to inject invalid context %r", span_context) + return + + if span_context.trace_id > _MAX_UINT_64BITS: + # set lower order 64 bits in `x-datadog-trace-id` header. For backwards compatibility these + # bits should be converted to a base 10 integer. + headers[HTTP_HEADER_TRACE_ID] = str(_get_64_lowest_order_bits_as_int(span_context.trace_id)) + # set higher order 64 bits in `_dd.p.tid` to propagate the full 128 bit trace id. + # Note - The higher order bits must be encoded in hex + span_context._meta[_HIGHER_ORDER_TRACE_ID_BITS] = _get_64_highest_order_bits_as_hex(span_context.trace_id) + else: + headers[HTTP_HEADER_TRACE_ID] = str(span_context.trace_id) + + headers[HTTP_HEADER_PARENT_ID] = str(span_context.span_id) + sampling_priority = span_context.sampling_priority + # Propagate priority only if defined + if sampling_priority is not None: + headers[HTTP_HEADER_SAMPLING_PRIORITY] = str(span_context.sampling_priority) + # Propagate origin only if defined + if span_context.dd_origin is not None: + headers[HTTP_HEADER_ORIGIN] = ensure_text(span_context.dd_origin) + + if not config._x_datadog_tags_enabled: + span_context._meta["_dd.propagation_error"] = "disabled" + return + + # Do not try to encode tags if we have already tried and received an error + if "_dd.propagation_error" in span_context._meta: + return + + # Only propagate trace tags which means ignoring the _dd.origin + tags_to_encode = { + # DEV: Context._meta is a _MetaDictType but we need Dict[str, str] + ensure_text(k): ensure_text(v) + for k, v in span_context._meta.items() + if _DatadogMultiHeader._is_valid_datadog_trace_tag_key(k) + } # type: Dict[Text, Text] + + if tags_to_encode: + try: + headers[_HTTP_HEADER_TAGS] = encode_tagset_values( + tags_to_encode, max_size=config._x_datadog_tags_max_length + ) + + except TagsetMaxSizeEncodeError: + # We hit the max size allowed, add a tag to the context to indicate this happened + span_context._meta["_dd.propagation_error"] = "inject_max_size" + log.warning("failed to encode x-datadog-tags", exc_info=True) + except TagsetEncodeError: + # We hit an encoding error, add a tag to the context to indicate this happened + span_context._meta["_dd.propagation_error"] = "encoding_error" + log.warning("failed to encode x-datadog-tags", exc_info=True) + + @staticmethod + def _extract(headers): + # type: (Dict[str, str]) -> Optional[Context] + trace_id_str = _extract_header_value(POSSIBLE_HTTP_HEADER_TRACE_IDS, headers) + if trace_id_str is None: + return None + try: + trace_id = int(trace_id_str) + except ValueError: + trace_id = 0 + + if trace_id <= 0 or trace_id > _MAX_UINT_64BITS: + log.warning( + "Invalid trace id: %r. `x-datadog-trace-id` must be greater than zero and less than 2**64", trace_id_str + ) + return None + + parent_span_id = _extract_header_value( + POSSIBLE_HTTP_HEADER_PARENT_IDS, + headers, + default="0", + ) + sampling_priority = _extract_header_value( + POSSIBLE_HTTP_HEADER_SAMPLING_PRIORITIES, + headers, + ) + origin = _extract_header_value( + POSSIBLE_HTTP_HEADER_ORIGIN, + headers, + ) + + meta = None + + tags_value = _DatadogMultiHeader._get_tags_value(headers) + if tags_value: + meta = _DatadogMultiHeader._extract_meta(tags_value) + + # When 128 bit trace ids are propagated the 64 lowest order bits are set in the `x-datadog-trace-id` + # header. The 64 highest order bits are encoded in base 16 and store in the `_dd.p.tid` tag. + # Here we reconstruct the full 128 bit trace_id if 128-bit trace id generation is enabled. + if meta and _HIGHER_ORDER_TRACE_ID_BITS in meta: + trace_id_hob_hex = meta[_HIGHER_ORDER_TRACE_ID_BITS] + if _DatadogMultiHeader._higher_order_is_valid(trace_id_hob_hex): + if config._128_bit_trace_id_enabled: + trace_id = _DatadogMultiHeader._put_together_trace_id(trace_id_hob_hex, trace_id) + else: + meta["_dd.propagation_error"] = "malformed_tid {}".format(trace_id_hob_hex) + del meta[_HIGHER_ORDER_TRACE_ID_BITS] + log.warning("malformed_tid: %s. Failed to decode trace id from http headers", trace_id_hob_hex) + + # Try to parse values into their expected types + try: + if sampling_priority is not None: + sampling_priority = int(sampling_priority) # type: ignore[assignment] + else: + sampling_priority = sampling_priority + + if meta: + meta = validate_sampling_decision(meta) + + return Context( + # DEV: Do not allow `0` for trace id or span id, use None instead + trace_id=trace_id or None, + span_id=int(parent_span_id) or None, # type: ignore[arg-type] + sampling_priority=sampling_priority, # type: ignore[arg-type] + dd_origin=origin, + # DEV: This cast is needed because of the type requirements of + # span tags and trace tags which are currently implemented using + # the same type internally (_MetaDictType). + meta=cast(_MetaDictType, meta), + ) + except (TypeError, ValueError): + log.debug( + ( + "received invalid x-datadog-* headers, " + "trace-id: %r, parent-id: %r, priority: %r, origin: %r, tags:%r" + ), + trace_id, + parent_span_id, + sampling_priority, + origin, + tags_value, + ) + return None + + +class _B3MultiHeader: + """Helper class to inject/extract B3 Multi-Headers + + https://github.com/openzipkin/b3-propagation/blob/3e54cda11620a773d53c7f64d2ebb10d3a01794c/README.md#multiple-headers + + Example:: + + X-B3-TraceId: 80f198ee56343ba864fe8b2a57d3eff7 + X-B3-ParentSpanId: 05e3ac9a4f6e3b90 + X-B3-SpanId: e457b5a2e4d86bd1 + X-B3-Sampled: 1 + + + Headers: + + - ``X-B3-TraceId`` header is encoded as 32 or 16 lower-hex characters. + - ``X-B3-SpanId`` header is encoded as 16 lower-hex characters. + - ``X-B3-Sampled`` header value of ``0`` means Deny, ``1`` means Accept, and absent means to defer. + - ``X-B3-Flags`` header is used to set ``1`` meaning Debug or an Accept. + + Restrictions: + + - ``X-B3-Sampled`` and ``X-B3-Flags`` should never both be set + + Implementation details: + + - Sampling priority gets encoded as: + - ``sampling_priority <= 0`` -> ``X-B3-Sampled: 0`` + - ``sampling_priority == 1`` -> ``X-B3-Sampled: 1`` + - ``sampling_priority > 1`` -> ``X-B3-Flags: 1`` + - Sampling priority gets decoded as: + - ``X-B3-Sampled: 0`` -> ``sampling_priority = 0`` + - ``X-B3-Sampled: 1`` -> ``sampling_priority = 1`` + - ``X-B3-Flags: 1`` -> ``sampling_priority = 2`` + - ``X-B3-TraceId`` is not required, will use ``None`` when not present + - ``X-B3-SpanId`` is not required, will use ``None`` when not present + """ + + @staticmethod + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None + if span_context.trace_id is None or span_context.span_id is None: + log.debug("tried to inject invalid context %r", span_context) + return + + headers[_HTTP_HEADER_B3_TRACE_ID] = _dd_id_to_b3_id(span_context.trace_id) + headers[_HTTP_HEADER_B3_SPAN_ID] = _dd_id_to_b3_id(span_context.span_id) + sampling_priority = span_context.sampling_priority + # Propagate priority only if defined + if sampling_priority is not None: + if sampling_priority <= 0: + headers[_HTTP_HEADER_B3_SAMPLED] = "0" + elif sampling_priority == 1: + headers[_HTTP_HEADER_B3_SAMPLED] = "1" + elif sampling_priority > 1: + headers[_HTTP_HEADER_B3_FLAGS] = "1" + + @staticmethod + def _extract(headers): + # type: (Dict[str, str]) -> Optional[Context] + trace_id_val = _extract_header_value( + _POSSIBLE_HTTP_HEADER_B3_TRACE_IDS, + headers, + ) + if trace_id_val is None: + return None + + span_id_val = _extract_header_value( + _POSSIBLE_HTTP_HEADER_B3_SPAN_IDS, + headers, + ) + sampled = _extract_header_value( + _POSSIBLE_HTTP_HEADER_B3_SAMPLEDS, + headers, + ) + flags = _extract_header_value( + _POSSIBLE_HTTP_HEADER_B3_FLAGS, + headers, + ) + + # Try to parse values into their expected types + try: + # DEV: We are allowed to have only x-b3-sampled/flags + # DEV: Do not allow `0` for trace id or span id, use None instead + trace_id = None + span_id = None + if trace_id_val is not None: + trace_id = _b3_id_to_dd_id(trace_id_val) or None + if span_id_val is not None: + span_id = _b3_id_to_dd_id(span_id_val) or None + + sampling_priority = None + if sampled is not None: + if sampled == "0": + sampling_priority = AUTO_REJECT + elif sampled == "1": + sampling_priority = AUTO_KEEP + if flags == "1": + sampling_priority = USER_KEEP + + return Context( + trace_id=trace_id, + span_id=span_id, + sampling_priority=sampling_priority, + ) + except (TypeError, ValueError): + log.debug( + "received invalid x-b3-* headers, " "trace-id: %r, span-id: %r, sampled: %r, flags: %r", + trace_id_val, + span_id_val, + sampled, + flags, + ) + return None + + +class _B3SingleHeader: + """Helper class to inject/extract B3 Single Header + + https://github.com/openzipkin/b3-propagation/blob/3e54cda11620a773d53c7f64d2ebb10d3a01794c/README.md#single-header + + Format:: + + b3={TraceId}-{SpanId}-{SamplingState}-{ParentSpanId} + + Example:: + + b3: 80f198ee56343ba864fe8b2a57d3eff7-e457b5a2e4d86bd1-1-05e3ac9a4f6e3b90 + + + Values: + + - ``TraceId`` header is encoded as 32 or 16 lower-hex characters. + - ``SpanId`` header is encoded as 16 lower-hex characters. + - ``SamplingState`` header value of ``0`` means Deny, ``1`` means Accept, and ``d`` means Debug + - ``ParentSpanId`` header is not used/ignored if sent + + Restrictions: + + - ``ParentSpanId`` value is ignored/not used + + Implementation details: + + - Sampling priority gets encoded as: + - ``sampling_priority <= 0`` -> ``SamplingState: 0`` + - ``sampling_priority == 1`` -> ``SamplingState: 1`` + - ``sampling_priority > 1`` -> ``SamplingState: d`` + - Sampling priority gets decoded as: + - ``SamplingState: 0`` -> ``sampling_priority = 0`` + - ``SamplingState: 1`` -> ``sampling_priority = 1`` + - ``SamplingState: d`` -> ``sampling_priority = 2`` + - ``TraceId`` is not required, will use ``None`` when not present + - ``SpanId`` is not required, will use ``None`` when not present + """ + + @staticmethod + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None + if span_context.trace_id is None or span_context.span_id is None: + log.debug("tried to inject invalid context %r", span_context) + return + + single_header = "{}-{}".format(_dd_id_to_b3_id(span_context.trace_id), _dd_id_to_b3_id(span_context.span_id)) + sampling_priority = span_context.sampling_priority + if sampling_priority is not None: + if sampling_priority <= 0: + single_header += "-0" + elif sampling_priority == 1: + single_header += "-1" + elif sampling_priority > 1: + single_header += "-d" + headers[_HTTP_HEADER_B3_SINGLE] = single_header + + @staticmethod + def _extract(headers): + # type: (Dict[str, str]) -> Optional[Context] + single_header = _extract_header_value(_POSSIBLE_HTTP_HEADER_B3_SINGLE_HEADER, headers) + if not single_header: + return None + + trace_id = None + span_id = None + sampled = None + + parts = single_header.split("-") + trace_id_val = None + span_id_val = None + + # Only SamplingState is provided + if len(parts) == 1: + (sampled,) = parts + + # Only TraceId and SpanId are provided + elif len(parts) == 2: + trace_id_val, span_id_val = parts + + # Full header, ignore any ParentSpanId present + elif len(parts) >= 3: + trace_id_val, span_id_val, sampled = parts[:3] + + # Try to parse values into their expected types + try: + # DEV: We are allowed to have only x-b3-sampled/flags + # DEV: Do not allow `0` for trace id or span id, use None instead + if trace_id_val is not None: + trace_id = _b3_id_to_dd_id(trace_id_val) or None + if span_id_val is not None: + span_id = _b3_id_to_dd_id(span_id_val) or None + + sampling_priority = None + if sampled is not None: + if sampled == "0": + sampling_priority = AUTO_REJECT + elif sampled == "1": + sampling_priority = AUTO_KEEP + elif sampled == "d": + sampling_priority = USER_KEEP + + return Context( + trace_id=trace_id, + span_id=span_id, + sampling_priority=sampling_priority, + ) + except (TypeError, ValueError): + log.debug( + "received invalid b3 header, b3: %r", + single_header, + ) + return None + + +class _TraceContext: + """Helper class to inject/extract W3C Trace Context + https://www.w3.org/TR/trace-context/ + Overview: + - ``traceparent`` header describes the position of the incoming request in its + trace graph in a portable, fixed-length format. Its design focuses on + fast parsing. Every tracing tool MUST properly set traceparent even when + it only relies on vendor-specific information in tracestate + - ``tracestate`` header extends traceparent with vendor-specific data represented + by a set of name/value pairs. Storing information in tracestate is + optional. + + The format for ``traceparent`` is:: + HEXDIGLC = DIGIT / "a" / "b" / "c" / "d" / "e" / "f" + value = version "-" version-format + version = 2HEXDIGLC + version-format = trace-id "-" parent-id "-" trace-flags + trace-id = 32HEXDIGLC + parent-id = 16HEXDIGLC + trace-flags = 2HEXDIGLC + + Example value of HTTP ``traceparent`` header:: + value = 00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01 + base16(version) = 00 + base16(trace-id) = 4bf92f3577b34da6a3ce929d0e0e4736 + base16(parent-id) = 00f067aa0ba902b7 + base16(trace-flags) = 01 // sampled + + The format for ``tracestate`` is key value pairs with each entry limited to 256 characters. + An example of the ``dd`` list member we would add is:: + "dd=s:2;o:rum;t.dm:-4;t.usr.id:baz64" + + Implementation details: + - Datadog Trace and Span IDs are 64-bit unsigned integers. + - The W3C Trace Context Trace ID is a 16-byte hexadecimal string. + - If the incoming traceparent is invalid we DO NOT use the tracecontext headers. + Otherwise, the trace-id value is set to the hex-encoded value of the trace-id. + If the trace-id is a 64-bit value (i.e. a Datadog trace-id), + then the upper half of the hex-encoded value will be all zeroes. + + - The tracestate header will have one list member added to it, ``dd``, which contains + values that would be in x-datadog-tags as well as those needed for propagation information. + The keys to the ``dd`` values have been shortened as follows to save space: + ``sampling_priority`` = ``s`` + ``origin`` = ``o`` + ``_dd.p.`` prefix = ``t.`` + """ + + @staticmethod + def decode_tag_val(tag_val): + # type str -> str + return tag_val.replace("~", "=") + + @staticmethod + def _get_traceparent_values(tp): + # type: (str) -> Tuple[int, int, Literal[0,1]] + """If there is no traceparent, or if the traceparent value is invalid raise a ValueError. + Otherwise we extract the trace-id, span-id, and sampling priority from the + traceparent header. + """ + valid_tp_values = _TRACEPARENT_HEX_REGEX.match(tp.strip()) + if valid_tp_values is None: + raise ValueError("Invalid traceparent version: %s" % tp) + + ( + version, + trace_id_hex, + span_id_hex, + trace_flags_hex, + future_vals, + ) = valid_tp_values.groups() # type: Tuple[str, str, str, str, Optional[str]] + + if version == "ff": + # https://www.w3.org/TR/trace-context/#version + raise ValueError("ff is an invalid traceparent version: %s" % tp) + elif version != "00": + # currently 00 is the only version format, but if future versions come up we may need to add changes + log.warning("unsupported traceparent version:%r, still attempting to parse", version) + elif version == "00" and future_vals is not None: + raise ValueError("Traceparents with the version `00` should contain 4 values delimited by a dash: %s" % tp) + + trace_id = _hex_id_to_dd_id(trace_id_hex) + span_id = _hex_id_to_dd_id(span_id_hex) + + # All 0s are invalid values + if trace_id == 0: + raise ValueError("0 value for trace_id is invalid") + if span_id == 0: + raise ValueError("0 value for span_id is invalid") + + trace_flags = _hex_id_to_dd_id(trace_flags_hex) + # there's currently only one trace flag, which denotes sampling priority + # was set to keep "01" or drop "00" + # trace flags is a bit field: https://www.w3.org/TR/trace-context/#trace-flags + # if statement is required to cast traceflags to a Literal + sampling_priority = 1 if trace_flags & 0x1 else 0 # type: Literal[0, 1] + + return trace_id, span_id, sampling_priority + + @staticmethod + def _get_tracestate_values(ts_l): + # type: (List[str]) -> Tuple[Optional[int], Dict[str, str], Optional[str]] + + # tracestate list parsing example: ["dd=s:2;o:rum;t.dm:-4;t.usr.id:baz64","congo=t61rcWkgMzE"] + # -> 2, {"_dd.p.dm":"-4","_dd.p.usr.id":"baz64"}, "rum" + + dd = None + for list_mem in ts_l: + if list_mem.startswith("dd="): + # cut out dd= before turning into dict + list_mem = list_mem[3:] + # since tags can have a value with a :, we need to only split on the first instance of : + dd = dict(item.split(":", 1) for item in list_mem.split(";")) + + # parse out values + if dd: + sampling_priority_ts = dd.get("s") + if sampling_priority_ts is not None: + sampling_priority_ts_int = int(sampling_priority_ts) + else: + sampling_priority_ts_int = None + + origin = dd.get("o") + if origin: + # we encode "=" as "~" in tracestate so need to decode here + origin = _TraceContext.decode_tag_val(origin) + # need to convert from t. to _dd.p. + other_propagated_tags = { + "_dd.p.%s" % k[2:]: _TraceContext.decode_tag_val(v) for (k, v) in dd.items() if k.startswith("t.") + } + + return sampling_priority_ts_int, other_propagated_tags, origin + else: + return None, {}, None + + @staticmethod + def _get_sampling_priority(traceparent_sampled, tracestate_sampling_priority): + # type: (int, Optional[int]) -> int + """ + When the traceparent sampled flag is set, the Datadog sampling priority is either + 1 or a positive value of sampling priority if propagated in tracestate. + + When the traceparent sampled flag is not set, the Datadog sampling priority is either + 0 or a negative value of sampling priority if propagated in tracestate. + """ + + if traceparent_sampled == 0 and (not tracestate_sampling_priority or tracestate_sampling_priority >= 0): + sampling_priority = 0 + + elif traceparent_sampled == 1 and (not tracestate_sampling_priority or tracestate_sampling_priority < 0): + sampling_priority = 1 + else: + # The two other options provided for clarity: + # elif traceparent_sampled == 1 and tracestate_sampling_priority > 0: + # elif traceparent_sampled == 0 and tracestate_sampling_priority <= 0: + sampling_priority = tracestate_sampling_priority # type: ignore + + return sampling_priority + + @staticmethod + def _extract(headers): + # type: (Dict[str, str]) -> Optional[Context] + + try: + tp = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACEPARENT, headers) + if tp is None: + log.debug("no traceparent header") + return None + trace_id, span_id, trace_flag = _TraceContext._get_traceparent_values(tp) + except (ValueError, AssertionError): + log.exception("received invalid w3c traceparent: %s ", tp) + return None + + meta = {W3C_TRACEPARENT_KEY: tp} # type: _MetaDictType + + ts = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACESTATE, headers) + return _TraceContext._get_context(trace_id, span_id, trace_flag, ts, meta) + + @staticmethod + def _get_context(trace_id, span_id, trace_flag, ts, meta=None): + # type: (int, int, Literal[0,1], Optional[str], Optional[_MetaDictType]) -> Context + if meta is None: + meta = {} + origin = None + sampling_priority = trace_flag # type: int + if ts: + # whitespace is allowed, but whitespace to start or end values should be trimmed + # e.g. "foo=1 \t , \t bar=2, \t baz=3" -> "foo=1,bar=2,baz=3" + ts_l = [member.strip() for member in ts.split(",")] + ts = ",".join(ts_l) + # the value MUST contain only ASCII characters in the + # range of 0x20 to 0x7E + if re.search(r"[^\x20-\x7E]+", ts): + log.debug("received invalid tracestate header: %r", ts) + else: + # store tracestate so we keep other vendor data for injection, even if dd ends up being invalid + meta[W3C_TRACESTATE_KEY] = ts + try: + tracestate_values = _TraceContext._get_tracestate_values(ts_l) + except (TypeError, ValueError): + log.debug("received invalid dd header value in tracestate: %r ", ts) + tracestate_values = None + + if tracestate_values: + sampling_priority_ts, other_propagated_tags, origin = tracestate_values + meta.update(other_propagated_tags.items()) + + sampling_priority = _TraceContext._get_sampling_priority(trace_flag, sampling_priority_ts) + else: + log.debug("no dd list member in tracestate from incoming request: %r", ts) + + return Context( + trace_id=trace_id, + span_id=span_id, + sampling_priority=sampling_priority, + dd_origin=origin, + meta=meta, + ) + + @staticmethod + def _inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None + tp = span_context._traceparent + if tp: + headers[_HTTP_HEADER_TRACEPARENT] = tp + # only inject tracestate if traceparent injected: https://www.w3.org/TR/trace-context/#tracestate-header + ts = span_context._tracestate + if ts: + headers[_HTTP_HEADER_TRACESTATE] = ts + + +class _NOP_Propagator: + @staticmethod + def _extract(headers): + # type: (Dict[str, str]) -> None + return None + + # this method technically isn't needed with the current way we have HTTPPropagator.inject setup + # but if it changes then we might want it + @staticmethod + def _inject(span_context, headers): + # type: (Context , Dict[str, str]) -> Dict[str, str] + return headers + + +_PROP_STYLES = { + PROPAGATION_STYLE_DATADOG: _DatadogMultiHeader, + PROPAGATION_STYLE_B3_MULTI: _B3MultiHeader, + PROPAGATION_STYLE_B3_SINGLE: _B3SingleHeader, + _PROPAGATION_STYLE_W3C_TRACECONTEXT: _TraceContext, + _PROPAGATION_STYLE_NONE: _NOP_Propagator, +} + + +class HTTPPropagator(object): + """A HTTP Propagator using HTTP headers as carrier.""" + + @staticmethod + def _extract_configured_contexts_avail(normalized_headers): + contexts = [] + styles_w_ctx = [] + for prop_style in config._propagation_style_extract: + propagator = _PROP_STYLES[prop_style] + context = propagator._extract(normalized_headers) + if context: + contexts.append(context) + styles_w_ctx.append(prop_style) + return contexts, styles_w_ctx + + @staticmethod + def _resolve_contexts(contexts, styles_w_ctx, normalized_headers): + primary_context = contexts[0] + links = [] + for context in contexts[1:]: + style_w_ctx = styles_w_ctx[contexts.index(context)] + # encoding expects at least trace_id and span_id + if context.span_id and context.trace_id and context.trace_id != primary_context.trace_id: + links.append( + SpanLink( + context.trace_id, + context.span_id, + flags=1 if context.sampling_priority else 0, + tracestate=context._meta.get(W3C_TRACESTATE_KEY, "") + if style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT + else None, + attributes={ + "reason": "terminated_context", + "context_headers": style_w_ctx, + }, + ) + ) + # if trace_id matches and the propagation style is tracecontext + # add the tracestate to the primary context + elif style_w_ctx == _PROPAGATION_STYLE_W3C_TRACECONTEXT: + # extract and add the raw ts value to the primary_context + ts = _extract_header_value(_POSSIBLE_HTTP_HEADER_TRACESTATE, normalized_headers) + if ts: + primary_context._meta[W3C_TRACESTATE_KEY] = ts + primary_context._span_links = links + return primary_context + + @staticmethod + def inject(span_context, headers): + # type: (Context, Dict[str, str]) -> None + """Inject Context attributes that have to be propagated as HTTP headers. + + Here is an example using `requests`:: + + import requests + + from ddtrace.propagation.http import HTTPPropagator + + def parent_call(): + with tracer.trace('parent_span') as span: + headers = {} + HTTPPropagator.inject(span.context, headers) + url = '' + r = requests.get(url, headers=headers) + + :param Context span_context: Span context to propagate. + :param dict headers: HTTP headers to extend with tracing attributes. + """ + # Not a valid context to propagate + if span_context.trace_id is None or span_context.span_id is None: + log.debug("tried to inject invalid context %r", span_context) + return + + if config.propagation_http_baggage_enabled is True and span_context._baggage is not None: + for key in span_context._baggage: + headers[_HTTP_BAGGAGE_PREFIX + key] = span_context._baggage[key] + + if PROPAGATION_STYLE_DATADOG in config._propagation_style_inject: + _DatadogMultiHeader._inject(span_context, headers) + if PROPAGATION_STYLE_B3_MULTI in config._propagation_style_inject: + _B3MultiHeader._inject(span_context, headers) + if PROPAGATION_STYLE_B3_SINGLE in config._propagation_style_inject: + _B3SingleHeader._inject(span_context, headers) + if _PROPAGATION_STYLE_W3C_TRACECONTEXT in config._propagation_style_inject: + _TraceContext._inject(span_context, headers) + + @staticmethod + def extract(headers): + # type: (Dict[str,str]) -> Context + """Extract a Context from HTTP headers into a new Context. + For tracecontext propagation we extract tracestate headers for + propagation even if another propagation style is specified before tracecontext, + so as to always propagate other vendor's tracestate values by default. + This is skipped if the tracer is configured to take the first style it matches. + + Here is an example from a web endpoint:: + + from ddtrace.propagation.http import HTTPPropagator + + def my_controller(url, headers): + context = HTTPPropagator.extract(headers) + if context: + tracer.context_provider.activate(context) + + with tracer.trace('my_controller') as span: + span.set_tag('http.url', url) + + :param dict headers: HTTP headers to extract tracing attributes. + :return: New `Context` with propagated attributes. + """ + if not headers: + return Context() + try: + normalized_headers = {name.lower(): v for name, v in headers.items()} + + # tracer configured to extract first only + if config._propagation_extract_first: + # loop through the extract propagation styles specified in order, return whatever context we get first + for prop_style in config._propagation_style_extract: + propagator = _PROP_STYLES[prop_style] + context = propagator._extract(normalized_headers) # type: ignore + if config.propagation_http_baggage_enabled is True: + _attach_baggage_to_context(normalized_headers, context) + return context + # loop through all extract propagation styles + else: + contexts, styles_w_ctx = HTTPPropagator._extract_configured_contexts_avail(normalized_headers) + + if contexts: + context = HTTPPropagator._resolve_contexts(contexts, styles_w_ctx, normalized_headers) + if config.propagation_http_baggage_enabled is True: + _attach_baggage_to_context(normalized_headers, context) + return context + + except Exception: + log.debug("error while extracting context propagation headers", exc_info=True) + return Context() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/provider.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/provider.py new file mode 100644 index 0000000..06dc1a8 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/provider.py @@ -0,0 +1,173 @@ +import abc +import contextvars +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Union # noqa:F401 + +from . import _hooks +from .context import Context # noqa:F401 +from .internal.logger import get_logger +from .span import Span + + +log = get_logger(__name__) + + +_DD_CONTEXTVAR = contextvars.ContextVar( + "datadog_contextvar", default=None +) # type: contextvars.ContextVar[Optional[Union[Context, Span]]] + +_DD_CI_CONTEXTVAR = contextvars.ContextVar( + "datadog_civisibility_contextvar", default=None +) # type: contextvars.ContextVar[Optional[Union[Context, Span]]] + + +class BaseContextProvider(metaclass=abc.ABCMeta): + """ + A ``ContextProvider`` is an interface that provides the blueprint + for a callable class, capable to retrieve the current active + ``Context`` instance. Context providers must inherit this class + and implement: + * the ``active`` method, that returns the current active ``Context`` + * the ``activate`` method, that sets the current active ``Context`` + """ + + def __init__(self): + # type: (...) -> None + self._hooks = _hooks.Hooks() + + @abc.abstractmethod + def _has_active_context(self): + pass + + @abc.abstractmethod + def activate(self, ctx): + # type: (Optional[Union[Context, Span]]) -> None + self._hooks.emit(self.activate, ctx) + + @abc.abstractmethod + def active(self): + # type: () -> Optional[Union[Context, Span]] + pass + + def _on_activate(self, func): + # type: (Callable[[Optional[Union[Span, Context]]], Any]) -> Callable[[Optional[Union[Span, Context]]], Any] + """Register a function to execute when a span is activated. + + Can be used as a decorator. + + :param func: The function to call when a span is activated. + The activated span will be passed as argument. + """ + self._hooks.register(self.activate, func) + return func + + def _deregister_on_activate(self, func): + # type: (Callable[[Optional[Union[Span, Context]]], Any]) -> Callable[[Optional[Union[Span, Context]]], Any] + """Unregister a function registered to execute when a span is activated. + + Can be used as a decorator. + + :param func: The function to stop calling when a span is activated. + """ + + self._hooks.deregister(self.activate, func) + return func + + def __call__(self, *args, **kwargs): + """Method available for backward-compatibility. It proxies the call to + ``self.active()`` and must not do anything more. + """ + return self.active() + + +class DatadogContextMixin(object): + """Mixin that provides active span updating suitable for synchronous + and asynchronous executions. + """ + + def activate(self, ctx): + # type: (Optional[Union[Context, Span]]) -> None + raise NotImplementedError + + def _update_active(self, span): + # type: (Span) -> Optional[Span] + """Updates the active span in an executor. + + The active span is updated to be the span's parent if the span has + finished until an unfinished span is found. + """ + if span.finished: + new_active = span # type: Optional[Span] + while new_active and new_active.finished: + new_active = new_active._parent + self.activate(new_active) + return new_active + return span + + +class DefaultContextProvider(BaseContextProvider, DatadogContextMixin): + """Context provider that retrieves contexts from a context variable. + + It is suitable for synchronous programming and for asynchronous executors + that support contextvars. + """ + + def __init__(self): + # type: () -> None + super(DefaultContextProvider, self).__init__() + _DD_CONTEXTVAR.set(None) + + def _has_active_context(self): + # type: () -> bool + """Returns whether there is an active context in the current execution.""" + ctx = _DD_CONTEXTVAR.get() + return ctx is not None + + def activate(self, ctx): + # type: (Optional[Union[Span, Context]]) -> None + """Makes the given context active in the current execution.""" + _DD_CONTEXTVAR.set(ctx) + super(DefaultContextProvider, self).activate(ctx) + + def active(self): + # type: () -> Optional[Union[Context, Span]] + """Returns the active span or context for the current execution.""" + item = _DD_CONTEXTVAR.get() + if isinstance(item, Span): + return self._update_active(item) + return item + + +class CIContextProvider(DefaultContextProvider): + """Context provider that retrieves contexts from a context variable. + + It is suitable for synchronous programming and for asynchronous executors + that support contextvars. + """ + + def __init__(self): + # type: () -> None + super(DefaultContextProvider, self).__init__() + _DD_CI_CONTEXTVAR.set(None) + + def _has_active_context(self): + # type: () -> bool + """Returns whether there is an active context in the current execution.""" + ctx = _DD_CI_CONTEXTVAR.get() + return ctx is not None + + def activate(self, ctx): + # type: (Optional[Union[Span, Context]]) -> None + """Makes the given context active in the current execution.""" + _DD_CI_CONTEXTVAR.set(ctx) + super(DefaultContextProvider, self).activate(ctx) + + def active(self): + # type: () -> Optional[Union[Context, Span]] + """Returns the active span or context for the current execution.""" + item = _DD_CI_CONTEXTVAR.get() + if isinstance(item, Span): + return self._update_active(item) + return item diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/py.typed b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/runtime/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/runtime/__init__.py new file mode 100644 index 0000000..dc6a3ae --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/runtime/__init__.py @@ -0,0 +1,58 @@ +from typing import Optional # noqa:F401 + +import ddtrace.internal.runtime.runtime_metrics + + +class _RuntimeMetricsStatus(type): + @property + def _enabled(_): + # type: () -> bool + """Runtime metrics enabled status.""" + return ddtrace.internal.runtime.runtime_metrics.RuntimeWorker.enabled + + +class RuntimeMetrics(metaclass=_RuntimeMetricsStatus): + """ + Runtime metrics service API. + + This is normally started automatically by ``ddtrace-run`` when the + ``DD_RUNTIME_METRICS_ENABLED`` variable is set. + + To start the service manually, invoke the ``enable`` static method:: + + from ddtrace.runtime import RuntimeMetrics + RuntimeMetrics.enable() + """ + + @staticmethod + def enable(tracer=None, dogstatsd_url=None, flush_interval=None): + # type: (Optional[ddtrace.Tracer], Optional[str], Optional[float]) -> None + """ + Enable the runtime metrics collection service. + + If the service has already been activated before, this method does + nothing. Use ``disable`` to turn off the runtime metric collection + service. + + :param tracer: The tracer instance to correlate with. + :param dogstatsd_url: The DogStatsD URL. + :param flush_interval: The flush interval. + """ + + ddtrace.internal.runtime.runtime_metrics.RuntimeWorker.enable( + tracer=tracer, dogstatsd_url=dogstatsd_url, flush_interval=flush_interval + ) + + @staticmethod + def disable(): + # type: () -> None + """ + Disable the runtime metrics collection service. + + Once disabled, runtime metrics can be re-enabled by calling ``enable`` + again. + """ + ddtrace.internal.runtime.runtime_metrics.RuntimeWorker.disable() + + +__all__ = ["RuntimeMetrics"] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampler.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampler.py new file mode 100644 index 0000000..477a326 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampler.py @@ -0,0 +1,337 @@ +"""Samplers manage the client-side trace sampling + +Any `sampled = False` trace won't be written, and can be ignored by the instrumentation. +""" +import abc +import json +from typing import TYPE_CHECKING # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 + +from .constants import ENV_KEY +from .constants import SAMPLE_RATE_METRIC_KEY +from .internal.constants import _PRIORITY_CATEGORY +from .internal.constants import DEFAULT_SAMPLING_RATE_LIMIT +from .internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS +from .internal.logger import get_logger +from .internal.rate_limiter import RateLimiter +from .internal.sampling import _apply_rate_limit +from .internal.sampling import _get_highest_precedence_rule_matching +from .internal.sampling import _set_sampling_tags +from .sampling_rule import SamplingRule +from .settings import _config as ddconfig + + +try: + from json.decoder import JSONDecodeError +except ImportError: + # handling python 2.X import error + JSONDecodeError = ValueError # type: ignore + +if TYPE_CHECKING: # pragma: no cover + from .span import Span # noqa:F401 + + +log = get_logger(__name__) + +# All references to MAX_TRACE_ID were replaced with _MAX_UINT_64BITS. +# Now that ddtrace supports generating 128bit trace_ids, +# the max trace id should be 2**128 - 1 (not 2**64 -1) +# MAX_TRACE_ID is no longer used and should be removed. +MAX_TRACE_ID = _MAX_UINT_64BITS +# Has to be the same factor and key as the Agent to allow chained sampling +KNUTH_FACTOR = 1111111111111111111 + + +class SamplingError(Exception): + pass + + +class BaseSampler(metaclass=abc.ABCMeta): + __slots__ = () + + @abc.abstractmethod + def sample(self, span, allow_false=True): + pass + + +class BasePrioritySampler(BaseSampler): + __slots__ = () + + @abc.abstractmethod + def update_rate_by_service_sample_rates(self, sample_rates): + pass + + +class AllSampler(BaseSampler): + """Sampler sampling all the traces""" + + def sample(self, span, allow_false=True): + # type: (Span, bool) -> bool + return not allow_false or True + + +class RateSampler(BaseSampler): + """Sampler based on a rate + + Keep (100 * `sample_rate`)% of the traces. + It samples randomly, its main purpose is to reduce the instrumentation footprint. + """ + + def __init__(self, sample_rate=1.0): + # type: (float) -> None + if sample_rate < 0.0: + raise ValueError("sample_rate of {} is negative".format(sample_rate)) + elif sample_rate > 1.0: + sample_rate = 1.0 + + self.set_sample_rate(sample_rate) + + log.debug("initialized RateSampler, sample %s%% of traces", 100 * sample_rate) + + def set_sample_rate(self, sample_rate): + # type: (float) -> None + self.sample_rate = float(sample_rate) + self.sampling_id_threshold = self.sample_rate * _MAX_UINT_64BITS + + def sample(self, span, allow_false=True): + # type: (Span, bool) -> bool + sampled = ((span._trace_id_64bits * KNUTH_FACTOR) % _MAX_UINT_64BITS) <= self.sampling_id_threshold + # NB allow_false has weird functionality here, doing something other than "allowing false" to be returned + # this is an artifact of this library's sampler abstractions having fallen out of alignment + # with the functional specification over time. + if sampled and allow_false: + span.set_metric(SAMPLE_RATE_METRIC_KEY, self.sample_rate) + return sampled + + +class _AgentRateSampler(RateSampler): + pass + + +class RateByServiceSampler(BasePrioritySampler): + """Sampler based on a rate, by service + + Keep (100 * `sample_rate`)% of the traces. + The sample rate is kept independently for each service/env tuple. + """ + + __slots__ = ("sample_rate", "_by_service_samplers", "_default_sampler") + + _default_key = "service:,env:" + + @staticmethod + def _key( + service=None, # type: Optional[str] + env=None, # type: Optional[str] + ): + # type: (...) -> str + """Compute a key with the same format used by the Datadog agent API.""" + service = service or "" + env = env or "" + return "service:" + service + ",env:" + env + + def __init__(self, sample_rate=1.0): + # type: (float) -> None + self.sample_rate = sample_rate + self._default_sampler = RateSampler(self.sample_rate) + self._by_service_samplers = {} # type: Dict[str, RateSampler] + + def set_sample_rate( + self, + sample_rate, # type: float + service="", # type: str + env="", # type: str + ): + # type: (...) -> None + self._by_service_samplers[self._key(service, env)] = _AgentRateSampler(sample_rate) + + def sample(self, span, allow_false=True): + sampled, sampler = self._make_sampling_decision(span) + _set_sampling_tags( + span, + sampled, + sampler.sample_rate, + self._choose_priority_category(sampler), + ) + return not allow_false or sampled + + def _choose_priority_category(self, sampler): + # type: (BaseSampler) -> str + if sampler is self._default_sampler: + return _PRIORITY_CATEGORY.DEFAULT + elif isinstance(sampler, _AgentRateSampler): + return _PRIORITY_CATEGORY.AUTO + else: + return _PRIORITY_CATEGORY.RULE + + def _make_sampling_decision(self, span): + # type: (Span) -> Tuple[bool, BaseSampler] + env = span.get_tag(ENV_KEY) + key = self._key(span.service, env) + sampler = self._by_service_samplers.get(key) or self._default_sampler + sampled = sampler.sample(span, allow_false=False) + return sampled, sampler + + def update_rate_by_service_sample_rates(self, rate_by_service): + # type: (Dict[str, float]) -> None + samplers = {} # type: Dict[str, RateSampler] + for key, sample_rate in rate_by_service.items(): + samplers[key] = _AgentRateSampler(sample_rate) + + self._by_service_samplers = samplers + + +class DatadogSampler(RateByServiceSampler): + """ + By default, this sampler relies on dynamic sample rates provided by the trace agent + to determine which traces are kept or dropped. + + You can also configure a static sample rate via ``default_sample_rate`` to use for sampling. + When a ``default_sample_rate`` is configured, that is the only sample rate used, and the agent + provided rates are ignored. + + You may also supply a list of ``SamplingRule`` instances to set sample rates for specific + services. + + Example rules:: + + DatadogSampler(rules=[ + SamplingRule(sample_rate=1.0, service="my-svc"), + SamplingRule(sample_rate=0.0, service="less-important"), + ]) + + Rules are evaluated in the order they are provided, and the first rule that matches is used. + If no rule matches, then the agent sample rates are used. + + This sampler can be configured with a rate limit. This will ensure the max number of + sampled traces per second does not exceed the supplied limit. The default is 100 traces kept + per second. + """ + + __slots__ = ("limiter", "rules") + + NO_RATE_LIMIT = -1 + # deprecate and remove the DEFAULT_RATE_LIMIT field from DatadogSampler + DEFAULT_RATE_LIMIT = DEFAULT_SAMPLING_RATE_LIMIT + + def __init__( + self, + rules=None, # type: Optional[List[SamplingRule]] + default_sample_rate=None, # type: Optional[float] + rate_limit=None, # type: Optional[int] + ): + # type: (...) -> None + """ + Constructor for DatadogSampler sampler + + :param rules: List of :class:`SamplingRule` rules to apply to the root span of every trace, default no rules + :param default_sample_rate: The default sample rate to apply if no rules matched (default: ``None`` / + Use :class:`RateByServiceSampler` only) + :param rate_limit: Global rate limit (traces per second) to apply to all traces regardless of the rules + applied to them, (default: ``100``) + """ + # Use default sample rate of 1.0 + super(DatadogSampler, self).__init__() + + if default_sample_rate is None: + if ddconfig._get_source("_trace_sample_rate") != "default": + default_sample_rate = float(ddconfig._trace_sample_rate) + + if rate_limit is None: + rate_limit = int(ddconfig._trace_rate_limit) + + if rules is None: + env_sampling_rules = ddconfig._trace_sampling_rules + if env_sampling_rules: + rules = self._parse_rules_from_env_variable(env_sampling_rules) + else: + rules = [] + self.rules = rules + else: + self.rules = [] + # Validate that rules is a list of SampleRules + for rule in rules: + if not isinstance(rule, SamplingRule): + raise TypeError("Rule {!r} must be a sub-class of type ddtrace.sampler.SamplingRules".format(rule)) + self.rules.append(rule) + + # DEV: Default sampling rule must come last + if default_sample_rate is not None: + self.rules.append(SamplingRule(sample_rate=default_sample_rate)) + + # Configure rate limiter + self.limiter = RateLimiter(rate_limit) + + log.debug("initialized %r", self) + + def __str__(self): + rates = {key: sampler.sample_rate for key, sampler in self._by_service_samplers.items()} + return "{}(agent_rates={!r}, limiter={!r}, rules={!r})".format( + self.__class__.__name__, rates, self.limiter, self.rules + ) + + __repr__ = __str__ + + def _parse_rules_from_env_variable(self, rules): + # type: (str) -> List[SamplingRule] + sampling_rules = [] + try: + json_rules = json.loads(rules) + except JSONDecodeError: + raise ValueError("Unable to parse DD_TRACE_SAMPLING_RULES={}".format(rules)) + for rule in json_rules: + if "sample_rate" not in rule: + raise KeyError("No sample_rate provided for sampling rule: {}".format(json.dumps(rule))) + sample_rate = float(rule["sample_rate"]) + service = rule.get("service", SamplingRule.NO_RULE) + name = rule.get("name", SamplingRule.NO_RULE) + resource = rule.get("resource", SamplingRule.NO_RULE) + tags = rule.get("tags", SamplingRule.NO_RULE) + try: + sampling_rule = SamplingRule( + sample_rate=sample_rate, service=service, name=name, resource=resource, tags=tags + ) + except ValueError as e: + raise ValueError("Error creating sampling rule {}: {}".format(json.dumps(rule), e)) + sampling_rules.append(sampling_rule) + return sampling_rules + + def sample(self, span, allow_false=True): + # type: (Span, bool) -> bool + """ + If allow_false is False, this function will return True regardless of the sampling decision + """ + matched_rule = _get_highest_precedence_rule_matching(span, self.rules) + + sampler = self._default_sampler # type: BaseSampler + sample_rate = self.sample_rate + if matched_rule: + sampled = matched_rule.sample(span) + sample_rate = matched_rule.sample_rate + else: + sampled, sampler = super(DatadogSampler, self)._make_sampling_decision(span) + if isinstance(sampler, RateSampler): + sample_rate = sampler.sample_rate + + _set_sampling_tags( + span, + sampled, + sample_rate, + self._choose_priority_category_with_rule(matched_rule, sampler), + ) + cleared_rate_limit = _apply_rate_limit(span, sampled, self.limiter) + + if not allow_false: + return True + return not cleared_rate_limit or sampled + + def _choose_priority_category_with_rule(self, rule, sampler): + # type: (Optional[SamplingRule], BaseSampler) -> str + if rule: + return _PRIORITY_CATEGORY.RULE + if self.limiter._has_been_configured: + return _PRIORITY_CATEGORY.USER + return super(DatadogSampler, self)._choose_priority_category(sampler) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampling_rule.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampling_rule.py new file mode 100644 index 0000000..3b2acbc --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/sampling_rule.py @@ -0,0 +1,214 @@ +from typing import TYPE_CHECKING # noqa:F401 + +from ddtrace.internal.compat import pattern_type +from ddtrace.internal.constants import MAX_UINT_64BITS as _MAX_UINT_64BITS +from ddtrace.internal.glob_matching import GlobMatcher +from ddtrace.internal.logger import get_logger +from ddtrace.internal.utils.cache import cachedmethod + + +if TYPE_CHECKING: # pragma: no cover + from typing import Any # noqa:F401 + from typing import Optional # noqa:F401 + from typing import Tuple # noqa:F401 + + from .span import Span # noqa:F401 + +log = get_logger(__name__) +KNUTH_FACTOR = 1111111111111111111 + + +class SamplingRule(object): + """ + Definition of a sampling rule used by :class:`DatadogSampler` for applying a sample rate on a span + """ + + NO_RULE = object() + + def __init__( + self, + sample_rate, # type: float + service=NO_RULE, # type: Any + name=NO_RULE, # type: Any + resource=NO_RULE, # type: Any + tags=NO_RULE, # type: Any + ): + # type: (...) -> None + """ + Configure a new :class:`SamplingRule` + + .. code:: python + + DatadogSampler([ + # Sample 100% of any trace + SamplingRule(sample_rate=1.0), + + # Sample no healthcheck traces + SamplingRule(sample_rate=0, name='flask.request'), + + # Sample all services ending in `-db` based on a regular expression + SamplingRule(sample_rate=0.5, service=re.compile('-db$')), + + # Sample based on service name using custom function + SamplingRule(sample_rate=0.75, service=lambda service: 'my-app' in service), + ]) + + :param sample_rate: The sample rate to apply to any matching spans + :type sample_rate: :obj:`float` greater than or equal to 0.0 and less than or equal to 1.0 + :param service: Rule to match the `span.service` on, default no rule defined + :type service: :obj:`object` to directly compare, :obj:`function` to evaluate, or :class:`re.Pattern` to match + :param name: Rule to match the `span.name` on, default no rule defined + :type name: :obj:`object` to directly compare, :obj:`function` to evaluate, or :class:`re.Pattern` to match + :param tags: A dictionary whose keys exactly match the names of tags expected to appear on spans, and whose + values are glob-matches with the expected span tag values. Glob matching supports "*" meaning any + number of characters, and "?" meaning any one character. If all tags specified in a SamplingRule are + matches with a given span, that span is considered to have matching tags with the rule. + """ + # Enforce sample rate constraints + if not 0.0 <= sample_rate <= 1.0: + raise ValueError( + ( + "SamplingRule(sample_rate={}) must be greater than or equal to 0.0 and less than or equal to 1.0" + ).format(sample_rate) + ) + + self._tag_value_matchers = {k: GlobMatcher(v) for k, v in tags.items()} if tags != SamplingRule.NO_RULE else {} + + self.sample_rate = sample_rate + self.service = service + self.name = name + self.resource = resource + self.tags = tags + + @property + def sample_rate(self): + # type: () -> float + return self._sample_rate + + @sample_rate.setter + def sample_rate(self, sample_rate): + # type: (float) -> None + self._sample_rate = sample_rate + self._sampling_id_threshold = sample_rate * _MAX_UINT_64BITS + + def _pattern_matches(self, prop, pattern): + # If the rule is not set, then assume it matches + # DEV: Having no rule and being `None` are different things + # e.g. ignoring `span.service` vs `span.service == None` + if pattern is self.NO_RULE: + return True + + # If the pattern is callable (e.g. a function) then call it passing the prop + # The expected return value is a boolean so cast the response in case it isn't + if callable(pattern): + try: + return bool(pattern(prop)) + except Exception: + log.warning("%r pattern %r failed with %r", self, pattern, prop, exc_info=True) + # Their function failed to validate, assume it is a False + return False + + # The pattern is a regular expression and the prop is a string + if isinstance(pattern, pattern_type): + try: + return bool(pattern.match(str(prop))) + except (ValueError, TypeError): + # This is to guard us against the casting to a string (shouldn't happen, but still) + log.warning("%r pattern %r failed with %r", self, pattern, prop, exc_info=True) + return False + + # Exact match on the values + return prop == pattern + + @cachedmethod() + def _matches(self, key): + # type: (Tuple[Optional[str], str, Optional[str]]) -> bool + # self._matches exists to maintain legacy pattern values such as regex and functions + service, name, resource = key + for prop, pattern in [(service, self.service), (name, self.name), (resource, self.resource)]: + if not self._pattern_matches(prop, pattern): + return False + else: + return True + + def matches(self, span): + # type: (Span) -> bool + """ + Return if this span matches this rule + + :param span: The span to match against + :type span: :class:`ddtrace.span.Span` + :returns: Whether this span matches or not + :rtype: :obj:`bool` + """ + glob_match = self.glob_matches(span) + return glob_match and self._matches((span.service, span.name, span.resource)) + + def glob_matches(self, span): + # type: (Span) -> bool + tag_match = True + if self._tag_value_matchers: + tag_match = self.tag_match(span.get_tags()) + return tag_match + + def tag_match(self, tags): + if tags is None: + return False + + tag_match = False + for tag_key in self._tag_value_matchers.keys(): + value = tags.get(tag_key) + if value is not None: + tag_match = self._tag_value_matchers[tag_key].match(value) + else: + # if we don't match with all specified tags for a rule, it's not a match + return False + return tag_match + + def sample(self, span, allow_false=True): + # type: (Span, bool) -> bool + """ + Return if this rule chooses to sample the span + + :param span: The span to sample against + :type span: :class:`ddtrace.span.Span` + :returns: Whether this span was sampled + :rtype: :obj:`bool` + """ + if self.sample_rate == 1: + return True + elif self.sample_rate == 0: + return False + + return ( + not allow_false + or ((span._trace_id_64bits * KNUTH_FACTOR) % _MAX_UINT_64BITS) <= self._sampling_id_threshold + ) + + def _no_rule_or_self(self, val): + return "NO_RULE" if val is self.NO_RULE else val + + def __repr__(self): + return "{}(sample_rate={!r}, service={!r}, name={!r}, resource={!r}, tags={!r})".format( + self.__class__.__name__, + self.sample_rate, + self._no_rule_or_self(self.service), + self._no_rule_or_self(self.name), + self._no_rule_or_self(self.resource), + self._no_rule_or_self(self.tags), + ) + + __str__ = __repr__ + + def __eq__(self, other): + # type: (Any) -> bool + if not isinstance(other, SamplingRule): + raise TypeError("Cannot compare SamplingRule to {}".format(type(other))) + + return ( + self.sample_rate == other.sample_rate + and self.service == other.service + and self.name == other.name + and self.resource == other.resource + and self.tags == other.tags + ) diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/__init__.py new file mode 100644 index 0000000..2c3a0bf --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/__init__.py @@ -0,0 +1,17 @@ +from .._hooks import Hooks +from .config import Config +from .exceptions import ConfigException +from .http import HttpConfig +from .integration import IntegrationConfig + + +# Default global config +_config = Config() + +__all__ = [ + "Config", + "ConfigException", + "HttpConfig", + "Hooks", + "IntegrationConfig", +] diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/_database_monitoring.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/_database_monitoring.py new file mode 100644 index 0000000..e37a7c6 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/_database_monitoring.py @@ -0,0 +1,17 @@ +from envier import En +from envier import validators + + +class DatabaseMonitoringConfig(En): + __prefix__ = "dd_dbm" + + propagation_mode = En.v( + str, + "propagation_mode", + default="disabled", + help="Valid Injection Modes: disabled, service, and full", + validator=validators.choice(["disabled", "full", "service"]), + ) + + +dbm_config = DatabaseMonitoringConfig() diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/asm.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/asm.py new file mode 100644 index 0000000..d51a52d --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/asm.py @@ -0,0 +1,101 @@ +import os.path +from platform import machine +from platform import system + +from envier import Env + +from ddtrace.appsec._constants import API_SECURITY +from ddtrace.appsec._constants import APPSEC +from ddtrace.appsec._constants import DEFAULT +from ddtrace.appsec._constants import IAST +from ddtrace.constants import APPSEC_ENV +from ddtrace.constants import IAST_ENV + + +def _validate_sample_rate(r: float) -> None: + if r < 0.0 or r > 1.0: + raise ValueError("sample rate value must be between 0.0 and 1.0") + + +def build_libddwaf_filename() -> str: + """ + Build the filename of the libddwaf library to load. + """ + _DIRNAME = os.path.dirname(os.path.dirname(__file__)) + FILE_EXTENSION = {"Linux": "so", "Darwin": "dylib", "Windows": "dll"}[system()] + ARCHI = machine().lower() + # 32-bit-Python on 64-bit-Windows + if system() == "Windows" and ARCHI == "amd64": + from sys import maxsize + + if maxsize <= (1 << 32): + ARCHI = "x86" + TRANSLATE_ARCH = {"amd64": "x64", "i686": "x86_64", "x86": "win32"} + ARCHITECTURE = TRANSLATE_ARCH.get(ARCHI, ARCHI) + return os.path.join(_DIRNAME, "appsec", "_ddwaf", "libddwaf", ARCHITECTURE, "lib", "libddwaf." + FILE_EXTENSION) + + +class ASMConfig(Env): + _asm_enabled = Env.var(bool, APPSEC_ENV, default=False) + _iast_enabled = Env.var(bool, IAST_ENV, default=False) + + _automatic_login_events_mode = Env.var(str, APPSEC.AUTOMATIC_USER_EVENTS_TRACKING, default="safe") + _user_model_login_field = Env.var(str, APPSEC.USER_MODEL_LOGIN_FIELD, default="") + _user_model_email_field = Env.var(str, APPSEC.USER_MODEL_EMAIL_FIELD, default="") + _user_model_name_field = Env.var(str, APPSEC.USER_MODEL_NAME_FIELD, default="") + _api_security_enabled = Env.var(bool, API_SECURITY.ENV_VAR_ENABLED, default=True) + _api_security_sample_rate = Env.var(float, API_SECURITY.SAMPLE_RATE, validator=_validate_sample_rate, default=0.1) + _api_security_parse_response_body = Env.var(bool, API_SECURITY.PARSE_RESPONSE_BODY, default=True) + _asm_libddwaf = build_libddwaf_filename() + _asm_libddwaf_available = os.path.exists(_asm_libddwaf) + + _waf_timeout = Env.var( + float, + "DD_APPSEC_WAF_TIMEOUT", + default=DEFAULT.WAF_TIMEOUT, + help_type=float, + help="Timeout in microseconds for WAF computations", + ) + + _iast_redaction_enabled = Env.var(bool, "DD_IAST_REDACTION_ENABLED", default=True) + _iast_redaction_name_pattern = Env.var( + str, + "DD_IAST_REDACTION_NAME_PATTERN", + default=r"(?i)^.*(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|" + + r"public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|" + + r"sign(?:ed|ature)?|auth(?:entication|orization)?)", + ) + _iast_redaction_value_pattern = Env.var( + str, + "DD_IAST_REDACTION_VALUE_PATTERN", + default=r"(?i)bearer\s+[a-z0-9\._\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|" + + r"ey[I-L][\w=-]+\.ey[I-L][\w=-]+(\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY" + + r"[\-]{5}[^\-]+[\-]{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}", + ) + _iast_lazy_taint = Env.var(bool, IAST.LAZY_TAINT, default=False) + + # for tests purposes + _asm_config_keys = [ + "_asm_enabled", + "_iast_enabled", + "_automatic_login_events_mode", + "_user_model_login_field", + "_user_model_email_field", + "_user_model_name_field", + "_api_security_enabled", + "_api_security_sample_rate", + "_api_security_parse_response_body", + "_waf_timeout", + "_iast_redaction_enabled", + "_iast_redaction_name_pattern", + "_iast_redaction_value_pattern", + "_iast_lazy_taint", + "_asm_config_keys", + ] + + +config = ASMConfig() + +if not config._asm_libddwaf_available: + config._asm_enabled = False + config._iast_enabled = False diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/config.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/config.py new file mode 100644 index 0000000..022f7d9 --- /dev/null +++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/settings/config.py @@ -0,0 +1,797 @@ +from copy import deepcopy +import multiprocessing +import os +import re +import sys +from typing import Any # noqa:F401 +from typing import Callable # noqa:F401 +from typing import Dict # noqa:F401 +from typing import List # noqa:F401 +from typing import Optional # noqa:F401 +from typing import Tuple # noqa:F401 +from typing import Union # noqa:F401 + +from ddtrace.internal.serverless import in_azure_function_consumption_plan +from ddtrace.internal.serverless import in_gcp_function +from ddtrace.internal.utils.cache import cachedmethod +from ddtrace.internal.utils.deprecations import DDTraceDeprecationWarning +from ddtrace.vendor.debtcollector import deprecate + +from ..internal import gitmetadata +from ..internal.constants import _PROPAGATION_STYLE_DEFAULT +from ..internal.constants import DEFAULT_BUFFER_SIZE +from ..internal.constants import DEFAULT_MAX_PAYLOAD_SIZE +from ..internal.constants import DEFAULT_PROCESSING_INTERVAL +from ..internal.constants import DEFAULT_REUSE_CONNECTIONS +from ..internal.constants import DEFAULT_SAMPLING_RATE_LIMIT +from ..internal.constants import DEFAULT_TIMEOUT +from ..internal.constants import PROPAGATION_STYLE_ALL +from ..internal.constants import PROPAGATION_STYLE_B3_SINGLE +from ..internal.logger import get_logger +from ..internal.schema import DEFAULT_SPAN_SERVICE_NAME +from ..internal.serverless import in_aws_lambda +from ..internal.utils.formats import asbool +from ..internal.utils.formats import parse_tags_str +from ..pin import Pin +from .http import HttpConfig +from .integration import IntegrationConfig + + +if sys.version_info >= (3, 8): + from typing import Literal # noqa:F401 +else: + from typing_extensions import Literal + + +log = get_logger(__name__) + + +DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP_DEFAULT = ( + r"(?ix)" + r"(?:" # JSON-ish leading quote + r'(?:"|%22)?' + r")" + r"(?:" # common keys" + r"(?:old[-_]?|new[-_]?)?p(?:ass)?w(?:or)?d(?:1|2)?" # pw, password variants + r"|pass(?:[-_]?phrase)?" # pass, passphrase variants + r"|secret" + r"|(?:" # key, key_id variants + r"api[-_]?" + r"|private[-_]?" + r"|public[-_]?" + r"|access[-_]?" + r"|secret[-_]?" + r"|app(?:lica" + r"tion)?[-_]?" + r")key(?:[-_]?id)?" + r"|token" + r"|consumer[-_]?(?:id|key|secret)" + r"|sign(?:ed|ature)?" + r"|auth(?:entication|orization)?" + r")" + r"(?:" + # '=' query string separator, plus value til next '&' separator + r"(?:\s|%20)*(?:=|%3D)[^&]+" + # JSON-ish '": "somevalue"', key being handled with case above, without the opening '"' + r'|(?:"|%22)' # closing '"' at end of key + r"(?:\s|%20)*(?::|%3A)(?:\s|%20)*" # ':' key-value separator, with surrounding spaces + r'(?:"|%22)' # opening '"' at start of value + r'(?:%2[^2]|%[^2]|[^"%])+' # value + r'(?:"|%22)' # closing '"' at end of value + r")" + r"|(?:" # other common secret values + r" bearer(?:\s|%20)+[a-z0-9._\-]+" + r"|token(?::|%3A)[a-z0-9]{13}" + r"|gh[opsu]_[0-9a-zA-Z]{36}" + r"|ey[I-L](?:[\w=-]|%3D)+\.ey[I-L](?:[\w=-]|%3D)+(?:\.(?:[\w.+/=-]|%3D|%2F|%2B)+)?" + r"|-{5}BEGIN(?:[a-z\s]|%20)+PRIVATE(?:\s|%20)KEY-{5}[^\-]+-{5}END" + r"(?:[a-z\s]|%20)+PRIVATE(?:\s|%20)KEY(?:-{5})?(?:\n|%0A)?" + r"|(?:ssh-(?:rsa|dss)|ecdsa-[a-z0-9]+-[a-z0-9]+)(?:\s|%20|%09)+(?:[a-z0-9/.+]" + r"|%2F|%5C|%2B){100,}(?:=|%3D)*(?:(?:\s|%20|%09)+[a-z0-9._-]+)?" + r")" +) + + +def _parse_propagation_styles(name, default): + # type: (str, Optional[str]) -> Optional[List[str]] + """Helper to parse http propagation extract/inject styles via env variables. + + The expected format is:: + +