Skip to content

Commit

Permalink
Add a DeadLetter Config to AWS lambda functions
Browse files Browse the repository at this point in the history
Some AWS services invoke lambda asynchronously. In this case, the
services invoking lambda only ensure the delivery of the event to the
function. The runtime and timeout errors cannot be handled by the
service.

A deadletter config is added to the `e3.aws.troposphere.awslambda`
attributes to handle event error in case of failure. It can be an
SQS queue or SNS topic.
These settings require to add the statement `sqs:SendMessage`
or `sns:Publish` to the lambda policy according to the deadletter
configuration.

ref it/aws-costs#8
ref it/org/operation_support/iaas/projects#111
  • Loading branch information
jeromef853 committed Dec 10, 2024
1 parent 2e4d227 commit 49e1da9
Show file tree
Hide file tree
Showing 2 changed files with 96 additions and 0 deletions.
17 changes: 17 additions & 0 deletions src/e3/aws/troposphere/awslambda/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def __init__(
reserved_concurrent_executions: int | None = None,
environment: dict[str, str] | None = None,
logging_config: awslambda.LoggingConfig | None = None,
dl_config: awslambda.DeadLetterConfig | None = None,
):
"""Initialize an AWS lambda function.
Expand All @@ -73,6 +74,8 @@ def __init__(
:param environment: Environment variables that are accessible from function
code during execution
:param logging_config: The function's Amazon CloudWatch Logs settings
:param dl_config: The dead letter config that specifies the topic or queue where
lambda sends asynchronous events when they fail processing
"""
self.name = name
self.description = description
Expand All @@ -90,6 +93,7 @@ def __init__(
self.reserved_concurrent_executions = reserved_concurrent_executions
self.environment = environment
self.logging_config = logging_config
self.dl_config = dl_config

def cfn_policy_document(self, stack: Stack) -> PolicyDocument:
statements = [
Expand Down Expand Up @@ -202,6 +206,9 @@ def lambda_resources(
if self.logging_config is not None:
params["LoggingConfig"] = self.logging_config

if self.dl_config is not None:
params["DeadLetterConfig"] = self.dl_config

result = [awslambda.Function(name_to_id(self.name), **params)]
# If retention duration is given provide a log group.
# If not provided the lambda creates a log group with
Expand Down Expand Up @@ -285,6 +292,7 @@ def __init__(
timeout: int = 3,
memory_size: int | None = None,
logging_config: awslambda.LoggingConfig | None = None,
dl_config: awslambda.DeadLetterConfig | None = None,
):
"""Initialize an AWS lambda function using a Docker image.
Expand All @@ -298,6 +306,8 @@ def __init__(
:param memory_size: the amount of memory available to the function at
runtime. The value can be any multiple of 1 MB.
:param logging_config: The function's Amazon CloudWatch Logs settings
:param dl_config: The dead letter config that specifies the topic or queue where
lambda sends asynchronous events when they fail processing
"""
super().__init__(
name=name,
Expand All @@ -306,6 +316,7 @@ def __init__(
timeout=timeout,
memory_size=memory_size,
logging_config=logging_config,
dl_config=dl_config,
)
self.source_dir: str = source_dir
self.repository_name: str = repository_name
Expand Down Expand Up @@ -364,6 +375,7 @@ def __init__(
reserved_concurrent_executions: int | None = None,
environment: dict[str, str] | None = None,
logging_config: awslambda.LoggingConfig | None = None,
dl_config: awslambda.DeadLetterConfig | None = None,
):
"""Initialize an AWS lambda function with a Python runtime.
Expand All @@ -390,6 +402,8 @@ def __init__(
:param environment: Environment variables that are accessible from function
code during execution
:param logging_config: The function's Amazon CloudWatch Logs settings
:param dl_config: The dead letter config that specifies the topic or queue where
lambda sends asynchronous events when they fail processing
"""
assert runtime.startswith("python"), "PyFunction only accept Python runtimes"
super().__init__(
Expand All @@ -408,6 +422,7 @@ def __init__(
reserved_concurrent_executions=reserved_concurrent_executions,
environment=environment,
logging_config=logging_config,
dl_config=dl_config,
)
self.code_dir = code_dir
self.requirement_file = requirement_file
Expand Down Expand Up @@ -491,6 +506,7 @@ def __init__(
logs_retention_in_days: int | None = None,
reserved_concurrent_executions: int | None = None,
logging_config: awslambda.LoggingConfig | None = None,
dl_config: awslambda.DeadLetterConfig | None = None,
):
"""Initialize an AWS lambda function using Python 3.8 runtime.
Expand All @@ -511,6 +527,7 @@ def __init__(
logs_retention_in_days=logs_retention_in_days,
reserved_concurrent_executions=reserved_concurrent_executions,
logging_config=logging_config,
dl_config=dl_config,
)


Expand Down
79 changes: 79 additions & 0 deletions tests/tests_e3_aws/troposphere/awslambda/awslambda_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
AliasRoutingConfiguration,
VersionWeight,
LoggingConfig,
DeadLetterConfig,
)

from e3.aws import AWSEnv
Expand All @@ -33,6 +34,8 @@

from e3.pytest import require_tool

from e3.aws.troposphere.sqs import Queue

if TYPE_CHECKING:
from typing import Iterable, Callable
from flask import Application, Response
Expand Down Expand Up @@ -130,6 +133,52 @@
},
}

EXPECTED_PYFUNCTION_WITH_DLQ_TEMPLATE = {
"Mypylambda": {
"Properties": {
"Code": {
"S3Bucket": "cfn_bucket",
"S3Key": "templates/mypylambda_lambda.zip",
},
"DeadLetterConfig": {
"TargetArn": {"Fn::GetAtt": ["PyFunctionDLQ", "Arn"]},
},
"Description": "this is a test with dlconfig",
"FunctionName": "mypylambda",
"Handler": "app.main",
"Role": "somearn",
"Runtime": "python3.12",
"Timeout": 3,
"MemorySize": 128,
"EphemeralStorage": {"Size": 1024},
"ReservedConcurrentExecutions": 1,
"Environment": {
"Variables": {"env_key_1": "env_value_1", "env_key_2": "env_value2"}
},
"LoggingConfig": {
"ApplicationLogLevel": "INFO",
"LogFormat": "JSON",
"SystemLogLevel": "WARN",
},
},
"Type": "AWS::Lambda::Function",
},
"MypylambdaLogGroup": {
"DeletionPolicy": "Retain",
"Properties": {
"LogGroupName": "/aws/lambda/mypylambda",
"RetentionInDays": 7,
},
"Type": "AWS::Logs::LogGroup",
},
"PyFunctionDLQ": {
"Properties": {
"QueueName": "PyFunctionDLQ",
"VisibilityTimeout": 30,
},
"Type": "AWS::SQS::Queue",
},
}

EXPECTED_PYFUNCTION_POLICY_DOCUMENT = {
"Statement": [
Expand Down Expand Up @@ -415,6 +464,36 @@ def test_pyfunction(stack: Stack) -> None:
assert stack.export()["Resources"] == EXPECTED_PYFUNCTION_TEMPLATE


def test_pyfunction_with_dlconfig(stack: Stack) -> None:
stack.s3_bucket = "cfn_bucket"
stack.s3_key = "templates/"
dlq = Queue(name="PyFunctionDLQ")
stack.add(dlq)
stack.add(
PyFunction(
name="mypylambda",
description="this is a test with dlconfig",
role="somearn",
runtime="python3.12",
code_dir="my_code_dir",
handler="app.main",
memory_size=128,
ephemeral_storage_size=1024,
logs_retention_in_days=7,
reserved_concurrent_executions=1,
environment={"env_key_1": "env_value_1", "env_key_2": "env_value2"},
logging_config=LoggingConfig(
ApplicationLogLevel="INFO",
LogFormat="JSON",
SystemLogLevel="WARN",
),
dl_config=DeadLetterConfig(TargetArn=dlq.arn),
)
)
print(stack.export()["Resources"])
assert stack.export()["Resources"] == EXPECTED_PYFUNCTION_WITH_DLQ_TEMPLATE


def test_pyfunction_with_requirements(tmp_path: Path, stack: Stack) -> None:
"""Test PyFunction creation."""
stack.s3_bucket = "cfn_bucket"
Expand Down

0 comments on commit 49e1da9

Please sign in to comment.