From ab05311ea1ebf2f7572c26effc152efbfa2510b3 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 20 Oct 2023 10:43:41 -0500 Subject: [PATCH] DSL sample Fixes #7 --- .github/workflows/ci.yml | 2 +- README.md | 1 + dsl/README.md | 29 +++++++++++++ dsl/__init__.py | 0 dsl/activities.py | 28 +++++++++++++ dsl/starter.py | 46 +++++++++++++++++++++ dsl/worker.py | 44 ++++++++++++++++++++ dsl/workflow.py | 85 ++++++++++++++++++++++++++++++++++++++ dsl/workflow1.yaml | 28 +++++++++++++ dsl/workflow2.yaml | 58 ++++++++++++++++++++++++++ poetry.lock | 88 +++++++++++++++++++++++++++++++++++++++- pyproject.toml | 4 ++ 12 files changed, 411 insertions(+), 2 deletions(-) create mode 100644 dsl/README.md create mode 100644 dsl/__init__.py create mode 100644 dsl/activities.py create mode 100644 dsl/starter.py create mode 100644 dsl/worker.py create mode 100644 dsl/workflow.py create mode 100644 dsl/workflow1.yaml create mode 100644 dsl/workflow2.yaml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 03a3826b..1bac467d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,7 +27,7 @@ jobs: # Using fixed Poetry version until # https://github.com/python-poetry/poetry/pull/7694 is fixed - run: python -m pip install --upgrade wheel "poetry==1.4.0" poethepoet - - run: poetry install --with pydantic + - run: poetry install --with pydantic --with dsl - run: poe lint - run: poe test -s -o log_cli_level=DEBUG - run: poe test -s -o log_cli_level=DEBUG --workflow-environment time-skipping diff --git a/README.md b/README.md index 0ef72176..cffcaeeb 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,7 @@ Some examples require extra dependencies. See each sample's directory for specif * [activity_worker](activity_worker) - Use Python activities from a workflow in another language. * [custom_converter](custom_converter) - Use a custom payload converter to handle custom types. * [custom_decorator](custom_decorator) - Custom decorator to auto-heartbeat a long-running activity. +* [dsl](dsl) - DSL workflow that executes steps defined in a YAML file. * [encryption](encryption) - Apply end-to-end encryption for all input/output. * [gevent_async](gevent_async) - Combine gevent and Temporal. * [open_telemetry](open_telemetry) - Trace workflows with OpenTelemetry. diff --git a/dsl/README.md b/dsl/README.md new file mode 100644 index 00000000..4000b3a3 --- /dev/null +++ b/dsl/README.md @@ -0,0 +1,29 @@ +# DSL Sample + +This sample shows how to have a workflow interpret/invoke arbitrary steps defined in a DSL. It is similar to the DSL +samples [in TypeScript](https://github.com/temporalio/samples-typescript/tree/main/dsl-interpreter) and +[in Go](https://github.com/temporalio/samples-go/tree/main/dsl). + +For this sample, the optional `dsl` dependency group must be included. To include, run: + + poetry install --with dsl + +To run, first see [README.md](../README.md) for prerequisites. Then, run the following from this directory to start the +worker: + + poetry run python worker.py + +This will start the worker. Then, in another terminal, run the following to execute a workflow of steps defined in +[workflow1.yaml](workflow1.yaml): + + poetry run python starter.py workflow1.yaml + +This will run the workflow and show the final variables that the workflow returns. Looking in the worker terminal, each +step executed will be visible. + +Similarly we can do the same for the more advanced [workflow2.yaml](workflow2.yaml) file: + + poetry run python starter.py workflow2.yaml + +This sample gives a guide of how one can write a workflow to interpret arbitrary steps from a user-provided DSL. Many +DSL models are more advanced and are more specific to conform to business logic needs. \ No newline at end of file diff --git a/dsl/__init__.py b/dsl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dsl/activities.py b/dsl/activities.py new file mode 100644 index 00000000..cba79253 --- /dev/null +++ b/dsl/activities.py @@ -0,0 +1,28 @@ +from temporalio import activity + + +class DSLActivities: + @activity.defn + async def activity1(self, arg: str) -> str: + activity.logger.info(f"Executing activity1 with arg: {arg}") + return f"[result from activity1: {arg}]" + + @activity.defn + async def activity2(self, arg: str) -> str: + activity.logger.info(f"Executing activity2 with arg: {arg}") + return f"[result from activity2: {arg}]" + + @activity.defn + async def activity3(self, arg1: str, arg2: str) -> str: + activity.logger.info(f"Executing activity3 with args: {arg1} and {arg2}") + return f"[result from activity3: {arg1} {arg2}]" + + @activity.defn + async def activity4(self, arg: str) -> str: + activity.logger.info(f"Executing activity4 with arg: {arg}") + return f"[result from activity4: {arg}]" + + @activity.defn + async def activity5(self, arg1: str, arg2: str) -> str: + activity.logger.info(f"Executing activity5 with args: {arg1} and {arg2}") + return f"[result from activity5: {arg1} {arg2}]" diff --git a/dsl/starter.py b/dsl/starter.py new file mode 100644 index 00000000..e530b10e --- /dev/null +++ b/dsl/starter.py @@ -0,0 +1,46 @@ +import asyncio +import logging +import sys +import uuid + +import dacite +import yaml +from temporalio.client import Client + +from dsl.workflow import DSLInput, DSLWorkflow + + +async def main(dsl_yaml: str) -> None: + # Convert the YAML to our dataclass structure. We use PyYAML + dacite to do + # this but it can be done any number of ways. + dsl_input = dacite.from_dict(DSLInput, yaml.safe_load(dsl_yaml)) + + # Connect client + client = await Client.connect("localhost:7233") + + # Run workflow + result = await client.execute_workflow( + DSLWorkflow.run, + dsl_input, + id=f"dsl-workflow-id-{uuid.uuid4()}", + task_queue="dsl-task-queue", + ) + logging.info( + f"Final variables:\n " + + "\n ".join((f"{k}: {v}" for k, v in result.items())) + ) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + + # Require the YAML file as an argument. We read this _outside_ of the async + # def function because thread-blocking IO should never happen in async def + # functions. + if len(sys.argv) != 2: + raise RuntimeError("Expected single argument for YAML file") + with open(sys.argv[1], "r") as yaml_file: + dsl_yaml = yaml_file.read() + + # Run + asyncio.run(main(dsl_yaml)) diff --git a/dsl/worker.py b/dsl/worker.py new file mode 100644 index 00000000..9945492e --- /dev/null +++ b/dsl/worker.py @@ -0,0 +1,44 @@ +import asyncio +import logging + +from temporalio.client import Client +from temporalio.worker import Worker + +from dsl.activities import DSLActivities +from dsl.workflow import DSLWorkflow + +interrupt_event = asyncio.Event() + + +async def main(): + # Connect client + client = await Client.connect("localhost:7233") + + # Run a worker for the activities and workflow + activities = DSLActivities() + async with Worker( + client, + task_queue="dsl-task-queue", + activities=[ + activities.activity1, + activities.activity2, + activities.activity3, + activities.activity4, + activities.activity5, + ], + workflows=[DSLWorkflow], + ): + # Wait until interrupted + logging.info("Worker started, ctrl+c to exit") + await interrupt_event.wait() + logging.info("Shutting down") + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + loop = asyncio.new_event_loop() + try: + loop.run_until_complete(main()) + except KeyboardInterrupt: + interrupt_event.set() + loop.run_until_complete(loop.shutdown_asyncgens()) diff --git a/dsl/workflow.py b/dsl/workflow.py new file mode 100644 index 00000000..53cf3ec2 --- /dev/null +++ b/dsl/workflow.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +import asyncio +import dataclasses +from dataclasses import dataclass +from datetime import timedelta +from typing import Any, Dict, List, Optional, Union + +from temporalio import workflow + + +@dataclass +class DSLInput: + root: Statement + variables: Dict[str, Any] = dataclasses.field(default_factory=dict) + + +@dataclass +class ActivityStatement: + activity: ActivityInvocation + + +@dataclass +class ActivityInvocation: + name: str + arguments: List[str] = dataclasses.field(default_factory=list) + result: Optional[str] = None + + +@dataclass +class SequenceStatement: + sequence: Sequence + + +@dataclass +class Sequence: + elements: List[Statement] + + +@dataclass +class ParallelStatement: + parallel: Parallel + + +@dataclass +class Parallel: + branches: List[Statement] + + +Statement = Union[ActivityStatement, SequenceStatement, ParallelStatement] + + +@workflow.defn +class DSLWorkflow: + @workflow.run + async def run(self, input: DSLInput) -> Dict[str, Any]: + self.variables = dict(input.variables) + workflow.logger.info("Running DSL workflow") + await self.execute_statement(input.root) + workflow.logger.info("DSL workflow completed") + return self.variables + + async def execute_statement(self, stmt: Statement) -> None: + if isinstance(stmt, ActivityStatement): + # Invoke activity loading arguments from variables and optionally + # storing result as a variable + result = await workflow.execute_activity( + stmt.activity.name, + args=[self.variables.get(arg, "") for arg in stmt.activity.arguments], + start_to_close_timeout=timedelta(minutes=1), + ) + if stmt.activity.result: + self.variables[stmt.activity.result] = result + elif isinstance(stmt, SequenceStatement): + # Execute each statement in order + for elem in stmt.sequence.elements: + await self.execute_statement(elem) + elif isinstance(stmt, ParallelStatement): + # Execute all in parallel. Note, this will raise an exception when + # the first activity fails and will not cancel the others. We could + # store tasks and cancel if we wanted. In newer Python versions this + # would use a TaskGroup instead. + await asyncio.gather( + *[self.execute_statement(branch) for branch in stmt.parallel.branches] + ) diff --git a/dsl/workflow1.yaml b/dsl/workflow1.yaml new file mode 100644 index 00000000..85da5236 --- /dev/null +++ b/dsl/workflow1.yaml @@ -0,0 +1,28 @@ +# This sample workflows execute 3 steps in sequence. +# 1) Activity1, takes arg1 as input, and put result as result1. +# 2) Activity2, takes result1 as input, and put result as result2. +# 3) Activity3, takes args2 and result2 as input, and put result as result3. + +variables: + arg1: value1 + arg2: value2 + +root: + sequence: + elements: + - activity: + name: activity1 + arguments: + - arg1 + result: result1 + - activity: + name: activity2 + arguments: + - result1 + result: result2 + - activity: + name: activity3 + arguments: + - arg2 + - result2 + result: result3 \ No newline at end of file diff --git a/dsl/workflow2.yaml b/dsl/workflow2.yaml new file mode 100644 index 00000000..cf19fdd6 --- /dev/null +++ b/dsl/workflow2.yaml @@ -0,0 +1,58 @@ +# This sample workflow executes 3 steps in sequence. +# 1) activity1, takes arg1 as input, and put result as result1. +# 2) it runs a parallel block which runs below sequence branches in parallel +# 2.1) sequence 1 +# 2.1.1) activity2, takes result1 as input, and put result as result2 +# 2.1.2) activity3, takes arg2 and result2 as input, and put result as result3 +# 2.2) sequence 2 +# 2.2.1) activity4, takes result1 as input, and put result as result4 +# 2.2.2) activity5, takes arg3 and result4 as input, and put result as result5 +# 3) activity3, takes result3 and result5 as input, and put result as result6. + +variables: + arg1: value1 + arg2: value2 + arg3: value3 + +root: + sequence: + elements: + - activity: + name: activity1 + arguments: + - arg1 + result: result1 + - parallel: + branches: + - sequence: + elements: + - activity: + name: activity2 + arguments: + - result1 + result: result2 + - activity: + name: activity3 + arguments: + - arg2 + - result2 + result: result3 + - sequence: + elements: + - activity: + name: activity4 + arguments: + - result1 + result: result4 + - activity: + name: activity5 + arguments: + - arg3 + - result4 + result: result5 + - activity: + name: activity3 + arguments: + - result3 + - result5 + result: result6 \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index d651d8e9..9e75e4ee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -473,6 +473,20 @@ sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] +[[package]] +name = "dacite" +version = "1.8.1" +description = "Simple creation of data classes from dictionaries." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, +] + +[package.extras] +dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] + [[package]] name = "deprecated" version = "1.2.14" @@ -1322,6 +1336,66 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "sentry-sdk" version = "1.25.1" @@ -1480,6 +1554,18 @@ files = [ {file = "types_protobuf-4.23.0.1-py3-none-any.whl", hash = "sha256:c926104f69ea62103846681b35b690d8d100ecf86c6cdda16c850a1313a272e4"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.12" +description = "Typing stubs for PyYAML" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, +] + [[package]] name = "typing-extensions" version = "4.6.3" @@ -1776,4 +1862,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "894f811830ab64c86fffe2388f80b61c2fce21b0cb6aa76fbb4e4427eb7d33c6" +content-hash = "218ab0129e03d31c374fc63d3bbb091e414defc98dfc7ad047e13a0653241d38" diff --git a/pyproject.toml b/pyproject.toml index ca854e4d..402e4f19 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,10 @@ pytest-asyncio = "^0.18.3" # All sample-specific dependencies are in optional groups below, named after the # sample they apply to +[tool.poetry.group.dsl] +optional = true +dependencies = { pyyaml = "^6.0.1", types-pyyaml = "^6.0.12", dacite = "^1.8.1" } + [tool.poetry.group.encryption] optional = true dependencies = { cryptography = "^38.0.1", aiohttp = "^3.8.1" }