From 70e980affb77724461cf048dda1925c4842e2a64 Mon Sep 17 00:00:00 2001 From: Eduardo Martin Rojo Date: Fri, 8 Jul 2022 16:26:55 +0100 Subject: [PATCH 1/3] First commit --- Dockerfile | 44 + Pipfile | 39 + Pipfile.lock | 801 ++++++++++++++++++ README.md | 60 +- docker-compose-standalone.yml | 9 + docker-compose.override.yml | 9 + docker-compose.yml | 57 ++ entrypoint.sh | 17 + forlint.sh | 20 + lab_share_lib/__init__.py | 0 lab_share_lib/config/test.py | 0 lab_share_lib/constants.py | 2 + lab_share_lib/exceptions.py | 19 + lab_share_lib/processing/__init__.py | 0 lab_share_lib/processing/base_processor.py | 7 + lab_share_lib/processing/rabbit_message.py | 36 + .../processing/rabbit_message_processor.py | 53 ++ lab_share_lib/rabbit/__init__.py | 0 lab_share_lib/rabbit/async_consumer.py | 352 ++++++++ lab_share_lib/rabbit/avro_encoder.py | 46 + lab_share_lib/rabbit/background_consumer.py | 67 ++ lab_share_lib/rabbit/basic_publisher.py | 43 + lab_share_lib/rabbit/messages/__init__.py | 0 lab_share_lib/rabbit/messages/base_message.py | 23 + lab_share_lib/rabbit/rabbit_stack.py | 44 + lab_share_lib/rabbit/schema_registry.py | 25 + lab_share_lib/types.py | 20 + lefthook.yml | 19 + mypy.ini | 47 + pyproject.toml | 6 + setup.cfg | 10 + setup.py | 14 + tests/__init__.py | 0 tests/conftest.py | 6 + tests/processing/test_rabbit_message.py | 68 ++ .../test_rabbit_message_processor.py | 133 +++ tests/rabbit/messages/test_base_message.py | 36 + tests/rabbit/test_async_consumer.py | 393 +++++++++ tests/rabbit/test_avro_encoder.py | 91 ++ tests/rabbit/test_background_consumer.py | 107 +++ tests/rabbit/test_basic_publisher.py | 83 ++ tests/rabbit/test_schema_registry.py | 110 +++ wait_for_connection.sh | 29 + 43 files changed, 2943 insertions(+), 2 deletions(-) create mode 100644 Dockerfile create mode 100644 Pipfile create mode 100644 Pipfile.lock create mode 100644 docker-compose-standalone.yml create mode 100644 docker-compose.override.yml create mode 100644 docker-compose.yml create mode 100755 entrypoint.sh create mode 100755 forlint.sh create mode 100644 lab_share_lib/__init__.py create mode 100644 lab_share_lib/config/test.py create mode 100644 lab_share_lib/constants.py create mode 100644 lab_share_lib/exceptions.py create mode 100644 lab_share_lib/processing/__init__.py create mode 100644 lab_share_lib/processing/base_processor.py create mode 100644 lab_share_lib/processing/rabbit_message.py create mode 100644 lab_share_lib/processing/rabbit_message_processor.py create mode 100644 lab_share_lib/rabbit/__init__.py create mode 100644 lab_share_lib/rabbit/async_consumer.py create mode 100644 lab_share_lib/rabbit/avro_encoder.py create mode 100644 lab_share_lib/rabbit/background_consumer.py create mode 100644 lab_share_lib/rabbit/basic_publisher.py create mode 100644 lab_share_lib/rabbit/messages/__init__.py create mode 100644 lab_share_lib/rabbit/messages/base_message.py create mode 100644 lab_share_lib/rabbit/rabbit_stack.py create mode 100644 lab_share_lib/rabbit/schema_registry.py create mode 100644 lab_share_lib/types.py create mode 100644 lefthook.yml create mode 100644 mypy.ini create mode 100644 pyproject.toml create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/processing/test_rabbit_message.py create mode 100644 tests/processing/test_rabbit_message_processor.py create mode 100644 tests/rabbit/messages/test_base_message.py create mode 100644 tests/rabbit/test_async_consumer.py create mode 100644 tests/rabbit/test_avro_encoder.py create mode 100644 tests/rabbit/test_background_consumer.py create mode 100644 tests/rabbit/test_basic_publisher.py create mode 100644 tests/rabbit/test_schema_registry.py create mode 100755 wait_for_connection.sh diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..1fbf30f --- /dev/null +++ b/Dockerfile @@ -0,0 +1,44 @@ +# Use alpine for a smaller image size and install only the required packages +FROM python:3.8-slim-buster + +# > Setting PYTHONUNBUFFERED to a non empty value ensures that the python output is sent straight to +# > terminal (e.g. your container log) without being first buffered and that you can see the output +# > of your application (e.g. django logs) in real time. +# https://stackoverflow.com/a/59812588 +ENV PYTHONUNBUFFERED 1 + +# Install required libs +RUN apt-get update && \ + apt-get install -y \ + build-essential \ + curl \ + netcat \ + unixodbc-dev + +# Install the package manager - pipenv +RUN pip install --upgrade pip && \ + pip install --no-cache-dir pipenv + +# Change the working directory for all proceeding operations +# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#workdir +WORKDIR /code + +# "items (files, directories) that do not require ADD’s tar auto-extraction capability, you should always use COPY." +# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#add-or-copy +COPY Pipfile . +COPY Pipfile.lock . + +# Install both default and dev packages so that we can run the tests against this image +RUN pipenv sync --dev --system && \ + pipenv --clear + +# Copy all the source to the image +COPY . . + +# "The best use for ENTRYPOINT is to set the image’s main command, allowing that image to be run as though it was that +# command (and then use CMD as the default flags)." +# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#entrypoint + +# https://docs.docker.com/engine/reference/builder/#healthcheck +#HEALTHCHECK --interval=1m --timeout=3s \ +# CMD curl -f http://localhost:8000/health || exit 1 diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..683e8fe --- /dev/null +++ b/Pipfile @@ -0,0 +1,39 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +black = "*" +coverage = {extras = ["toml"],version = "*"} +flake8 = "*" +flake8-bugbear = "*" +mypy = "*" +pika-stubs = "*" +pytest = "*" +pytest-cov = "*" +pytest-freezegun = "*" +types-python-dateutil = "*" +responses = "*" +types-requests = "*" + +[packages] +colorlog = "~=6.6" +fastavro = "~=1.5" +more-itertools = "~=8.13" +pika = "~=1.3" +python-dotenv = "~=0.20" +requests = "~=2.28" +slackclient = "~=2.9" + +[requires] +python_version = "3.8" + +[pipenv] +allow_prereleases = true + +[scripts] +black = 'black' +mypy = 'mypy' +flake8 = 'flake8' +test = 'python -m pytest -vx' diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..9a5133d --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,801 @@ +{ + "_meta": { + "hash": { + "sha256": "02b7ecf2675009cad3c3bdf238d943eac213507d8ce9d4ca35532e49e1278337" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.8" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "aiohttp": { + "hashes": [ + "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3", + "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782", + "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75", + "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf", + "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7", + "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675", + "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1", + "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785", + "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4", + "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf", + "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5", + "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15", + "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca", + "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8", + "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac", + "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8", + "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef", + "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516", + "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700", + "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2", + "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8", + "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0", + "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676", + "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad", + "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155", + "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db", + "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd", + "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091", + "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602", + "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411", + "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93", + "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd", + "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec", + "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51", + "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7", + "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17", + "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d", + "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00", + "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923", + "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440", + "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32", + "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e", + "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1", + "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724", + "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a", + "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8", + "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2", + "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33", + "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b", + "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2", + "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632", + "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b", + "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2", + "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316", + "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74", + "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96", + "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866", + "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44", + "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950", + "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa", + "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c", + "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a", + "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd", + "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd", + "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9", + "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421", + "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2", + "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922", + "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4", + "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237", + "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642", + "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==3.8.1" + }, + "aiosignal": { + "hashes": [ + "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a", + "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==1.2.0" + }, + "async-timeout": { + "hashes": [ + "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15", + "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==4.0.2" + }, + "attrs": { + "hashes": [ + "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", + "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==21.4.0" + }, + "certifi": { + "hashes": [ + "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", + "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==2022.6.15" + }, + "charset-normalizer": { + "hashes": [ + "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", + "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==2.1.0" + }, + "colorlog": { + "hashes": [ + "sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8", + "sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e" + ], + "index": "pypi", + "version": "==6.6.0" + }, + "fastavro": { + "hashes": [ + "sha256:2c4777ebd022aecb05829b52359351746fb8ac1f3958385e524addde22d212c4", + "sha256:3600c5b8ccc0c5dfab5927486d71ba79ac155b0eeecce517d86e191a5186110c", + "sha256:45b7bfabf42bb3882071599eef49228039febbb1b8801b3a65bf32d717eea075", + "sha256:49398a773fab076725abf8926525092c455f2cb2ae58127d69fcc132d970fd6e", + "sha256:4c447c81bf9002aa0258bb8e208698744b4e22362548d5423f718c92c22ce572", + "sha256:5de3473c759c884f88c0e4e078538b9199416183bd4f49a6baf39468d7c0900c", + "sha256:746d97e41d24c838c70a44cb4e6a7e4109f20923fda5a6de18482e31db06ccdd", + "sha256:7699d1283062f11a3bbfb705f1f2d3fae0f7a1238ae3ca6234c7c9208b0a11a6", + "sha256:778d134d144d025f72ed98f60b5a63d363f473c5b19a32f4173e2e11e7f30be9", + "sha256:7e4ccdf1142a01b5a10b9451cc7f2e82b0bf4d2593921b9aa8d66a4758f42ee7", + "sha256:8bfd03691fb57771b0f4d4a241bd5d5f2e622cbd55e88ab3718a58b7e8e00bdb", + "sha256:90517b77f9d8362efe8deaa3da06673f0ef961e973bd9daa49aba998d8ca849e", + "sha256:cc162420a6619debb8aabffc961e8f9aa442dfb0246115c421a0fa108c5e0796", + "sha256:db4f1a65dfa6d26b4be74d9aacd13467a17928ada2c15096b401288671689e44", + "sha256:e6f23f1f4e2726408a35d2f2b279c29da41a989351a0f8c28ec62348f01be977", + "sha256:ee7ffc6882834499fd53877a08cdadbb107821a559bb78278dbbb3685ae92788", + "sha256:faa8c9cd4920c2e4e4131dae2e5fb050cc64533c22384d5dbba55c6a6463771e" + ], + "index": "pypi", + "version": "==1.5.2" + }, + "frozenlist": { + "hashes": [ + "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e", + "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08", + "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b", + "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486", + "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78", + "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468", + "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1", + "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953", + "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3", + "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d", + "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a", + "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141", + "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08", + "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07", + "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa", + "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa", + "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868", + "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f", + "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b", + "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b", + "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1", + "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f", + "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478", + "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58", + "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01", + "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8", + "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d", + "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676", + "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274", + "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab", + "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8", + "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24", + "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a", + "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2", + "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f", + "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f", + "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93", + "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1", + "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51", + "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846", + "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5", + "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d", + "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c", + "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e", + "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae", + "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02", + "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0", + "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b", + "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3", + "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b", + "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa", + "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a", + "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d", + "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed", + "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148", + "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9", + "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c", + "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2", + "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.0" + }, + "idna": { + "hashes": [ + "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", + "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" + ], + "markers": "python_version >= '3.5'", + "version": "==3.3" + }, + "more-itertools": { + "hashes": [ + "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f", + "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb" + ], + "index": "pypi", + "version": "==8.13.0" + }, + "multidict": { + "hashes": [ + "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60", + "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c", + "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672", + "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51", + "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032", + "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2", + "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b", + "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80", + "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88", + "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a", + "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d", + "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389", + "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c", + "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9", + "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c", + "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516", + "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b", + "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43", + "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee", + "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227", + "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d", + "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae", + "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7", + "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4", + "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9", + "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f", + "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013", + "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9", + "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e", + "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693", + "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a", + "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15", + "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb", + "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96", + "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87", + "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376", + "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658", + "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0", + "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071", + "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360", + "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc", + "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3", + "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba", + "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8", + "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9", + "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2", + "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3", + "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68", + "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8", + "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d", + "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49", + "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608", + "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57", + "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86", + "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20", + "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293", + "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849", + "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937", + "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d" + ], + "markers": "python_version >= '3.7'", + "version": "==6.0.2" + }, + "pika": { + "hashes": [ + "sha256:15357ddc47a5c28f0b07d80e93d504cbbf7a1ad5e1cd129ecd27afe76472c529", + "sha256:9195f37aed089862b205fd8f8ce1cc6ea0a7ee3cd80f58e6eea6cb9d8411a647" + ], + "index": "pypi", + "version": "==1.3.0" + }, + "python-dotenv": { + "hashes": [ + "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f", + "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938" + ], + "index": "pypi", + "version": "==0.20.0" + }, + "requests": { + "hashes": [ + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" + ], + "index": "pypi", + "version": "==2.28.1" + }, + "slackclient": { + "hashes": [ + "sha256:a8cab9146795e23d66a03473b80dd23df8c500829dfa9d06b3e3d5aec0a2b293", + "sha256:ab79fefb5412d0595bc01d2f195a787597f2a617b6766562932ab9ffbe5cb173" + ], + "index": "pypi", + "version": "==2.9.4" + }, + "urllib3": { + "hashes": [ + "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec", + "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", + "version": "==1.26.10" + }, + "yarl": { + "hashes": [ + "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac", + "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8", + "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e", + "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746", + "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98", + "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125", + "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d", + "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d", + "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986", + "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d", + "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec", + "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8", + "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee", + "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3", + "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1", + "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd", + "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b", + "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de", + "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0", + "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8", + "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6", + "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245", + "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23", + "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332", + "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1", + "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c", + "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4", + "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0", + "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8", + "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832", + "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58", + "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6", + "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1", + "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52", + "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92", + "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185", + "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d", + "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d", + "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b", + "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739", + "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05", + "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63", + "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d", + "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa", + "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913", + "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe", + "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b", + "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b", + "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656", + "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1", + "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4", + "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e", + "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63", + "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271", + "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed", + "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d", + "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda", + "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265", + "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f", + "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c", + "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba", + "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c", + "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b", + "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523", + "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a", + "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef", + "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95", + "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72", + "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794", + "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41", + "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576", + "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==1.7.2" + } + }, + "develop": { + "attrs": { + "hashes": [ + "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", + "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==21.4.0" + }, + "black": { + "hashes": [ + "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90", + "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c", + "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78", + "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4", + "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee", + "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e", + "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e", + "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6", + "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9", + "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c", + "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256", + "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f", + "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2", + "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c", + "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b", + "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807", + "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf", + "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def", + "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad", + "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d", + "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849", + "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69", + "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666" + ], + "index": "pypi", + "version": "==22.6.0" + }, + "certifi": { + "hashes": [ + "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", + "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==2022.6.15" + }, + "charset-normalizer": { + "hashes": [ + "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", + "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" + ], + "markers": "python_full_version >= '3.6.0'", + "version": "==2.1.0" + }, + "click": { + "hashes": [ + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.3" + }, + "coverage": { + "extras": [ + "toml" + ], + "hashes": [ + "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749", + "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982", + "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3", + "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9", + "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428", + "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e", + "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c", + "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9", + "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264", + "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605", + "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397", + "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d", + "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c", + "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815", + "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068", + "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b", + "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4", + "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4", + "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3", + "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84", + "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83", + "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4", + "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8", + "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb", + "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d", + "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df", + "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6", + "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b", + "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72", + "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13", + "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df", + "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc", + "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6", + "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28", + "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b", + "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4", + "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad", + "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46", + "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3", + "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9", + "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54" + ], + "index": "pypi", + "version": "==6.4.1" + }, + "flake8": { + "hashes": [ + "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d", + "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d" + ], + "index": "pypi", + "version": "==4.0.1" + }, + "flake8-bugbear": { + "hashes": [ + "sha256:db5d7a831ef4412a224b26c708967ff816818cabae415e76b8c58df156c4b8e5", + "sha256:e450976a07e4f9d6c043d4f72b17ec1baf717fe37f7997009c8ae58064f88305" + ], + "index": "pypi", + "version": "==22.7.1" + }, + "freezegun": { + "hashes": [ + "sha256:15103a67dfa868ad809a8f508146e396be2995172d25f927e48ce51c0bf5cb09", + "sha256:b4c64efb275e6bc68dc6e771b17ffe0ff0f90b81a2a5189043550b6519926ba4" + ], + "markers": "python_version >= '3.6'", + "version": "==1.2.1" + }, + "idna": { + "hashes": [ + "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", + "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" + ], + "markers": "python_version >= '3.5'", + "version": "==3.3" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "mypy": { + "hashes": [ + "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5", + "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66", + "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e", + "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56", + "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e", + "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d", + "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813", + "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932", + "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569", + "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b", + "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0", + "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648", + "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6", + "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950", + "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15", + "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723", + "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a", + "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3", + "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6", + "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24", + "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b", + "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d", + "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492" + ], + "index": "pypi", + "version": "==0.961" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "packaging": { + "hashes": [ + "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", + "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" + ], + "markers": "python_version >= '3.6'", + "version": "==21.3" + }, + "pathspec": { + "hashes": [ + "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a", + "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1" + ], + "version": "==0.9.0" + }, + "pika-stubs": { + "hashes": [ + "sha256:aaa78fa9f52eb3591b6073fbbe2607567405d1857be268d447bea252e22dd6cf" + ], + "index": "pypi", + "version": "==0.1.3" + }, + "platformdirs": { + "hashes": [ + "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", + "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" + ], + "markers": "python_version >= '3.7'", + "version": "==2.5.2" + }, + "pluggy": { + "hashes": [ + "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", + "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" + ], + "markers": "python_version >= '3.6'", + "version": "==1.0.0" + }, + "py": { + "hashes": [ + "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", + "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.11.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20", + "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==2.8.0" + }, + "pyflakes": { + "hashes": [ + "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c", + "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.0" + }, + "pyparsing": { + "hashes": [ + "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", + "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" + ], + "markers": "python_full_version >= '3.6.8'", + "version": "==3.0.9" + }, + "pytest": { + "hashes": [ + "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c", + "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45" + ], + "index": "pypi", + "version": "==7.1.2" + }, + "pytest-cov": { + "hashes": [ + "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6", + "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470" + ], + "index": "pypi", + "version": "==3.0.0" + }, + "pytest-freezegun": { + "hashes": [ + "sha256:19c82d5633751bf3ec92caa481fb5cffaac1787bd485f0df6436fd6242176949", + "sha256:5318a6bfb8ba4b709c8471c94d0033113877b3ee02da5bfcd917c1889cde99a7" + ], + "index": "pypi", + "version": "==0.4.2" + }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.2" + }, + "requests": { + "hashes": [ + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" + ], + "index": "pypi", + "version": "==2.28.1" + }, + "responses": { + "hashes": [ + "sha256:2dcc863ba63963c0c3d9ee3fa9507cbe36b7d7b0fccb4f0bdfd9e96c539b1487", + "sha256:b82502eb5f09a0289d8e209e7bad71ef3978334f56d09b444253d5ad67bf5253" + ], + "index": "pypi", + "version": "==0.21.0" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + }, + "types-python-dateutil": { + "hashes": [ + "sha256:8695c7d7a5b1aef4002f3ab4e1247e23b1d41cd7cc1286d4594c2d8c5593c991", + "sha256:fd5ed97262b76ae684695ea38ace8dd7c1bc9491aba7eb4edf6654b7ecabc870" + ], + "index": "pypi", + "version": "==2.8.18" + }, + "types-requests": { + "hashes": [ + "sha256:85383b4ef0535f639c3f06c5bbb6494bbf59570c4cd88bbcf540f0b2ac1b49ab", + "sha256:9863d16dfbb3fa55dcda64fa3b989e76e8859033b26c1e1623e30465cfe294d3" + ], + "index": "pypi", + "version": "==2.28.0" + }, + "types-urllib3": { + "hashes": [ + "sha256:20588c285e5ca336d908d2705994830a83cfb6bda40fc356bbafaf430a262013", + "sha256:8bb3832c684c30cbed40b96e28bc04703becb2b97d82ac65ba4b968783453b0e" + ], + "version": "==1.26.16" + }, + "typing-extensions": { + "hashes": [ + "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02", + "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6" + ], + "markers": "python_version >= '3.7'", + "version": "==4.3.0" + }, + "urllib3": { + "hashes": [ + "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec", + "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", + "version": "==1.26.10" + } + } +} diff --git a/README.md b/README.md index 0b1cf66..4697679 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,58 @@ -# lab-share-lib -Python library to interact with lab-share framework +# tol-lab-share + +Rabbitmq consumer for TOL data input + + +## Getting Started + +The following tools are required for development: + +- python (use pyenv or something similar to install the python version specified in the `Pipfile`) + +Use pyenv or something similar to install the version of python +defined in the `Pipfile`: + +```bash + brew install pyenv + pyenv install +``` + +Use pipenv to install the required python packages for the application and development: + +```bash + pipenv install --dev +``` + + +### Setting up with Docker + +If you want to setup a local development environment with Docker please check +the instructions in [SETUP_DOCKER.md](SETUP_DOCKER.md) + + +## Running + +1. Enter the python virtual environment using: +```bash + pipenv shell +``` + +1. Run the app using: + +```bash + python tol-lab-share +``` + +## Testing + +Run the tests using pytest (flags are for verbose and exit early): + +```bash + python -m pytest -vx +``` + +## Deployment + +This project uses a Docker image as the unit of deployment. Update `.release-version` with +major/minor/patch. On merging a pull request into *develop* or *master*, a release will be created +along with the Docker image associated to that release. diff --git a/docker-compose-standalone.yml b/docker-compose-standalone.yml new file mode 100644 index 0000000..809ce80 --- /dev/null +++ b/docker-compose-standalone.yml @@ -0,0 +1,9 @@ +# +# Basic version of stack, for Github +# +version: "3.3" +services: + lab-share-lib: + build: . + image: lab-share-lib + entrypoint: "bash -c ./entrypoint.sh" \ No newline at end of file diff --git a/docker-compose.override.yml b/docker-compose.override.yml new file mode 100644 index 0000000..4ef075e --- /dev/null +++ b/docker-compose.override.yml @@ -0,0 +1,9 @@ +# +# Extension of the stack for Local Dev environment running. +# Provides mounting code from the current folder path and +# connect to web and rabbitmq UI +version: "3.3" +services: + lab-share-lib: + volumes: + - ".:/code" diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..b6f701b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,57 @@ +# +# Basic version of stack, for Github +# +version: "3.3" +services: + lab-share-lib: + build: . + image: lab-share-lib + depends_on: + - rabbitmq + - schema_registry + environment: + REDPANDA_HOST: host.docker.internal + REDPANDA_PORT: 8081 + RABBITMQ_HOST: host.docker.internal + RABBITMQ_PORT: 5672 + + entrypoint: "bash -c ./entrypoint.sh" + + # An instance of RedPanda Schema Registry + # To run it as a standalone container: + # docker network create redpanda-network + # docker volume create redpanda-volume + # docker run --name=schema_registry --net=redpanda-network -v "redpanda-volume:/var/lib/redpanda/data" -p 8081:8081 \ + # docker.vectorized.io/vectorized/redpanda start --overprovisioned --smp 1 --memory 250M + schema_registry: + image: docker.vectorized.io/vectorized/redpanda + networks: + - redpanda-network + ports: + - "8081:8081" + volumes: + - redpanda-volume:/var/lib/redpanda/data + #mem_limit: 250m + command: + - start + - --overprovisioned + - --smp 1 + - --memory 250M + + rabbitmq: + image: rabbitmq:3-management + hostname: heron-rabbitmq + ports: + - "5672:5672" + - "8080:15672" + volumes: + - ./rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf:ro + - rabbitmq-volume:/var/lib/rabbitmq/mnesia + +volumes: + redpanda-volume: + rabbitmq-volume: + +networks: + external: + redpanda-network: diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 0000000..ea72943 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +# +# This script waits 15 seconds for connection with the database, +# then it will reset the database if RESET_DATABASE is "true" +# and after that will start the we server +set -o errexit +set -o pipefail +set -o nounset + +TIMEOUT=120 + +./wait_for_connection.sh "${REDPANDA_HOST}" "${REDPANDA_PORT}" "${TIMEOUT}" +./wait_for_connection.sh "${RABBITMQ_HOST}" "${RABBITMQ_PORT}" "${TIMEOUT}" + +echo "Starting service" +exec python tol-lab-share diff --git a/forlint.sh b/forlint.sh new file mode 100755 index 0000000..1965c3a --- /dev/null +++ b/forlint.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +### +# A little bash script to ease the formatting and linting burden +# +# run by: ./forlint.sh +### +echo "Running 'black .' on all files using the config in pyproject.toml ..." +black . +echo "Black complete." +echo "---------------" + +echo "Running 'mypy .' on all the files using the config in setup.cfg ..." +mypy . +echo "mypy complete." +echo "---------------" + +echo "Running 'flake8' on all the files using the config in setup.cfg ..." +flake8 +echo "flake8 complete." diff --git a/lab_share_lib/__init__.py b/lab_share_lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lab_share_lib/config/test.py b/lab_share_lib/config/test.py new file mode 100644 index 0000000..e69de29 diff --git a/lab_share_lib/constants.py b/lab_share_lib/constants.py new file mode 100644 index 0000000..57d0449 --- /dev/null +++ b/lab_share_lib/constants.py @@ -0,0 +1,2 @@ +RABBITMQ_HEADER_KEY_SUBJECT = "subject" +RABBITMQ_HEADER_KEY_VERSION = "version" diff --git a/lab_share_lib/exceptions.py b/lab_share_lib/exceptions.py new file mode 100644 index 0000000..430e4f3 --- /dev/null +++ b/lab_share_lib/exceptions.py @@ -0,0 +1,19 @@ +class Error(Exception): + """Base class for exceptions in this module.""" + + pass + + +class TransientRabbitError(Error): + """ + Raised during processing of a RabbitMQ message when a transient issue occurs. + For example, this might be a database being inaccessible. The message should be reprocessed. + """ + + def __init__(self, message): + """Constructs a new processing error message. + + Arguments: + message {str} -- A message to log and possibly show to the user/caller. + """ + self.message = message diff --git a/lab_share_lib/processing/__init__.py b/lab_share_lib/processing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lab_share_lib/processing/base_processor.py b/lab_share_lib/processing/base_processor.py new file mode 100644 index 0000000..6cbba80 --- /dev/null +++ b/lab_share_lib/processing/base_processor.py @@ -0,0 +1,7 @@ +from typing import Callable + +from lab_share_lib.processing.rabbit_message import RabbitMessage + + +class BaseProcessor: + process: Callable[["BaseProcessor", RabbitMessage], bool] diff --git a/lab_share_lib/processing/rabbit_message.py b/lab_share_lib/processing/rabbit_message.py new file mode 100644 index 0000000..1573295 --- /dev/null +++ b/lab_share_lib/processing/rabbit_message.py @@ -0,0 +1,36 @@ +from lab_share_lib.constants import RABBITMQ_HEADER_KEY_SUBJECT, RABBITMQ_HEADER_KEY_VERSION + + +class RabbitMessage: + def __init__(self, headers, encoded_body): + self.headers = headers + self.encoded_body = encoded_body + + self._subject = None + self._schema_version = None + self._decoded_list = None + self._message = None + + @property + def subject(self): + if self._subject is None: + self._subject = self.headers[RABBITMQ_HEADER_KEY_SUBJECT] + return self._subject + + @property + def schema_version(self): + if self._schema_version is None: + self._schema_version = self.headers[RABBITMQ_HEADER_KEY_VERSION] + return self._schema_version + + def decode(self, encoder): + self._decoded_list = list(encoder.decode(self.encoded_body, self.schema_version)) + + @property + def contains_single_message(self): + return self._decoded_list is not None and len(self._decoded_list) == 1 + + @property + def message(self): + if self._decoded_list: + return self._decoded_list[0] diff --git a/lab_share_lib/processing/rabbit_message_processor.py b/lab_share_lib/processing/rabbit_message_processor.py new file mode 100644 index 0000000..762e36c --- /dev/null +++ b/lab_share_lib/processing/rabbit_message_processor.py @@ -0,0 +1,53 @@ +import logging +from typing import cast + +from lab_share_lib.exceptions import TransientRabbitError +from lab_share_lib.processing.base_processor import BaseProcessor +from lab_share_lib.processing.rabbit_message import RabbitMessage +from lab_share_lib.rabbit.avro_encoder import AvroEncoder + +LOGGER = logging.getLogger(__name__) + + +class RabbitMessageProcessor: + def __init__(self, schema_registry, basic_publisher, config): + self._schema_registry = schema_registry + self._basic_publisher = basic_publisher + self._config = config + + self._build_processors() + + def _build_processors(self): + self._processors = {} + for subject in self._config.PROCESSORS.keys(): + self._processors[subject] = self._build_processor_for_subject(subject) + + def _build_processor_for_subject(self, subject: str) -> BaseProcessor: + processor_instance_builder = self._config.PROCESSORS[subject] + return cast( + BaseProcessor, processor_instance_builder(self._schema_registry, self._basic_publisher, self._config) + ) + + def process_message(self, headers, body): + message = RabbitMessage(headers, body) + try: + message.decode(AvroEncoder(self._schema_registry, message.subject)) + except TransientRabbitError as ex: + LOGGER.error(f"Transient error while processing message: {ex.message}") + raise # Cause the consumer to restart and try this message again. Ideally we will delay the consumer. + except Exception as ex: + LOGGER.error(f"Unrecoverable error while decoding RabbitMQ message: {type(ex)} {str(ex)}") + return False # Send the message to dead letters. + + if not message.contains_single_message: + LOGGER.error("RabbitMQ message received containing multiple AVRO encoded messages.") + return False # Send the message to dead letters. + + try: + return self._processors[message.subject].process(message) + except KeyError: + LOGGER.error( + f"Received message has subject '{message.subject}'" + " but there is no implemented processor for this subject." + ) + return False # Send the message to dead letters. diff --git a/lab_share_lib/rabbit/__init__.py b/lab_share_lib/rabbit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lab_share_lib/rabbit/async_consumer.py b/lab_share_lib/rabbit/async_consumer.py new file mode 100644 index 0000000..2342aee --- /dev/null +++ b/lab_share_lib/rabbit/async_consumer.py @@ -0,0 +1,352 @@ +import functools +import logging +import os +import ssl + +from pika import ConnectionParameters, PlainCredentials, SelectConnection, SSLOptions +from pika.adapters.utils.connection_workflow import ( + AMQPConnectionWorkflowFailed, + AMQPConnectorPhaseErrorBase, + AMQPConnectorSocketConnectError, +) +from pika.exceptions import AMQPConnectionError + +from lab_share_lib.exceptions import TransientRabbitError + +LOGGER = logging.getLogger(__name__) + + +class AsyncConsumer(object): + """This is an async consumer that will handle unexpected interactions + with RabbitMQ such as channel and connection closures. + If RabbitMQ closes the connection, this class will stop and indicate + that reconnection is necessary. You should look at the output, as + there are limited reasons why the connection may be closed, which + usually are tied to permission related issues or socket timeouts. + If the channel is closed, it will indicate a problem with one of the + commands that were issued and that should surface in the output as well. + """ + + def __init__(self, server_details, queue, process_message): + """Create a new instance of the consumer class, passing in the AMQP + URL used to connect to RabbitMQ. + :param RabbitServerDetails server_details: The RabbitMQ server connection details. + :param str queue: The AMQP queue to consume from. + :param func process_message: A function to call with details of any messages consumed from the queue. + This function will be passed the message headers and the message body and should + return a boolean indicating whether the message was processed successfully (True) + or failed to be processed and should be dead-lettered (False). + """ + self.should_reconnect = False + self.was_consuming = False + self.had_transient_error = False + + self._connection = None + self._channel = None + self._closing = False + self._consumer_tag = None + self._server_details = server_details + self._queue = queue + self._process_message = process_message + self._consuming = False + # In production, experiment with higher prefetch values + # for higher consumer throughput + self._prefetch_count = 1 + + @staticmethod + def _reap_last_connection_workflow_error(error): + """Extract exception value from the last connection attempt + + :param Exception error: error passed by the `AMQPConnectionWorkflow` + completion callback. + + :returns: Exception value from the last connection attempt + :rtype: Exception + """ + if isinstance(error, AMQPConnectionWorkflowFailed): + # Extract exception value from the last connection attempt + error = error.exceptions[-1] + if isinstance(error, AMQPConnectorSocketConnectError): + error = AMQPConnectionError(error) + elif isinstance(error, AMQPConnectorPhaseErrorBase): + error = error.exception + + return error + + def connect(self): + """This method connects to RabbitMQ, returning the connection handle. + When the connection is established, the on_connection_open method + will be invoked by pika. + :rtype: pika.SelectConnection + """ + LOGGER.info("Connecting to %s", self._server_details.host) + credentials = PlainCredentials(self._server_details.username, self._server_details.password) + connection_params = ConnectionParameters( + host=self._server_details.host, + port=self._server_details.port, + virtual_host=self._server_details.vhost, + credentials=credentials, + ) + if self._server_details.uses_ssl: + cafile = os.getenv("REQUESTS_CA_BUNDLE") + ssl_context = ssl.create_default_context(cafile=cafile) + connection_params.ssl_options = SSLOptions(ssl_context) + + return SelectConnection( + parameters=connection_params, + on_open_callback=self.on_connection_open, + on_open_error_callback=self.on_connection_open_error, + on_close_callback=self.on_connection_closed, + ) + + def close_connection(self): + self._consuming = False + if not self._connection or self._connection.is_closing or self._connection.is_closed: + LOGGER.info("Connection is closing or already closed") + else: + LOGGER.info("Closing connection") + self._connection.close() + + def on_connection_open(self, _unused_connection): + """This method is called by pika once the connection to RabbitMQ has + been established. It passes the handle to the connection object in + case we need it, but in this case, we'll just mark it unused. + :param pika.SelectConnection _unused_connection: The connection + """ + LOGGER.info("Connection opened") + self.open_channel() + + def on_connection_open_error(self, _unused_connection, err): + """This method is called by pika if the connection to RabbitMQ + can't be established. + :param pika.SelectConnection _unused_connection: The connection + :param Exception err: The error + """ + # Note that err is likely to be an AMQPConnectionWorkflowFailed error. Unfortunately this means the actual + # cause of the error is wrapped a few layers deep and the type of err does not generate a useful string + # description. As such, we'll use a static method pulled from Pika source code to extract the description. + LOGGER.error("Connection open failed: %s", AsyncConsumer._reap_last_connection_workflow_error(err)) + self.reconnect() + + def on_connection_closed(self, _unused_connection, reason): + """This method is invoked by pika when the connection to RabbitMQ is + closed unexpectedly. Since it is unexpected, we will reconnect to + RabbitMQ if it disconnects. + :param pika.connection.Connection connection: The closed connection obj + :param Exception reason: exception representing reason for loss of + connection. + """ + self._channel = None + if self._connection and self._closing: + LOGGER.debug("Connection closed, already closing -- stopping the IOLoop") + self._connection.ioloop.stop() + else: + LOGGER.warning("Connection closed, reconnect necessary: %s", reason) + self.reconnect() + + def reconnect(self): + """Will be invoked if the connection can't be opened or is + closed. Indicates that a reconnect is necessary then stops the + ioloop. + """ + self.should_reconnect = True + self.stop() + + def open_channel(self): + """Open a new channel with RabbitMQ by issuing the Channel.Open RPC + command. When RabbitMQ responds that the channel is open, the + on_channel_open callback will be invoked by pika. + """ + if self._connection: + LOGGER.info("Creating a new channel") + self._connection.channel(on_open_callback=self.on_channel_open) + else: + LOGGER.error("No connection to open channel with") + + def on_channel_open(self, channel): + """This method is invoked by pika when the channel has been opened. + The channel object is passed in so we can make use of it. + Since the channel is now open, we'll set the QoS before starting consuming. + :param pika.channel.Channel channel: The channel object + """ + LOGGER.info("Channel opened") + self._channel = channel + self.add_on_channel_close_callback() + self.set_qos() + + def add_on_channel_close_callback(self): + """This method tells pika to call the on_channel_closed method if + RabbitMQ unexpectedly closes the channel. + """ + if self._channel: + LOGGER.info("Adding channel close callback") + self._channel.add_on_close_callback(self.on_channel_closed) + else: + LOGGER.error("No channel to add close callback to") + + def on_channel_closed(self, channel, reason): + """Invoked by pika when RabbitMQ unexpectedly closes the channel. + Channels are usually closed if you attempt to do something that + violates the protocol, such as re-declare an exchange or queue with + different parameters. In this case, we'll close the connection + to shutdown the object. + :param pika.channel.Channel: The closed channel + :param Exception reason: why the channel was closed + """ + LOGGER.warning("Channel %i was closed: %s", channel, reason) + self.close_connection() + + def set_qos(self): + """This method sets up the consumer prefetch to only be delivered + one message at a time. The consumer must acknowledge this message + before RabbitMQ will deliver another one. You should experiment + with different prefetch values to achieve desired performance. + """ + if self._channel: + self._channel.basic_qos(prefetch_count=self._prefetch_count, callback=self.on_basic_qos_ok) + + def on_basic_qos_ok(self, _unused_frame): + """Invoked by pika when the Basic.QoS method has completed. At this + point we will start consuming messages by calling start_consuming + which will invoke the needed RPC commands to start the process. + :param pika.frame.Method _unused_frame: The Basic.QosOk response frame + """ + LOGGER.info("QOS set to: %d", self._prefetch_count) + self.start_consuming() + + def start_consuming(self): + """This method sets up the consumer by first calling + add_on_cancel_callback so that the object is notified if RabbitMQ + cancels the consumer. It then issues the Basic.Consume RPC command + which returns the consumer tag that is used to uniquely identify the + consumer with RabbitMQ. We keep the value to use it when we want to + cancel consuming. The on_message method is passed in as a callback pika + will invoke when a message is fully received. + """ + if self._channel: + LOGGER.info("Issuing consumer related RPC commands") + self.add_on_cancel_callback() + self._consumer_tag = self._channel.basic_consume(self._queue, self.on_message) + self.was_consuming = True + self.had_transient_error = False + self._consuming = True + else: + LOGGER.error("No channel to consume from") + + def add_on_cancel_callback(self): + """Add a callback that will be invoked if RabbitMQ cancels the consumer + for some reason. If RabbitMQ does cancel the consumer, + on_consumer_cancelled will be invoked by pika. + """ + if self._channel: + LOGGER.info("Adding consumer cancellation callback") + self._channel.add_on_cancel_callback(self.on_consumer_cancelled) + else: + LOGGER.error("No channel to add cancel callback to") + + def on_consumer_cancelled(self, method_frame): + """Invoked by pika when RabbitMQ sends a Basic.Cancel for a consumer + receiving messages. + :param pika.frame.Method method_frame: The Basic.Cancel frame + """ + LOGGER.info("Consumer was cancelled remotely, shutting down: %r", method_frame) + if self._channel: + self._channel.close() + + def on_message(self, channel, basic_deliver, properties, body): + """Invoked by pika when a message is delivered from RabbitMQ. The + channel is passed for your convenience. The basic_deliver object that + is passed in carries the exchange, routing key, delivery tag and + a redelivered flag for the message. The properties passed in is an + instance of BasicProperties with the message properties and the body + is the message that was sent. + :param pika.channel.Channel channel: The channel object + :param pika.Spec.Basic.Deliver: basic_deliver method + :param pika.Spec.BasicProperties: properties + :param bytes body: The message body + """ + LOGGER.info("Received message # %s from %s", basic_deliver.delivery_tag, properties.app_id) + delivery_tag = basic_deliver.delivery_tag + + try: + should_ack_message = self._process_message(properties.headers, body) + except TransientRabbitError: + self.had_transient_error = True + raise + + if should_ack_message: + LOGGER.info("Acknowledging message %s", delivery_tag) + channel.basic_ack(delivery_tag) + else: + LOGGER.info("Rejecting message %s", delivery_tag) + channel.basic_nack(delivery_tag, requeue=False) + + def stop_consuming(self): + """Tell RabbitMQ that you would like to stop consuming by sending the + Basic.Cancel RPC command. + """ + if self._channel: + LOGGER.info("Sending a Basic.Cancel RPC command to RabbitMQ") + cb = functools.partial(self.on_cancelok, userdata=self._consumer_tag) + self._channel.basic_cancel(self._consumer_tag, cb) + + def on_cancelok(self, _unused_frame, userdata): + """This method is invoked by pika when RabbitMQ acknowledges the + cancellation of a consumer. At this point we will close the channel. + This will invoke the on_channel_closed method once the channel has been + closed, which will in-turn close the connection. + :param pika.frame.Method _unused_frame: The Basic.CancelOk frame + :param str|unicode userdata: Extra user data (consumer tag) + """ + self._consuming = False + LOGGER.info("RabbitMQ acknowledged the cancellation of the consumer: %s", userdata) + self.close_channel() + + def close_channel(self): + """Call to close the channel with RabbitMQ cleanly by issuing the + Channel.Close RPC command. + """ + if self._channel: + LOGGER.info("Closing the channel") + self._channel.close() + else: + LOGGER.error("No channel to close") + + def run(self): + """Run the example consumer by connecting to RabbitMQ and then + starting the IOLoop to block and allow the SelectConnection to operate. + """ + self._connection = self.connect() + if self._connection: + LOGGER.debug("Connection made -- starting the IOLoop") + self._connection.ioloop.start() + else: + LOGGER.error("Connection was not established to start the IOLoop on") + + def stop(self): + """Cleanly shutdown the connection to RabbitMQ by stopping the consumer + with RabbitMQ. When RabbitMQ confirms the cancellation, on_cancelok + will be invoked by pika, which will then closing the channel and + connection. The IOLoop is started again because this method is invoked + when CTRL-C is pressed raising a KeyboardInterrupt exception. This + exception stops the IOLoop which needs to be running for pika to + communicate with RabbitMQ. All of the commands issued prior to starting + the IOLoop will be buffered but not processed. + """ + LOGGER.debug("Stop requested") + if self._closing: + LOGGER.debug("Consumer is already closing -- stop request ignored") + else: + LOGGER.info("Stopping") + LOGGER.debug("Closing the consumer") + self._closing = True + if self._consuming: + LOGGER.debug("Consumer was consuming -- stopping consumption") + self.stop_consuming() + if self._connection: + LOGGER.debug("Connection still exists -- starting the IOLoop") + self._connection.ioloop.start() + elif self._connection: + LOGGER.debug("Consumer was not consuming -- stopping the IOLoop") + self._connection.ioloop.stop() + LOGGER.info("Stopped") diff --git a/lab_share_lib/rabbit/avro_encoder.py b/lab_share_lib/rabbit/avro_encoder.py new file mode 100644 index 0000000..54dfb49 --- /dev/null +++ b/lab_share_lib/rabbit/avro_encoder.py @@ -0,0 +1,46 @@ +import json +from io import StringIO +from typing import Any, List, NamedTuple + +import fastavro + +from lab_share_lib.rabbit.schema_registry import RESPONSE_KEY_SCHEMA, RESPONSE_KEY_VERSION + + +class EncodedMessage(NamedTuple): + body: bytes + version: str + + +class AvroEncoder: + def __init__(self, schema_registry, subject): + self._schema_registry = schema_registry + self._subject = subject + + def _schema_response(self, version): + if version is None: + return self._schema_registry.get_schema(self._subject) + else: + return self._schema_registry.get_schema(self._subject, version) + + def _schema(self, schema_response): + schema_obj = json.loads(schema_response[RESPONSE_KEY_SCHEMA]) + return fastavro.parse_schema(schema_obj) + + def _schema_version(self, schema_response): + return schema_response[RESPONSE_KEY_VERSION] + + def encode(self, records: List, version: str = None) -> EncodedMessage: + schema_response = self._schema_response(version) + string_writer = StringIO() + fastavro.json_writer(string_writer, self._schema(schema_response), records) + + return EncodedMessage( + body=string_writer.getvalue().encode(), version=str(self._schema_version(schema_response)) + ) + + def decode(self, message: bytes, version: str) -> Any: + schema_response = self._schema_response(version) + string_reader = StringIO(message.decode("utf-8")) + + return fastavro.json_reader(string_reader, self._schema(schema_response)) diff --git a/lab_share_lib/rabbit/background_consumer.py b/lab_share_lib/rabbit/background_consumer.py new file mode 100644 index 0000000..3c081f5 --- /dev/null +++ b/lab_share_lib/rabbit/background_consumer.py @@ -0,0 +1,67 @@ +import logging +import time +from threading import Thread + +from lab_share_lib.rabbit.async_consumer import AsyncConsumer + +LOGGER = logging.getLogger(__name__) + + +class BackgroundConsumer(Thread): + """This is a RabbitMQ consumer that runs in a background thread and will reconnect + after a time delay if the AsyncConsumer indicates that a reconnect is necessary. + """ + + def __init__(self, server_details, queue, process_message): + super().__init__() + self.name = type(self).__name__ + self.daemon = True + self._running = False + self._reconnect_delay = 0 + self._server_details = server_details + self._queue = queue + self._process_message = process_message + self._consumer_var = None + + def run(self): + self._running = True + try: + while True: + try: + self._consumer.run() + except KeyboardInterrupt: + self._consumer.stop() + break + self._maybe_reconnect() + finally: + self._running = False + + @property + def is_healthy(self): + return self._running + + @property + def _consumer(self): + if self._consumer_var is None: + self._consumer_var = AsyncConsumer(self._server_details, self._queue, self._process_message) + + return self._consumer_var + + def _maybe_reconnect(self): + if self._consumer.should_reconnect: + self._consumer.stop() + reconnect_delay = self._get_reconnect_delay() + LOGGER.info("Reconnecting after %d seconds", reconnect_delay) + time.sleep(reconnect_delay) + self._consumer_var = None + + def _get_reconnect_delay(self): + if self._consumer.had_transient_error: + self._reconnect_delay = 30 + elif self._consumer.was_consuming: + self._reconnect_delay = 0 + else: + self._reconnect_delay += 1 + if self._reconnect_delay > 30: + self._reconnect_delay = 30 + return self._reconnect_delay diff --git a/lab_share_lib/rabbit/basic_publisher.py b/lab_share_lib/rabbit/basic_publisher.py new file mode 100644 index 0000000..2767e49 --- /dev/null +++ b/lab_share_lib/rabbit/basic_publisher.py @@ -0,0 +1,43 @@ +import os +import ssl + +from pika import BasicProperties, BlockingConnection, ConnectionParameters, PlainCredentials, SSLOptions +from pika.spec import PERSISTENT_DELIVERY_MODE + +from lab_share_lib.constants import RABBITMQ_HEADER_KEY_SUBJECT, RABBITMQ_HEADER_KEY_VERSION +from lab_share_lib.types import RabbitServerDetails + + +class BasicPublisher: + def __init__(self, server_details: RabbitServerDetails): + credentials = PlainCredentials(server_details.username, server_details.password) + self._connection_params = ConnectionParameters( + host=server_details.host, + port=server_details.port, + virtual_host=server_details.vhost, + credentials=credentials, + ) + + if server_details.uses_ssl: + cafile = os.getenv("REQUESTS_CA_BUNDLE") + ssl_context = ssl.create_default_context(cafile=cafile) + self._connection_params.ssl_options = SSLOptions(ssl_context) + + def publish_message(self, exchange, routing_key, body, subject, schema_version): + properties = BasicProperties( + delivery_mode=PERSISTENT_DELIVERY_MODE, + headers={ + RABBITMQ_HEADER_KEY_SUBJECT: subject, + RABBITMQ_HEADER_KEY_VERSION: schema_version, + }, + ) + + connection = BlockingConnection(self._connection_params) + channel = connection.channel() + channel.basic_publish( + exchange=exchange, + routing_key=routing_key, + properties=properties, + body=body, + ) + connection.close() diff --git a/lab_share_lib/rabbit/messages/__init__.py b/lab_share_lib/rabbit/messages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lab_share_lib/rabbit/messages/base_message.py b/lab_share_lib/rabbit/messages/base_message.py new file mode 100644 index 0000000..2170db8 --- /dev/null +++ b/lab_share_lib/rabbit/messages/base_message.py @@ -0,0 +1,23 @@ +class BaseMessage: + def __init__(self): + self._textual_errors = [] + + @property + def textual_errors_summary(self): + error_count = len(self._textual_errors) + + if error_count == 0: + errors_label = "No errors were" + elif error_count == 1: + errors_label = "1 error was" + else: + errors_label = f"{error_count} errors were" + + additional_text = " Only the first 5 are shown." if error_count > 5 else "" + + error_list = [f"{errors_label} reported during processing.{additional_text}"] + self._textual_errors[:5] + + return error_list + + def add_textual_error(self, description): + self._textual_errors.append(description) diff --git a/lab_share_lib/rabbit/rabbit_stack.py b/lab_share_lib/rabbit/rabbit_stack.py new file mode 100644 index 0000000..a3357ba --- /dev/null +++ b/lab_share_lib/rabbit/rabbit_stack.py @@ -0,0 +1,44 @@ +from lab_share_lib.processing.rabbit_message_processor import RabbitMessageProcessor +from lab_share_lib.rabbit.background_consumer import BackgroundConsumer +from lab_share_lib.rabbit.basic_publisher import BasicPublisher +from lab_share_lib.rabbit.schema_registry import SchemaRegistry +from lab_share_lib.types import RabbitServerDetails + + +class RabbitStack: + def __init__(self, config): + self._config = config + + rabbit_crud_queue = self._config.RABBITMQ_CRUD_QUEUE + self._background_consumer = BackgroundConsumer( + self._rabbit_server_details(), rabbit_crud_queue, self._rabbit_message_processor().process_message + ) + + @property + def is_healthy(self): + return self._background_consumer.is_healthy + + def _rabbit_server_details(self): + return RabbitServerDetails( + uses_ssl=self._config.RABBITMQ_SSL, + host=self._config.RABBITMQ_HOST, + port=self._config.RABBITMQ_PORT, + username=self._config.RABBITMQ_USERNAME, + password=self._config.RABBITMQ_PASSWORD, + vhost=self._config.RABBITMQ_VHOST, + ) + + def _schema_registry(self): + redpanda_url = self._config.REDPANDA_BASE_URI + redpanda_api_key = self._config.REDPANDA_API_KEY + return SchemaRegistry(redpanda_url, redpanda_api_key) + + def _rabbit_message_processor(self): + basic_publisher = BasicPublisher(self._rabbit_server_details()) + return RabbitMessageProcessor(self._schema_registry(), basic_publisher, self._config) + + def bring_stack_up(self): + if self._background_consumer.is_healthy: + return + + self._background_consumer.start() diff --git a/lab_share_lib/rabbit/schema_registry.py b/lab_share_lib/rabbit/schema_registry.py new file mode 100644 index 0000000..f100a50 --- /dev/null +++ b/lab_share_lib/rabbit/schema_registry.py @@ -0,0 +1,25 @@ +from functools import lru_cache + +from requests import get + +from lab_share_lib.exceptions import TransientRabbitError + +RESPONSE_KEY_VERSION = "version" +RESPONSE_KEY_SCHEMA = "schema" + + +@lru_cache +def get_json_from_url(url: str, api_key: str) -> dict: + try: + return (dict)(get(url, headers={"X-API-KEY": api_key}).json()) + except Exception: + raise TransientRabbitError(f"Unable to connect to schema registry at {url}") + + +class SchemaRegistry: + def __init__(self, base_uri: str, api_key: str): + self._base_uri = base_uri + self._api_key = api_key + + def get_schema(self, subject: str, version: str = "latest") -> dict: + return get_json_from_url(f"{self._base_uri}/subjects/{subject}/versions/{version}", self._api_key) diff --git a/lab_share_lib/types.py b/lab_share_lib/types.py new file mode 100644 index 0000000..d327f76 --- /dev/null +++ b/lab_share_lib/types.py @@ -0,0 +1,20 @@ +from types import ModuleType + + +class RabbitServerDetails(ModuleType): + """ModuleType class for details to connect to a RabbitMQ server.""" + + uses_ssl: bool + host: str + port: int + username: str + password: str + vhost: str + + def __init__(self, uses_ssl, host, port, username, password, vhost): + self.uses_ssl = uses_ssl + self.host = host + self.port = port + self.username = username + self.password = password + self.vhost = vhost or "/" diff --git a/lefthook.yml b/lefthook.yml new file mode 100644 index 0000000..6841163 --- /dev/null +++ b/lefthook.yml @@ -0,0 +1,19 @@ +# https://github.com/Arkweid/lefthook/blob/master/docs/full_guide.md +pre-push: + commands: + packages-check: + tags: security + run: pipenv check + +pre-commit: + parallel: true + commands: + black: + glob: "*.{py}" + run: pipenv run black {staged_files} + flake8: + glob: "*.{py}" + run: pipenv run flake8 {staged_files} + mypy: + glob: "*.{py}" + run: pipenv run mypy {staged_files} diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..72c36e6 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,47 @@ +# Global options: + +[mypy] +warn_unused_configs = True + +disallow_incomplete_defs = True + +check_untyped_defs = True + +warn_redundant_casts = True +warn_unused_ignores = True +warn_return_any = True +warn_unreachable = True + +pretty = True + +# Per-module options: + +[mypy-pymongo.*] +ignore_missing_imports = True + +[mypy-mysql.*] +ignore_missing_imports = True + +[mypy-pandas] +ignore_missing_imports = True + +[mypy-sqlalchemy.*] +ignore_missing_imports = True + +[mypy-pyodbc] +ignore_missing_imports = True + +[mypy-bson.*] +ignore_missing_imports = True + +[mypy-pysftp] +ignore_missing_imports = True + +[mypy-numpy] +ignore_missing_imports = True + +[mypy-flask_apscheduler] +ignore_missing_imports = True + +[mypy-requests] +ignore_missing_imports = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..85a28bd --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[tool.black] +line-length = 120 + +[tool.coverage.run] +branch = true +source = ['lab_share_lib'] diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..985cac5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,10 @@ +[flake8] +max-line-length = 120 +max-complexity = 13 +statistics = True +count = True +show-source = True + +[tool:pytest] +testpaths = tests +addopts = --cov=lab_share_lib --cov-report html --cov-report xml diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..3977455 --- /dev/null +++ b/setup.py @@ -0,0 +1,14 @@ +from setuptools import find_packages, setup # type: ignore + +setup( + name="lab_share_lib", + packages=find_packages(include=["lab_share_lib"]), + version="0.1.0", + description="Library to allow creating consumers to interact with lab-share framework", + author="Stuart McHattie", + license="MIT", + install_requires=["pika", "fastavro~=1.5", "requests"], + setup_requires=["pytest-runner"], + tests_require=["pytest"], + test_suite="tests", +) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..948d01e --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def config(): + return type("Config", (object,), {"PROCESSORS": {}}) diff --git a/tests/processing/test_rabbit_message.py b/tests/processing/test_rabbit_message.py new file mode 100644 index 0000000..5ab068a --- /dev/null +++ b/tests/processing/test_rabbit_message.py @@ -0,0 +1,68 @@ +from unittest.mock import MagicMock + +import pytest + +from lab_share_lib.constants import RABBITMQ_HEADER_KEY_SUBJECT, RABBITMQ_HEADER_KEY_VERSION +from lab_share_lib.processing.rabbit_message import RabbitMessage + +HEADERS = { + RABBITMQ_HEADER_KEY_SUBJECT: "a-subject", + RABBITMQ_HEADER_KEY_VERSION: "3", +} + +ENCODED_BODY = "Encoded body" +DECODED_LIST = ["Decoded body"] + + +@pytest.fixture +def subject(): + return RabbitMessage(HEADERS, ENCODED_BODY) + + +@pytest.fixture +def decoder(): + decoder = MagicMock() + decoder.decode.return_value = DECODED_LIST + + return decoder + + +def test_subject_extracts_the_header_correctly(subject): + assert subject.subject == HEADERS[RABBITMQ_HEADER_KEY_SUBJECT] + + +def test_schema_version_extracts_the_header_correctly(subject): + assert subject.schema_version == HEADERS[RABBITMQ_HEADER_KEY_VERSION] + + +def test_decode_populates_decoded_list(subject, decoder): + subject.decode(decoder) + + decoder.decode.assert_called_once_with(ENCODED_BODY, HEADERS[RABBITMQ_HEADER_KEY_VERSION]) + assert subject._decoded_list == DECODED_LIST + + +@pytest.mark.parametrize( + "decoded_list,expected", + [ + ([], False), + (["decoded_1"], True), + (["decoded_1", "decoded_2"], False), + ], +) +def test_contains_single_message_gives_correct_response(subject, decoded_list, expected): + subject._decoded_list = decoded_list + assert subject.contains_single_message is expected + + +@pytest.mark.parametrize( + "decoded_list,expected", + [ + (["decoded_1"], "decoded_1"), + # Realistically, you wouldn't be calling `.message` unless `.contains_single_message` returns True. But anyway! + (["decoded_1", "decoded_2"], "decoded_1"), + ], +) +def test_message_returns_first_decoded_list_item(subject, decoded_list, expected): + subject._decoded_list = decoded_list + assert subject.message == expected diff --git a/tests/processing/test_rabbit_message_processor.py b/tests/processing/test_rabbit_message_processor.py new file mode 100644 index 0000000..3d4eee1 --- /dev/null +++ b/tests/processing/test_rabbit_message_processor.py @@ -0,0 +1,133 @@ +from unittest.mock import MagicMock, patch +import pytest + +from lab_share_lib.constants import RABBITMQ_HEADER_KEY_SUBJECT, RABBITMQ_HEADER_KEY_VERSION +from lab_share_lib.exceptions import TransientRabbitError +from lab_share_lib.processing.rabbit_message_processor import RabbitMessageProcessor +from lab_share_lib.processing.base_processor import BaseProcessor + +RABBITMQ_SUBJECT_EXAMPLE = "MyOwnSubject" + +SCHEMA_REGISTRY = MagicMock() +BASIC_PUBLISHER = MagicMock() +PROCESSOR = MagicMock(BaseProcessor) + +HEADERS = { + RABBITMQ_HEADER_KEY_SUBJECT: RABBITMQ_SUBJECT_EXAMPLE, + RABBITMQ_HEADER_KEY_VERSION: "3", +} +MESSAGE_BODY = "Body" + + +@pytest.fixture +def logger(): + with patch("lab_share_lib.processing.rabbit_message_processor.LOGGER") as logger: + yield logger + + +@pytest.fixture +def rabbit_message(): + with patch("lab_share_lib.processing.rabbit_message_processor.RabbitMessage") as rabbit_message: + rabbit_message.return_value.subject = HEADERS[RABBITMQ_HEADER_KEY_SUBJECT] + yield rabbit_message + + +@pytest.fixture +def avro_encoder(): + with patch("lab_share_lib.processing.rabbit_message_processor.AvroEncoder") as avro_encoder: + yield avro_encoder + + +@pytest.fixture +def create_plate_processor(): + yield MagicMock(BaseProcessor) + + +@pytest.fixture +def subject(config, create_plate_processor, rabbit_message, avro_encoder): + subject = RabbitMessageProcessor(SCHEMA_REGISTRY, BASIC_PUBLISHER, config) + subject._processors = {RABBITMQ_SUBJECT_EXAMPLE: create_plate_processor.return_value} + yield subject + + +def test_constructor_stored_passed_values(subject, config): + assert subject._schema_registry == SCHEMA_REGISTRY + assert subject._basic_publisher == BASIC_PUBLISHER + assert subject._config == config + + +def test_constructor_populated_processors_correctly(subject, create_plate_processor): + assert list(subject._processors.keys()) == [RABBITMQ_SUBJECT_EXAMPLE] + assert subject._processors[RABBITMQ_SUBJECT_EXAMPLE] == create_plate_processor.return_value + + +def test_process_message_decodes_the_message(subject, rabbit_message, avro_encoder): + subject.process_message(HEADERS, MESSAGE_BODY) + + rabbit_message.assert_called_once_with(HEADERS, MESSAGE_BODY) + avro_encoder.assert_called_once_with(SCHEMA_REGISTRY, HEADERS[RABBITMQ_HEADER_KEY_SUBJECT]) + rabbit_message.return_value.decode.assert_called_once_with(avro_encoder.return_value) + + +def test_process_message_handles_exception_during_decode(subject, logger, rabbit_message): + rabbit_message.return_value.decode.side_effect = KeyError() + result = subject.process_message(HEADERS, MESSAGE_BODY) + + assert result is False + logger.error.assert_called_once() + error_log = logger.error.call_args.args[0] + assert "unrecoverable" in error_log.lower() + + +def test_process_message_handles_transient_error_from_schema_registry(subject, logger, rabbit_message): + # We have mocked out the decode method. The AvroEncoder speaks to the schema registry + # which could raise this error type so we'll just mock it on the decode method. + error_message = "Schema registry unreachable" + rabbit_message.return_value.decode.side_effect = TransientRabbitError(error_message) + + with pytest.raises(TransientRabbitError): + subject.process_message(HEADERS, MESSAGE_BODY) + + logger.error.assert_called_once() + error_log = logger.error.call_args.args[0] + assert "transient" in error_log.lower() + assert error_message in error_log + + +def test_process_message_rejects_rabbit_message_with_multiple_messages(subject, logger, rabbit_message): + rabbit_message.return_value.contains_single_message = False + result = subject.process_message(HEADERS, MESSAGE_BODY) + + assert result is False + logger.error.assert_called_once() + error_log = logger.error.call_args.args[0] + assert "multiple" in error_log.lower() + + +def test_process_message_rejects_rabbit_message_with_unrecognised_subject(subject, logger, rabbit_message): + wrong_subject = "random-subject" + rabbit_message.return_value.subject = wrong_subject + result = subject.process_message(HEADERS, MESSAGE_BODY) + + assert result is False + logger.error.assert_called_once() + error_log = logger.error.call_args.args[0] + assert wrong_subject in error_log + + +@pytest.mark.parametrize("return_value", [True, False]) +def test_process_message_returns_value_returned_by_processor(subject, create_plate_processor, return_value): + create_plate_processor.return_value.process.return_value = return_value + result = subject.process_message(HEADERS, MESSAGE_BODY) + + assert result is return_value + + +def test_process_message_raises_error_generated_by_processor(subject, create_plate_processor): + raised_error = TransientRabbitError("Test") + create_plate_processor.return_value.process.side_effect = raised_error + + with pytest.raises(TransientRabbitError) as ex_info: + subject.process_message(HEADERS, MESSAGE_BODY) + + assert ex_info.value == raised_error diff --git a/tests/rabbit/messages/test_base_message.py b/tests/rabbit/messages/test_base_message.py new file mode 100644 index 0000000..d93e432 --- /dev/null +++ b/tests/rabbit/messages/test_base_message.py @@ -0,0 +1,36 @@ +import pytest + +from lab_share_lib.rabbit.messages.base_message import BaseMessage + + +@pytest.fixture +def subject(): + return BaseMessage() + + +@pytest.mark.parametrize( + "errors, headline", + [ + [[], "No errors were reported during processing."], + [["Error 1"], "1 error was reported during processing."], + [["Error 1", "Error 2"], "2 errors were reported during processing."], + [["Error 1", "Error 2", "Error 3"], "3 errors were reported during processing."], + [["Error 1", "Error 2", "Error 3", "Error 4"], "4 errors were reported during processing."], + [["Error 1", "Error 2", "Error 3", "Error 4", "Error 5"], "5 errors were reported during processing."], + ], +) +def test_textual_errors_summary_is_accurate_for_up_to_5_errors(subject, errors, headline): + subject._textual_errors = errors + assert subject.textual_errors_summary == [headline] + errors + + +def test_textual_errors_summary_is_accurate_for_6_errors(subject): + subject._textual_errors = ["Error 1", "Error 2", "Error 3", "Error 4", "Error 5", "Error 6"] + assert subject.textual_errors_summary == [ + "6 errors were reported during processing. Only the first 5 are shown.", + "Error 1", + "Error 2", + "Error 3", + "Error 4", + "Error 5", + ] diff --git a/tests/rabbit/test_async_consumer.py b/tests/rabbit/test_async_consumer.py new file mode 100644 index 0000000..3532666 --- /dev/null +++ b/tests/rabbit/test_async_consumer.py @@ -0,0 +1,393 @@ +from unittest.mock import ANY, MagicMock, Mock, call, patch + +import pytest + +from lab_share_lib.exceptions import TransientRabbitError +from lab_share_lib.rabbit.async_consumer import AsyncConsumer +from lab_share_lib.types import RabbitServerDetails + +DEFAULT_SERVER_DETAILS = RabbitServerDetails( + uses_ssl=False, host="host", port=5672, username="username", password="password", vhost="vhost" +) + + +@pytest.fixture +def mock_logger(): + with patch("lab_share_lib.rabbit.async_consumer.LOGGER") as logger: + yield logger + + +@pytest.fixture +def subject(): + return AsyncConsumer(DEFAULT_SERVER_DETAILS, "queue", Mock()) + + +@pytest.mark.parametrize("uses_ssl", [True, False]) +def test_connect_provides_correct_parameters(mock_logger, uses_ssl): + server_details = RabbitServerDetails( + uses_ssl=uses_ssl, host="host", port=5672, username="username", password="password", vhost="vhost" + ) + subject = AsyncConsumer(server_details, "queue", Mock()) + select_connection = subject.connect() + select_connection.close() # Don't want async callbacks that will log during other tests + + parameters = select_connection.params + if server_details.uses_ssl: + assert parameters.ssl_options is not None + else: + assert parameters.ssl_options is None + + assert parameters.host == server_details.host + assert parameters.port == server_details.port + assert parameters.credentials.username == server_details.username + assert parameters.credentials.password == server_details.password + assert parameters.virtual_host == server_details.vhost + mock_logger.info.assert_called_once() + + +def test_close_connection_sets_consuming_false(subject, mock_logger): + subject._consuming = True + subject.close_connection() + + assert subject._consuming is False + mock_logger.info.assert_called_once() + + +def test_close_connection_calls_close_on_connection(subject, mock_logger): + subject._connection = MagicMock() + subject._connection.is_closing = False + subject._connection.is_closed = False + subject.close_connection() + + subject._connection.close.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_on_connection_open_calls_open_channel(subject, mock_logger): + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.open_channel") as open_channel: + subject.on_connection_open(Mock()) + + open_channel.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_on_connection_open_error_calls_reconnect(subject, mock_logger): + error = Exception("An error") + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.reconnect") as reconnect: + subject.on_connection_open_error(Mock(), error) + + reconnect.assert_called_once() + mock_logger.error.assert_called_once_with(ANY, error) + + +def test_on_connection_closed_sets_channel_to_none(subject): + subject._connection = MagicMock() + subject._channel = "Not none" + subject.on_connection_closed(Mock(), "A reason") + + assert subject._channel is None + + +def test_on_connection_closed_stops_the_ioloop(subject): + subject._connection = MagicMock() + subject._closing = True + subject.on_connection_closed(Mock(), "A reason") + + subject._connection.ioloop.stop.assert_called_once() + + +def test_on_connection_closed_reconnects_when_not_in_closing_state(subject, mock_logger): + subject._connection = MagicMock() + subject._closing = False + reason = "A reason" + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.reconnect") as reconnect: + subject.on_connection_closed(Mock(), reason) + + reconnect.assert_called_once() + mock_logger.warning.assert_called_once_with(ANY, reason) + + +def test_reconnect_prepares_for_reconnection(subject): + subject.should_reconnect = False + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.stop") as stop: + subject.reconnect() + + assert subject.should_reconnect is True + stop.assert_called_once() + + +def test_open_channel_calls_the_connection_method(subject, mock_logger): + subject._connection = MagicMock() + subject.open_channel() + + subject._connection.channel.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_open_channel_logs_when_no_connection(subject, mock_logger): + subject._connection = None + subject.open_channel() + + mock_logger.error.assert_called_once() + + +def test_on_channel_open_sets_the_channel_and_calls_follow_up_methods(subject, mock_logger): + subject._channel = None + fake_channel = Mock() + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.add_on_channel_close_callback") as add_callback: + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.set_qos") as set_qos: + subject.on_channel_open(fake_channel) + + mock_logger.info.assert_called_once() + assert subject._channel == fake_channel + add_callback.assert_called_once() + set_qos.assert_called_once() + + +def test_add_on_channel_close_callback_calls_the_channel_method(subject, mock_logger): + subject._channel = MagicMock() + subject.add_on_channel_close_callback() + + subject._channel.add_on_close_callback.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_add_on_channel_close_callback_logs_when_no_channel(subject, mock_logger): + subject._channel = None + subject.add_on_channel_close_callback() + + mock_logger.error.assert_called_once() + + +def test_on_channel_closed_calls_close_connection(subject, mock_logger): + channel = "A channel" + reason = "A reason" + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.close_connection") as close_connection: + subject.on_channel_closed(channel, reason) + + close_connection.assert_called_once() + mock_logger.warning.assert_called_once_with(ANY, channel, reason) + + +@pytest.mark.parametrize("prefetch_count", [1, 5, 10]) +def test_set_qos_applies_prefetch_count_to_channel(subject, prefetch_count): + subject._prefetch_count = prefetch_count + subject._channel = MagicMock() + subject.set_qos() + + subject._channel.basic_qos.assert_called_once_with(prefetch_count=prefetch_count, callback=ANY) + + +@pytest.mark.parametrize("prefetch_count", [1, 5, 10]) +def test_on_basic_qos_ok_calls_start_consuming(subject, mock_logger, prefetch_count): + subject._prefetch_count = prefetch_count + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.start_consuming") as start_consuming: + subject.on_basic_qos_ok(Mock()) + + start_consuming.assert_called_once() + mock_logger.info.assert_called_once_with(ANY, prefetch_count) + + +def test_start_consuming_logs_when_no_channel(subject, mock_logger): + subject._channel = None + subject.start_consuming() + + mock_logger.error.assert_called_once() + + +def test_start_consuming_takes_necessary_actions(subject, mock_logger): + # Test objects + test_tag = "Test tag" + test_queue = "queue.name" + + # Arrange + subject._queue = test_queue + subject._channel = MagicMock() + subject._channel.basic_consume = Mock(return_value=test_tag) + subject._consumer_tag = None + subject.was_consuming = False + subject._consuming = False + + # Act + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.add_on_cancel_callback") as add_callback: + subject.start_consuming() + + # Assert + mock_logger.info.assert_called_once() + add_callback.assert_called_once() + subject._channel.basic_consume.assert_called_once_with(test_queue, ANY) + assert subject._consumer_tag == test_tag + assert subject.was_consuming is True + assert subject._consuming is True + + +def test_add_on_cancel_callback_calls_the_channel_method(subject, mock_logger): + subject._channel = MagicMock() + subject.add_on_cancel_callback() + + subject._channel.add_on_cancel_callback.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_add_on_cancel_callback_logs_when_no_channel(subject, mock_logger): + subject._channel = None + subject.add_on_cancel_callback() + + mock_logger.error.assert_called_once() + + +def test_on_consumer_cancelled_logs(subject, mock_logger): + method_frame = Mock() + subject.on_consumer_cancelled(method_frame) + + mock_logger.info.assert_called_once_with(ANY, method_frame) + + +def test_on_consumer_cancelled_calls_channel_close(subject, mock_logger): + subject._channel = MagicMock() + subject.on_consumer_cancelled(Mock()) + + subject._channel.close.assert_called_once() + + +@pytest.mark.parametrize( + "return_value,ack_calls,nack_calls", + [[False, [], [call("Test tag", requeue=False)]], [True, [call("Test tag")], []]], +) +def test_on_message_passes_relevant_info_to_process_message(subject, mock_logger, return_value, ack_calls, nack_calls): + subject._process_message = Mock(return_value=return_value) + + # Arrange arguments + channel = MagicMock() + + delivery_tag = "Test tag" + basic_deliver = MagicMock() + basic_deliver.delivery_tag = delivery_tag + + app_id = "Test app ID" + headers = {"header1": "value1"} + properties = MagicMock() + properties.app_id = app_id + properties.headers = headers + + body = "A message body" + + # Act + subject.on_message(channel, basic_deliver, properties, body) + + # Assert + mock_logger.info.assert_has_calls([call(ANY, delivery_tag, app_id), call(ANY, delivery_tag)]) + subject._process_message.assert_called_once_with(headers, body) + + channel.basic_ack.assert_has_calls(ack_calls) + channel.basic_nack.assert_has_calls(nack_calls) + + +def test_on_message_handles_transient_rabbit_error(subject): + subject._process_message = Mock(side_effect=TransientRabbitError("Boom!")) + channel = MagicMock() + + assert subject.had_transient_error is False + + with pytest.raises(TransientRabbitError): + subject.on_message(channel, MagicMock(), MagicMock(), "") + + channel.basic_ack.assert_not_called() + channel.basic_nack.assert_not_called() + assert subject.had_transient_error is True + + +def test_stop_consuming_calls_the_channel_method(subject, mock_logger): + subject._channel = MagicMock() + subject._consumer_tag = Mock() + subject.stop_consuming() + + subject._channel.basic_cancel.assert_called_once_with(subject._consumer_tag, ANY) + mock_logger.info.assert_called_once() + + +def test_on_cancelok_calls_close_channel_method(subject, mock_logger): + subject._channel = MagicMock() + subject._consuming = True + userdata = Mock() + + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.close_channel") as close_channel: + subject.on_cancelok(Mock(), userdata) + + assert subject._consuming is False + mock_logger.info.assert_called_once_with(ANY, userdata) + close_channel.assert_called_once() + + +def test_close_channel_calls_the_channel_method(subject, mock_logger): + subject._channel = MagicMock() + subject.close_channel() + + subject._channel.close.assert_called_once() + mock_logger.info.assert_called_once() + + +def test_close_channel_logs_when_no_channel(subject, mock_logger): + subject._channel = None + subject.close_channel() + + mock_logger.error.assert_called_once() + + +def test_run_starts_the_ioloop_when_connection_created(subject): + subject._connection = None + test_connection = MagicMock() + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.connect", return_value=test_connection): + subject.run() + + assert subject._connection == test_connection + test_connection.ioloop.start.assert_called_once() + + +def test_run_logs_error_when_connection_not_created(subject, mock_logger): + subject._connection = None + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.connect", return_value=None): + subject.run() + + mock_logger.error.assert_called_once() + + +def test_stop_logs_process(subject, mock_logger): + subject._closing = False + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.stop_consuming"): + subject.stop() + + mock_logger.info.assert_has_calls([call("Stopping"), call("Stopped")]) + + +def test_stop_takes_correct_actions_when_consuming(subject): + subject._closing = False + subject._consuming = True + subject._connection = MagicMock() + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.stop_consuming") as stop_consuming: + subject.stop() + + stop_consuming.assert_called_once() + subject._connection.ioloop.stop.assert_not_called() + subject._connection.ioloop.start.assert_called_once() + + +def test_stop_takes_correct_actions_when_not_consuming(subject): + subject._closing = False + subject._consuming = False + subject._connection = MagicMock() + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.stop_consuming") as stop_consuming: + subject.stop() + + stop_consuming.assert_not_called() + subject._connection.ioloop.stop.assert_called_once() + subject._connection.ioloop.start.assert_not_called() + + +def test_stop_does_nothing_if_already_closing(subject, mock_logger): + subject._closing = True + with patch("lab_share_lib.rabbit.async_consumer.AsyncConsumer.stop_consuming") as stop_consuming: + subject.stop() + + stop_consuming.assert_not_called() + mock_logger.info.assert_not_called() diff --git a/tests/rabbit/test_avro_encoder.py b/tests/rabbit/test_avro_encoder.py new file mode 100644 index 0000000..64c4689 --- /dev/null +++ b/tests/rabbit/test_avro_encoder.py @@ -0,0 +1,91 @@ +from unittest.mock import ANY, MagicMock, Mock, patch + +import pytest + +from lab_share_lib.rabbit.avro_encoder import AvroEncoder +from lab_share_lib.rabbit.schema_registry import RESPONSE_KEY_SCHEMA, RESPONSE_KEY_VERSION + +SUBJECT = "create-plate-map" +SCHEMA_RESPONSE = {RESPONSE_KEY_SCHEMA: '{ "key": "value" }', RESPONSE_KEY_VERSION: 7} +SCHEMA_OBJECT = {"key": "value"} +MESSAGE_BODY = "The written message." + + +@pytest.fixture +def schema_registry(): + schema_registry = MagicMock() + schema_registry.get_schema.return_value = SCHEMA_RESPONSE + + yield schema_registry + + +@pytest.fixture +def fastavro(): + with patch("lab_share_lib.rabbit.avro_encoder.fastavro") as fastavro: + yield fastavro + + +@pytest.fixture +def subject(schema_registry): + return AvroEncoder(schema_registry, SUBJECT) + + +def test_constructor_stores_passed_values(subject, schema_registry): + assert subject._schema_registry == schema_registry + assert subject._subject == SUBJECT + + +@pytest.mark.parametrize("schema_version", [None, "5"]) +def test_schema_response_calls_the_schema_registry(subject, schema_registry, schema_version): + response = subject._schema_response(schema_version) + + if schema_version is None: + schema_registry.get_schema.assert_called_once_with(SUBJECT) + else: + schema_registry.get_schema.assert_called_once_with(SUBJECT, schema_version) + + assert response == SCHEMA_RESPONSE + + +def test_schema_parses_the_returned_schema(subject, fastavro): + avro_schema = Mock() + fastavro.parse_schema.return_value = avro_schema + + parsed_schema = subject._schema(SCHEMA_RESPONSE) + + fastavro.parse_schema.assert_called_once_with(SCHEMA_OBJECT) + assert parsed_schema == avro_schema + + +def test_schema_version_extracts_the_version(subject): + assert subject._schema_version(SCHEMA_RESPONSE) == 7 + + +@pytest.mark.parametrize("schema_version", [None, "5"]) +def test_encode_encodes_the_message(subject, fastavro, schema_version): + records = [{"key": "value"}] + + def json_writer(string_writer, schema, record_list): + assert schema == fastavro.parse_schema.return_value + assert record_list == records + string_writer.write(MESSAGE_BODY) + + fastavro.json_writer.side_effect = json_writer + + result = subject.encode(records, schema_version) + + assert result.body == MESSAGE_BODY.encode() + assert result.version == "7" + + +@pytest.mark.parametrize("schema_version", ["5", "42"]) +def test_decode_decodes_the_message(subject, fastavro, schema_version): + fastavro.json_reader.return_value = SCHEMA_OBJECT + + result = subject.decode(MESSAGE_BODY.encode(), schema_version) + + fastavro.json_reader.assert_called_once_with(ANY, fastavro.parse_schema.return_value) + string_reader = fastavro.json_reader.call_args.args[0] + assert string_reader.read() == MESSAGE_BODY + + assert result == SCHEMA_OBJECT diff --git a/tests/rabbit/test_background_consumer.py b/tests/rabbit/test_background_consumer.py new file mode 100644 index 0000000..8f95b4e --- /dev/null +++ b/tests/rabbit/test_background_consumer.py @@ -0,0 +1,107 @@ +from unittest.mock import Mock, patch + +import pytest + +from lab_share_lib.rabbit.background_consumer import BackgroundConsumer +from lab_share_lib.types import RabbitServerDetails + +DEFAULT_SERVER_DETAILS = RabbitServerDetails( + uses_ssl=False, host="host", port=5672, username="username", password="password", vhost="vhost" +) + + +@pytest.fixture +def subject(): + return BackgroundConsumer(DEFAULT_SERVER_DETAILS, "queue", Mock()) + + +def test_init_sets_the_correct_name(subject): + assert subject.name == "BackgroundConsumer" + + +def test_init_sets_daemon_thread_true(subject): + assert subject.daemon is True + + +@pytest.mark.parametrize("uses_ssl", [True, False]) +@pytest.mark.parametrize("host", ["", "host"]) +@pytest.mark.parametrize("port", [8080, 5672]) +@pytest.mark.parametrize("username", ["", "username"]) +@pytest.mark.parametrize("password", ["", "password"]) +@pytest.mark.parametrize("vhost", ["", "vhost"]) +@pytest.mark.parametrize("queue", ["", "queue"]) +def test_consumer_is_passed_correct_parameters(uses_ssl, host, port, username, password, vhost, queue): + server_details = RabbitServerDetails( + uses_ssl=uses_ssl, host=host, port=port, username=username, password=password, vhost=vhost + ) + process_message = Mock() + subject = BackgroundConsumer(server_details, queue, process_message) + + with patch( + "lab_share_lib.rabbit.background_consumer.AsyncConsumer.__init__", return_value=None + ) as async_consumer_init: + # Initiate creation of the AsyncConsumer + subject._consumer + + async_consumer_init.assert_called_once_with(server_details, queue, process_message) + + +def test_run_starts_consumer_and_stops_on_keyboard_interrupt(subject): + with patch("lab_share_lib.rabbit.background_consumer.AsyncConsumer") as consumer: + consumer.return_value.run.side_effect = KeyboardInterrupt() + subject.run() + + consumer.return_value.run.assert_called_once() + consumer.return_value.stop.assert_called_once() + + +def test_maybe_reconnect_sleeps_longer_each_time(subject): + with patch("lab_share_lib.rabbit.background_consumer.time.sleep") as sleep_func: + with patch("lab_share_lib.rabbit.background_consumer.AsyncConsumer") as consumer: + consumer.return_value.should_reconnect = True + consumer.return_value.had_transient_error = False + consumer.return_value.was_consuming = False + + subject._maybe_reconnect() + sleep_func.assert_called_with(1) + subject._maybe_reconnect() + sleep_func.assert_called_with(2) + subject._maybe_reconnect() + sleep_func.assert_called_with(3) + + subject._reconnect_delay = 28 + subject._maybe_reconnect() + sleep_func.assert_called_with(29) + subject._maybe_reconnect() + sleep_func.assert_called_with(30) + subject._maybe_reconnect() # Maximum delay is 30 seconds + sleep_func.assert_called_with(30) + + assert consumer.return_value.stop.call_count == 6 + + +def test_maybe_reconnect_sleeps_zero_seconds_if_consumer_was_consuming(subject): + with patch("lab_share_lib.rabbit.background_consumer.time.sleep") as sleep_func: + with patch("lab_share_lib.rabbit.background_consumer.AsyncConsumer") as consumer: + consumer.return_value.should_reconnect = True + consumer.return_value.had_transient_error = False + consumer.return_value.was_consuming = True + + for _ in range(5): + subject._maybe_reconnect() + sleep_func.assert_called_with(0) + + assert consumer.return_value.stop.call_count == 5 + + +def test_maybe_reconnect_sleeps_30_seconds_if_consumer_had_transient_error(subject): + with patch("lab_share_lib.rabbit.background_consumer.time.sleep") as sleep_func: + with patch("lab_share_lib.rabbit.background_consumer.AsyncConsumer") as consumer: + consumer.return_value.should_reconnect = True + consumer.return_value.had_transient_error = True + + for _ in range(5): + subject._maybe_reconnect() + sleep_func.assert_called_with(30) + + assert consumer.return_value.stop.call_count == 5 diff --git a/tests/rabbit/test_basic_publisher.py b/tests/rabbit/test_basic_publisher.py new file mode 100644 index 0000000..e336b64 --- /dev/null +++ b/tests/rabbit/test_basic_publisher.py @@ -0,0 +1,83 @@ +from typing import cast +from unittest.mock import MagicMock, patch + +import pytest +from pika import PlainCredentials +from pika.spec import PERSISTENT_DELIVERY_MODE + +from lab_share_lib.constants import RABBITMQ_HEADER_KEY_SUBJECT, RABBITMQ_HEADER_KEY_VERSION +from lab_share_lib.rabbit.basic_publisher import BasicPublisher +from lab_share_lib.types import RabbitServerDetails + +DEFAULT_SERVER_DETAILS = RabbitServerDetails( + uses_ssl=False, host="host", port=5672, username="username", password="password", vhost="vhost" +) + + +@pytest.fixture +def channel(): + yield MagicMock() + + +@pytest.fixture +def blocking_connection(channel): + with patch("lab_share_lib.rabbit.basic_publisher.BlockingConnection") as blocking_connection: + blocking_connection.return_value.channel.return_value = channel + yield blocking_connection + + +@pytest.fixture +def subject(): + return BasicPublisher(DEFAULT_SERVER_DETAILS) + + +@pytest.mark.parametrize("uses_ssl", [True, False]) +@pytest.mark.parametrize("host", ["", "host"]) +@pytest.mark.parametrize("port", [8080, 5672]) +@pytest.mark.parametrize("username", ["", "username"]) +@pytest.mark.parametrize("password", ["", "password"]) +@pytest.mark.parametrize("vhost", ["", "vhost"]) +def test_constructor_creates_correct_connection_parameters(uses_ssl, host, port, username, password, vhost): + server_details = RabbitServerDetails( + uses_ssl=uses_ssl, host=host, port=port, username=username, password=password, vhost=vhost + ) + + subject = BasicPublisher(server_details) + + if server_details.uses_ssl: + assert subject._connection_params.ssl_options is not None + else: + assert subject._connection_params.ssl_options is None + + assert subject._connection_params.host == server_details.host + assert subject._connection_params.port == server_details.port + assert subject._connection_params.virtual_host == server_details.vhost + + credentials = cast(PlainCredentials, subject._connection_params.credentials) + assert credentials.username == server_details.username + assert credentials.password == server_details.password + + +@pytest.mark.parametrize("exchange", ["", "exchange"]) +@pytest.mark.parametrize("routing_key", ["", "routing_key"]) +@pytest.mark.parametrize("body", ["", "body"]) +@pytest.mark.parametrize("schema_subject", ["", "subject"]) +@pytest.mark.parametrize("schema_version", ["", "schema_version"]) +def test_publish_message_publishes_the_message( + subject, blocking_connection, channel, exchange, routing_key, body, schema_subject, schema_version +): + subject.publish_message(exchange, routing_key, body, schema_subject, schema_version) + + blocking_connection.assert_called_once_with(subject._connection_params) + blocking_connection.return_value.channel.assert_called_once() + channel.basic_publish.assert_called_once() + blocking_connection.return_value.close.assert_called_once() + + assert channel.basic_publish.call_args.kwargs["exchange"] == exchange + assert channel.basic_publish.call_args.kwargs["routing_key"] == routing_key + assert channel.basic_publish.call_args.kwargs["body"] == body + + message_properties = channel.basic_publish.call_args.kwargs["properties"] + assert message_properties.delivery_mode == PERSISTENT_DELIVERY_MODE + assert message_properties.headers[RABBITMQ_HEADER_KEY_SUBJECT] == schema_subject + assert message_properties.headers[RABBITMQ_HEADER_KEY_VERSION] == schema_version diff --git a/tests/rabbit/test_schema_registry.py b/tests/rabbit/test_schema_registry.py new file mode 100644 index 0000000..ba74610 --- /dev/null +++ b/tests/rabbit/test_schema_registry.py @@ -0,0 +1,110 @@ +import pytest +import responses + +from lab_share_lib.exceptions import TransientRabbitError +from lab_share_lib.rabbit.schema_registry import SchemaRegistry + +BASE_URI = "http://schema_registry.com" +API_KEY = "EA7G00DF00D" + + +@pytest.fixture +def subject(): + subject = SchemaRegistry(BASE_URI, API_KEY) + yield subject + + +def test_constructor_stores_values_correctly(subject): + assert subject._base_uri == BASE_URI + assert subject._api_key == API_KEY + + +@pytest.mark.parametrize( + "schema_subject, schema_version", + [ + ["test-subject-1", "1"], + ["test-subject-2", "7"], + ["test-subject-3", "latest"], + ], +) +@responses.activate +def test_get_schema_generates_the_correct_request(subject, schema_subject, schema_version): + expected_url = f"{BASE_URI}/subjects/{schema_subject}/versions/{schema_version}" + + responses.add( + responses.GET, + expected_url, + json={}, + status=200, + ) + + subject.get_schema(schema_subject, schema_version) + + assert len(responses.calls) == 1 + assert responses.calls[0].request.url == expected_url + assert "X-API-KEY" in responses.calls[0].request.headers + assert responses.calls[0].request.headers["X-API-KEY"] == API_KEY + + +@responses.activate +def test_get_schema_returns_the_response_json(subject): + schema_subject = "create-plate-map" + schema_version = "7" + response_json = {"schema": "Some schema"} + + responses.add( + responses.GET, + f"{BASE_URI}/subjects/{schema_subject}/versions/{schema_version}", + json=response_json, + status=200, + ) + + result = subject.get_schema(schema_subject, schema_version) + + assert result == response_json + + +@responses.activate +def test_get_schema_without_a_version_gets_latest(subject): + expected_url = f"{BASE_URI}/subjects/create-plate-map/versions/latest" + + responses.add( + responses.GET, + expected_url, + json={}, + status=200, + ) + + subject.get_schema("create-plate-map") + + assert len(responses.calls) == 1 + assert responses.calls[0].request.url == expected_url + + +@responses.activate +def test_get_schema_caches_responses(subject): + schema_subject = "create-plate-map" + schema_version = "7" + response_json = {"schema": "Some schema"} + + responses.add( + responses.GET, + f"{BASE_URI}/subjects/{schema_subject}/versions/{schema_version}", + json=response_json, + status=200, + ) + + result1 = subject.get_schema(schema_subject, schema_version) + assert result1 == response_json + + responses.reset() # Stop responding to requests + + result2 = subject.get_schema(schema_subject, schema_version) + assert result2 == response_json # Note the result was the same after resetting the responses + + +def test_get_schema_raises_transient_rabbit_error_on_exception(subject): + with pytest.raises(TransientRabbitError) as ex_info: + subject.get_schema("no-schema-here", "42") + + assert BASE_URI in ex_info.value.message diff --git a/wait_for_connection.sh b/wait_for_connection.sh new file mode 100755 index 0000000..211a42f --- /dev/null +++ b/wait_for_connection.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +# +# This script waits TIMEOUT seconds for connection to HOST:PORT +# to be stablished and exit with 0 if success or 1 if error +set -o pipefail +set -o nounset + +HOST=$1 +PORT=$2 +TIMEOUT=$3 + +TIMEOUT_END=$(($(date +%s) + TIMEOUT)) +result=1 +while [ $result -ne 0 ]; do + echo "Waiting for connection to ${HOST}:${PORT}..." + nc -w 1 -z "${HOST}" "${PORT}" > /dev/null 2>&1 + result=$? + if [ $result -eq 0 ]; then + echo "Connected to ${HOST}:${PORT}." + exit 0 + else + if [ $(date +%s) -ge $TIMEOUT_END ]; then + echo "Operation timed out" >&2 + exit 1 + fi + sleep 1 + fi +done \ No newline at end of file From 42d47598d33902230ab138bc4e5e236d19c581a4 Mon Sep 17 00:00:00 2001 From: Eduardo Martin Rojo Date: Mon, 11 Jul 2022 15:13:10 +0100 Subject: [PATCH 2/3] Some more workflows --- .github/workflows/automated_release.yml | 36 +++++++ .github/workflows/check_release_version.yml | 27 +++++ .github/workflows/ci.yml | 111 ++++++++++++++++++++ .github/workflows/codeql-analysis.yml | 72 +++++++++++++ Pipfile | 2 + Pipfile.lock | 61 ++++++----- pyproject.toml | 29 +++++ 7 files changed, 309 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/automated_release.yml create mode 100644 .github/workflows/check_release_version.yml create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/automated_release.yml b/.github/workflows/automated_release.yml new file mode 100644 index 0000000..4781143 --- /dev/null +++ b/.github/workflows/automated_release.yml @@ -0,0 +1,36 @@ +name: Automated build, test, release and push + +on: + push: + branches: + - develop + - master + +env: + IMAGE_NAME: ${{ github.repository }}/${{ github.event.repository.name }} + +jobs: + build_test_release_push: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: nelonoel/branch-name@v1.0.1 + + # Create a release tag based on the branch name and .release-version file + - name: Set release tag + # https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable + # On the develop branch this might create RELEASE_VERSION=2.4.6-987654321-develop + # On the master branch this would then only create RELEASE_VERSION=2.4.6 + run: echo "RELEASE_VERSION=$(printf -- '%s%s\n' $(cat .release-version) $([ ${BRANCH_NAME} = "develop" ] && printf -- '-%s-develop' ${GITHUB_RUN_ID} || echo ""))" >> $GITHUB_ENV + + # Create a GitHub release with the release asset as an artifact + - name: Create release and upload release.tar.gz + uses: ncipollo/release-action@v1.8.8 + with: + name: ${{ env.RELEASE_VERSION }} + tag: v${{ env.RELEASE_VERSION }} + prerelease: ${{ !(github.ref == 'refs/heads/master') }} + commit: ${{ github.sha }} + artifacts: dist/*.tar.gz + artifactErrorsFailBuild: true diff --git a/.github/workflows/check_release_version.yml b/.github/workflows/check_release_version.yml new file mode 100644 index 0000000..e5db52d --- /dev/null +++ b/.github/workflows/check_release_version.yml @@ -0,0 +1,27 @@ +# Checks that the .release-version file has been updated +name: Check release version + +on: + pull_request: + branches: + - master + +jobs: + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Get specific changed files + id: changed-files-specific + uses: tj-actions/changed-files@v23 + with: + files: | + .release-version + + - name: Run step looking for change in the release version + run: >- + if ! "${{ steps.changed-files-specific.outputs.any_changed }}"; then + echo "Please change the release version number" + exit 1; + fi diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..49e92fb --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,111 @@ +name: CI + +on: + push: + branches: + - develop-* + - develop + - master + pull_request: + branches: + - develop-* + - develop + - master + +jobs: + black: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/Pipfile') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install pipenv + run: | + pip install pipenv + - name: Install dependencies + run: | + pipenv sync --dev --system + - name: Check format with black + run: | + # stop the build if there are black formatting errors + python -m black --check . + flake8: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/Pipfile') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install pipenv + run: | + pip install pipenv + - name: Install dependencies + run: | + pipenv sync --dev --system + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 + mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/Pipfile') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install pipenv + run: | + pip install pipenv + - name: Install dependencies + run: | + pipenv sync --dev --system + - name: Run mypy + run: | + python -m mypy . + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - uses: actions/cache@v1 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/Pipfile') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install pipenv + run: | + pip install pipenv + - name: Install dependencies + run: | + pipenv sync --dev --system + - name: Test with pytest + run: | + python -m pytest -vx + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..76b7aad --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,72 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# ******** NOTE ******** + +name: "CodeQL" + +on: + push: + branches: [ develop-*, develop, master ] + paths-ignore: + - "README.md" + pull_request: + # The branches below must be a subset of the branches above + branches: [ develop-*, develop ] + paths-ignore: + - "README.md" + schedule: + - cron: '40 23 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + # Learn more... + # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + # - name: Autobuild + # uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 diff --git a/Pipfile b/Pipfile index 683e8fe..59511f4 100644 --- a/Pipfile +++ b/Pipfile @@ -16,6 +16,8 @@ pytest-freezegun = "*" types-python-dateutil = "*" responses = "*" types-requests = "*" +build = "*" +hatchling = "*" [packages] colorlog = "~=6.6" diff --git a/Pipfile.lock b/Pipfile.lock index 9a5133d..ea0c445 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "02b7ecf2675009cad3c3bdf238d943eac213507d8ce9d4ca35532e49e1278337" + "sha256": "2a39c79f23fb2a69369809420b9b636b04beac1312fd0cc846795aa1f6568b2f" }, "pipfile-spec": 6, "requires": { @@ -91,7 +91,6 @@ "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642", "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578" ], - "markers": "python_full_version >= '3.6.0'", "version": "==3.8.1" }, "aiosignal": { @@ -99,7 +98,6 @@ "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a", "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2" ], - "markers": "python_full_version >= '3.6.0'", "version": "==1.2.0" }, "async-timeout": { @@ -107,7 +105,6 @@ "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15", "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c" ], - "markers": "python_full_version >= '3.6.0'", "version": "==4.0.2" }, "attrs": { @@ -115,7 +112,6 @@ "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==21.4.0" }, "certifi": { @@ -123,7 +119,6 @@ "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" ], - "markers": "python_full_version >= '3.6.0'", "version": "==2022.6.15" }, "charset-normalizer": { @@ -131,7 +126,6 @@ "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" ], - "markers": "python_full_version >= '3.6.0'", "version": "==2.1.0" }, "colorlog": { @@ -227,7 +221,6 @@ "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2", "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951" ], - "markers": "python_version >= '3.7'", "version": "==1.3.0" }, "idna": { @@ -235,7 +228,6 @@ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '3.5'", "version": "==3.3" }, "more-itertools": { @@ -308,7 +300,6 @@ "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937", "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d" ], - "markers": "python_version >= '3.7'", "version": "==6.0.2" }, "pika": { @@ -348,7 +339,6 @@ "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec", "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", "version": "==1.26.10" }, "yarl": { @@ -426,7 +416,6 @@ "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576", "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59" ], - "markers": "python_full_version >= '3.6.0'", "version": "==1.7.2" } }, @@ -436,7 +425,6 @@ "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4", "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==21.4.0" }, "black": { @@ -468,12 +456,19 @@ "index": "pypi", "version": "==22.6.0" }, + "build": { + "hashes": [ + "sha256:19b0ed489f92ace6947698c3ca8436cb0556a66e2aa2d34cd70e2a5d27cd0437", + "sha256:887a6d471c901b1a6e6574ebaeeebb45e5269a79d095fe9a8f88d6614ed2e5f0" + ], + "index": "pypi", + "version": "==0.8.0" + }, "certifi": { "hashes": [ "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" ], - "markers": "python_full_version >= '3.6.0'", "version": "==2022.6.15" }, "charset-normalizer": { @@ -481,7 +476,6 @@ "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" ], - "markers": "python_full_version >= '3.6.0'", "version": "==2.1.0" }, "click": { @@ -489,7 +483,6 @@ "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" ], - "markers": "python_version >= '3.7'", "version": "==8.1.3" }, "coverage": { @@ -542,6 +535,13 @@ "index": "pypi", "version": "==6.4.1" }, + "editables": { + "hashes": [ + "sha256:167524e377358ed1f1374e61c268f0d7a4bf7dbd046c656f7b410cde16161b1a", + "sha256:ee686a8db9f5d91da39849f175ffeef094dd0e9c36d6a59a2e8c7f92a3b80020" + ], + "version": "==0.3" + }, "flake8": { "hashes": [ "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d", @@ -563,15 +563,21 @@ "sha256:15103a67dfa868ad809a8f508146e396be2995172d25f927e48ce51c0bf5cb09", "sha256:b4c64efb275e6bc68dc6e771b17ffe0ff0f90b81a2a5189043550b6519926ba4" ], - "markers": "python_version >= '3.6'", "version": "==1.2.1" }, + "hatchling": { + "hashes": [ + "sha256:9ca409a41140cc662164680b78adcd3ba77b23a945984bd9970d11fe38e86a38", + "sha256:f5b01f47d5c6b055aeb0cf05ab578ee43148db40d9dc80315b14a5b528d526a9" + ], + "index": "pypi", + "version": "==1.5.0" + }, "idna": { "hashes": [ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '3.5'", "version": "==3.3" }, "iniconfig": { @@ -629,7 +635,6 @@ "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb", "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522" ], - "markers": "python_version >= '3.6'", "version": "==21.3" }, "pathspec": { @@ -639,6 +644,13 @@ ], "version": "==0.9.0" }, + "pep517": { + "hashes": [ + "sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0", + "sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161" + ], + "version": "==0.12.0" + }, "pika-stubs": { "hashes": [ "sha256:aaa78fa9f52eb3591b6073fbbe2607567405d1857be268d447bea252e22dd6cf" @@ -651,7 +663,6 @@ "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" ], - "markers": "python_version >= '3.7'", "version": "==2.5.2" }, "pluggy": { @@ -659,7 +670,6 @@ "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" ], - "markers": "python_version >= '3.6'", "version": "==1.0.0" }, "py": { @@ -667,7 +677,6 @@ "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==1.11.0" }, "pycodestyle": { @@ -675,7 +684,6 @@ "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20", "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", "version": "==2.8.0" }, "pyflakes": { @@ -683,7 +691,6 @@ "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c", "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.4.0" }, "pyparsing": { @@ -691,7 +698,6 @@ "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" ], - "markers": "python_full_version >= '3.6.8'", "version": "==3.0.9" }, "pytest": { @@ -723,7 +729,6 @@ "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.8.2" }, "requests": { @@ -747,7 +752,6 @@ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.16.0" }, "tomli": { @@ -786,7 +790,7 @@ "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02", "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6" ], - "markers": "python_version >= '3.7'", + "markers": "python_version < '3.10'", "version": "==4.3.0" }, "urllib3": { @@ -794,7 +798,6 @@ "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec", "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", "version": "==1.26.10" } } diff --git a/pyproject.toml b/pyproject.toml index 85a28bd..87e9a51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,32 @@ +[project] +name = "lab-share-lib" +version = "0.0.1" +authors = [ + { name="Stuart McHattie", email="sm49@sanger.ac.uk" }, +] +maintainers = [ + { name="PSD", email="psd-help@sanger.ac.uk" }, +] +description = "Library to allow building consumers to interact with lab-share framework created as part of the Heron handover OKR" +readme = "README.md" +license = { file="LICENSE" } +requires-python = ">=3.7" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://github.com/sanger/lab-share-lib" +"Bug Tracker" = "https://github.com/sanger/lab-share-lib/issues" + + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + + [tool.black] line-length = 120 From ae51711151cc9c08227e914361fb42bff01e6091 Mon Sep 17 00:00:00 2001 From: Eduardo Martin Rojo Date: Mon, 11 Jul 2022 15:14:42 +0100 Subject: [PATCH 3/3] New release version --- .release-version | 1 + 1 file changed, 1 insertion(+) create mode 100644 .release-version diff --git a/.release-version b/.release-version new file mode 100644 index 0000000..6e8bf73 --- /dev/null +++ b/.release-version @@ -0,0 +1 @@ +0.1.0