diff --git a/.dockerignore b/.dockerignore index 24e9c941c6..c61456fc36 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,3 +3,4 @@ htmlcov staticfiles .coverage +care/media/ diff --git a/.github/workflows/test-base.yml b/.github/workflows/test-base.yml index 87a611f4a0..9246ad80e9 100644 --- a/.github/workflows/test-base.yml +++ b/.github/workflows/test-base.yml @@ -16,7 +16,7 @@ jobs: uses: actions/cache@v3 with: path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-${{ hashFiles('Pipfile.lock', 'docker/prod.Dockerfile') }} + key: ${{ runner.os }}-buildx-${{ hashFiles('Pipfile.lock', 'docker/dev.Dockerfile') }} restore-keys: | ${{ runner.os }}-buildx- @@ -30,7 +30,7 @@ jobs: files: docker-compose.yaml,docker-compose.local.yaml - name: Start services - run: docker compose -f docker-compose.yaml -f docker-compose.local.yaml up -d --no-build + run: docker compose -f docker-compose.yaml -f docker-compose.local.yaml up -d --wait --no-build - name: Check migrations run: make checkmigration diff --git a/Makefile b/Makefile index 2e761cd334..bbd32d5c3d 100644 --- a/Makefile +++ b/Makefile @@ -20,7 +20,7 @@ build: docker compose -f docker-compose.yaml -f $(docker_config_file) build up: - docker compose -f docker-compose.yaml -f $(docker_config_file) up -d + docker compose -f docker-compose.yaml -f $(docker_config_file) up -d --wait down: docker compose -f docker-compose.yaml -f $(docker_config_file) down @@ -34,16 +34,16 @@ list: logs: docker compose -f docker-compose.yaml -f $(docker_config_file) logs -checkmigration: up +checkmigration: docker compose exec backend bash -c "python manage.py makemigrations --check --dry-run" -makemigrations: up +makemigrations: docker compose exec backend bash -c "python manage.py makemigrations" -test: up +test: docker compose exec backend bash -c "python manage.py test --keepdb --parallel" -test-coverage: up +test-coverage: docker compose exec backend bash -c "coverage run manage.py test --settings=config.settings.test --keepdb --parallel" docker compose exec backend bash -c "coverage combine || true; coverage xml" docker compose cp backend:/app/coverage.xml coverage.xml diff --git a/Pipfile b/Pipfile index d018e3a8ab..6714aa7153 100644 --- a/Pipfile +++ b/Pipfile @@ -32,7 +32,6 @@ gunicorn = "==21.2.0" healthy-django = "==0.1.0" jsonschema = "==4.20.0" jwcrypto = "==1.5.1" -littletable = "==2.2.3" newrelic = "==9.3.0" pillow = "==10.1.0" psycopg = "==3.1.14" @@ -41,10 +40,11 @@ pydantic = "==1.10.12" # fix for fhir.resources < 7.0.2 pyjwt = "==2.8.0" python-slugify = "==8.0.1" pywebpush = "==1.14.0" -redis = {extras = ["hiredis"], version = "==5.0.0"} +redis = {extras = ["hiredis"], version = "<5.0.0"} # constraint for redis-om requests = "==2.31.0" sentry-sdk = "==1.30.0" whitenoise = "==6.5.0" +redis-om = "==0.2.1" [dev-packages] black = "==23.9.1" diff --git a/Pipfile.lock b/Pipfile.lock index f8c7f1fc8a..336c2677d3 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "295d0042e5bcc49505f0e031c73abc0115fbef956004c95aef86c8c7f184e275" + "sha256": "3c54a8dd5717e310c8371fdf3e6e4899df3af066f4a09de240d672edd4d2e9c4" }, "pipfile-spec": 6, "requires": { @@ -663,6 +663,7 @@ "sha256:f9f606e810858207d4b4287b4ef0dc622c2aa469548bf02b59dcc616f134f811", "sha256:fa45f7d771094b8145af10db74704ab0f698adb682fbf3721d8090f90e42cc49" ], + "markers": "python_version >= '3.7'", "version": "==2.3.2" }, "http-ece": { @@ -728,14 +729,13 @@ "markers": "python_version >= '3.8'", "version": "==5.3.4" }, - "littletable": { + "more-itertools": { "hashes": [ - "sha256:0e48d8bcdfaf6610f41e2d702a4d384e80af83fe737a2152e6a9b8d40c5a8998", - "sha256:15671dd79c24102b247e9acda33b1eb5940c6905ef97a9997b222464e1a7f870" + "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d", + "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3" ], - "index": "pypi", - "markers": "python_version >= '3.9'", - "version": "==2.2.3" + "markers": "python_version >= '3.7'", + "version": "==9.1.0" }, "newrelic": { "hashes": [ @@ -983,6 +983,14 @@ "markers": "python_version >= '3.7'", "version": "==8.0.1" }, + "python-ulid": { + "hashes": [ + "sha256:5fb5e4a91db8ca93e8938a613360b3def299b60d41f847279a8c39c9b2e9c65e", + "sha256:88c952f6be133dbede19c907d72d26717d2691ec8421512b573144794d891e24" + ], + "markers": "python_version >= '3.7'", + "version": "==1.1.0" + }, "pytz": { "hashes": [ "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b", @@ -1058,19 +1066,28 @@ "hiredis" ], "hashes": [ - "sha256:06570d0b2d84d46c21defc550afbaada381af82f5b83e5b3777600e05d8e2ed0", - "sha256:5cea6c0d335c9a7332a460ed8729ceabb4d0c489c7285b0a86dbbf8a017bd120" + "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d", + "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c" ], "markers": "python_version >= '3.7'", - "version": "==5.0.0" + "version": "==4.6.0" + }, + "redis-om": { + "hashes": [ + "sha256:150c9cb5238d6003f35e9b6394aab30a0df35b00e955eb7dc508f4345e0a0ccc", + "sha256:31313a3027a014608b3a4d44ecd1d3000c7d0fe3a25060db19b42225e636cd53" + ], + "index": "pypi", + "markers": "python_version >= '3.7' and python_version < '4.0'", + "version": "==0.2.1" }, "referencing": { "hashes": [ - "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161", - "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99" + "sha256:3c57da0513e9563eb7e203ebe9bb3a1b509b042016433bd1e45a2853466c3dd3", + "sha256:7e4dc12271d8e15612bfe35792f5ea1c40970dadf8624602e33db2758f7ee554" ], "markers": "python_version >= '3.8'", - "version": "==0.32.0" + "version": "==0.32.1" }, "requests": { "hashes": [ @@ -1225,6 +1242,22 @@ ], "version": "==1.3" }, + "types-pyopenssl": { + "hashes": [ + "sha256:3d6f3462bec0c260caadf93fbb377225c126661b779c7d9ab99b6dad5ca10db9", + "sha256:47a7eedbd18b7bcad17efebf1c53416148f5a173918a6d75027e75e32fe039ae" + ], + "markers": "python_version >= '3.8'", + "version": "==23.3.0.20240106" + }, + "types-redis": { + "hashes": [ + "sha256:2b2fa3a78f84559616242d23f86de5f4130dfd6c3b83fb2d8ce3329e503f756e", + "sha256:912de6507b631934bd225cdac310b04a58def94391003ba83939e5a10e99568d" + ], + "markers": "python_version >= '3.8'", + "version": "==4.6.0.20240106" + }, "typing-extensions": { "hashes": [ "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783", @@ -1273,10 +1306,10 @@ }, "wcwidth": { "hashes": [ - "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02", - "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c" + "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", + "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" ], - "version": "==0.2.12" + "version": "==0.2.13" }, "whitenoise": { "hashes": [ @@ -1434,12 +1467,12 @@ }, "boto3": { "hashes": [ - "sha256:970fd9f9f522eb48f3cd5574e927b369279ebf5bcf0f2fae5ed9cc6306e58558", - "sha256:aad3f305fe3cd4f2bba545c9580cd460c366af56a8aabb6094528dd32317f8d2" + "sha256:2b74c58f475ff0dcf2f3637da9367a9465d29fad971ff5d8dc54ac39554e9022", + "sha256:f8f16c2d0ec1dca291857f1c138d5c30e01e40f653443cc2679e2f6ae71b05a6" ], "index": "pypi", "markers": "python_version >= '3.8'", - "version": "==1.34.2" + "version": "==1.34.15" }, "boto3-stubs": { "extras": [ @@ -1450,17 +1483,16 @@ "sha256:93321cfd3b2f161ed3b9fdf4176e5aed4a6f5a0d124cb9661cc238843180e6e0", "sha256:d9b21c2771e2f067097400ee34d92e08e71986e4bc20f1f4f3b1be0fc3f7f61b" ], - "index": "pypi", "markers": "python_version >= '3.8'", "version": "==1.34.15" }, "botocore": { "hashes": [ - "sha256:1ff1398b6ea670e1c01ac67a33af3da854f8e700d3528289c04f319c330d8250", - "sha256:51905c3d623c60df5dc5794387de7caf886d350180a01a3dfa762e903edb45a9" + "sha256:16bcf871e67ef0177593f06e9e5bae4db51c9a9a2e953cb14feeb42d53441a85", + "sha256:c3c3404962a6d9d5e1634bd70ed53b8eff1ff17ee9d7a6240e9e8c94db48ad6f" ], "markers": "python_version >= '3.8'", - "version": "==1.34.11" + "version": "==1.34.15" }, "botocore-stubs": { "hashes": [ @@ -1782,11 +1814,11 @@ }, "faker": { "hashes": [ - "sha256:1d5dc0a75da7bc40741ee4c84d99dc087b97bd086d4222ad06ac4dd2219bcf3f", - "sha256:9c22c0a734ca01c6e4f2259eab5dab9081905a9d67b27272aea5c9feeb5a3789" + "sha256:3cd0e04ed7da1bb8037afc40d1127d19e0ac4afac247a1fe1d8dde9b5c6d6e5b", + "sha256:d1b8fe8e8fc96d816294a301741940c2229dcf1f5dd1231805666e4005cc6353" ], "markers": "python_version >= '3.8'", - "version": "==22.0.0" + "version": "==22.1.0" }, "filelock": { "hashes": [ @@ -1990,10 +2022,10 @@ }, "mypy-boto3-s3": { "hashes": [ - "sha256:633876d2a96dbb924f9667084316c1759bff40c19a9a38313d5a4e825c5fc641", - "sha256:7644a00e096ebb1c3292551059f64ff8329625dacd40827ced9481b14d64c733" + "sha256:71c39ab0623cdb442d225b71c1783f6a513cff4c4a13505a2efbb2e3aff2e965", + "sha256:f9669ecd182d5bf3532f5f2dcc5e5237776afe157ad5a0b37b26d6bec5fcc432" ], - "version": "==1.34.0" + "version": "==1.34.14" }, "mypy-extensions": { "hashes": [ @@ -2188,11 +2220,11 @@ }, "s3transfer": { "hashes": [ - "sha256:01d4d2c35a016db8cb14f9a4d5e84c1f8c96e7ffc211422555eed45c11fa7eb1", - "sha256:9e1b186ec8bb5907a1e82b51237091889a9973a2bb799a924bcd9f301ff79d3d" + "sha256:3cdb40f5cfa6966e812209d0994f2a4709b561c88e90cf00c2696d2df4e56b2e", + "sha256:d0c8bbf672d5eebbe4e57945e23b972d963f07d82f661cabf678a5c88831595b" ], "markers": "python_version >= '3.8'", - "version": "==0.9.0" + "version": "==0.10.0" }, "setuptools": { "hashes": [ @@ -2266,11 +2298,11 @@ }, "types-requests": { "hashes": [ - "sha256:0f8c0c9764773384122813548d9eea92a5c4e1f33ed54556b508968ec5065cee", - "sha256:2e2230c7bc8dd63fa3153c1c0ae335f8a368447f0582fc332f17d54f88e69027" + "sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612", + "sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354" ], - "markers": "python_version >= '3.7'", - "version": "==2.31.0.20231231" + "markers": "python_version >= '3.8'", + "version": "==2.31.0.20240106" }, "types-s3transfer": { "hashes": [ @@ -2293,7 +2325,7 @@ "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e" ], - "markers": "python_version >= '3.6'", + "markers": "python_version >= '3.7'", "version": "==2.0.7" }, "virtualenv": { @@ -2340,10 +2372,10 @@ }, "wcwidth": { "hashes": [ - "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02", - "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c" + "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", + "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" ], - "version": "==0.2.12" + "version": "==0.2.13" }, "werkzeug": { "hashes": [ @@ -2358,11 +2390,11 @@ "docs": { "alabaster": { "hashes": [ - "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3", - "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2" + "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", + "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92" ], - "markers": "python_version >= '3.6'", - "version": "==0.7.13" + "markers": "python_version >= '3.9'", + "version": "==0.7.16" }, "babel": { "hashes": [ @@ -2701,7 +2733,7 @@ "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e" ], - "markers": "python_version >= '3.6'", + "markers": "python_version >= '3.7'", "version": "==2.0.7" } } diff --git a/care/abdm/utils/fhir.py b/care/abdm/utils/fhir.py index c504b64e27..d862f079fe 100644 --- a/care/abdm/utils/fhir.py +++ b/care/abdm/utils/fhir.py @@ -34,6 +34,7 @@ from care.facility.models.file_upload import FileUpload from care.facility.models.icd11_diagnosis import REVERSE_CONDITION_VERIFICATION_STATUSES from care.facility.models.patient_investigation import InvestigationValue +from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id class Fhir: @@ -137,8 +138,6 @@ def _organization(self): return self._organization_profile def _condition(self, diagnosis_id, verification_status): - from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id - diagnosis = get_icd11_diagnosis_object_by_id(diagnosis_id) [code, label] = diagnosis.label.split(" ", 1) condition_profile = Condition( diff --git a/care/facility/api/serializers/consultation_diagnosis.py b/care/facility/api/serializers/consultation_diagnosis.py index 90e6810b32..62f5d80a6a 100644 --- a/care/facility/api/serializers/consultation_diagnosis.py +++ b/care/facility/api/serializers/consultation_diagnosis.py @@ -7,6 +7,7 @@ ConsultationDiagnosis, ) from care.facility.models.icd11_diagnosis import ICD11Diagnosis +from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id from care.users.api.serializers.user import UserBaseMinimumSerializer @@ -30,8 +31,6 @@ class ConsultationDiagnosisSerializer(serializers.ModelSerializer): created_by = UserBaseMinimumSerializer(read_only=True) def get_diagnosis_object(self, obj): - from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id - return get_icd11_diagnosis_object_by_id(obj.diagnosis_id, as_dict=True) class Meta: diff --git a/care/facility/api/viewsets/icd.py b/care/facility/api/viewsets/icd.py index 8bcfc0d25b..9d1f722886 100644 --- a/care/facility/api/viewsets/icd.py +++ b/care/facility/api/viewsets/icd.py @@ -1,31 +1,29 @@ -from re import IGNORECASE - +from redis_om import FindQuery from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.viewsets import ViewSet - -def serailize_data(icd11_object): - result = [] - for object in icd11_object: - if type(object) == tuple: - object = object[0] - result.append( - {"id": object.id, "label": object.label, "chapter": object.chapter} - ) - return result +from care.facility.static_data.icd11 import ICD11 +from care.utils.static_data.helpers import query_builder class ICDViewSet(ViewSet): permission_classes = (IsAuthenticated,) + def serialize_data(self, objects: list[ICD11]): + return [diagnosis.get_representation() for diagnosis in objects] + def list(self, request): - from care.facility.static_data.icd11 import ICDDiseases + try: + limit = min(int(request.query_params.get("limit")), 20) + except (ValueError, TypeError): + limit = 20 + + query = [] + if q := request.query_params.get("query"): + query.append(ICD11.label % query_builder(q)) - queryset = ICDDiseases.where(has_code=True) - if request.GET.get("query", False): - query = request.GET.get("query") - queryset = queryset.where( - label=queryset.re_match(r".*" + query + r".*", IGNORECASE) - ) # can accept regex from FE if needed. - return Response(serailize_data(queryset[0:100])) + result = FindQuery(expressions=query, model=ICD11, limit=limit).execute( + exhaust_results=False + ) + return Response(self.serialize_data(result)) diff --git a/care/facility/api/viewsets/prescription.py b/care/facility/api/viewsets/prescription.py index 5af3a7473f..e31359ac85 100644 --- a/care/facility/api/viewsets/prescription.py +++ b/care/facility/api/viewsets/prescription.py @@ -2,6 +2,7 @@ from django.utils import timezone from django_filters import rest_framework as filters from drf_spectacular.utils import extend_schema +from redis_om import FindQuery from rest_framework import mixins, status from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -18,8 +19,10 @@ PrescriptionType, generate_choices, ) +from care.facility.static_data.medibase import MedibaseMedicine from care.utils.filters.choicefilter import CareChoiceFilter from care.utils.queryset.consultation import get_consultation_queryset +from care.utils.static_data.helpers import query_builder, token_escaper def inverse_choices(choices): @@ -150,57 +153,27 @@ def administer(self, request, *args, **kwargs): class MedibaseViewSet(ViewSet): permission_classes = (IsAuthenticated,) - def serailize_data(self, objects): - return [ - { - "id": x[0], - "name": x[1], - "type": x[2], - "generic": x[3], - "company": x[4], - "contents": x[5], - "cims_class": x[6], - "atc_classification": x[7], - } - for x in objects - ] - - def sort(self, query, results): - exact_matches = [] - word_matches = [] - partial_matches = [] - - for x in results: - name = x[1].lower() - generic = x[3].lower() - company = x[4].lower() - words = f"{name} {generic} {company}".split() - - if name == query: - exact_matches.append(x) - elif query in words: - word_matches.append(x) - else: - partial_matches.append(x) - - return exact_matches + word_matches + partial_matches + def serialize_data(self, objects: list[MedibaseMedicine]): + return [medicine.get_representation() for medicine in objects] def list(self, request): - from care.facility.static_data.medibase import MedibaseMedicineTable - - queryset = MedibaseMedicineTable + try: + limit = min(int(request.query_params.get("limit")), 30) + except (ValueError, TypeError): + limit = 30 + query = [] if type := request.query_params.get("type"): - queryset = [x for x in queryset if x[2] == type] + query.append(MedibaseMedicine.type == type) - if query := request.query_params.get("query"): - query = query.strip().lower() - queryset = [x for x in queryset if query in f"{x[1]} {x[3]} {x[4]}".lower()] - queryset = self.sort(query, queryset) + if q := request.query_params.get("query"): + query.append( + (MedibaseMedicine.name == token_escaper.escape(q)) + | (MedibaseMedicine.vec % query_builder(q)) + ) - try: - limit = min(int(request.query_params.get("limit", 30)), 100) - except ValueError: - limit = 30 + result = FindQuery( + expressions=query, model=MedibaseMedicine, limit=limit + ).execute(exhaust_results=False) - return Response(self.serailize_data(queryset[:limit])) + return Response(self.serialize_data(result)) diff --git a/care/facility/management/commands/load_redis_index.py b/care/facility/management/commands/load_redis_index.py new file mode 100644 index 0000000000..ed09b4d2ff --- /dev/null +++ b/care/facility/management/commands/load_redis_index.py @@ -0,0 +1,28 @@ +from django.core.cache import cache +from django.core.management import BaseCommand + +from care.facility.static_data.icd11 import load_icd11_diagnosis +from care.facility.static_data.medibase import load_medibase_medicines +from care.hcx.static_data.pmjy_packages import load_pmjy_packages + + +class Command(BaseCommand): + """ + Command to load static data to redis + Usage: python manage.py load_redis_index + """ + + help = "Loads static data to redis" + + def handle(self, *args, **options): + if cache.get("redis_index_loading"): + print("Redis Index already loading, skipping") + return + + cache.set("redis_index_loading", True, timeout=60 * 5) + + load_icd11_diagnosis() + load_medibase_medicines() + load_pmjy_packages() + + cache.delete("redis_index_loading") diff --git a/care/facility/models/patient.py b/care/facility/models/patient.py index 0eb568d869..4085a5da5b 100644 --- a/care/facility/models/patient.py +++ b/care/facility/models/patient.py @@ -35,6 +35,7 @@ REVERSE_ROUTE_TO_FACILITY_CHOICES, ) from care.facility.models.patient_consultation import PatientConsultation +from care.facility.static_data.icd11 import get_icd11_diagnoses_objects_by_ids from care.users.models import GENDER_CHOICES, REVERSE_GENDER_CHOICES, User from care.utils.models.base import BaseManager, BaseModel from care.utils.models.validators import mobile_or_landline_number_validator @@ -538,8 +539,6 @@ def format_as_time(time): return time.strftime("%H:%M") def format_diagnoses(diagnosis_ids): - from care.facility.static_data.icd11 import get_icd11_diagnoses_objects_by_ids - diagnoses = get_icd11_diagnoses_objects_by_ids(diagnosis_ids) return ", ".join([diagnosis["label"] for diagnosis in diagnoses]) diff --git a/care/facility/static_data/icd11.py b/care/facility/static_data/icd11.py index 8afc87228a..c0582e6f18 100644 --- a/care/facility/static_data/icd11.py +++ b/care/facility/static_data/icd11.py @@ -1,48 +1,74 @@ -import contextlib import re +from typing import TypedDict -from django.db import connection -from littletable import Table +from django.core.paginator import Paginator +from redis_om import Field, Migrator +from redis_om.model.model import NotFoundError as RedisModelNotFoundError from care.facility.models.icd11_diagnosis import ICD11Diagnosis +from care.utils.static_data.models.base import BaseRedisModel DISEASE_CODE_PATTERN = r"^(?:[A-Z]+\d|\d+[A-Z])[A-Z\d.]*\s" -def fetch_from_db(): - # This is a hack to prevent the migration from failing when the table does not exist - all_tables = connection.introspection.table_names() - if "facility_icd11diagnosis" in all_tables: - return [ - { - "id": str(diagnosis["id"]), - "label": diagnosis["label"], - "has_code": bool(re.match(DISEASE_CODE_PATTERN, diagnosis["label"])), - "chapter": diagnosis["meta_chapter_short"], - } - for diagnosis in ICD11Diagnosis.objects.values( - "id", "label", "meta_chapter_short" - ) - ] - return [] - - -ICDDiseases = Table("ICD11") -ICDDiseases.insert_many(fetch_from_db()) -ICDDiseases.create_search_index("label") -ICDDiseases.create_index("id", unique=True) - - -def get_icd11_diagnosis_object_by_id(diagnosis_id, as_dict=False): - obj = None - with contextlib.suppress(BaseException): - obj = ICDDiseases.by.id[str(diagnosis_id)] - return obj and (obj.__dict__ if as_dict else obj) - - -def get_icd11_diagnoses_objects_by_ids(diagnoses_ids): - diagnosis_objects = [] - for diagnosis in diagnoses_ids: - with contextlib.suppress(BaseException): - diagnosis_objects.append(ICDDiseases.by.id[str(diagnosis)].__dict__) - return diagnosis_objects +class ICD11Object(TypedDict): + id: int + label: str + chapter: str + + +class ICD11(BaseRedisModel): + id: int = Field(primary_key=True) + label: str = Field(index=True, full_text_search=True) + chapter: str + + def get_representation(self) -> ICD11Object: + return { + "id": self.id, + "label": self.label, + "chapter": self.chapter, + } + + +def load_icd11_diagnosis(): + print("Loading ICD11 Diagnosis into the redis cache...", end="", flush=True) + + icd_objs = ICD11Diagnosis.objects.order_by("id").values_list( + "id", "label", "meta_chapter_short" + ) + paginator = Paginator(icd_objs, 5000) + for page_number in paginator.page_range: + for diagnosis in paginator.page(page_number).object_list: + if re.match(DISEASE_CODE_PATTERN, diagnosis[1]): + ICD11( + id=diagnosis[0], + label=diagnosis[1], + chapter=diagnosis[2] or "", + ).save() + Migrator().run() + print("Done") + + +def get_icd11_diagnosis_object_by_id( + diagnosis_id: int, as_dict=False +) -> ICD11 | ICD11Object | None: + try: + diagnosis = ICD11.get(diagnosis_id) + return diagnosis.get_representation() if as_dict else diagnosis + except RedisModelNotFoundError: + return None + + +def get_icd11_diagnoses_objects_by_ids(diagnoses_ids: list[int]) -> list[ICD11Object]: + if not diagnoses_ids: + return [] + + query = None + for diagnosis_id in diagnoses_ids: + if query is None: + query = ICD11.id == diagnosis_id + else: + query |= ICD11.id == diagnosis_id + + diagnosis_objects: list[ICD11] = list(ICD11.find(query)) + return [diagnosis.get_representation() for diagnosis in diagnosis_objects] diff --git a/care/facility/static_data/medibase.py b/care/facility/static_data/medibase.py index fb5718c3df..44f8935d18 100644 --- a/care/facility/static_data/medibase.py +++ b/care/facility/static_data/medibase.py @@ -1,12 +1,55 @@ +from typing import TypedDict + +from django.core.paginator import Paginator from django.db.models import CharField, TextField, Value from django.db.models.functions import Coalesce +from redis_om import Field, Migrator + +from care.facility.models.prescription import MedibaseMedicine as MedibaseMedicineModel +from care.utils.static_data.models.base import BaseRedisModel + + +class MedibaseMedicineObject(TypedDict): + id: str + name: str + type: str + generic: str + company: str + contents: str + cims_class: str + atc_classification: str + -from care.facility.models.prescription import MedibaseMedicine +class MedibaseMedicine(BaseRedisModel): + id: str = Field(primary_key=True) + name: str = Field(index=True) + type: str = Field(index=True) + generic: str + company: str + contents: str + cims_class: str + atc_classification: str + vec: str = Field(index=True, full_text_search=True) -def load_medibase_in_memory(): - return ( - MedibaseMedicine.objects.all() + def get_representation(self) -> MedibaseMedicineObject: + return { + "id": self.id, + "name": self.name, + "type": self.type, + "generic": self.generic, + "company": self.company, + "contents": self.contents, + "cims_class": self.cims_class, + "atc_classification": self.atc_classification, + } + + +def load_medibase_medicines(): + print("Loading Medibase Medicines into the redis cache...", end="", flush=True) + + medibase_objects = ( + MedibaseMedicineModel.objects.order_by("external_id") .annotate( generic_pretty=Coalesce("generic", Value(""), output_field=CharField()), company_pretty=Coalesce("company", Value(""), output_field=CharField()), @@ -29,6 +72,19 @@ def load_medibase_in_memory(): "atc_classification_pretty", ) ) - - -MedibaseMedicineTable = load_medibase_in_memory() + paginator = Paginator(medibase_objects, 5000) + for page_number in paginator.page_range: + for medicine in paginator.page(page_number).object_list: + MedibaseMedicine( + id=str(medicine[0]), + name=medicine[1], + type=medicine[2], + generic=medicine[3], + company=medicine[4], + contents=medicine[5], + cims_class=medicine[6], + atc_classification=medicine[7], + vec=f"{medicine[1]} {medicine[3]} {medicine[4]}", + ).save() + Migrator().run() + print("Done") diff --git a/care/facility/tasks/__init__.py b/care/facility/tasks/__init__.py index 6231b4f9a3..05644417d7 100644 --- a/care/facility/tasks/__init__.py +++ b/care/facility/tasks/__init__.py @@ -4,6 +4,7 @@ from care.facility.tasks.asset_monitor import check_asset_status from care.facility.tasks.cleanup import delete_old_notifications from care.facility.tasks.plausible_stats import capture_goals +from care.facility.tasks.redis_index import load_redis_index from care.facility.tasks.summarisation import ( summarise_district_patient, summarise_facility_capacity, @@ -55,3 +56,8 @@ def setup_periodic_tasks(sender, **kwargs): capture_goals.s(), name="capture_goals", ) + sender.add_periodic_task( + crontab(hour="*", minute="0"), + load_redis_index.s(), + name="load_redis_index", + ) diff --git a/care/facility/tasks/redis_index.py b/care/facility/tasks/redis_index.py new file mode 100644 index 0000000000..68bb5c6f59 --- /dev/null +++ b/care/facility/tasks/redis_index.py @@ -0,0 +1,32 @@ +from logging import Logger + +from celery import shared_task +from celery.utils.log import get_task_logger +from django.core.cache import cache + +from care.facility.static_data.icd11 import load_icd11_diagnosis +from care.facility.static_data.medibase import load_medibase_medicines +from care.hcx.static_data.pmjy_packages import load_pmjy_packages +from care.utils.static_data.models.base import index_exists + +logger: Logger = get_task_logger(__name__) + + +@shared_task +def load_redis_index(): + if cache.get("redis_index_loading"): + logger.info("Redis Index already loading, skipping") + return + + cache.set("redis_index_loading", True, timeout=60 * 2) + logger.info("Loading Redis Index") + if index_exists(): + logger.info("Index already exists, skipping") + return + + load_icd11_diagnosis() + load_medibase_medicines() + load_pmjy_packages() + + cache.delete("redis_index_loading") + logger.info("Redis Index Loaded") diff --git a/care/facility/tests/test_icd11_search.py b/care/facility/tests/test_icd11_search.py index 933ed1b74e..582eac2fb5 100644 --- a/care/facility/tests/test_icd11_search.py +++ b/care/facility/tests/test_icd11_search.py @@ -29,10 +29,10 @@ def test_search_with_disease_code(self): res = self.search_icd11("aCuTe radiodermatitis following radiotherapy") self.assertContains(res, "EL60 Acute radiodermatitis following radiotherapy") - res = self.search_icd11("cutaneous insect bite reactions") + res = self.search_icd11("cutaneous reactions") self.assertContains(res, "EK50.0 Cutaneous insect bite reactions") - res = self.search_icd11("Haemorrhage of anus and rectum") + res = self.search_icd11("Haemorrhage rectum") self.assertContains(res, "ME24.A1 Haemorrhage of anus and rectum") res = self.search_icd11("ME24.A1") diff --git a/care/facility/utils/reports/discharge_summary.py b/care/facility/utils/reports/discharge_summary.py index 0a8037ab0d..48af9e6c66 100644 --- a/care/facility/utils/reports/discharge_summary.py +++ b/care/facility/utils/reports/discharge_summary.py @@ -25,6 +25,7 @@ ACTIVE_CONDITION_VERIFICATION_STATUSES, ConditionVerificationStatus, ) +from care.facility.static_data.icd11 import get_icd11_diagnoses_objects_by_ids from care.hcx.models.policy import Policy logger = logging.getLogger(__name__) @@ -49,8 +50,6 @@ def clear_lock(consultation_ext_id: str): def get_diagnoses_data(consultation: PatientConsultation): - from care.facility.static_data.icd11 import get_icd11_diagnoses_objects_by_ids - entries = ( consultation.diagnoses.filter( verification_status__in=ACTIVE_CONDITION_VERIFICATION_STATUSES diff --git a/care/hcx/api/viewsets/gateway.py b/care/hcx/api/viewsets/gateway.py index 5363fd427e..a236df222d 100644 --- a/care/hcx/api/viewsets/gateway.py +++ b/care/hcx/api/viewsets/gateway.py @@ -1,10 +1,10 @@ import json from datetime import datetime -from re import IGNORECASE, search from uuid import uuid4 as uuid from django.db.models import Q from drf_spectacular.utils import extend_schema +from redis_om import FindQuery from rest_framework import status from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -14,6 +14,7 @@ from care.facility.models.file_upload import FileUpload from care.facility.models.icd11_diagnosis import ConditionVerificationStatus from care.facility.models.patient_consultation import PatientConsultation +from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id from care.facility.utils.reports.discharge_summary import ( generate_discharge_report_signed_url, ) @@ -35,10 +36,12 @@ from care.hcx.models.claim import Claim from care.hcx.models.communication import Communication from care.hcx.models.policy import Policy +from care.hcx.static_data.pmjy_packages import PMJYPackage from care.hcx.utils.fhir import Fhir from care.hcx.utils.hcx import Hcx from care.hcx.utils.hcx.operations import HcxOperations from care.utils.queryset.communications import get_communications +from care.utils.static_data.helpers import query_builder class HcxGatewayViewSet(GenericViewSet): @@ -105,8 +108,6 @@ def check_eligibility(self, request): @extend_schema(tags=["hcx"], request=MakeClaimSerializer()) @action(detail=False, methods=["post"]) def make_claim(self, request): - from care.facility.static_data.icd11 import get_icd11_diagnosis_object_by_id - data = request.data serializer = MakeClaimSerializer(data=data) @@ -319,34 +320,23 @@ def payors(self, request): return Response(response, status=status.HTTP_200_OK) + def serialize_data(self, objects: list[PMJYPackage]): + return [package.get_representation() for package in objects] + @extend_schema(tags=["hcx"]) @action(detail=False, methods=["get"]) def pmjy_packages(self, request): - from care.hcx.static_data.pmjy_packages import PMJYPackages - - def serailize_data(pmjy_packages): - result = [] - for pmjy_package in pmjy_packages: - if type(pmjy_package) == tuple: - pmjy_package = pmjy_package[0] - result.append( - { - "code": pmjy_package.code, - "name": pmjy_package.name, - "price": pmjy_package.price, - "package_name": pmjy_package.package_name, - } - ) - return result - - queryset = PMJYPackages - limit = request.GET.get("limit", 20) - if request.GET.get("query", False): - query = request.GET.get("query") - queryset = queryset.where( - lambda row: search(r".*" + query + r".*", row.name, IGNORECASE) - is not None - or search(r".*" + query + r".*", row.package_name, IGNORECASE) - is not None - ) - return Response(serailize_data(queryset[:limit])) + try: + limit = min(int(request.query_params.get("limit")), 20) + except (ValueError, TypeError): + limit = 20 + + query = [] + if q := request.query_params.get("query"): + query.append(PMJYPackage.vec % query_builder(q)) + + results = FindQuery(expressions=query, model=PMJYPackage, limit=limit).execute( + exhaust_results=False + ) + + return Response(self.serialize_data(results)) diff --git a/care/hcx/static_data/pmjy_packages.py b/care/hcx/static_data/pmjy_packages.py index 1520b44808..be20f9fc60 100644 --- a/care/hcx/static_data/pmjy_packages.py +++ b/care/hcx/static_data/pmjy_packages.py @@ -1,33 +1,46 @@ import json - -from littletable import Table - - -def fetch_data(): - with open("data/pmjy_packages.json", "r") as json_file: - return json.load(json_file) - - -PMJYPackages = Table("PMJYPackages") -pmjy_packages = fetch_data() - -IGNORE_FIELDS = [ - "hbp_code", - "specialty", - "package_code", - "stratification", - "implant", -] - -for pmjy_package in pmjy_packages: - for field in IGNORE_FIELDS: - pmjy_package.pop(field, "") - - pmjy_package["code"] = pmjy_package.pop("procedure_code") - pmjy_package["name"] = pmjy_package.pop("procedure_label") - pmjy_package["price"] = pmjy_package.pop("procedure_price") - PMJYPackages.insert(pmjy_package) - -PMJYPackages.create_search_index("name") -PMJYPackages.create_search_index("package_name") -PMJYPackages.create_index("code", unique=True) +from typing import TypedDict + +from redis_om import Field, Migrator + +from care.utils.static_data.models.base import BaseRedisModel + + +class PMJYPackageObject(TypedDict): + code: str + name: str + price: str + package_name: str + + +class PMJYPackage(BaseRedisModel): + code: str = Field(primary_key=True) + name: str + price: str + package_name: str + vec: str = Field(index=True, full_text_search=True) + + def get_representation(self) -> PMJYPackageObject: + return { + "code": self.code, + "name": self.name, + "price": self.price, + "package_name": self.package_name, + } + + +def load_pmjy_packages(): + print("Loading PMJY Packages into the redis cache...", end="", flush=True) + with open("data/pmjy_packages.json", "r") as f: + pmjy_packages = json.load(f) + for package in pmjy_packages: + PMJYPackage( + code=package["procedure_code"], + name=package["procedure_label"], + price=package["procedure_price"], + package_name=package["package_name"], + vec=f"{package['procedure_label']} {package['package_name']}", + ).save() + + Migrator().run() + print("Done") diff --git a/care/utils/static_data/__init__.py b/care/utils/static_data/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/care/utils/static_data/helpers.py b/care/utils/static_data/helpers.py new file mode 100644 index 0000000000..6c0f1c2567 --- /dev/null +++ b/care/utils/static_data/helpers.py @@ -0,0 +1,13 @@ +import re + +from redis_om.model.token_escaper import TokenEscaper + +token_escaper = TokenEscaper(re.compile(r"[,<>{}\[\]\\\"\':;!@#$%^&*()\-+=~\/ ]")) + + +def query_builder(query: str) -> str: + """ + Builds a query for redis full text search from a given query string. + """ + words = query.strip().rstrip(".").rsplit(maxsplit=3) + return f"{'* '.join([token_escaper.escape(word) for word in words])}*" diff --git a/care/utils/static_data/models/__init__.py b/care/utils/static_data/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/care/utils/static_data/models/base.py b/care/utils/static_data/models/base.py new file mode 100644 index 0000000000..fe67958e48 --- /dev/null +++ b/care/utils/static_data/models/base.py @@ -0,0 +1,23 @@ +from abc import ABC + +from django.conf import settings +from redis_om import HashModel, get_redis_connection +from redis_om.model.migrations.migrator import schema_hash_key + + +class BaseRedisModel(HashModel, ABC): + class Meta: + database = get_redis_connection(url=settings.REDIS_URL) + global_key_prefix = "care_static_data" + + +def index_exists(model: HashModel = None): + """ + Checks the existence of a redisearch index. + If no model is passed, it checks for the existence of any index. + """ + + conn = get_redis_connection(url=settings.REDIS_URL) + if model: + return conn.exists(schema_hash_key(model.Meta.index_name)) + return len(conn.execute_command("FT._LIST")) diff --git a/config/exception_handler.py b/config/exception_handler.py new file mode 100644 index 0000000000..164eb9e65f --- /dev/null +++ b/config/exception_handler.py @@ -0,0 +1,20 @@ +from celery import current_app +from django.core.exceptions import ValidationError as DjangoValidationError +from redis.exceptions import ResponseError as RedisResponseError +from rest_framework.exceptions import APIException +from rest_framework.exceptions import ValidationError as DRFValidationError +from rest_framework.fields import get_error_detail +from rest_framework.views import exception_handler as drf_exception_handler + + +def exception_handler(exc, context): + if isinstance(exc, DjangoValidationError): + exc = DRFValidationError(detail={"detail": get_error_detail(exc)[0]}) + + elif isinstance(exc, RedisResponseError): + current_app.send_task("care.facility.tasks.redis_index.load_redis_index") + exc = APIException( + detail={"detail": "Something went wrong, please try after a few seconds."} + ) + + return drf_exception_handler(exc, context) diff --git a/config/settings/base.py b/config/settings/base.py index d34d8726bc..12cffac7ad 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -58,13 +58,16 @@ DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=0) DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +REDIS_URL = env("REDIS_URL", default="redis://localhost:6379") + # CACHES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#caches CACHES = { "default": { "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": env("REDIS_URL", default="redis://localhost:6379"), + "LOCATION": REDIS_URL, "OPTIONS": { "CLIENT_CLASS": "django_redis.client.DefaultClient", # Mimicing memcache behavior. @@ -329,6 +332,7 @@ "PAGE_SIZE": 14, "SEARCH_PARAM": "search_text", "DEFAULT_SCHEMA_CLASS": "care.utils.schema.AutoSchema", + "EXCEPTION_HANDLER": "config.exception_handler.exception_handler", } # drf-spectacular (schema generation) diff --git a/config/urls.py b/config/urls.py index 46f42e4eca..9b76c36bbf 100644 --- a/config/urls.py +++ b/config/urls.py @@ -30,10 +30,11 @@ from config.health_views import MiddlewareAuthenticationVerifyView from .auth_views import AnnotatedTokenVerifyView, TokenObtainPairView, TokenRefreshView -from .views import home_view +from .views import home_view, ping urlpatterns = [ path("", home_view, name="home"), + path("ping/", ping, name="ping"), # Django Admin, use {% url 'admin:index' %} path(settings.ADMIN_URL, admin.site.urls), # Rest API diff --git a/config/views.py b/config/views.py index 4d7d7dab44..228ed1fe62 100644 --- a/config/views.py +++ b/config/views.py @@ -1,5 +1,10 @@ +from django.http import JsonResponse from django.shortcuts import render def home_view(request): return render(request, "pages/home.html") + + +def ping(request): + return JsonResponse({"status": "OK"}) diff --git a/config/wsgi.py b/config/wsgi.py index 9827d177fd..835d5c2ece 100644 --- a/config/wsgi.py +++ b/config/wsgi.py @@ -36,5 +36,3 @@ # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) - -from care.facility.static_data.medibase import MedibaseMedicineTable # noqa diff --git a/docker-compose.local.yaml b/docker-compose.local.yaml index 7a1bb41142..27d2a2ff63 100644 --- a/docker-compose.local.yaml +++ b/docker-compose.local.yaml @@ -25,7 +25,6 @@ services: entrypoint: [ "bash", "scripts/celery-dev.sh" ] depends_on: - db - - backend - redis volumes: - .:/app diff --git a/docker-compose.pre-built.yaml b/docker-compose.pre-built.yaml index f9cbf9b503..8afcf4bdf0 100644 --- a/docker-compose.pre-built.yaml +++ b/docker-compose.pre-built.yaml @@ -1,7 +1,6 @@ version: '3.4' services: - backend: image: "ghcr.io/coronasafe/care:latest" env_file: @@ -20,7 +19,6 @@ services: entrypoint: [ "bash", "celery_worker-ecs.sh" ] depends_on: - db - - backend - redis celery-beat: @@ -30,5 +28,5 @@ services: entrypoint: [ "bash", "celery_beat-ecs.sh" ] depends_on: - db - - backend - redis + - celery-beat diff --git a/docker-compose.yaml b/docker-compose.yaml index 7beb1c9b5f..2972849920 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -14,7 +14,7 @@ services: - postgres-data:/var/lib/postgresql/data redis: - image: redis:alpine + image: redis/redis-stack-server:6.2.6-v10 restart: always volumes: - redis-data:/data diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index 5b41816bfe..f71f5f6464 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -8,7 +8,7 @@ ENV PATH /venv/bin:$PATH RUN apt-get update && apt-get install --no-install-recommends -y \ build-essential libjpeg-dev zlib1g-dev \ - libpq-dev gettext wget gnupg chromium \ + libpq-dev gettext wget curl gnupg chromium \ && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ && rm -rf /var/lib/apt/lists/* @@ -21,4 +21,11 @@ RUN pipenv install --system --categories "packages dev-packages" COPY . /app +HEALTHCHECK \ + --interval=10s \ + --timeout=5s \ + --start-period=10s \ + --retries=12 \ + CMD ["/app/scripts/healthcheck.sh"] + WORKDIR /app diff --git a/docker/prod.Dockerfile b/docker/prod.Dockerfile index 3f8e2bf6d3..a6e7d8709b 100644 --- a/docker/prod.Dockerfile +++ b/docker/prod.Dockerfile @@ -41,7 +41,7 @@ ENV PATH /venv/bin:$PATH WORKDIR ${APP_HOME} RUN apt-get update && apt-get install --no-install-recommends -y \ - libpq-dev gettext wget gnupg chromium \ + libpq-dev gettext wget curl gnupg chromium \ && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \ && rm -rf /var/lib/apt/lists/* @@ -50,6 +50,13 @@ COPY --from=builder /venv /venv COPY --chmod=0755 ./scripts/*.sh ./ +HEALTHCHECK \ + --interval=30s \ + --timeout=5s \ + --start-period=10s \ + --retries=6 \ + CMD ["/app/healthcheck.sh"] + COPY . ${APP_HOME} EXPOSE 9000 diff --git a/scripts/celery-dev.sh b/scripts/celery-dev.sh index 97289f7185..4e1c0ad22c 100755 --- a/scripts/celery-dev.sh +++ b/scripts/celery-dev.sh @@ -1,5 +1,37 @@ #!/bin/bash +printf "celery" >> /tmp/container-role + +if [ -z "${DATABASE_URL}" ]; then + export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" +fi + +postgres_ready() { +python << END +import sys + +import psycopg + +try: + psycopg.connect(conninfo="${DATABASE_URL}") +except psycopg.OperationalError as e: + print(e) + sys.exit(-1) +sys.exit(0) + +END +} + +until postgres_ready; do + >&2 echo 'Waiting for PostgreSQL to become available...' + sleep 1 +done +>&2 echo 'PostgreSQL is available' + +python manage.py migrate --noinput +python manage.py load_redis_index + + watchmedo \ auto-restart --directory=./ --pattern=*.py --recursive -- \ celery --workdir="/app" -A config.celery_app worker -B --loglevel=INFO diff --git a/scripts/celery_beat-ecs.sh b/scripts/celery_beat-ecs.sh index 16d6dd8825..936814b294 100755 --- a/scripts/celery_beat-ecs.sh +++ b/scripts/celery_beat-ecs.sh @@ -1,4 +1,5 @@ #!/bin/bash +printf "celery-beat" >> /tmp/container-role if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" @@ -27,5 +28,7 @@ done >&2 echo 'PostgreSQL is available' python manage.py migrate --noinput +python manage.py load_redis_index + celery --app=config.celery_app beat --loglevel=info diff --git a/scripts/celery_beat.sh b/scripts/celery_beat.sh index 5bd97ebcbf..c3eb660fa5 100755 --- a/scripts/celery_beat.sh +++ b/scripts/celery_beat.sh @@ -1,4 +1,5 @@ #!/bin/bash +printf "celery-beat" >> /tmp/container-role if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" @@ -27,6 +28,8 @@ done >&2 echo 'PostgreSQL is available' python manage.py migrate --noinput +python manage.py load_redis_index + export NEW_RELIC_CONFIG_FILE=/etc/newrelic.ini newrelic-admin run-program celery --app=config.celery_app beat --loglevel=info diff --git a/scripts/celery_worker-ecs.sh b/scripts/celery_worker-ecs.sh index 6d0e19cab3..701378b461 100755 --- a/scripts/celery_worker-ecs.sh +++ b/scripts/celery_worker-ecs.sh @@ -1,6 +1,9 @@ #!/bin/bash +printf "celery-worker" >> /tmp/container-role if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" fi + + celery --app=config.celery_app worker --max-tasks-per-child=6 --loglevel=info diff --git a/scripts/celery_worker.sh b/scripts/celery_worker.sh index 1dbd467339..bc291f737e 100755 --- a/scripts/celery_worker.sh +++ b/scripts/celery_worker.sh @@ -1,7 +1,10 @@ #!/bin/bash +printf "celery-worker" >> /tmp/container-role if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" fi + + export NEW_RELIC_CONFIG_FILE=/etc/newrelic.ini newrelic-admin run-program celery --app=config.celery_app worker --max-tasks-per-child=6 --loglevel=info diff --git a/scripts/healthcheck.sh b/scripts/healthcheck.sh new file mode 100755 index 0000000000..be1d3a6b02 --- /dev/null +++ b/scripts/healthcheck.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +CONTAINER_ROLE=$(cat /tmp/container-role) +if [[ "$CONTAINER_ROLE" = "api" ]]; then + curl -fsS http://localhost:9000/ping/ || exit 1 +elif [[ "$CONTAINER_ROLE" == celery* ]]; then + celery -A config.celery_app inspect ping -d celery@$HOSTNAME || exit 1 +fi diff --git a/scripts/start-dev.sh b/scripts/start-dev.sh index 555a4ecfa7..f79ff45a20 100755 --- a/scripts/start-dev.sh +++ b/scripts/start-dev.sh @@ -1,16 +1,16 @@ #!/usr/bin/env bash set -euo pipefail -cd /app +printf "api" >> /tmp/container-role -echo "running migrations..." -python manage.py migrate +cd /app echo "running collectstatic..." python manage.py collectstatic --noinput echo "starting server..." if [[ "${DJANGO_DEBUG,,}" == "true" ]]; then + echo "waiting for debugger..." python -m debugpy --wait-for-client --listen 0.0.0.0:9876 manage.py runserver_plus 0.0.0.0:9000 else python manage.py runserver 0.0.0.0:9000 diff --git a/scripts/start-ecs.sh b/scripts/start-ecs.sh index 8dac8788db..ac9fb2c745 100755 --- a/scripts/start-ecs.sh +++ b/scripts/start-ecs.sh @@ -4,6 +4,8 @@ set -o errexit set -o pipefail set -o nounset +printf "api" >> /tmp/container-role + if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" fi diff --git a/scripts/start.sh b/scripts/start.sh index d072a6d4b7..d660d8ec9c 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -4,6 +4,8 @@ set -o errexit set -o pipefail set -o nounset +printf "api" >> /tmp/container-role + if [ -z "${DATABASE_URL}" ]; then export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}" fi