From 7d0dd826f72973a856c0bdcd9fd5c495eb3b554c Mon Sep 17 00:00:00 2001 From: Aakash Singh Date: Wed, 17 Jan 2024 16:39:21 +0530 Subject: [PATCH] Fix bucket configurations (#1826) * fix bucket configurations * add type annotations --- care/facility/api/serializers/facility.py | 11 ++-- care/facility/api/serializers/file_upload.py | 2 +- care/facility/models/facility.py | 2 +- care/facility/models/file_upload.py | 37 ++++++----- care/utils/csp/config.py | 66 ++++++++++++-------- config/settings/base.py | 52 +++++++-------- config/settings/local.py | 3 - docker-compose.yaml | 8 ++- docker/.local.env | 5 ++ docker/.prebuilt.env | 6 ++ docker/awslocal/bucket-setup.sh | 3 +- 11 files changed, 114 insertions(+), 81 deletions(-) mode change 100644 => 100755 docker/awslocal/bucket-setup.sh diff --git a/care/facility/api/serializers/facility.py b/care/facility/api/serializers/facility.py index a7ac3820ed..73779bdfd9 100644 --- a/care/facility/api/serializers/facility.py +++ b/care/facility/api/serializers/facility.py @@ -1,5 +1,4 @@ import boto3 -from django.conf import settings from django.contrib.auth import get_user_model from rest_framework import serializers @@ -13,7 +12,7 @@ StateSerializer, WardSerializer, ) -from care.utils.csp import config as cs_provider +from care.utils.csp.config import BucketType, get_client_config from config.serializers import ChoiceField from config.validators import MiddlewareDomainAddressValidator @@ -166,13 +165,11 @@ def save(self, **kwargs): facility = self.instance image = self.validated_data["cover_image"] image_extension = image.name.rsplit(".", 1)[-1] - s3 = boto3.client( - "s3", - **cs_provider.get_client_config(cs_provider.BucketType.FACILITY.value), - ) + config, bucket_name = get_client_config(BucketType.FACILITY, True) + s3 = boto3.client("s3", **config) image_location = f"cover_images/{facility.external_id}_cover.{image_extension}" s3.put_object( - Bucket=settings.FACILITY_S3_BUCKET, + Bucket=bucket_name, Key=image_location, Body=image.file, ) diff --git a/care/facility/api/serializers/file_upload.py b/care/facility/api/serializers/file_upload.py index c97fd76ab2..962688da7a 100644 --- a/care/facility/api/serializers/file_upload.py +++ b/care/facility/api/serializers/file_upload.py @@ -140,7 +140,7 @@ def create(self, validated_data): validated_data["uploaded_by"] = user validated_data["internal_name"] = validated_data["original_name"] del validated_data["original_name"] - file_upload = super().create(validated_data) + file_upload: FileUpload = super().create(validated_data) file_upload.signed_url = file_upload.signed_url(mime_type=mime_type) return file_upload diff --git a/care/facility/models/facility.py b/care/facility/models/facility.py index 2a041f291c..fb3eaff08c 100644 --- a/care/facility/models/facility.py +++ b/care/facility/models/facility.py @@ -175,7 +175,7 @@ class Meta: def read_cover_image_url(self): if self.cover_image_url: - return f"{settings.FACILITY_S3_STATIC_PREFIX}/{self.cover_image_url}" + return f"{settings.FACILITY_S3_BUCKET_EXTERNAL_ENDPOINT}/{settings.FACILITY_S3_BUCKET}/{self.cover_image_url}" return None def __str__(self): diff --git a/care/facility/models/file_upload.py b/care/facility/models/file_upload.py index 78153a9151..584cda5e05 100644 --- a/care/facility/models/file_upload.py +++ b/care/facility/models/file_upload.py @@ -3,12 +3,11 @@ from uuid import uuid4 import boto3 -from django.conf import settings from django.db import models from care.facility.models import FacilityBaseModel from care.users.models import User -from care.utils.csp import config as cs_provider +from care.utils.csp.config import BucketType, get_client_config class FileUpload(FacilityBaseModel): @@ -83,44 +82,50 @@ def save(self, *args, **kwargs): self.internal_name = internal_name return super().save(*args, **kwargs) - def signed_url(self, duration=60 * 60, mime_type=None): - s3Client = boto3.client("s3", **cs_provider.get_client_config()) + def signed_url( + self, duration=60 * 60, mime_type=None, bucket_type=BucketType.PATIENT + ): + config, bucket_name = get_client_config(bucket_type, True) + s3 = boto3.client("s3", **config) params = { - "Bucket": settings.FILE_UPLOAD_BUCKET, + "Bucket": bucket_name, "Key": f"{self.FileType(self.file_type).name}/{self.internal_name}", } if mime_type: params["ContentType"] = mime_type - return s3Client.generate_presigned_url( + return s3.generate_presigned_url( "put_object", Params=params, ExpiresIn=duration, # seconds ) - def read_signed_url(self, duration=60 * 60): - s3Client = boto3.client("s3", **cs_provider.get_client_config()) - return s3Client.generate_presigned_url( + def read_signed_url(self, duration=60 * 60, bucket_type=BucketType.PATIENT): + config, bucket_name = get_client_config(bucket_type, True) + s3 = boto3.client("s3", **config) + return s3.generate_presigned_url( "get_object", Params={ - "Bucket": settings.FILE_UPLOAD_BUCKET, + "Bucket": bucket_name, "Key": f"{self.FileType(self.file_type).name}/{self.internal_name}", }, ExpiresIn=duration, # seconds ) - def put_object(self, file, bucket=settings.FILE_UPLOAD_BUCKET, **kwargs): - s3 = boto3.client("s3", **cs_provider.get_client_config()) + def put_object(self, file, bucket_type=BucketType.PATIENT, **kwargs): + config, bucket_name = get_client_config(bucket_type) + s3 = boto3.client("s3", **config) return s3.put_object( Body=file, - Bucket=bucket, + Bucket=bucket_name, Key=f"{self.FileType(self.file_type).name}/{self.internal_name}", **kwargs, ) - def get_object(self, bucket=settings.FILE_UPLOAD_BUCKET, **kwargs): - s3 = boto3.client("s3", **cs_provider.get_client_config()) + def get_object(self, bucket_type=BucketType.PATIENT, **kwargs): + config, bucket_name = get_client_config(bucket_type) + s3 = boto3.client("s3", **config) return s3.get_object( - Bucket=bucket, + Bucket=bucket_name, Key=f"{self.FileType(self.file_type).name}/{self.internal_name}", **kwargs, ) diff --git a/care/utils/csp/config.py b/care/utils/csp/config.py index 0f6673c3ad..dbd145b536 100644 --- a/care/utils/csp/config.py +++ b/care/utils/csp/config.py @@ -1,12 +1,24 @@ import enum +from typing import TypeAlias, TypedDict from django.conf import settings +class ClientConfig(TypedDict): + region_name: str + aws_access_key_id: str + aws_secret_access_key: str + endpoint_url: str + + +BucketName: TypeAlias = str + + class CSProvider(enum.Enum): AWS = "AWS" GCP = "GCP" - AZURE = "AZURE" + DOCKER = "DOCKER" # localstack in docker + LOCAL = "LOCAL" # localstack on host class BucketType(enum.Enum): @@ -14,27 +26,31 @@ class BucketType(enum.Enum): FACILITY = "FACILITY" -DEFAULT = CSProvider.AWS.value - - -def get_client_config(bucket_type=BucketType.PATIENT.value): - config = { - BucketType.PATIENT.value: { - "region_name": settings.CLOUD_REGION, - "aws_access_key_id": settings.FILE_UPLOAD_KEY, - "aws_secret_access_key": settings.FILE_UPLOAD_SECRET, - "endpoint_url": settings.FILE_UPLOAD_BUCKET_ENDPOINT, - }, - BucketType.FACILITY.value: { - "region_name": settings.CLOUD_REGION, - "aws_access_key_id": settings.FACILITY_S3_KEY, - "aws_secret_access_key": settings.FACILITY_S3_SECRET, - "endpoint_url": settings.FACILITY_S3_BUCKET_ENDPOINT, - }, - } - - if settings.CLOUD_PROVIDER == CSProvider.GCP.value: - for key in config.keys(): - config[key]["endpoint_url"] = "https://storage.googleapis.com" - - return config[bucket_type] +def get_facility_bucket_config(external) -> tuple[ClientConfig, BucketName]: + return { + "region_name": settings.FACILITY_S3_REGION, + "aws_access_key_id": settings.FACILITY_S3_KEY, + "aws_secret_access_key": settings.FACILITY_S3_SECRET, + "endpoint_url": settings.FACILITY_S3_BUCKET_EXTERNAL_ENDPOINT + if external + else settings.FACILITY_S3_BUCKET_ENDPOINT, + }, settings.FACILITY_S3_BUCKET + + +def get_patient_bucket_config(external) -> tuple[ClientConfig, BucketName]: + return { + "region_name": settings.FILE_UPLOAD_REGION, + "aws_access_key_id": settings.FILE_UPLOAD_KEY, + "aws_secret_access_key": settings.FILE_UPLOAD_SECRET, + "endpoint_url": settings.FILE_UPLOAD_BUCKET_EXTERNAL_ENDPOINT + if external + else settings.FILE_UPLOAD_BUCKET_ENDPOINT, + }, settings.FILE_UPLOAD_BUCKET + + +def get_client_config(bucket_type: BucketType, external=False): + if bucket_type == BucketType.FACILITY: + return get_facility_bucket_config(external=external) + elif bucket_type == BucketType.PATIENT: + return get_patient_bucket_config(external=external) + raise ValueError("Invalid Bucket Type") diff --git a/config/settings/base.py b/config/settings/base.py index c1fa82a09c..d34d8726bc 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -484,29 +484,30 @@ # Cloud and Buckets # ------------------------------------------------------------------------------ -CLOUD_PROVIDER = env("CLOUD_PROVIDER", default="aws").upper() -CLOUD_REGION = env("CLOUD_REGION", default="ap-south-1") -if CLOUD_PROVIDER not in csp_config.CSProvider.__members__: - print(f"Warning Invalid CSP Found! {CLOUD_PROVIDER}") +BUCKET_PROVIDER = env("BUCKET_PROVIDER", default="aws").upper() +BUCKET_REGION = env("BUCKET_REGION", default="ap-south-1") +BUCKET_KEY = env("BUCKET_KEY", default="") +BUCKET_SECRET = env("BUCKET_SECRET", default="") +BUCKET_ENDPOINT = env("BUCKET_ENDPOINT", default="") +BUCKET_EXTERNAL_ENDPOINT = env("BUCKET_EXTERNAL_ENDPOINT", default=BUCKET_ENDPOINT) -if USE_S3 := env.bool("USE_S3", default=False): - # aws settings - AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID") - AWS_SECRET_ACCESS_KEY = env("AWS_SECRET_ACCESS_KEY") - AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME") - AWS_DEFAULT_ACL = "public-read" - AWS_S3_CUSTOM_DOMAIN = f"{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com" - AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"} +if BUCKET_PROVIDER not in csp_config.CSProvider.__members__: + print(f"Warning Invalid CSP Found! {BUCKET_PROVIDER}") FILE_UPLOAD_BUCKET = env("FILE_UPLOAD_BUCKET", default="") -# FILE_UPLOAD_REGION = env("FILE_UPLOAD_REGION", default="care-patient-staging") -FILE_UPLOAD_KEY = env("FILE_UPLOAD_KEY", default="") -FILE_UPLOAD_SECRET = env("FILE_UPLOAD_SECRET", default="") +FILE_UPLOAD_REGION = env("FILE_UPLOAD_REGION", default=BUCKET_REGION) +FILE_UPLOAD_KEY = env("FILE_UPLOAD_KEY", default=BUCKET_KEY) +FILE_UPLOAD_SECRET = env("FILE_UPLOAD_SECRET", default=BUCKET_SECRET) FILE_UPLOAD_BUCKET_ENDPOINT = env( - "FILE_UPLOAD_BUCKET_ENDPOINT", - default=f"https://{FILE_UPLOAD_BUCKET}.s3.amazonaws.com", + "FILE_UPLOAD_BUCKET_ENDPOINT", default=BUCKET_ENDPOINT +) +FILE_UPLOAD_BUCKET_EXTERNAL_ENDPOINT = env( + "FILE_UPLOAD_BUCKET_EXTERNAL_ENDPOINT", + default=BUCKET_EXTERNAL_ENDPOINT + if BUCKET_ENDPOINT + else FILE_UPLOAD_BUCKET_ENDPOINT, ) ALLOWED_MIME_TYPES = env.list( @@ -549,16 +550,17 @@ ) FACILITY_S3_BUCKET = env("FACILITY_S3_BUCKET", default="") -FACILITY_S3_REGION = env("FACILITY_S3_REGION_CODE", default="ap-south-1") -FACILITY_S3_KEY = env("FACILITY_S3_KEY", default="") -FACILITY_S3_SECRET = env("FACILITY_S3_SECRET", default="") +FACILITY_S3_REGION = env("FACILITY_S3_REGION_CODE", default=BUCKET_REGION) +FACILITY_S3_KEY = env("FACILITY_S3_KEY", default=BUCKET_KEY) +FACILITY_S3_SECRET = env("FACILITY_S3_SECRET", default=BUCKET_SECRET) FACILITY_S3_BUCKET_ENDPOINT = env( - "FACILITY_S3_BUCKET_ENDPOINT", - default=f"https://{FACILITY_S3_BUCKET}.s3.{FACILITY_S3_REGION}.amazonaws.com", + "FACILITY_S3_BUCKET_ENDPOINT", default=BUCKET_ENDPOINT ) -FACILITY_S3_STATIC_PREFIX = env( - "FACILITY_S3_STATIC_PREFIX", - default=f"https://{FACILITY_S3_BUCKET}.s3.{FACILITY_S3_REGION}.amazonaws.com", +FACILITY_S3_BUCKET_EXTERNAL_ENDPOINT = env( + "FACILITY_S3_BUCKET_EXTERNAL_ENDPOINT", + default=BUCKET_EXTERNAL_ENDPOINT + if BUCKET_ENDPOINT + else FACILITY_S3_BUCKET_ENDPOINT, ) diff --git a/config/settings/local.py b/config/settings/local.py index 68692f9d07..9ead7719f1 100644 --- a/config/settings/local.py +++ b/config/settings/local.py @@ -48,6 +48,3 @@ RUNSERVER_PLUS_PRINT_SQL_TRUNCATE = 100000 DISABLE_RATELIMIT = True - -FILE_UPLOAD_BUCKET_ENDPOINT = "http://localstack:4566" -FACILITY_S3_BUCKET_ENDPOINT = "http://localstack:4566" diff --git a/docker-compose.yaml b/docker-compose.yaml index 5296ac4fc8..7beb1c9b5f 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -25,9 +25,13 @@ services: - AWS_DEFAULT_REGION=ap-south-1 - EDGE_PORT=4566 - SERVICES=s3 + - EXTRA_CORS_ALLOWED_ORIGINS=* + - EXTRA_CORS_ALLOWED_HEADERS=* volumes: - - "${TEMPDIR:-/tmp/localstack}:/tmp/localstack" - - "./docker/awslocal:/docker-entrypoint-initaws.d" + - "${TEMPDIR:-./care/media/localstack}:/var/lib/localstack" + - "./docker/awslocal:/etc/localstack/init/ready.d/" + ports: + - "4566:4566" fidelius: image: khavinshankar/fidelius:v1.0 diff --git a/docker/.local.env b/docker/.local.env index e755fb55b5..ccfaef5fba 100644 --- a/docker/.local.env +++ b/docker/.local.env @@ -9,5 +9,10 @@ CELERY_BROKER_URL=redis://redis:6379/0 DJANGO_DEBUG=False +BUCKET_REGION=ap-south-1 +BUCKET_KEY=key +BUCKET_SECRET=secret +BUCKET_ENDPOINT=http://localstack:4566 +BUCKET_EXTERNAL_ENDPOINT=http://localhost:4566 FILE_UPLOAD_BUCKET=patient-bucket FACILITY_S3_BUCKET=facility-bucket diff --git a/docker/.prebuilt.env b/docker/.prebuilt.env index f069cabc86..02267439a7 100644 --- a/docker/.prebuilt.env +++ b/docker/.prebuilt.env @@ -8,6 +8,12 @@ REDIS_URL=redis://redis:6379/0 CELERY_BROKER_URL=redis://redis:6379/0 DJANGO_SETTINGS_MODULE=config.settings.deployment DJANGO_DEBUG=False + +BUCKET_REGION=ap-south-1 +BUCKET_KEY=key +BUCKET_SECRET=secret +BUCKET_ENDPOINT=http://localstack:4566 +BUCKET_EXTERNAL_ENDPOINT=http://localhost:4566 FILE_UPLOAD_BUCKET=patient-bucket FACILITY_S3_BUCKET=facility-bucket diff --git a/docker/awslocal/bucket-setup.sh b/docker/awslocal/bucket-setup.sh old mode 100644 new mode 100755 index 83874fe2e1..05025d1ed8 --- a/docker/awslocal/bucket-setup.sh +++ b/docker/awslocal/bucket-setup.sh @@ -1,4 +1,5 @@ -#!/usr/bin/env bash +#!/usr/bin/sh + set -x awslocal s3 mb s3://patient-bucket awslocal s3 mb s3://facility-bucket