diff --git a/.env b/.env new file mode 100644 index 0000000..496cc8d --- /dev/null +++ b/.env @@ -0,0 +1,20 @@ +# Log level +LOG_LEVEL=debug + +# Environment +ENVIRONMENT=development + +# The engine URLs +ENGINE_URLS=["http://localhost:8080"] + +# The Service URL +SERVICE_URL="http://localhost:9292" + +# The maximum of tasks the service can process +MAX_TASKS=50 + +# The number of times the service tries to announce itself to the engine +ENGINE_ANNOUNCE_RETRIES=5 + +# The number of seconds between each retry +ENGINE_ANNOUNCE_RETRY_DELAY=3 diff --git a/.github/actions/build-and-push-docker-image-to-github/action.yml b/.github/actions/build-and-push-docker-image-to-github/action.yml new file mode 100644 index 0000000..8afff00 --- /dev/null +++ b/.github/actions/build-and-push-docker-image-to-github/action.yml @@ -0,0 +1,55 @@ +# Documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action +name: build_and_push_docker_image_to_github +description: Build and push Docker image to GitHub + +inputs: + docker-registry-url: + description: Docker registry URL + required: true + default: ghcr.io + docker-registry-username: + description: Docker registry username + required: true + docker-registry-password: + description: Docker registry password + required: true + docker-image-name: + description: Docker image name + required: true + docker-image-context: + description: Docker image context + required: true + default: . + +outputs: + docker-image-tags: + description: "Docker image tags" + value: ${{ steps.meta.outputs.tags }} + +runs: + using: composite + steps: + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ${{ inputs.docker-registry-url }} + username: ${{ inputs.docker-registry-username }} + password: ${{ inputs.docker-registry-password }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v2 + with: + images: | + ${{ inputs.docker-registry-url }}/${{ inputs.docker-image-name }} + tags: | + type=raw,value=latest + type=sha + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: ${{ inputs.docker-image-context }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/actions/execute-command-on-kubernetes-cluster/action.yml b/.github/actions/execute-command-on-kubernetes-cluster/action.yml new file mode 100644 index 0000000..a742295 --- /dev/null +++ b/.github/actions/execute-command-on-kubernetes-cluster/action.yml @@ -0,0 +1,62 @@ +# Documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action +name: execute_command_on_kubernetes_cluster +description: Execute a command on a Kubernetes cluster + +inputs: + kubectl-binary-url: + description: kubectl binary URL + required: true + default: https://dl.k8s.io/release/v1.25.3/bin/linux/amd64/kubectl + kubectl-binary-sha-url: + description: kubectl binary SHA URL + required: true + default: https://dl.k8s.io/v1.25.3/bin/linux/amd64/kubectl.sha256 + kube-config: + description: kubectl config file + required: true + kube-namespace: + description: kubectl namespace + required: true + kubectl-context: + description: kubectl context + required: true + default: . + kubectl-args: + description: kubectl arguments + required: true + +runs: + using: composite + steps: + - name: Get filenames from URLs + shell: bash + run: | + echo "KUBECTL_BINARY_NAME=$(basename "${{ inputs.kubectl-binary-url }}")" >> $GITHUB_ENV + echo "KUBECTL_BINARY_SHA_NAME=$(basename "${{ inputs.kubectl-binary-sha-url }}")" >> $GITHUB_ENV + + - name: Download kubectl binary + shell: bash + run: curl -LO -s "${{ inputs.kubectl-binary-url }}" + + - name: Download kubectl binary SHA + shell: bash + run: curl -LO -s "${{ inputs.kubectl-binary-sha-url }}" + + - name: Verify kubectl binary with SHA checksum + shell: bash + run: echo "$(cat $KUBECTL_BINARY_SHA_NAME) kubectl" | sha256sum --check + + - name: Install kubectl binary + shell: bash + run: sudo install -o root -g root -m 0755 $KUBECTL_BINARY_NAME /usr/local/bin/kubectl + + - name: Create kubectl config file + shell: bash + run: | + mkdir --parent ~/.kube + echo "${{ inputs.kube-config }}" > ~/.kube/config + + - name: Execute kubectl command + shell: bash + working-directory: ${{ inputs.kubectl-context }} + run: kubectl --namespace "${{ inputs.kube-namespace }}" ${{ inputs.kubectl-args }} diff --git a/.github/actions/lint-python-app/action.yml b/.github/actions/lint-python-app/action.yml new file mode 100644 index 0000000..a12beb0 --- /dev/null +++ b/.github/actions/lint-python-app/action.yml @@ -0,0 +1,28 @@ +# Documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action +name: lint_python_app +description: Lint a Python app + +inputs: + python-app-path: + description: Python app path + required: true + default: . + +runs: + using: composite + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: 'pip' + + - name: Install app dependencies + shell: bash + working-directory: ${{ inputs.python-app-path }} + run: pip3 install -r requirements.txt -r requirements-all.txt + + - name: Lint Python code + shell: bash + working-directory: ${{ inputs.python-app-path }} + run: flake8 . diff --git a/.github/actions/test-python-app/action.yml b/.github/actions/test-python-app/action.yml new file mode 100644 index 0000000..134e6f4 --- /dev/null +++ b/.github/actions/test-python-app/action.yml @@ -0,0 +1,41 @@ +# Documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action +name: test_python_app +description: Test a Python app + +inputs: + python-app-path: + description: Python app path + required: true + default: . + token: + description: GitHub token + required: true + default: "" + +runs: + using: composite + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: 'pip' + + - name: Install app dependencies + shell: bash + working-directory: ${{ inputs.python-app-path }} + run: pip3 install -r requirements.txt -r requirements-all.txt + + - name: Run Python tests + shell: bash + working-directory: ${{ inputs.python-app-path }} + run: pytest + + - name: Coverage report + uses: orgoro/coverage@v3.1 + if: github.event_name == 'pull_request' + with: + coverageFile: ${{ inputs.python-app-path }}/coverage.xml + token: ${{ inputs.token }} + + diff --git a/.github/workflows/image-resize.yml b/.github/workflows/image-resize.yml new file mode 100644 index 0000000..fd778e5 --- /dev/null +++ b/.github/workflows/image-resize.yml @@ -0,0 +1,166 @@ +# Documentation: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsuses +name: image-resize_workflow +run-name: image-resize workflow + +# Allow one concurrent deployment +concurrency: + group: "image-resize" + cancel-in-progress: true + +on: + push: + branches: + - main + - prod + paths: + - .github/actions/build-and-push-docker-image-to-github/action.yml + - .github/actions/execute-command-on-kubernetes-cluster/action.yml + - .github/actions/lint-python-app/action.yml + - .github/actions/test-python-app/action.yml + - .github/workflows/image-resize.yml + - common-code/**/* + - services/image-resize/**/* + + pull_request: + paths: + - .github/actions/build-and-push-docker-image-to-github/action.yml + - .github/actions/execute-command-on-kubernetes-cluster/action.yml + - .github/actions/lint-python-app/action.yml + - .github/actions/test-python-app/action.yml + - .github/workflows/image-resize.yml + - common-code/**/* + - services/image-resize/**/* + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + review: + runs-on: ubuntu-latest + steps: + - name: Clone repository + uses: actions/checkout@v3 + + - name: Lint Python app + uses: ./.github/actions/lint-python-app + with: + python-app-path: ./services/image-resize + + test: + needs: review + runs-on: ubuntu-latest + steps: + - name: Clone repository + uses: actions/checkout@v3 + + - name: Test Python app + uses: ./.github/actions/test-python-app + with: + python-app-path: ./services/image-resize + token: ${{ secrets.GITHUB_TOKEN }} + + release: + needs: test + runs-on: ubuntu-latest + # Only run on main + if: success() && github.ref == 'refs/heads/main' + steps: + - name: Clone repository + uses: actions/checkout@v3 + + - name: Build and push Docker image to GitHub + id: build-and-push-docker-image-to-github + uses: ./.github/actions/build-and-push-docker-image-to-github + with: + docker-registry-username: ${{ github.actor }} + docker-registry-password: ${{ secrets.GITHUB_TOKEN }} + docker-image-name: swiss-ai-center/image-resize + docker-image-context: ./services/image-resize + outputs: + docker-image-tags: ${{ steps.build-and-push-docker-image-to-github.outputs.docker-image-tags }} + + deploy-dev: + needs: release + runs-on: ubuntu-latest + # Only run on main + if: success() && github.ref == 'refs/heads/main' + steps: + - name: Clone repository + uses: actions/checkout@v3 + + - name: Prepare configuration files + shell: bash + working-directory: services/image-resize/kubernetes + env: + ENVIRONMENT: production + LOG_LEVEL: info + ENGINE_URLS: "'[\"https://core-engine-swiss-ai-center.kube.isc.heia-fr.ch\"]'" + SERVICE_URL: https://image-resize-swiss-ai-center.kube.isc.heia-fr.ch + run: | + # Set image-resize version + docker_image_tags=(${{ needs.release.outputs.docker-image-tags }}) + docker_image_sha_tag="${docker_image_tags[1]}" + yq ".spec.template.spec.containers[0].image = \"$docker_image_sha_tag\"" image-resize.stateful.yml > new-image-resize.stateful.yml && mv new-image-resize.stateful.yml image-resize.stateful.yml + + # Set image-resize configuration (ConfigMap) + yq '.data = (.data | to_entries | map({"key": .key, "value": "${" + .key + "}"}) | from_entries)' image-resize.config-map.yml | envsubst > new-image-resize.config-map.yml && mv new-image-resize.config-map.yml image-resize.config-map.yml + + # Set image-resize configuration (Ingress) + yq ".spec.rules[0].host = \"${SERVICE_URL#*://}\"" image-resize.ingress.yml > new-image-resize.ingress.yml && mv new-image-resize.ingress.yml image-resize.ingress.yml + yq ".spec.tls[0].hosts[0] = \"${SERVICE_URL#*://}\"" image-resize.ingress.yml > new-image-resize.ingress.yml && mv new-image-resize.ingress.yml image-resize.ingress.yml + + - name: Deploy image-resize on the Kubernetes cluster + uses: ./.github/actions/execute-command-on-kubernetes-cluster + with: + kube-config: ${{ secrets.KUBE_CONFIG_DEV }} + kube-namespace: swiss-ai-center-dev + kubectl-context: ./services/image-resize/kubernetes + kubectl-args: | + apply \ + -f image-resize.config-map.yml \ + -f image-resize.stateful.yml \ + -f image-resize.service.yml \ + -f image-resize.ingress.yml + + deploy-prod: + needs: release + runs-on: ubuntu-latest + # Only run on prod + if: success() && github.ref == 'refs/heads/prod' + steps: + - name: Clone repository + uses: actions/checkout@v3 + + - name: Prepare configuration files + shell: bash + working-directory: services/image-resize/kubernetes + env: + ENVIRONMENT: production + LOG_LEVEL: info + ENGINE_URLS: "'[\"https://core-engine-swiss-ai-center.kube.isc.heia-fr.ch\"]'" + SERVICE_URL: https://image-resize-swiss-ai-center.kube.isc.heia-fr.ch + run: | + # Set image-resize version + docker_image_tags=(${{ needs.release.outputs.docker-image-tags }}) + docker_image_sha_tag="${docker_image_tags[1]}" + yq ".spec.template.spec.containers[0].image = \"$docker_image_sha_tag\"" image-resize.stateful.yml > new-image-resize.stateful.yml && mv new-image-resize.stateful.yml image-resize.stateful.yml + + # Set image-resize configuration (ConfigMap) + yq '.data = (.data | to_entries | map({"key": .key, "value": "${" + .key + "}"}) | from_entries)' image-resize.config-map.yml | envsubst > new-image-resize.config-map.yml && mv new-image-resize.config-map.yml image-resize.config-map.yml + + # Set image-resize configuration (Ingress) + yq ".spec.rules[0].host = \"${SERVICE_URL#*://}\"" image-resize.ingress.yml > new-image-resize.ingress.yml && mv new-image-resize.ingress.yml image-resize.ingress.yml + yq ".spec.tls[0].hosts[0] = \"${SERVICE_URL#*://}\"" image-resize.ingress.yml > new-image-resize.ingress.yml && mv new-image-resize.ingress.yml image-resize.ingress.yml + + - name: Deploy image-resize on the Kubernetes cluster + uses: ./.github/actions/execute-command-on-kubernetes-cluster + with: + kube-config: ${{ secrets.KUBE_CONFIG_PROD }} + kube-namespace: swiss-ai-center-prod + kubectl-context: ./services/image-resize/kubernetes + kubectl-args: | + apply \ + -f image-resize.config-map.yml \ + -f image-resize.stateful.yml \ + -f image-resize.service.yml \ + -f image-resize.ingress.yml diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..eaa992f --- /dev/null +++ b/.gitignore @@ -0,0 +1,33 @@ +## Material for MkDocs + +# Cache +.cache + +# Output directory +/public + +## Python + +# Environments +.venv +venv + +# Byte-compiled / optimized / DLL files +__pycache__/ + +# Pytest cache +.pytest_cache + +# Pytest Coverage +.coverage +coverage.xml + +## IntelliJ's IDEs +.idea + +## Visual Studio Code +.vscode + +## macOS +.DS_Store + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..7591b38 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,35 @@ +# Base image +FROM python:3.10 + +# Install all required packages to run the model +RUN apt update && apt install --yes ffmpeg libsm6 libxext6 + +# Work directory +WORKDIR /app + +# Copy requirements file +COPY ./requirements.txt . +COPY ./requirements-all.txt . + +# Install dependencies +RUN pip install --requirement requirements.txt --requirement requirements-all.txt + +# Copy sources +COPY src src + +# Environment variables +ENV ENVIRONMENT=${ENVIRONMENT} +ENV LOG_LEVEL=${LOG_LEVEL} +ENV ENGINE_URLS=${ENGINE_URLS} +ENV MAX_TASKS=${MAX_TASKS} +ENV ENGINE_ANNOUNCE_RETRIES=${ENGINE_ANNOUNCE_RETRIES} +ENV ENGINE_ANNOUNCE_RETRY_DELAY=${ENGINE_ANNOUNCE_RETRY_DELAY} + +# Exposed ports +EXPOSE 80 + +# Switch to src directory +WORKDIR "/app/src" + +# Command to run on start +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "80"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..ada8ada --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# image-resize + +_Check the [related documentation](https://swiss-ai-center.github.io/swiss-ai-center/reference/image-resize) for more information._ diff --git a/kubernetes/image-resize.config-map.yml b/kubernetes/image-resize.config-map.yml new file mode 100644 index 0000000..e99afe4 --- /dev/null +++ b/kubernetes/image-resize.config-map.yml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: image-resize-config + labels: + app: image-resize +data: + ENVIRONMENT: development + LOG_LEVEL: debug + ENGINE_URLS: '["http://core-engine-service:8080"]' + SERVICE_URL: http://image-resize-service:9292 diff --git a/kubernetes/image-resize.ingress.yml b/kubernetes/image-resize.ingress.yml new file mode 100644 index 0000000..197b7d8 --- /dev/null +++ b/kubernetes/image-resize.ingress.yml @@ -0,0 +1,22 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: image-resize-ingress + annotations: + nginx.ingress.kubernetes.io/proxy-body-size: "16m" + nginx.org/client-max-body-size: "16m" +spec: + rules: + - host: image-resize-swiss-ai-center.kube.isc.heia-fr.ch + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: image-resize-service + port: + number: 80 + tls: + - hosts: + - image-resize-swiss-ai-center.kube.isc.heia-fr.ch diff --git a/kubernetes/image-resize.service.yml b/kubernetes/image-resize.service.yml new file mode 100644 index 0000000..ab79374 --- /dev/null +++ b/kubernetes/image-resize.service.yml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: image-resize-service +spec: + type: LoadBalancer + ports: + - name: http + port: 9292 + targetPort: 80 + protocol: TCP + selector: + app: image-resize diff --git a/kubernetes/image-resize.stateful.yml b/kubernetes/image-resize.stateful.yml new file mode 100644 index 0000000..758647b --- /dev/null +++ b/kubernetes/image-resize.stateful.yml @@ -0,0 +1,36 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + # This name uniquely identifies the stateful set + name: image-resize-stateful + labels: + app: image-resize +spec: + serviceName: image-resize + replicas: 1 + selector: + matchLabels: + app: image-resize + template: + metadata: + labels: + app: image-resize + spec: + containers: + - name: image-resize + image: ghcr.io/swiss-ai-center/image-resize:latest + # If you build the image locally, change the next line to `imagePullPolicy: Never` - there is no need to pull the image + imagePullPolicy: Always + ports: + - name: http + containerPort: 80 + env: + - name: MAX_TASKS + value: "50" + - name: ENGINE_ANNOUNCE_RETRIES + value: "5" + - name: ENGINE_ANNOUNCE_RETRY_DELAY + value: "3" + envFrom: + - configMapRef: + name: image-resize-config diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..f381c78 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,6 @@ +[project] +name = "image-resize" + +[tool.pytest.ini_options] +pythonpath = [".", "src"] +addopts = "--cov-config=.coveragerc --cov-report xml --cov-report term-missing --cov=./src" diff --git a/requirements-all.txt b/requirements-all.txt new file mode 100644 index 0000000..99129f1 --- /dev/null +++ b/requirements-all.txt @@ -0,0 +1,55 @@ +aiobotocore==2.4.1 +aiofile==3.8.7 +aiofiles==22.1.0 +aiohttp==3.8.4 +aioitertools==0.11.0 +aiosignal==1.3.1 +anyio==3.7.0 +async-timeout==4.0.2 +attrs==23.1.0 +botocore==1.27.59 +caio==0.9.12 +certifi==2023.5.7 +charset-normalizer==3.1.0 +click==8.1.3 +coverage==7.2.7 +dnspython==2.3.0 +email-validator==1.3.0 +exceptiongroup==1.1.1 +fastapi==0.87.0 +flake8==5.0.4 +frozenlist==1.3.3 +h11==0.14.0 +httpcore==0.16.3 +httpx==0.23.1 +idna==3.4 +iniconfig==2.0.0 +jmespath==1.0.1 +MarkupSafe==2.1.3 +mccabe==0.7.0 +multidict==6.0.4 +numpy==1.21.4 +opencv-python==4.7.0.68 +packaging==23.1 +pluggy==1.2.0 +pycodestyle==2.9.1 +pydantic==1.10.9 +pyflakes==2.5.0 +pytest==7.2.0 +pytest-asyncio==0.20.3 +pytest-cov==4.0.0 +pytest-httpserver==1.0.6 +python-dateutil==2.8.2 +python-dotenv==0.21.0 +PyYAML==6.0 +rfc3986==1.5.0 +six==1.16.0 +sniffio==1.3.0 +starlette==0.21.0 +tomli==2.0.1 +typing_extensions==4.6.3 +urllib3==1.26.16 +uvicorn==0.19.0 +Werkzeug==2.3.6 +wrapt==1.15.0 +yarl==1.9.2 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..084017f --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +aiofile==3.8.7 +common-code[test] @ git+https://github.com/swiss-ai-center/core-engine.git@main#subdirectory=common-code +flake8==5.0.4 +numpy==1.21.4 +opencv-python==4.7.0.68 +pytest==7.2.0 +pytest-asyncio==0.20.3 +pytest-cov==4.0.0 +pytest-httpserver==1.0.6 diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..6bb6bc1 --- /dev/null +++ b/src/main.py @@ -0,0 +1,198 @@ +import asyncio +import time +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import RedirectResponse +from common_code.config import get_settings +from pydantic import Field +from common_code.http_client import HttpClient +from common_code.logger.logger import get_logger +from common_code.service.controller import router as service_router +from common_code.service.service import ServiceService +from common_code.storage.service import StorageService +from common_code.tasks.controller import router as tasks_router +from common_code.tasks.service import TasksService +from common_code.tasks.models import TaskData +from common_code.service.models import Service +from common_code.service.enums import ServiceStatus +from common_code.common.enums import FieldDescriptionType, ExecutionUnitTagName, ExecutionUnitTagAcronym +from common_code.common.models import FieldDescription, ExecutionUnitTag + +# Imports required by the service's model +import json +import cv2 +import numpy as np +from common_code.tasks.service import get_extension + +settings = get_settings() + + +class MyService(Service): + """ + Image resize model + """ + + # Any additional fields must be excluded for Pydantic to work + model: object = Field(exclude=True) + + def __init__(self): + super().__init__( + name="Image Resize", + slug="image-resize", + url=settings.service_url, + summary=api_summary, + description=api_description, + status=ServiceStatus.AVAILABLE, + data_in_fields=[ + FieldDescription(name="image", type=[FieldDescriptionType.IMAGE_PNG, FieldDescriptionType.IMAGE_JPEG]), + FieldDescription(name="settings", type=[FieldDescriptionType.APPLICATION_JSON]) + ], + data_out_fields=[ + FieldDescription(name="result", type=[FieldDescriptionType.IMAGE_PNG, FieldDescriptionType.IMAGE_JPEG]), + ], + tags=[ + ExecutionUnitTag( + name=ExecutionUnitTagName.IMAGE_PROCESSING, + acronym=ExecutionUnitTagAcronym.IMAGE_PROCESSING + ), + ], + has_ai=False + ) + + def process(self, data): + raw = data["image"].data + input_type = data["image"].type + img = cv2.imdecode(np.frombuffer(raw, np.uint8), 1) + raw_settings = data["settings"].data + image_settings = json.loads(raw_settings) + + if "with_ratio" in image_settings and image_settings["with_ratio"] is True: + scale_percent = image_settings["scale_percent"] + width = int(img.shape[1] * scale_percent / 100) + height = int(img.shape[0] * scale_percent / 100) + else: + width = image_settings["width"] + + if "height" in image_settings: + height = image_settings["height"] + else: + ratio = width / img.shape[1] + height = int(img.shape[0] * ratio) + + dim = (width, height) + + # resize image + resized = cv2.resize(img, dim, interpolation=cv2.INTER_AREA) + + # Save .jpg image + guessed_extension = get_extension(input_type) + is_success, out_buff = cv2.imencode(guessed_extension, resized) + + return { + "result": TaskData( + data=out_buff.tobytes(), + type=input_type + ) + } + + +api_description = """ +This service resizes an image. It can be used to resize an image to a specific width and height, +or to resize an image by a percentage. +the settings parameter is a JSON object with the following fields: +- width: the width of the resized image (eg. 500) +- height: the height of the resized image (eg. 500) +- scale_percent: the percentage of the original image size (eg. 50) +- with_ratio: if true, the width and height fields are ignored and the image is resized by the scale_percent field +""" +api_summary = """ +Resizes an image. +""" + +# Define the FastAPI application with information +app = FastAPI( + title="Image Resize API.", + description=api_description, + version="1.0.0", + contact={ + "name": "Swiss AI Center", + "url": "https://swiss-ai-center.ch/", + "email": "info@swiss-ai-center.ch", + }, + swagger_ui_parameters={ + "tagsSorter": "alpha", + "operationsSorter": "method", + }, + license_info={ + "name": "GNU Affero General Public License v3.0 (GNU AGPLv3)", + "url": "https://choosealicense.com/licenses/agpl-3.0/", + }, +) + +# Include routers from other files +app.include_router(service_router, tags=['Service']) +app.include_router(tasks_router, tags=['Tasks']) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +# Redirect to docs +@app.get("/", include_in_schema=False) +async def root(): + return RedirectResponse("/docs", status_code=301) + + +service_service: ServiceService | None = None + + +@app.on_event("startup") +async def startup_event(): + # Manual instances because startup events doesn't support Dependency Injection + # https://github.com/tiangolo/fastapi/issues/2057 + # https://github.com/tiangolo/fastapi/issues/425 + + # Global variable + global service_service + + logger = get_logger(settings) + http_client = HttpClient() + storage_service = StorageService(logger) + my_service = MyService() + tasks_service = TasksService(logger, settings, http_client, storage_service) + service_service = ServiceService(logger, settings, http_client, tasks_service) + + tasks_service.set_service(my_service) + + # Start the tasks service + tasks_service.start() + + async def announce(): + retries = settings.engine_announce_retries + for engine_url in settings.engine_urls: + announced = False + while not announced and retries > 0: + announced = await service_service.announce_service(my_service, engine_url) + retries -= 1 + if not announced: + time.sleep(settings.engine_announce_retry_delay) + if retries == 0: + logger.warning(f"Aborting service announcement after " + f"{settings.engine_announce_retries} retries") + + # Announce the service to its engine + asyncio.ensure_future(announce()) + + +@app.on_event("shutdown") +async def shutdown_event(): + # Global variable + global service_service + my_service = MyService() + for engine_url in settings.engine_urls: + await service_service.graceful_shutdown(my_service, engine_url) diff --git a/tests/test.jpg b/tests/test.jpg new file mode 100644 index 0000000..5c27dbc Binary files /dev/null and b/tests/test.jpg differ diff --git a/tests/test_default_route.py b/tests/test_default_route.py new file mode 100644 index 0000000..083211a --- /dev/null +++ b/tests/test_default_route.py @@ -0,0 +1,15 @@ +import pytest +from fastapi.testclient import TestClient +from main import app + + +@pytest.fixture(name="client") +def client_fixture(): + client = TestClient(app) + yield client + + +def test_default_route(client: TestClient): + response = client.get("/") + # TODO: Why doesn't it return a 302? + assert response.status_code == 200 diff --git a/tests/test_logger.py b/tests/test_logger.py new file mode 100644 index 0000000..5eb306f --- /dev/null +++ b/tests/test_logger.py @@ -0,0 +1,27 @@ +import pytest +from common_code.config import get_settings + + +def test_logger(caplog: pytest.LogCaptureFixture): + from common_code.logger.logger import get_logger + logger = get_logger(get_settings()) + caplog.set_level("INFO") + logger.set_level("INFO") + logger.info(message="test_info") + assert "test_info" in caplog.text + caplog.set_level("WARNING") + logger.set_level("WARNING") + logger.warning(message="test_warning") + assert "test_warning" in caplog.text + caplog.set_level("ERROR") + logger.set_level("ERROR") + logger.error(message="test_error") + assert "test_error" in caplog.text + caplog.set_level("CRITICAL") + logger.set_level("CRITICAL") + logger.critical(message="test_critical") + assert "test_critical" in caplog.text + caplog.set_level("DEBUG") + logger.set_level("DEBUG") + logger.debug(message="test_debug") + assert "test_debug" in caplog.text diff --git a/tests/test_service.py b/tests/test_service.py new file mode 100644 index 0000000..85e1be1 --- /dev/null +++ b/tests/test_service.py @@ -0,0 +1,143 @@ +import pytest +from fastapi.testclient import TestClient +from pytest_httpserver import HTTPServer +from common_code.config import get_settings +from common_code.logger.logger import get_logger +from common_code.storage.service import StorageService +from main import app +import time + + +@pytest.fixture(name="storage") +def storage_fixture(): + settings = get_settings() + logger = get_logger(settings) + + storage = StorageService(logger=logger) + + yield storage + + +@pytest.fixture(name="client") +def client_fixture(reachable_engine_instance: HTTPServer): + def get_settings_override(): + settings = get_settings() + settings.engine_urls = [reachable_engine_instance.url_for("")] + settings.engine_announce_retries = 2 + settings.engine_announce_retry_delay = 1 + settings.max_tasks = 2 + + return settings + + app.dependency_overrides[get_settings] = get_settings_override + + # client = TestClient(app) + # yield client + + with TestClient(app) as client: + # We wait for the app to announce itself to the engine (ugly) + time.sleep(5) + yield client + + app.dependency_overrides.clear() + + +@pytest.fixture(name="reachable_engine_instance") +def reachable_engine_instance_fixture(httpserver: HTTPServer): + httpserver.expect_request("/services").respond_with_json({}, status=200) + + yield httpserver + + httpserver.clear() + + +@pytest.fixture(name="unreachable_engine_instance") +def unreachable_engine_instance_fixture(httpserver: HTTPServer): + httpserver.expect_request("/services").respond_with_json({}, status=500) + + yield httpserver + + httpserver.clear() + + +@pytest.fixture(name="app_with_reachable_engine_instance") +def app_with_reachable_engine_instance(reachable_engine_instance: HTTPServer): + def get_settings_override(): + settings = get_settings() + settings.engine_urls = [reachable_engine_instance.url_for("")] + settings.engine_announce_retries = 2 + settings.engine_announce_retry_delay = 1 + settings.max_tasks = 2 + + return settings + + # I don't understand why, in this specific case, I need to call the `get_settings_override` function + # for this to work where elsewhere I can pass the function as it is... + app.dependency_overrides[get_settings] = get_settings_override() + + yield app + app.dependency_overrides.clear() + + +@pytest.fixture(name="app_with_unreachable_engine_instance") +def app_with_unreachable_engine_instance(unreachable_engine_instance: HTTPServer): + def get_settings_override(): + settings = get_settings() + settings.engine_urls = [unreachable_engine_instance.url_for("")] + settings.engine_announce_retries = 2 + settings.engine_announce_retry_delay = 1 + settings.max_tasks = 2 + + return settings + + # I don't understand why, in this specific case, I need to call the `get_settings_override` function + # for this to work where elsewhere I can pass the function as it is... + app.dependency_overrides[get_settings] = get_settings_override() + + yield app + app.dependency_overrides.clear() + + +def test_announce_to_reachable_engine(caplog: pytest.LogCaptureFixture, app_with_reachable_engine_instance): + with TestClient(app_with_reachable_engine_instance): + # We wait for the app to announce itself to the engine (ugly) + time.sleep(5) + + # We look for `WARNING` messages in the logs to check if the service wasn't + # able to announce itself to the engine. + # + # This is not a good way to test the app as any other warnings will make the test + # passes. + warning_logs_found = False + for record in caplog.records: + if record.levelname == "WARNING": + warning_logs_found = True + break + + assert not warning_logs_found + + +def test_announce_to_unreachable_engine(caplog: pytest.LogCaptureFixture, app_with_unreachable_engine_instance): + with TestClient(app_with_unreachable_engine_instance): + # We wait for the app to announce itself to the engine (ugly) + time.sleep(5) + + # We look for `WARNING` messages in the logs to check if the service wasn't + # able to announce itself to the engine. + # + # This is not a good way to test the app as any other warnings will make the test + # passes. + warning_logs_found = False + for record in caplog.records: + if record.levelname == "WARNING": + warning_logs_found = True + break + + assert warning_logs_found + + +def test_status(client: TestClient): + status_response = client.get("/status") + + # Check the output + assert status_response.status_code == 200 diff --git a/tests/test_tasks.py b/tests/test_tasks.py new file mode 100644 index 0000000..394e200 --- /dev/null +++ b/tests/test_tasks.py @@ -0,0 +1,129 @@ +# TODO: Fix this test +# import pytest +# from fastapi.testclient import TestClient +# from pytest_httpserver import HTTPServer +# from config import get_settings +# from logger.logger import get_logger +# from storage.service import StorageService +# from main import app +# import time +# +# +# @pytest.fixture(name="storage") +# def storage_fixture(): +# settings = get_settings() +# logger = get_logger(settings) +# +# storage = StorageService(logger=logger) +# +# yield storage +# +# +# @pytest.fixture(name="client") +# def client_fixture(reachable_engine_instance: HTTPServer): +# def get_settings_override(): +# settings = get_settings() +# settings.engine_urls = [reachable_engine_instance.url_for("")] +# settings.engine_announce_retries = 2 +# settings.engine_announce_retry_delay = 1 +# settings.max_tasks = 2 +# +# return settings +# +# app.dependency_overrides[get_settings] = get_settings_override +# +# # client = TestClient(app) +# # yield client +# +# with TestClient(app) as client: +# # We wait for the app to announce itself to the engine (ugly) +# time.sleep(5) +# yield client +# +# app.dependency_overrides.clear() +# +# +# @pytest.fixture(name="reachable_engine_instance") +# def reachable_engine_instance_fixture(httpserver: HTTPServer): +# httpserver.expect_request("/services").respond_with_json({}, status=200) +# +# yield httpserver +# +# httpserver.clear() +# +# +# service_task = { +# "s3_access_key_id": "minio", +# "s3_secret_access_key": "minio123", +# "s3_region": "eu-central-2", +# "s3_host": "http://localhost:9000", +# "s3_bucket": "engine", +# "task": { +# "data_in": [], +# "service_id": "00000000-0000-0000-0000-000000000000", +# "pipeline_id": "00000000-0000-0000-0000-000000000000", +# "id": "00000000-0000-0000-0000-000000000000" +# } +# } +# + +# @pytest.mark.asyncio +# async def test_task_status(client: TestClient, storage: StorageService): +# service_task_copy = service_task.copy() +# +# with open("tests/test.jpg", "rb") as file: +# file_key = await storage.upload( +# file, +# ".jpg", +# service_task_copy["s3_region"], +# service_task_copy["s3_secret_access_key"], +# service_task_copy["s3_access_key_id"], +# service_task_copy["s3_host"], +# service_task_copy["s3_bucket"], +# ) +# +# service_task_copy["task"]["data_in"] = [file_key] +# +# compute_response = client.post("/compute", json=service_task_copy) +# assert compute_response.status_code == 200 +# +# task_status_response = client.get(f"/status/{service_task_copy['task']['id']}") +# +# found_at_least_once = False +# number_of_tries = 0 +# while task_status_response.status_code == 200 and number_of_tries < 5: +# found_at_least_once = True +# number_of_tries += 1 +# task_status_response = client.get(f"/status/{service_task_copy['task']['id']}") +# +# assert found_at_least_once and number_of_tries != 5 +# +# +# def test_task_status_not_found(client: TestClient): +# task_status_response = client.get("/status/00000000-0000-0000-0000-000000000000") +# +# assert task_status_response.status_code == 404 +# +# +# @pytest.mark.asyncio +# async def test_compute_queue_full(client: TestClient, storage: StorageService): +# service_task_copy = service_task.copy() +# +# with open("tests/test.jpg", "rb") as file: +# file_key = await storage.upload( +# file, +# ".jpg", +# service_task_copy["s3_region"], +# service_task_copy["s3_secret_access_key"], +# service_task_copy["s3_access_key_id"], +# service_task_copy["s3_host"], +# service_task_copy["s3_bucket"], +# ) +# +# service_task_copy["task"]["data_in"] = [file_key] +# +# compute_response = client.post("/compute", json=service_task_copy) +# compute_response = client.post("/compute", json=service_task_copy) +# compute_response = client.post("/compute", json=service_task_copy) +# +# assert compute_response.status_code == 503