From ac5bf6f8f81bcd75b3f8d8fd87106e97516ad395 Mon Sep 17 00:00:00 2001 From: Rob Ballantyne Date: Tue, 6 Aug 2024 16:59:12 +0100 Subject: [PATCH] Update to new base image. Remove Runpod serverless. Add WIP API wrapper --- .github/workflows/docker-build.yml | 102 +++++--- README.md | 135 ++-------- .../supervisord/conf.d/serverless.conf | 20 -- .../opt/ai-dock/bin/build/layer0/common.sh | 90 ------- .../opt/ai-dock/bin/supervisor-serverless.sh | 34 --- .../example_payloads/bound_image2image.json | 16 -- .../example_payloads/bound_text2image.json | 20 -- .../raw_controlnet_t2i_adapters.json | 132 ---------- .../example_payloads/raw_image2image.json | 109 -------- .../docs/example_payloads/raw_text2image.json | 97 ------- .../docs/example_payloads/raw_upscale.json | 116 --------- .../opt/serverless/docs/swagger/openapi.yaml | 237 ------------------ .../opt/serverless/handlers/basehandler.py | 234 ----------------- .../opt/serverless/handlers/hello_world.py | 7 - .../opt/serverless/handlers/rawworkflow.py | 141 ----------- .../opt/serverless/handlers/text2image.py | 83 ------ .../providers/runpod/test_input.json | 109 -------- .../opt/serverless/providers/runpod/worker.py | 46 ---- .../opt/serverless/utils/filesystem.py | 24 -- .../COPY_ROOT/opt/serverless/utils/network.py | 56 ----- .../COPY_ROOT/opt/serverless/utils/s3utils.py | 45 ---- .../opt/ai-dock/bin/build/layer0/amd.sh | 16 ++ .../opt/ai-dock/bin/build/layer0/clean.sh | 8 + .../opt/ai-dock/bin/build/layer0/common.sh | 48 ++++ .../opt/ai-dock/bin/build/layer0/cpu.sh | 16 ++ .../opt/ai-dock/bin/build/layer0/init.sh | 2 +- .../opt/ai-dock/bin/build/layer0/nvidia.sh | 27 ++ .../supervisor/supervisord/conf.d/.gitkeep | 0 .../supervisord/conf.d/comfyui.conf | 0 .../conf.d/comfyui_api_wrapper.conf} | 6 +- .../ai-dock/api-wrapper/config}/__init__.py | 0 .../opt/ai-dock/api-wrapper/config/config.py | 13 + .../opt/ai-dock/api-wrapper/environment.yaml | 7 + .../opt/ai-dock/api-wrapper/main.py | 98 ++++++++ .../api-wrapper/modifiers}/__init__.py | 0 .../api-wrapper/modifiers/basemodifier.py | 140 +++++++++++ .../api-wrapper/modifiers/text2image.py} | 43 ++-- .../ai-dock/api-wrapper/payloads/imgsave.json | 43 ++++ .../api-wrapper/requestmodels/__init__.py} | 0 .../api-wrapper/requestmodels/models.py | 84 +++++++ .../opt/ai-dock/api-wrapper/requirements.txt | 9 + .../api-wrapper/responses/__init__.py} | 0 .../ai-dock/api-wrapper/responses/result.py | 11 + .../api-wrapper/workers/generation_worker.py | 101 ++++++++ .../api-wrapper/workers/postprocess_worker.py | 124 +++++++++ .../api-wrapper/workers/preprocess_worker.py | 62 +++++ .../api-wrapper}/workflows/image2image.json | 0 .../api-wrapper}/workflows/text2image.json | 0 .../opt/ai-dock/bin/build/layer1}/amd.sh | 0 .../opt/ai-dock/bin/build/layer1}/clean.sh | 1 - .../opt/ai-dock/bin/build/layer1/common.sh | 42 ++++ .../opt/ai-dock/bin/build/layer1}/cpu.sh | 0 .../opt/ai-dock/bin/build/layer1/init.sh | 20 ++ .../opt/ai-dock/bin/build/layer1}/nvidia.sh | 10 +- .../opt/ai-dock/bin/preflight.d/10-default.sh | 9 - .../opt/ai-dock/bin/set-comfyui-flags.sh | 0 .../bin/supervisor-comfyui-api-wrapper.sh} | 18 +- .../opt/ai-dock/bin/supervisor-comfyui.sh | 13 +- .../opt/ai-dock/bin/update-comfyui.sh | 3 +- .../ai-dock/storage_monitor/etc/mappings.sh | 0 .../opt/caddy/share/service_config_18188 | 20 +- .../usr => COPY_ROOT_1/root}/.gitkeep | 0 .../handlers => COPY_ROOT_1/usr}/.gitkeep | 0 .../usr/local/share/ai-dock/comfyui.ipynb | 0 .../opt/ai-dock/bin/build/layer99}/init.sh | 47 +++- .../stable_diffusion/models/ckpt}/.gitkeep | 0 .../models/controlnet}/.gitkeep | 0 .../models/diffusers}/.gitkeep | 0 .../models/embeddings}/.gitkeep | 0 .../stable_diffusion/models/esrgan}/.gitkeep | 0 .../stable_diffusion/models/gligen}/.gitkeep | 0 .../models/hypernetworks}/.gitkeep | 0 .../stable_diffusion/models/lora}/.gitkeep | 0 .../models/style_models}/.gitkeep | 0 .../stable_diffusion/models/unet}/.gitkeep | 0 .../stable_diffusion/models/vae}/.gitkeep | 0 .../models/vae_approx}/.gitkeep | 0 .../models/vae_approx/.gitkeep | 0 build/Dockerfile | 45 ++-- config/provisioning/default.sh | 79 ++++-- docker-compose.yaml | 13 +- 81 files changed, 1123 insertions(+), 1908 deletions(-) delete mode 100644 build/COPY_ROOT/etc/supervisor/supervisord/conf.d/serverless.conf delete mode 100755 build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh delete mode 100755 build/COPY_ROOT/opt/ai-dock/bin/supervisor-serverless.sh delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_image2image.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_text2image.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_controlnet_t2i_adapters.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_image2image.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_text2image.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_upscale.json delete mode 100644 build/COPY_ROOT/opt/serverless/docs/swagger/openapi.yaml delete mode 100644 build/COPY_ROOT/opt/serverless/handlers/basehandler.py delete mode 100644 build/COPY_ROOT/opt/serverless/handlers/hello_world.py delete mode 100644 build/COPY_ROOT/opt/serverless/handlers/rawworkflow.py delete mode 100644 build/COPY_ROOT/opt/serverless/handlers/text2image.py delete mode 100644 build/COPY_ROOT/opt/serverless/providers/runpod/test_input.json delete mode 100644 build/COPY_ROOT/opt/serverless/providers/runpod/worker.py delete mode 100644 build/COPY_ROOT/opt/serverless/utils/filesystem.py delete mode 100644 build/COPY_ROOT/opt/serverless/utils/network.py delete mode 100644 build/COPY_ROOT/opt/serverless/utils/s3utils.py create mode 100755 build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/amd.sh create mode 100755 build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/clean.sh create mode 100755 build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh create mode 100755 build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/cpu.sh rename build/{COPY_ROOT => COPY_ROOT_0}/opt/ai-dock/bin/build/layer0/init.sh (91%) create mode 100755 build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh rename build/{COPY_ROOT => COPY_ROOT_1}/etc/supervisor/supervisord/conf.d/.gitkeep (100%) rename build/{COPY_ROOT => COPY_ROOT_1}/etc/supervisor/supervisord/conf.d/comfyui.conf (100%) rename build/{COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui_rp_api.conf => COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui_api_wrapper.conf} (74%) rename build/{COPY_ROOT/opt/serverless/handlers => COPY_ROOT_1/opt/ai-dock/api-wrapper/config}/__init__.py (100%) create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/config.py create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/environment.yaml create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/main.py rename build/{COPY_ROOT/opt/serverless/utils => COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers}/__init__.py (100%) create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/basemodifier.py rename build/{COPY_ROOT/opt/serverless/handlers/image2image.py => COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/text2image.py} (56%) create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/payloads/imgsave.json rename build/{COPY_ROOT/opt/serverless/docs/postman/.gitkeep => COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/__init__.py} (100%) create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/models.py create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requirements.txt rename build/{COPY_ROOT/root/.gitkeep => COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/__init__.py} (100%) create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/result.py create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/generation_worker.py create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/postprocess_worker.py create mode 100644 build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/preprocess_worker.py rename build/{COPY_ROOT/opt/serverless => COPY_ROOT_1/opt/ai-dock/api-wrapper}/workflows/image2image.json (100%) rename build/{COPY_ROOT/opt/serverless => COPY_ROOT_1/opt/ai-dock/api-wrapper}/workflows/text2image.json (100%) rename build/{COPY_ROOT/opt/ai-dock/bin/build/layer0 => COPY_ROOT_1/opt/ai-dock/bin/build/layer1}/amd.sh (100%) rename build/{COPY_ROOT/opt/ai-dock/bin/build/layer0 => COPY_ROOT_1/opt/ai-dock/bin/build/layer1}/clean.sh (85%) create mode 100755 build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/common.sh rename build/{COPY_ROOT/opt/ai-dock/bin/build/layer0 => COPY_ROOT_1/opt/ai-dock/bin/build/layer1}/cpu.sh (100%) create mode 100755 build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/init.sh rename build/{COPY_ROOT/opt/ai-dock/bin/build/layer0 => COPY_ROOT_1/opt/ai-dock/bin/build/layer1}/nvidia.sh (53%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/ai-dock/bin/preflight.d/10-default.sh (71%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/ai-dock/bin/set-comfyui-flags.sh (100%) rename build/{COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui-rp-api.sh => COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui-api-wrapper.sh} (50%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/ai-dock/bin/supervisor-comfyui.sh (88%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/ai-dock/bin/update-comfyui.sh (81%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/ai-dock/storage_monitor/etc/mappings.sh (100%) rename build/{COPY_ROOT => COPY_ROOT_1}/opt/caddy/share/service_config_18188 (62%) rename build/{COPY_ROOT/usr => COPY_ROOT_1/root}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/serverless/handlers => COPY_ROOT_1/usr}/.gitkeep (100%) rename build/{COPY_ROOT => COPY_ROOT_1}/usr/local/share/ai-dock/comfyui.ipynb (100%) rename build/{COPY_ROOT_EXTRA/opt/ai-dock/bin/build/layer1 => COPY_ROOT_99/opt/ai-dock/bin/build/layer99}/init.sh (83%) rename build/{COPY_ROOT_EXTRA/opt/serverless/workflows => COPY_ROOT_99/opt/storage/stable_diffusion/models/ckpt}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/ckpt => COPY_ROOT_99/opt/storage/stable_diffusion/models/controlnet}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/controlnet => COPY_ROOT_99/opt/storage/stable_diffusion/models/diffusers}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/diffusers => COPY_ROOT_99/opt/storage/stable_diffusion/models/embeddings}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/embeddings => COPY_ROOT_99/opt/storage/stable_diffusion/models/esrgan}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/esrgan => COPY_ROOT_99/opt/storage/stable_diffusion/models/gligen}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/gligen => COPY_ROOT_99/opt/storage/stable_diffusion/models/hypernetworks}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/hypernetworks => COPY_ROOT_99/opt/storage/stable_diffusion/models/lora}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/lora => COPY_ROOT_99/opt/storage/stable_diffusion/models/style_models}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/style_models => COPY_ROOT_99/opt/storage/stable_diffusion/models/unet}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/unet => COPY_ROOT_99/opt/storage/stable_diffusion/models/vae}/.gitkeep (100%) rename build/{COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae => COPY_ROOT_99/opt/storage/stable_diffusion/models/vae_approx}/.gitkeep (100%) delete mode 100644 build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae_approx/.gitkeep diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index a3f285ac..1aa6aef8 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -18,8 +18,7 @@ jobs: build: # Undeclared SHA tags with latest commit from master branch # Only building periodic sha tagged images - - {latest: "true", sha: "719fb2c", python: "3.10", pytorch: "2.2.2"} - - {latest: "false", sha: "719fb2c", python: "3.10", pytorch: "2.2.2"} + - {latest: "false", sha: "de17a97", python: "3.10", pytorch: "2.3.1"} steps: - name: Free Space @@ -35,19 +34,26 @@ jobs: - name: Env Setter run: | - echo "PACKAGE_NAME=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} + REPO=${GITHUB_REPOSITORY,,} + echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} + echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - name: Checkout uses: actions/checkout@v3 - name: Permissions fixes run: | - reponame="$(basename ${GITHUB_REPOSITORY})" - target="${HOME}/work/${reponame}/${reponame}/build/COPY*" + target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" chmod -R ug+rwX ${target} + - + name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -55,7 +61,7 @@ jobs: - name: Set tags run: | - img_path="ghcr.io/${{ env.PACKAGE_NAME }}" + img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" if [[ -z '${{ matrix.build.sha }}' ]]; then COMFYUI_SHA="$(curl -fsSL "https://api.github.com/repos/comfyanonymous/ComfyUI/commits/master" | jq -r '.sha[0:7]')" else @@ -64,14 +70,13 @@ jobs: [ -z "$COMFYUI_SHA" ] && { echo "Error: COMFYUI_SHA is empty. Exiting script." >&2; exit 1; } echo "COMFYUI_SHA=${COMFYUI_SHA}" >> ${GITHUB_ENV} - base_tag="cpu-${{ env.UBUNTU_VERSION }}" + base_tag="v2-cpu-${{ env.UBUNTU_VERSION }}" - # Latest tags OR sha - Not both if [[ ${{ matrix.build.latest }} == "true" ]]; then echo "Marking latest" - TAGS="${img_path}:${base_tag}, ${img_path}:latest-cpu, ${img_path}:latest-cpu-jupyter" + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-cpu" else - TAGS="${img_path}:${base_tag}-${COMFYUI_SHA}" + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}" fi echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - @@ -80,7 +85,7 @@ jobs: with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-cpu-${{ env.UBUNTU_VERSION }} + IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-cpu-${{ env.UBUNTU_VERSION }} PYTHON_VERSION=${{ matrix.build.python }} PYTORCH_VERSION=${{ matrix.build.pytorch }} COMFYUI_SHA=${{ env.COMFYUI_SHA }} @@ -97,9 +102,7 @@ jobs: build: # Undeclared SHA tags with latest commit from master branch # Only building periodic sha tagged images - - {latest: "true", sha: "719fb2c", python: "3.10", pytorch: "2.2.2", cuda: "11.8.0-runtime"} - - {latest: "false", sha: "719fb2c", python: "3.10", pytorch: "2.2.2", cuda: "11.8.0-runtime"} - - {latest: "false", sha: "719fb2c", python: "3.10", pytorch: "2.2.2", cuda: "12.1.0-runtime"} + - {latest: "false", sha: "de17a97", python: "3.10", pytorch: "2.3.1", cuda: "12.1.0-base"} steps: - name: Free Space @@ -115,19 +118,26 @@ jobs: - name: Env Setter run: | - echo "PACKAGE_NAME=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} + REPO=${GITHUB_REPOSITORY,,} + echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} + echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - name: Checkout uses: actions/checkout@v3 - name: Permissions fixes run: | - reponame="$(basename ${GITHUB_REPOSITORY})" - target="${HOME}/work/${reponame}/${reponame}/build/COPY*" + target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" chmod -R ug+rwX ${target} + - + name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -135,7 +145,8 @@ jobs: - name: Set tags run: | - img_path="ghcr.io/${{ env.PACKAGE_NAME }}" + img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" + img_path_dhub="${{ vars.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-cuda" if [[ -z '${{ matrix.build.sha }}' ]]; then COMFYUI_SHA="$(curl -fsSL "https://api.github.com/repos/comfyanonymous/ComfyUI/commits/master" | jq -r '.sha[0:7]')" else @@ -144,15 +155,17 @@ jobs: [ -z "$COMFYUI_SHA" ] && { echo "Error: COMFYUI_SHA is empty. Exiting script." >&2; exit 1; } echo "COMFYUI_SHA=${COMFYUI_SHA}" >> ${GITHUB_ENV} - base_tag="cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}" + base_tag="v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }}" - # Latest tags OR sha - Not both if [[ ${{ matrix.build.latest }} == "true" ]]; then - echo "Marking latest" - TAGS="${img_path}:${base_tag}, ${img_path}:latest, ${img_path}:latest-jupyter, ${img_path}:latest-cuda, ${img_path}:latest-cuda-jupyter" - else - TAGS="${img_path}:${base_tag}-${COMFYUI_SHA}" - fi + echo "Marking latest" + # GHCR.io Tags + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest, ${img_path_ghcr}:latest-cuda" + # Docker.io Tags + TAGS="${TAGS}, ${img_path_dhub}:${COMFYUI_SHA}, ${img_path_dhub}:latest" + else + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}, ${img_path_dhub}:${COMFYUI_SHA}" + fi echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - name: Build and push @@ -160,7 +173,7 @@ jobs: with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }} + IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-cuda-${{ matrix.build.cuda }}-${{ env.UBUNTU_VERSION }} PYTHON_VERSION=${{ matrix.build.python }} PYTORCH_VERSION=${{ matrix.build.pytorch }} COMFYUI_SHA=${{ env.COMFYUI_SHA }} @@ -176,8 +189,7 @@ jobs: build: # Undeclared SHA tags with latest commit from master branch # Only building periodic sha tagged images - - {latest: "true", sha: "719fb2c", python: "3.10", pytorch: "2.2.2", rocm: "5.7-runtime"} - - {latest: "false", sha: "719fb2c", python: "3.10", pytorch: "2.2.2", rocm: "5.7-runtime"} + - {latest: "false", sha: "de17a97", python: "3.10", pytorch: "2.3.1", rocm: "6.0-runtime"} steps: - name: Free Space @@ -193,19 +205,26 @@ jobs: - name: Env Setter run: | - echo "PACKAGE_NAME=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} + REPO=${GITHUB_REPOSITORY,,} + echo "REPO_NAMESPACE=${REPO%%/*}" >> ${GITHUB_ENV} + echo "REPO_NAME=${REPO#*/}" >> ${GITHUB_ENV} - name: Checkout uses: actions/checkout@v3 - name: Permissions fixes run: | - reponame="$(basename ${GITHUB_REPOSITORY})" - target="${HOME}/work/${reponame}/${reponame}/build/COPY*" + target="${HOME}/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/build/COPY*" chmod -R ug+rwX ${target} + - + name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ vars.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -213,7 +232,8 @@ jobs: - name: Set tags run: | - img_path="ghcr.io/${{ env.PACKAGE_NAME }}" + img_path_ghcr="ghcr.io/${{ env.REPO_NAMESPACE }}/${{ env.REPO_NAME }}" + img_path_dhub="${{ vars.DOCKERHUB_USER }}/${{ env.REPO_NAME }}-rocm" if [[ -z '${{ matrix.build.sha }}' ]]; then COMFYUI_SHA="$(curl -fsSL "https://api.github.com/repos/comfyanonymous/ComfyUI/commits/master" | jq -r '.sha[0:7]')" else @@ -222,14 +242,16 @@ jobs: [ -z "$COMFYUI_SHA" ] && { echo "Error: COMFYUI_SHA is empty. Exiting script." >&2; exit 1; } echo "COMFYUI_SHA=${COMFYUI_SHA}" >> ${GITHUB_ENV} - base_tag="rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }}" + base_tag="v2-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }}" - # Latest tags OR sha - Not both if [[ ${{ matrix.build.latest }} == "true" ]]; then echo "Marking latest" - TAGS="${img_path}:${base_tag}, ${img_path}:latest-rocm, ${img_path}:latest-rocm-jupyter" + # GHCR.io Tags + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}, ${img_path_ghcr}:${base_tag}, ${img_path_ghcr}:latest-rocm" + # Docker.io Tags + TAGS="${TAGS}, ${img_path_dhub}:${COMFYUI_SHA}, ${img_path_dhub}:latest" else - TAGS="${img_path}:${base_tag}-${COMFYUI_SHA}" + TAGS="${img_path_ghcr}:${base_tag}-${COMFYUI_SHA}, ${img_path_dhub}:${COMFYUI_SHA}" fi echo "TAGS=${TAGS}" >> ${GITHUB_ENV} - @@ -238,7 +260,7 @@ jobs: with: context: build build-args: | - IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }} + IMAGE_BASE=ghcr.io/ai-dock/python:${{ matrix.build.python }}-v2-rocm-${{ matrix.build.rocm }}-${{ env.UBUNTU_VERSION }} PYTHON_VERSION=${{ matrix.build.python }} PYTORCH_VERSION=${{ matrix.build.pytorch }} COMFYUI_SHA=${{ env.COMFYUI_SHA }} diff --git a/README.md b/README.md index 58a1e5c1..62b6521e 100644 --- a/README.md +++ b/README.md @@ -2,14 +2,14 @@ # AI-Dock + ComfyUI Docker Image -Run [ComfyUI](https://github.com/comfyanonymous/ComfyUI) in a cloud-first AI-Dock container. +Run [ComfyUI](https://github.com/comfyanonymous/ComfyUI) in a highly-configurable, cloud-first AI-Dock container. >[!NOTE] >These images do not bundle models or third-party configurations. You should use a [provisioning script](https://github.com/ai-dock/base-image/wiki/4.0-Running-the-Image#provisioning-script) to automatically configure your container. You can find examples in `config/provisioning`. ## Documentation -All AI-Dock containers share a common base which is designed to make running on cloud services such as [vast.ai](https://link.ai-dock.org/vast.ai) and [runpod.io](https://link.ai-dock.org/template) as straightforward and user friendly as possible. +All AI-Dock containers share a common base which is designed to make running on cloud services such as [vast.ai](https://link.ai-dock.org/vast.ai) as straightforward and user friendly as possible. Common features and options are documented in the [base wiki](https://github.com/ai-dock/base-image/wiki) but any additional features unique to this image will be detailed below. @@ -29,40 +29,36 @@ Tags follow these patterns: - `:cpu-[ubuntu-version]` -Browse [here](https://github.com/ai-dock/comfyui/pkgs/container/comfyui) for an image suitable for your target environment. +Browse [ghcr.io](https://github.com/ai-dock/comfyui/pkgs/container/comfyui) for an image suitable for your target environment. Alternatively, view a select range of [CUDA](https://hub.docker.com/r/aidockorg/comfyui-cuda) and [ROCm](https://hub.docker.com/r/aidockorg/comfyui-rocm) builds at DockerHub. Supported Platforms: `NVIDIA CUDA`, `AMD ROCm`, `CPU` -## Required Environment Variables For Stable Diffusion 3 - -If you are using the Stable Diffusion 3 provisioning-script(sd3.sh), you must set the following variable(s) - -| Variable | Description | -| ---------- | -------------------------------------------------------------------------------------------------------------------------------------- | -| `HF_TOKEN` | Your Hugging Face API key(must be either a write or read token). This is needed to download Stable Diffiusion 3 as it is a gated model | - ## Additional Environment Variables | Variable | Description | | ------------------------ | ----------- | | `AUTO_UPDATE` | Update ComfyUI on startup (default `false`) | +| `CIVITAI_TOKEN` | Authenticate download requests from Civitai - Required for gated models | | `COMFYUI_BRANCH` | ComfyUI branch/commit hash for auto update (default `master`) | -| `COMFYUI_FLAGS` | Startup flags. eg. `--gpu-only --highvram` | +| `COMFYUI_ARGS` | Startup flags. eg. `--gpu-only --highvram` | | `COMFYUI_PORT_HOST` | ComfyUI interface port (default `8188`) | | `COMFYUI_URL` | Override `$DIRECT_ADDRESS:port` with URL for ComfyUI | +| `HF_TOKEN` | Authenticate download requests from HuggingFace - Required for gated models (SD3, FLUX, etc.) | See the base environment variables [here](https://github.com/ai-dock/base-image/wiki/2.0-Environment-Variables) for more configuration options. -### Additional Micromamba Environments +### Additional Python Environments | Environment | Packages | | -------------- | ----------------------------------------- | | `comfyui` | ComfyUI and dependencies | +| `api` | ComfyUI API wrapper and dependencies | -This micromamba environment will be activated on shell login. -See the base micromamba environments [here](https://github.com/ai-dock/base-image/wiki/1.0-Included-Software#installed-micromamba-environments). +The `comfyui` environment will be activated on shell login. + +~~See the base micromamba environments [here](https://github.com/ai-dock/base-image/wiki/1.0-Included-Software#installed-micromamba-environments).~~ ## Additional Services @@ -72,30 +68,16 @@ The following services will be launched alongside the [default services](https:/ The service will launch on port `8188` unless you have specified an override with `COMFYUI_PORT_HOST`. -ComfyUI will be updated to the latest version on container start. You can pin the version to a branch or commit hash by setting the `COMFYUI_BRANCH` variable. - -You can set startup flags by using variable `COMFYUI_FLAGS`. +You can set startup flags by using variable `COMFYUI_ARGS`. To manage this service you can use `supervisorctl [start|stop|restart] comfyui`. -### ComfyUI RP API - -This service is available on port `8188` and is used to test the [RunPod serverless](https://link.ai-dock.org/runpod-serverless) API. +### ComfyUI API Wrapper -You can access the api directly at `/rp-api/runsync` or you can use the Swager/openAPI playground at `/rp-api`. +This service is available on port `8188` and is a work-in-progress to replace previous serverless handlers which have been depreciated; Old Docker images and sources remain available should you need them. -There are several [example payloads](https://github.com/ai-dock/comfyui/tree/main/build/COPY_ROOT/opt/serverless/docs/example_payloads) included in this repository. - -This API is available on all platforms - But the container can ony run in serverless mode on RunPod infrastructure. - -To learn more about the serverless API see the [serverless section](#runpod-serverless) - -
- API Playground -
- -
+You can access the api directly at `/ai-dock/api/` or you can use the Swager/openAPI playground at `/ai-dock/api/docs`. >[!NOTE] >All services are password protected by default. See the [security](https://github.com/ai-dock/base-image/wiki#security) and [environment variables](https://github.com/ai-dock/base-image/wiki/2.0-Environment-Variables) documentation for more information. @@ -110,91 +92,4 @@ To learn more about the serverless API see the [serverless section](#runpod-serv --- -**Runpod.​io** - -- [comfyui:latest-cuda](https://link.ai-dock.org/template-runpod-comfyui) - - ---- - -## RunPod Serverless - -The container can be used as a [RunPod serverless](https://link.ai-dock.org/runpod-serverless) worker. To enable serverless mode you must run the container with environment variables `SERVERLESS=true` and `WORKSPACE=/runpod-volume`. - -The handlers will accept a job, process it and upload your images to s3 compatible storage. - -You may either set your s3 credentials as environment variables or you can pass them to the worker in the payload. - -You should set `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, `AWS_ENDPOINT_URL` and `AWS_BUCKET_NAME`. - -
- Serverless template example -
- -
- -If passed in the payload these variables should be in lowercase. - -Incorrect or unset s3 credentials will not resut in job failure. You can still retrieve your images from the network volume. - -When used in serverless mode, the container will skip provisioning and will not update ComfyUI or the nodes on start so you must either ensure everyting you need is built into the image (see [Building the Image](https://github.com/ai-dock/base-image/wiki/5.0-Building-the-Image)) or first run the container with a network volume in GPU Cloud to get everything set up before launching your workers. - -After launching a serverless worker, any instances of the container launched on the network volume in GPU cloud will also skip auto-updating. All updates must be done manually. - -The API is documented in openapi format. You can test it in a running container on the ComfyUI port at `/rp-api/docs` - See [ComfyUI RP API](#comfyui-rp-api) for more information. - ---- - -The API can use multiple handlers which you may define in the payload. Three handlers have been included for your convenience - -### Handler: RawWorkflow - -This handler should be passed a full ComfyUI workflow in the payload. It will detect any URL's and download the files into the input directory before replacing the URL value with the local path of the resource. This is very useful when working with image to image and controlnets. - -This is the most flexible of all handlers. - -
- RawWorkflow schema -
- -
- Example payload -
- - -### Handler: Text2Image - -This is a basic handler that is bound to a static workflow file (`/opt/serverless/workflows/text2image.json`). - -You can define several overrides to modify the workflow before processing. - -
- Text2Image schema -
- -
- Example payload - -
- -### Handler: Image2Image - -This is a basic handler that is bound to a static workflow file (`/opt/serverless/workflows/image2image.json`). - -You can define several overrides to modify the workflow before processing. - -
- Image2Image schema -
- -
- Example payload -
- -These handlers demonstrate how you can create a simple endpoint which will require very little frontend work to implement. - -You can find example payloads for these handlers [here](https://github.com/ai-dock/comfyui/tree/main/build/COPY_ROOT/opt/serverless/docs/example_payloads) - ---- - _The author ([@robballantyne](https://github.com/robballantyne)) may be compensated if you sign up to services linked in this document. Testing multiple variants of GPU images in many different environments is both costly and time-consuming; This helps to offset costs_ diff --git a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/serverless.conf b/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/serverless.conf deleted file mode 100644 index 0bd9b4be..00000000 --- a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/serverless.conf +++ /dev/null @@ -1,20 +0,0 @@ -[program:serverless] -user=$USER_NAME -environment=PROC_NAME="%(program_name)s",USER=$USER_NAME,HOME=/home/$USER_NAME -command=/opt/ai-dock/bin/supervisor-serverless.sh -process_name=%(program_name)s -numprocs=1 -directory=/opt/serverless -priority=100 -autostart=true -startsecs=2 -startretries=3 -autorestart=unexpected -stopsignal=TERM -stopwaitsecs=10 -stopasgroup=true -killasgroup=true -stdout_logfile=/var/log/supervisor/serverless.log -stdout_logfile_maxbytes=10MB -stdout_logfile_backups=1 -redirect_stderr=true diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh b/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh deleted file mode 100755 index d180a597..00000000 --- a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/common.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/false - -source /opt/ai-dock/etc/environment.sh - -build_common_main() { - build_common_create_env - build_common_install_jupyter_kernels -} - -build_common_create_env() { - apt-get update - $APT_INSTALL \ - libgl1-mesa-glx \ - libtcmalloc-minimal4 - - ln -sf $(ldconfig -p | grep -Po "libtcmalloc_minimal.so.\d" | head -n 1) \ - /lib/x86_64-linux-gnu/libtcmalloc.so - - micromamba create -n comfyui - micromamba run -n comfyui mamba-skel - micromamba install -n comfyui -y \ - python="${PYTHON_VERSION}" \ - ipykernel \ - ipywidgets \ - nano - micromamba run -n comfyui install-pytorch -v "$PYTORCH_VERSION" - - # RunPod serverless support - micromamba create -n serverless - micromamba run -n serverless mamba-skel - micromamba install -n serverless \ - python=3.10 \ - python-magic \ - ipykernel \ - ipywidgets \ - nano - micromamba run -n serverless $PIP_INSTALL \ - runpod -} - - -build_common_install_jupyter_kernels() { - kernel_path=/usr/local/share/jupyter/kernels - - # Add the often-present "Python3 (ipykernel) as a comfyui alias" - rm -rf ${kernel_path}/python3 - dir="${kernel_path}/python3" - file="${dir}/kernel.json" - cp -rf ${kernel_path}/../_template ${dir} - sed -i 's/DISPLAY_NAME/'"Python3 (ipykernel)"'/g' ${file} - sed -i 's/PYTHON_MAMBA_NAME/'"comfyui"'/g' ${file} - - dir="${kernel_path}/comfyui" - file="${dir}/kernel.json" - cp -rf ${kernel_path}/../_template ${dir} - sed -i 's/DISPLAY_NAME/'"ComfyUI"'/g' ${file} - sed -i 's/PYTHON_MAMBA_NAME/'"comfyui"'/g' ${file} - - dir="${kernel_path}/serverless" - file="${dir}/kernel.json" - cp -rf ${kernel_path}/../_template ${dir} - sed -i 's/DISPLAY_NAME/'"Serverless"'/g' ${file} - sed -i 's/PYTHON_MAMBA_NAME/'"serverless"'/g' ${file} -} - -build_common_install_comfyui() { - # Set git SHA to latest if not provided - if [[ -z $COMFYUI_SHA ]]; then - export COMFYUI_SHA="$(curl -fsSL "https://api.github.com/repos/comfyanonymous/ComfyUI/commits/master" \ - | jq -r '.sha[0:7]')" - env-store COMFYUI_SHA - fi - - cd /opt - git clone https://github.com/comfyanonymous/ComfyUI - cd /opt/ComfyUI - git checkout "$COMFYUI_SHA" - - micromamba run -n comfyui ${PIP_INSTALL} -r requirements.txt -} - -build_common_run_tests() { - installed_pytorch_version=$(micromamba run -n comfyui python -c "import torch; print(torch.__version__)") - if [[ "$installed_pytorch_version" != "$PYTORCH_VERSION"* ]]; then - echo "Expected PyTorch ${PYTORCH_VERSION} but found ${installed_pytorch_version}\n" - exit 1 - fi -} - -build_common_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-serverless.sh b/build/COPY_ROOT/opt/ai-dock/bin/supervisor-serverless.sh deleted file mode 100755 index f4b89f51..00000000 --- a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-serverless.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -trap cleanup EXIT - -function cleanup() { - kill $(jobs -p) > /dev/null 2>&1 -} - -function start() { - source /opt/ai-dock/etc/environment.sh - if [[ ${SERVERLESS,,} != true ]]; then - printf "Refusing to start serverless worker without \$SERVERLESS=true\n" - exec sleep 10 - fi - - # Delay launch until workspace is ready - if [[ -f /run/workspace_sync || -f /run/container_config ]]; then - while [[ -f /run/workspace_sync || -f /run/container_config ]]; do - sleep 1 - done - fi - printf "Serverless worker started: %s\n" "$(date +"%x %T.%3N")" >> /var/log/timing_data - - if [[ -n $RUNPOD_ENDPOINT_ID ]]; then - printf "Starting RunPod serverless worker...\n" - micromamba -n serverless run \ - python -u /opt/serverless/providers/runpod/worker.py - else - printf "No serverless worker available in this environment" - exec sleep 10 - fi -} - -start 2>&1 \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_image2image.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_image2image.json deleted file mode 100644 index b7964daf..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_image2image.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "input": { - "handler": "Image2Image", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "ckpt_name": "v1-5-pruned-emaonly.ckpt", - "include_text": "photograph of a victorian woman, arms outstretched with angel wings. cloudy sky, meadow grass", - "exclude_text": "watermark, text", - "denoise": 0.87, - "input_image": "https://raw.githubusercontent.com/comfyanonymous/ComfyUI/master/input/example.png" - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_text2image.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_text2image.json deleted file mode 100644 index b2c92158..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_text2image.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "input": { - "handler": "Text2Image", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "steps": 20, - "ckpt_name": "v1-5-pruned-emaonly.ckpt", - "sampler_name": "euler", - "scheduler": "normal", - "include_text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,", - "exclude_text": "text, watermark", - "width": 512, - "height": 512, - "batch_size": 1 - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_controlnet_t2i_adapters.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_controlnet_t2i_adapters.json deleted file mode 100644 index a047ed90..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_controlnet_t2i_adapters.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 891858402356003, - "steps": 20, - "cfg": 8, - "sampler_name": "uni_pc_bh2", - "scheduler": "normal", - "denoise": 1, - "model": [ - "34", - 0 - ], - "positive": [ - "23", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "5", - 0 - ] - }, - "class_type": "KSampler" - }, - "5": { - "inputs": { - "width": 832, - "height": 384, - "batch_size": 1 - }, - "class_type": "EmptyLatentImage" - }, - "7": { - "inputs": { - "text": "(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)", - "clip": [ - "34", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "34", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage" - }, - "20": { - "inputs": { - "image": "https://upload.wikimedia.org/wikipedia/commons/3/3e/Stereogram_Tut_Shark_Depthmap.png", - "upload": "image" - }, - "class_type": "LoadImage" - }, - "23": { - "inputs": { - "strength": 1.0000000000000004, - "conditioning": [ - "24", - 0 - ], - "control_net": [ - "33", - 0 - ], - "image": [ - "20", - 0 - ] - }, - "class_type": "ControlNetApply" - }, - "24": { - "inputs": { - "text": "underwater photograph shark\n\n\n\n", - "clip": [ - "34", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "33": { - "inputs": { - "control_net_name": "diff_control_sd15_depth_fp16.safetensors", - "model": [ - "34", - 0 - ] - }, - "class_type": "DiffControlNetLoader" - }, - "34": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - } - } - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_image2image.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_image2image.json deleted file mode 100644 index a65bd5fd..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_image2image.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 280823642470253, - "steps": 20, - "cfg": 8, - "sampler_name": "dpmpp_2m", - "scheduler": "normal", - "denoise": 0.8700000000000001, - "model": [ - "14", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "12", - 0 - ] - }, - "class_type": "KSampler" - }, - "6": { - "inputs": { - "text": "photograph of victorian woman with wings, sky clouds, meadow grass", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "7": { - "inputs": { - "text": "watermark, text", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage" - }, - "10": { - "inputs": { - "image": "https://raw.githubusercontent.com/comfyanonymous/ComfyUI/master/input/example.png", - "upload": "image" - }, - "class_type": "LoadImage" - }, - "12": { - "inputs": { - "pixels": [ - "10", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEEncode" - }, - "14": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - } - } - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_text2image.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_text2image.json deleted file mode 100644 index 14a36cf0..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_text2image.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 156680208700286, - "steps": 20, - "cfg": 8, - "sampler_name": "euler", - "scheduler": "normal", - "denoise": 1, - "model": [ - "4", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "5", - 0 - ] - }, - "class_type": "KSampler" - }, - "4": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - }, - "5": { - "inputs": { - "width": 512, - "height": 512, - "batch_size": 1 - }, - "class_type": "EmptyLatentImage" - }, - "6": { - "inputs": { - "text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "7": { - "inputs": { - "text": "text, watermark", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "4", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage" - } - } - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_upscale.json b/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_upscale.json deleted file mode 100644 index ae5fcbed..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_upscale.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 833543590226030, - "steps": 20, - "cfg": 8, - "sampler_name": "euler", - "scheduler": "normal", - "denoise": 1, - "model": [ - "4", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "5", - 0 - ] - }, - "class_type": "KSampler" - }, - "4": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - }, - "5": { - "inputs": { - "width": 512, - "height": 512, - "batch_size": 1 - }, - "class_type": "EmptyLatentImage" - }, - "6": { - "inputs": { - "text": "masterpiece best quality girl standing in victorian clothing", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "7": { - "inputs": { - "text": "bad hands", - "clip": [ - "4", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "4", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "14", - 0 - ] - }, - "class_type": "SaveImage" - }, - "13": { - "inputs": { - "model_name": "RealESRGAN_x4.pth" - }, - "class_type": "UpscaleModelLoader" - }, - "14": { - "inputs": { - "upscale_model": [ - "13", - 0 - ], - "image": [ - "8", - 0 - ] - }, - "class_type": "ImageUpscaleWithModel" - } - } - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/swagger/openapi.yaml b/build/COPY_ROOT/opt/serverless/docs/swagger/openapi.yaml deleted file mode 100644 index 50ba2664..00000000 --- a/build/COPY_ROOT/opt/serverless/docs/swagger/openapi.yaml +++ /dev/null @@ -1,237 +0,0 @@ -openapi: 3.1.0 -info: - title: ComfyUI API - description: Send a payload to a local ComfyUI API and have your image(s) uploaded to s3 compatible storage - version: 0.1.0 - -components: - securitySchemes: - basicAuth: - type: http - scheme: basic - schemas: - BaseRequest: - type: object - required: - - input - properties: - id: - type: string - description: Define the job ID. Not required in RunPod serverless but useful if you have a queue in front of this endpoint - input: - type: object - required: - - handler - properties: - handler: - type: string - description: Handler class name - aws_access_key_id: - type: string - required: false - description: Alternatively set AWS_ACCESS_KEY_ID environment variable - aws_secret_access_key: - type: string - required: false - description: Alternatively set AWS_SECRET_ACCESS_KEY environment variable - aws_endpoint_url: - type: string - required: false - description: Alternatively set AWS_ENDPOINT_URL environment variable - aws_bucket_name: - type: string - required: false - description: Alternatively set AWS_BUCKET_NAME environment variable - webhook_url: - type: string - required: false - description: Webhook URL to invoke after a successful run or an error. Alternatively set WEBHOOK_URL environment variable - webhook_extra_params: - type: object - required: false - description: Extra params for webhook request - RawWorkflow: - description: Downloads URLs to input directory with no additional processing - Your application must modify the workflow as needed. - allOf: - - $ref: '#/components/schemas/BaseRequest' - - type: object - properties: - input: - type: object - required: - - workflow_json - properties: - workflow_json: - type: object - description: A ComfyUI workflow. Ensure all nodes and models are available to your container. - Text2Image: - description: Basic request bound to static workflow file text2image.json. Requires very little frontend work to implement. - allOf: - - $ref: '#/components/schemas/BaseRequest' - - type: object - properties: - input: - type: object - properties: - seed: - type: integer - description: Random if not provided - steps: - type: integer - sampler_name: - type: string - scheduler: - type: string - ckpt_name: - type: string - description: Checkpoint file name. Ensure it is present in the container. - width: - type: integer - height: - type: integer - include_text: - type: string - description: Positive prompt - exclude_text: - type: string - description: Negative prompt - Image2Image: - description: Basic request bound to static workflow file image2image.json. Requires very little frontend work to implement. - allOf: - - $ref: '#/components/schemas/BaseRequest' - - type: object - properties: - input: - type: object - properties: - seed: - type: integer - description: Random if not provided - steps: - type: integer - sampler_name: - type: string - scheduler: - type: string - denoise: - type: number - ckpt_name: - type: string - description: Checkpoint file name. Ensure it is present in the container. - include_text: - type: string - description: Positive prompt - exclude_text: - type: string - description: Negative prompt - input_image: - type: string - description: URL of input image - -security: - - basicAuth: [] - -paths: - /rp-api/runsync: - post: - summary: "RunPod compatible synchronous endpoint" - description: "

Only /runsync (blocking) is available in this hosted API. Full functionality is available when running on RunPod's serverless infrastructure.

This is a work-in-progress and will receive feature updates.

aws_* can be omitted if you have set the corresponding AWS_* environment variables.

The examples will use models and nodes installed by the default provisioning script.

You must have an s3 compatible backend defined - Backblaze b2 may be a good choice.

" - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/RawWorkflow' - examples: - RawWorkflow (Text2Image): - description: | - Text to image using a simple workflow. - - Downloads URLs to input directory with no additional processing - Your application must modify the workflow as needed. - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_text2image.json' - RawWorkflow (Image2Image): - description: | - Image to image using a simple workflow. - - Downloads URLs to input directory with no additional processing - Your application must modify the workflow as needed. - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_image2image.json' - RawWorkflow (Controlnet/T2I Adapters): - description: | - Controlnet example using diff_control_sd15_depth_fp16.safetensors. - - Downloads URLs to input directory with no additional processing - Your application must modify the workflow as needed. - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_controlnet_t2i_adapters.json' - RawWorkflow (Text2Image Upscaling): - description: | - Text to image with RealESRGAN_x4 upscaling. - - Downloads URLs to input directory with no additional processing - Your application must modify the workflow as needed. - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/raw_upscale.json' - Text2Image (Workflow Bound): - description: | - Text to image bound to workflow file text2image.json - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_text2image.json' - Image2Image (Workflow Bound): - description: | - Image to image bound to workflow file image2image.json - - Ensure that you set your aws_* values. - externalValue: 'https://raw.githubusercontent.com/ai-dock/comfyui/main/build/COPY_ROOT/opt/serverless/docs/example_payloads/bound_image2image.json' - - responses: - default: - description: Success (RP is returning 200 for failures(!)) - content: - application/json: - schema: - type: object - properties: - output: - type: object - properties: - images: - type: array - items: - type: object - properties: - local_path: - type: string - description: File location on local disk - example: /opt/ComfyUI/output//ComfyUI__1.png - url: - type: string - description: URL to uploaded image - example: https://s3.provider.com///ComfyUI__1.png - timings: - type: object - properties: - job_time_received: - type: string - description: The time the job was received by the worker - example: datetime string - job_time_queued: - type: string - description: The time the job was put into the ComfyUI queue - example: datetime string - job_time_processed: - type: string - description: The time the job was completed by ComfyUI - example: datetime string - job_time_completed: - type: string - description: The time the job was finished and URLs for all artifacts retrieved - example: datetime string - job_time_total: - type: integer - description: Total time in seconds for job to complete - example: 12 diff --git a/build/COPY_ROOT/opt/serverless/handlers/basehandler.py b/build/COPY_ROOT/opt/serverless/handlers/basehandler.py deleted file mode 100644 index 94ef1051..00000000 --- a/build/COPY_ROOT/opt/serverless/handlers/basehandler.py +++ /dev/null @@ -1,234 +0,0 @@ -import json -import requests -import datetime -import time -import os -import base64 -import shutil -from utils.s3utils import s3utils -from utils.network import Network -from utils.filesystem import Filesystem - -class BaseHandler: - ENDPOINT_PROMPT="http://127.0.0.1:18188/prompt" - ENDPOINT_QUEUE="http://127.0.0.1:18188/queue" - ENDPOINT_HISTORY="http://127.0.0.1:18188/history" - INPUT_DIR=f"/opt/ComfyUI/input/" - OUTPUT_DIR=f"/opt/ComfyUI/output/" - - request_id = None - comfyui_job_id = None - - - def __init__(self, payload, workflow_json = None): - self.job_time_received = datetime.datetime.now() - self.payload = payload - self.workflow_json = workflow_json - self.s3utils = s3utils(self.get_s3_settings()) - self.request_id = str(self.get_value( - "request_id", - None - ) - ) - self.set_prompt() - - def set_prompt(self): - if self.workflow_json: - with open(self.workflow_json, 'r') as f: - self.prompt = {"prompt": json.load(f)} - else: - self.prompt = {"prompt": self.payload["workflow_json"]} - - def get_value(self, key, default = None): - if key not in self.payload and default == None: - raise IndexError(f"{key} required but not set") - elif key not in self.payload: - return default - elif Network.is_url(self.payload[key]) and not (key.startswith("aws_") or key.startswith("webhook_")): - return self.get_url_content(self.payload[key]) - else: - return self.payload[key] - - def get_input_dir(self): - return f"{self.INPUT_DIR}" - - def get_output_dir(self): - return f"{self.OUTPUT_DIR}" - - def replace_urls(self, data): - if isinstance(data, dict): - for key, value in data.items(): - data[key] = self.replace_urls(value) - elif isinstance(data, list): - for i, item in enumerate(data): - data[i] = self.replace_urls(item) - elif isinstance(data, str) and Network.is_url(data): - data = self.get_url_content(data) - return data - - def get_url_content(self, url): - existing_file = Filesystem.find_input_file( - self.get_input_dir(), - Network.get_url_hash(url) - ) - if existing_file: - return os.path.basename(existing_file) - else: - return os.path.basename(Network.download_file( - url, - self.get_input_dir(), - self.request_id - ) - ) - - def is_server_ready(self): - try: - req = requests.head(self.ENDPOINT_PROMPT) - return True if req.status_code == 200 else False - except: - return False - - def queue_job(self, timeout = 30): - try: - self.job_time_queued = datetime.datetime.now() - while ((datetime.datetime.now() - self.job_time_queued).seconds < timeout) and not self.is_server_ready(): - print(f"waiting for local server...") - time.sleep(0.5) - - if not self.is_server_ready(): - self.invoke_webhook(success=False, error=f"Server not ready after timeout ({timeout}s)") - raise requests.RequestException(f"Server not ready after timeout ({timeout}s)") - - print ("Posting job to local server...") - data = json.dumps(self.prompt).encode('utf-8') - response = requests.post(self.ENDPOINT_PROMPT, data=data).json() - if "prompt_id" in response: - return response["prompt_id"] - elif "node_errors" in response: - self.invoke_webhook(success=False, error=response["node_errors"]) - raise requests.RequestException(response["node_errors"]) - elif "error" in response: - self.invoke_webhook(success=False, error=response["error"]) - raise requests.RequestException(response["error"]) - except requests.RequestException: - self.invoke_webhook(success=False, error="Unknown error") - raise - except: - self.invoke_webhook(success=False, error="Unknown error") - raise requests.RequestException("Failed to queue prompt") - - def get_job_status(self): - try: - history = requests.get(self.ENDPOINT_HISTORY).json() - if self.comfyui_job_id in history: - self.job_time_processed = datetime.datetime.now() - return "complete" - queue = requests.get(self.ENDPOINT_QUEUE).json() - for job in queue["queue_running"]: - if self.comfyui_job_id in job: - return "running" - for job in queue["queue_pending"]: - if self.comfyui_job_id in job: - return "pending" - except: - self.invoke_webhook(success=False, error="Failed to queue job") - raise requests.RequestException("Failed to queue job") - - def image_to_base64(self, path): - with open(path, "rb") as f: - b64 = (base64.b64encode(f.read())) - return "data:image/png;charset=utf-8;base64, " + b64 - - def get_result(self, job_id): - result = requests.get(self.ENDPOINT_HISTORY).json()[self.comfyui_job_id] - - prompt = result["prompt"] - outputs = result["outputs"] - - self.result = { - "images": [], - "timings": {} - } - - custom_output_dir = f"{self.OUTPUT_DIR}{self.request_id}" - os.makedirs(custom_output_dir, exist_ok = True) - - for key, value in outputs.items(): - for inner_key, inner_value in value.items(): - if isinstance(inner_value, list): - for item in inner_value: - if item.get("type") == "output": - original_path = f"{self.OUTPUT_DIR}{item['subfolder']}/{item['filename']}" - new_path = f"{custom_output_dir}/{item['filename']}" - - # Handle duplicated request where output file is not re-generated - if os.path.islink(original_path): - shutil.copyfile(os.path.realpath(original_path), new_path) - else: - os.rename(original_path, new_path) - os.symlink(new_path, original_path) - key = f"{self.request_id}/{item['filename']}" - self.result["images"].append({ - "local_path": new_path, - #"base64": self.image_to_base64(path), - # make this work first, then threads - "url": self.s3utils.file_upload(new_path, key) - }) - - self.job_time_completed = datetime.datetime.now() - self.result["timings"] = { - "job_time_received": self.job_time_received.ctime(), - "job_time_queued": self.job_time_queued.ctime(), - "job_time_processed": self.job_time_processed.ctime(), - "job_time_completed": self.job_time_completed.ctime(), - "job_time_total": (self.job_time_completed - self.job_time_received).seconds - } - - return self.result - - def get_s3_settings(self): - settings = {} - settings["aws_access_key_id"] = self.get_value("aws_access_key_id", os.environ.get("AWS_ACCESS_KEY_ID")) - settings["aws_secret_access_key"] = self.get_value("aws_secret_access_key", os.environ.get("AWS_SECRET_ACCESS_KEY")) - settings["aws_endpoint_url"] = self.get_value("aws_endpoint_url", os.environ.get("AWS_ENDPOINT_URL")) - settings["aws_bucket_name"] = self.get_value("aws_bucket_name", os.environ.get("AWS_BUCKET_NAME")) - settings["connect_timeout"] = 5 - settings["connect_attempts"] = 1 - return settings - - # Webhook cannot be mandatory. Quick fix - def invoke_webhook(self, success = False, result = {}, error = ""): - try: - webhook_url = self.get_value("webhook_url", os.environ.get("WEBHOOK_URL")) - except: - return None - webhook_extra_params = self.get_value("webhook_extra_params", {}) - - if Network.is_url(webhook_url): - data = {} - data["job_id"] = self.comfyui_job_id - data["request_id"] = self.request_id - data["success"] = success - if result: - data["result"] = result - if error: - data["error"] = error - if webhook_extra_params: - data["extra_params"] = webhook_extra_params - Network.invoke_webhook(webhook_url, data) - else: - print("webhook_url is NOT valid!") - - def handle(self): - self.comfyui_job_id = self.queue_job(30) - - status = None - while status != "complete": - status = self.get_job_status() - if status != "complete": - print (f"Waiting for {status} job to complete") - time.sleep(0.5) - - result = self.get_result(self.comfyui_job_id) - self.invoke_webhook(success=True, result=result) - return result \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/handlers/hello_world.py b/build/COPY_ROOT/opt/serverless/handlers/hello_world.py deleted file mode 100644 index 497c7542..00000000 --- a/build/COPY_ROOT/opt/serverless/handlers/hello_world.py +++ /dev/null @@ -1,7 +0,0 @@ -def run(payload): - if payload["name"]: - name = payload["name"] - else: - name = "World" - - return f"Hello {name}!" \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/handlers/rawworkflow.py b/build/COPY_ROOT/opt/serverless/handlers/rawworkflow.py deleted file mode 100644 index a03dd738..00000000 --- a/build/COPY_ROOT/opt/serverless/handlers/rawworkflow.py +++ /dev/null @@ -1,141 +0,0 @@ -from handlers.basehandler import BaseHandler - -""" -Handler classes are generally bound to a specific workflow file. -To modify values we have to be confident in the json structure. - -One exception - RawWorkflow will send payload['workflow_json'] to the ComfyUI API after -downloading any URL's to the input directory and replacing the URL with a local path. - -Any other modifications you need, such as randomising seeds should be done before submitting. -""" - -class RawWorkflow(BaseHandler): - - WORKFLOW_JSON = None - - def __init__(self, payload): - super().__init__(payload, self.WORKFLOW_JSON) - self.apply_modifiers() - - - def apply_modifiers(self): - self.prompt = self.replace_urls(self.prompt) - - -""" -aws_* values can alternatively be set as AWS_* environment variables -Example Request Body: - -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 280823642470253, - "steps": 20, - "cfg": 8, - "sampler_name": "dpmpp_2m", - "scheduler": "normal", - "denoise": 0.8700000000000001, - "model": [ - "14", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "12", - 0 - ] - }, - "class_type": "KSampler" - }, - "6": { - "inputs": { - "text": "photograph of victorian woman with wings, sky clouds, meadow grass", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "7": { - "inputs": { - "text": "watermark, text", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage" - }, - "10": { - "inputs": { - "image": "https://raw.githubusercontent.com/comfyanonymous/ComfyUI/master/input/example.png", - "upload": "image" - }, - "class_type": "LoadImage" - }, - "12": { - "inputs": { - "pixels": [ - "10", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEEncode" - }, - "14": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - } - } - } -} - -""" - diff --git a/build/COPY_ROOT/opt/serverless/handlers/text2image.py b/build/COPY_ROOT/opt/serverless/handlers/text2image.py deleted file mode 100644 index d64f4230..00000000 --- a/build/COPY_ROOT/opt/serverless/handlers/text2image.py +++ /dev/null @@ -1,83 +0,0 @@ -from handlers.basehandler import BaseHandler -import random -import time - - -""" -Handler classes are generally bound to a specific workflow file. -To modify values we have to be confident in the json structure. - -One exception - RawWorkflow will send payload['workflow_json'] to the ComfyUI API after -downloading any URL's to the input directory and replacing the URL with a local path. -""" - -class Text2Image(BaseHandler): - - WORKFLOW_JSON = "/opt/serverless/workflows/text2image.json" - - def __init__(self, payload): - super().__init__(payload, self.WORKFLOW_JSON) - self.apply_modifiers() - - - def apply_modifiers(self): - timestr = time.strftime("%Y%m%d-%H%M%S") - self.prompt["prompt"]["3"]["inputs"]["seed"] = self.get_value( - "seed", - random.randint(0,2**32)) - self.prompt["prompt"]["3"]["inputs"]["steps"] = self.get_value( - "steps", - 20) - self.prompt["prompt"]["3"]["inputs"]["sampler_name"] = self.get_value( - "sampler_name", - "euler") - self.prompt["prompt"]["3"]["inputs"]["scheduler"] = self.get_value( - "scheduler", - "normal") - self.prompt["prompt"]["4"]["inputs"]["ckpt_name"] = self.get_value( - "ckpt_name", - "v1-5-pruned-emaonly.ckpt") - self.prompt["prompt"]["5"]["inputs"]["width"] = self.get_value( - "width", - 512) - self.prompt["prompt"]["5"]["inputs"]["height"] = self.get_value( - "height", - 512) - self.prompt["prompt"]["5"]["inputs"]["batch_size"] = self.get_value( - "batch_size", - 1) - self.prompt["prompt"]["6"]["inputs"]["text"] = self.get_value( - "include_text", - "") - self.prompt["prompt"]["7"]["inputs"]["text"] = self.get_value( - "exclude_text", - "") - - - -""" -Example Request Body: - -{ - "input": { - "handler": "Text2Image", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "steps": 20, - "ckpt_name": "v1-5-pruned-emaonly.ckpt", - "sampler_name": "euler", - "scheduler": "normal", - "include_text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,", - "exclude_text": "text, watermark", - "width": 512, - "height": 512, - "batch_size": 1 - } -} - -""" - diff --git a/build/COPY_ROOT/opt/serverless/providers/runpod/test_input.json b/build/COPY_ROOT/opt/serverless/providers/runpod/test_input.json deleted file mode 100644 index a65bd5fd..00000000 --- a/build/COPY_ROOT/opt/serverless/providers/runpod/test_input.json +++ /dev/null @@ -1,109 +0,0 @@ -{ - "input": { - "handler": "RawWorkflow", - "aws_access_key_id": "your-s3-access-key", - "aws_secret_access_key": "your-s3-secret-access-key", - "aws_endpoint_url": "https://my-endpoint.backblaze.com", - "aws_bucket_name": "your-bucket", - "webhook_url": "your-webhook-url", - "webhook_extra_params": {}, - "workflow_json": { - "3": { - "inputs": { - "seed": 280823642470253, - "steps": 20, - "cfg": 8, - "sampler_name": "dpmpp_2m", - "scheduler": "normal", - "denoise": 0.8700000000000001, - "model": [ - "14", - 0 - ], - "positive": [ - "6", - 0 - ], - "negative": [ - "7", - 0 - ], - "latent_image": [ - "12", - 0 - ] - }, - "class_type": "KSampler" - }, - "6": { - "inputs": { - "text": "photograph of victorian woman with wings, sky clouds, meadow grass", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "7": { - "inputs": { - "text": "watermark, text", - "clip": [ - "14", - 1 - ] - }, - "class_type": "CLIPTextEncode" - }, - "8": { - "inputs": { - "samples": [ - "3", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEDecode" - }, - "9": { - "inputs": { - "filename_prefix": "ComfyUI", - "images": [ - "8", - 0 - ] - }, - "class_type": "SaveImage" - }, - "10": { - "inputs": { - "image": "https://raw.githubusercontent.com/comfyanonymous/ComfyUI/master/input/example.png", - "upload": "image" - }, - "class_type": "LoadImage" - }, - "12": { - "inputs": { - "pixels": [ - "10", - 0 - ], - "vae": [ - "14", - 2 - ] - }, - "class_type": "VAEEncode" - }, - "14": { - "inputs": { - "ckpt_name": "v1-5-pruned-emaonly.ckpt" - }, - "class_type": "CheckpointLoaderSimple" - } - } - } -} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/providers/runpod/worker.py b/build/COPY_ROOT/opt/serverless/providers/runpod/worker.py deleted file mode 100644 index 0047e905..00000000 --- a/build/COPY_ROOT/opt/serverless/providers/runpod/worker.py +++ /dev/null @@ -1,46 +0,0 @@ -import sys -sys.path.append('/opt/serverless') -from pydoc import locate -import runpod -import uuid - -def get_handler(payload): - try: - c_name = payload["handler"] - m_name = c_name.lower() - handler_class = locate(f"handlers.{m_name}.{c_name}") - handler = handler_class(payload) - except: - raise - - return handler - -''' -Handler to be specified in input.handler -''' -def worker(event): - result = {} - try: - payload = event["input"] - if is_test_job(event): - payload["request_id"] = str(uuid.uuid4()) - else: - payload["request_id"] = event["id"] - handler = get_handler(payload) - result = handler.handle() - except Exception as e: - result = {} - result["error"] = str(e) - - return result - -def is_test_job(event): - test_values = [ - "local_test", - "test_job" - ] - return event["id"] in test_values - -runpod.serverless.start({ - "handler": worker -}) \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/utils/filesystem.py b/build/COPY_ROOT/opt/serverless/utils/filesystem.py deleted file mode 100644 index 7beded31..00000000 --- a/build/COPY_ROOT/opt/serverless/utils/filesystem.py +++ /dev/null @@ -1,24 +0,0 @@ -import glob -import magic -import mimetypes - -class Filesystem: - def __init__(self): - pass - - @staticmethod - def find_input_file(directory, str_hash): - # Hashed url should have only one result - try: - matched = glob.glob(f'{directory}/{str_hash}*') - if len(matched) > 0: - return matched[0] - return None - except: - return None - - @staticmethod - def get_file_extension(filepath): - mime_str = magic.from_file(filepath, mime=True) - return mimetypes.guess_extension(mime_str) - \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/utils/network.py b/build/COPY_ROOT/opt/serverless/utils/network.py deleted file mode 100644 index a50cadf7..00000000 --- a/build/COPY_ROOT/opt/serverless/utils/network.py +++ /dev/null @@ -1,56 +0,0 @@ -from urllib.parse import urlparse -import requests -import os -import uuid -import hashlib -from .filesystem import Filesystem - -class Network: - def __init__(self): - pass - - @staticmethod - def is_url(value): - try: - return bool(urlparse(value)[0]) - except: - return False - - @staticmethod - def get_url_hash(url): - return hashlib.md5((f'{url}').encode()).hexdigest() - - # todo - threads - @staticmethod - def download_file(url, target_dir, request_id): - try: - file_name_hash = Network.get_url_hash(url) - os.makedirs(target_dir, exist_ok=True) - response = requests.get(url, timeout=5) - if response.status_code > 399: - raise requests.RequestException(f"Unable to download {url}") - - filepath_hash = f"{target_dir}/{file_name_hash}" - # ignore above - with open(filepath_hash, mode="wb") as file: - file.write(response.content) - - file_extension = Filesystem.get_file_extension(filepath_hash) - filepath = f"{filepath_hash}{file_extension}" - os.replace(filepath_hash, filepath) - - except: - raise - - print(f"Downloaded {url} to {filepath}") - return filepath - - @staticmethod - def invoke_webhook(url, data): - try: - response = requests.post(url, json=data) - print(f"Invoke webhook {url} with data {data} - status {response.status_code}") - return response - except requests.exceptions.RequestException as e: - print(f"Error making POST request: {e}") - return None diff --git a/build/COPY_ROOT/opt/serverless/utils/s3utils.py b/build/COPY_ROOT/opt/serverless/utils/s3utils.py deleted file mode 100644 index a86fa43b..00000000 --- a/build/COPY_ROOT/opt/serverless/utils/s3utils.py +++ /dev/null @@ -1,45 +0,0 @@ -from botocore.client import Config -import boto3 - -class s3utils: - def __init__(self, args): - self.aws_access_key_id = args["aws_access_key_id"] - self.aws_secret_access_key = args["aws_secret_access_key"] - self.aws_endpoint_url = args["aws_endpoint_url"] - self.aws_bucket_name = args["aws_bucket_name"] - self.connect_timeout = args["connect_timeout"] - self.connect_attempts = args["connect_attempts"] - self.config = Config( - connect_timeout=self.connect_timeout, - retries = dict( - max_attempts = self.connect_attempts - ), - signature_version = 'v4' - ) - self.session = boto3.session.Session( - aws_access_key_id = self.aws_access_key_id, - aws_secret_access_key = self.aws_secret_access_key, - ) - - def get_client(self): - return self.session.client( - service_name="s3", - endpoint_url=self.aws_endpoint_url, - config=self.config - ) - - def file_upload(self, filepath, key): - client = self.get_client() - try: - client.upload_file(filepath, self.aws_bucket_name, key) - - presigned_url = client.generate_presigned_url( - 'get_object', - Params={ - 'Bucket': f'{self.aws_bucket_name}', - 'Key': f'{key}' - }, ExpiresIn=604800) - except: - return "" - - return presigned_url diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/amd.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/amd.sh new file mode 100755 index 00000000..f0dad6df --- /dev/null +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/amd.sh @@ -0,0 +1,16 @@ +#!/bin/false + +build_amd_main() { + build_amd_install_deps + build_common_run_tests +} + +build_amd_install_deps() { + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + torch==${PYTORCH_VERSION} \ + torchvision \ + torchaudio \ + --index-url=https://download.pytorch.org/whl/rocm${ROCM_VERSION} +} + +build_amd_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/clean.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/clean.sh new file mode 100755 index 00000000..3cea2c6b --- /dev/null +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/clean.sh @@ -0,0 +1,8 @@ +#!/bin/false + +# Tidy up and keep image small +apt-get clean -y + +fix-permissions.sh -o container +rm /etc/ld.so.cache +ldconfig \ No newline at end of file diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh new file mode 100755 index 00000000..4e3a7cd5 --- /dev/null +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/common.sh @@ -0,0 +1,48 @@ +#!/bin/false + +source /opt/ai-dock/etc/environment.sh + +build_common_main() { + build_common_create_venv +} + +build_common_create_venv() { + apt-get update + $APT_INSTALL \ + "python${PYTHON_VERSION}" \ + "python${PYTHON_VERSION}-dev" \ + "python${PYTHON_VERSION}-venv" + + # ComfyUI venv + "python${PYTHON_VERSION}" -m venv "$COMFYUI_VENV" + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + ipykernel \ + ipywidgets + "$COMFYUI_VENV_PYTHON" -m ipykernel install \ + --name="comfyui" \ + --display-name="Python${PYTHON_VERSION} (comfyui)" + # Add the default Jupyter kernel as an alias of comfyui + "$COMFYUI_VENV_PYTHON" -m ipykernel install \ + --name="python3" \ + --display-name="Python3 (ipykernel)" + + # API venv + "python${PYTHON_VERSION}" -m venv "$API_VENV" + "$API_VENV_PIP" install --no-cache-dir \ + ipykernel \ + ipywidgets + "$API_VENV_PYTHON" -m ipykernel install \ + --name="api-wrapper" \ + --display-name="Python${PYTHON_VERSION} (api-wrapper)" +} + + +build_common_run_tests() { + installed_pytorch_version=$("$COMFYUI_VENV_PYTHON" -c "import torch; print(torch.__version__)") + if [[ "$installed_pytorch_version" != "$PYTORCH_VERSION"* ]]; then + echo "Expected PyTorch ${PYTORCH_VERSION} but found ${installed_pytorch_version}\n" + exit 1 + fi +} + +build_common_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/cpu.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/cpu.sh new file mode 100755 index 00000000..a2a4f854 --- /dev/null +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/cpu.sh @@ -0,0 +1,16 @@ +#!/bin/false + +build_cpu_main() { + build_cpu_install_deps + build_common_run_tests +} + +build_cpu_install_deps() { + "$FOOOCUS_VENV_PIP" install --no-cache-dir \ + torch==${PYTORCH_VERSION} \ + torchvision \ + torchaudio \ + --extra-index-url=https://download.pytorch.org/whl/cpu +} + +build_cpu_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/init.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/init.sh similarity index 91% rename from build/COPY_ROOT/opt/ai-dock/bin/build/layer0/init.sh rename to build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/init.sh index c8d77e94..974e2560 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/init.sh +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/init.sh @@ -17,4 +17,4 @@ else exit 1 fi -source /opt/ai-dock/bin/build/layer0/clean.sh \ No newline at end of file +source /opt/ai-dock/bin/build/layer0/clean.sh diff --git a/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh new file mode 100755 index 00000000..6c62ab8a --- /dev/null +++ b/build/COPY_ROOT_0/opt/ai-dock/bin/build/layer0/nvidia.sh @@ -0,0 +1,27 @@ +#!/bin/false + +build_nvidia_main() { + build_nvidia_install_deps + build_common_run_tests + build_nvidia_run_tests +} + +build_nvidia_install_deps() { + short_cuda_version="cu$(cut -d '.' -f 1,2 <<< "${CUDA_VERSION}" | tr -d '.')" + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + torch==${PYTORCH_VERSION} \ + torchvision \ + torchaudio \ + xformers \ + --index-url=https://download.pytorch.org/whl/$short_cuda_version +} + +build_nvidia_run_tests() { + installed_pytorch_cuda_version=$("$COMFYUI_VENV_PYTHON" -c "import torch; print(torch.version.cuda)") + if [[ "$CUDA_VERSION" != "$installed_pytorch_cuda"* ]]; then + echo "Expected PyTorch CUDA ${CUDA_VERSION} but found ${installed_pytorch_cuda}\n" + exit 1 + fi +} + +build_nvidia_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/.gitkeep b/build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/.gitkeep similarity index 100% rename from build/COPY_ROOT/etc/supervisor/supervisord/conf.d/.gitkeep rename to build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/.gitkeep diff --git a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui.conf b/build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui.conf similarity index 100% rename from build/COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui.conf rename to build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui.conf diff --git a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui_rp_api.conf b/build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui_api_wrapper.conf similarity index 74% rename from build/COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui_rp_api.conf rename to build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui_api_wrapper.conf index 6d96b913..a8c5b6fc 100644 --- a/build/COPY_ROOT/etc/supervisor/supervisord/conf.d/comfyui_rp_api.conf +++ b/build/COPY_ROOT_1/etc/supervisor/supervisord/conf.d/comfyui_api_wrapper.conf @@ -1,7 +1,7 @@ -[program:comfyui_rp_api] +[program:comfyui_api_wrapper] user=$USER_NAME environment=PROC_NAME="%(program_name)s",USER=$USER_NAME,HOME=/home/$USER_NAME -command=/opt/ai-dock/bin/supervisor-comfyui-rp-api.sh +command=/opt/ai-dock/bin/supervisor-comfyui-api-wrapper.sh process_name=%(program_name)s numprocs=1 directory=/home/$USER_NAME @@ -14,7 +14,7 @@ stopsignal=TERM stopwaitsecs=10 stopasgroup=true killasgroup=true -stdout_logfile=/var/log/supervisor/comfyui-rp-api.log +stdout_logfile=/var/log/supervisor/comfyui-api-wrapper.log stdout_logfile_maxbytes=10MB stdout_logfile_backups=1 stderr_logfile_maxbytes=0 diff --git a/build/COPY_ROOT/opt/serverless/handlers/__init__.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/__init__.py similarity index 100% rename from build/COPY_ROOT/opt/serverless/handlers/__init__.py rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/__init__.py diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/config.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/config.py new file mode 100644 index 00000000..2cb99c31 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/config/config.py @@ -0,0 +1,13 @@ +import os +COMFYUI_API_BASE="http://127.0.0.1:18188" +COMFYUI_API_PROMPT=f"{COMFYUI_API_BASE}/prompt" +COMFYUI_API_QUEUE=f"{COMFYUI_API_BASE}/queue" +COMFYUI_API_HISTORY=f"{COMFYUI_API_BASE}/history" + +if os.getenv("API_CACHE", 'memory').lower() == "redis": + CACHE_TYPE="redis" +else: + CACHE_TYPE = "memory" + +INPUT_DIR=f"/opt/ComfyUI/input/" +OUTPUT_DIR=f"/opt/ComfyUI/output/" diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/environment.yaml b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/environment.yaml new file mode 100644 index 00000000..26f82ab2 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/environment.yaml @@ -0,0 +1,7 @@ +name: comfyui-api +channels: + - conda-forge +dependencies: + - python=3.10 + - uvicorn=0.23 + - fastapi=0.103 diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/main.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/main.py new file mode 100644 index 00000000..dd875c1b --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/main.py @@ -0,0 +1,98 @@ +import asyncio +from config import config +from fastapi import FastAPI, Response, Body +from typing import Annotated, List +from aiocache import Cache, SimpleMemoryCache +from requestmodels.models import Payload +from responses.result import Result +from workers.preprocess_worker import PreprocessWorker +from workers.generation_worker import GenerationWorker +from workers.postprocess_worker import PostprocessWorker +import uuid + +app = FastAPI(root_path="/ai-dock/api") + +@app.on_event("startup") +async def startup_event(): + asyncio.create_task(main()) + +# Simple memory based caching by default +# This worker is not expected to handle disaster recovery +# See ai-dock/comfyui-load-balancer for advanced usage (TODO) +if config.CACHE_TYPE == "redis": + request_store = Cache(Cache.REDIS, namespace="request_store") + response_store = Cache(Cache.REDIS, namespace="response_store") +else: + request_store = SimpleMemoryCache(namespace="request_store") + response_store = SimpleMemoryCache(namespace="response_store") + + # Check payload for URLs and download as required + preprocess_queue = asyncio.Queue() + # Generate outputs with ComfyUI + generation_queue = asyncio.Queue() + # Upload outputs, webhook, cleanup + postprocess_queue = asyncio.Queue() + + +async def main(): + worker_config = { + "preprocess_queue": preprocess_queue, + "generation_queue": generation_queue, + "postprocess_queue": postprocess_queue, + "request_store": request_store, + "response_store": response_store, + } + + preprocess_workers = [PreprocessWorker(i, worker_config) for i in range(1, 4)] + preprocess_tasks = [asyncio.create_task(worker.work()) for worker in preprocess_workers] + + # One initially - May extend this to several + generation_workers = [GenerationWorker(i, worker_config) for i in range(1, 2)] + generation_tasks = [asyncio.create_task(worker.work()) for worker in generation_workers] + + postprocess_workers = [PostprocessWorker(i, worker_config) for i in range(1, 4)] + postprocess_tasks = [asyncio.create_task(worker.work()) for worker in postprocess_workers] + + # Wait indefinitely + await asyncio.gather(*preprocess_tasks, *generation_tasks, *postprocess_tasks) + + + +@app.post('/payload', response_model=Result, status_code=202) +async def payload( + payload: Annotated[ + Payload, + Body( + openapi_examples=Payload.get_openapi_examples() + ), + ], +): + + if not payload.input.request_id: + payload.input.request_id = str(uuid.uuid4()) + request_id = payload.input.request_id + + result_pending = Result(id=request_id) + + # Immediately store request for crash recovery (redis) + await request_store.set(request_id, payload) + await response_store.set(request_id, result_pending) + await preprocess_queue.put(request_id) + + return result_pending + + +@app.get('/result/{request_id}', response_model=Result, status_code=200) +async def result(request_id: str, response: Response): + result = await response_store.get(request_id) + if not result: + result = Result(id=request_id, status="failed", message="Request ID not found") + response.status_code = 404 + + return result + +@app.get('/queue-info', response_model=List[str]) +async def queue_info(): + return list(request_queue.queue) + + \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/utils/__init__.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/__init__.py similarity index 100% rename from build/COPY_ROOT/opt/serverless/utils/__init__.py rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/__init__.py diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/basemodifier.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/basemodifier.py new file mode 100644 index 00000000..006b2fbb --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/basemodifier.py @@ -0,0 +1,140 @@ +import asyncio +import os +import json +import hashlib +import aiofiles +import aiohttp +import magic +import mimetypes +from urllib.parse import urlparse +from config import config +from pathlib import Path + +class BaseModifier: + WORKFLOW_JSON = "" + + def __init__(self, modifications={}): + self.modifications = modifications + self.input_dir = config.INPUT_DIR + + async def load_workflow(self, workflow={}): + if workflow and not self.WORKFLOW_JSON: + self.workflow = workflow + else: + try: + async with aiofiles.open(self.WORKFLOW_JSON, 'r') as f: + file_content = await f.read() + self.workflow = json.loads(file_content) + except Exception as e: + raise Exception(f"Could not load workflow ({e})") + + async def modify_workflow_value(self, key, default = None): + """ + Modify a workflow value after loading the json. + """ + if key not in self.modifications and default == None: + raise IndexError(f"{key} required but not set") + elif key not in self.modifications: + return default + else: + return self.modifications[key] + + async def replace_workflow_urls(self, data): + """ + Find all URL strings in the prompt and replace the URL string with a filepath + """ + if isinstance(data, dict): + for key, value in data.items(): + data[key] = await self.replace_workflow_urls(value) + elif isinstance(data, list): + for i, item in enumerate(data): + data[i] = await self.replace_workflow_urls(item) + elif isinstance(data, str) and self.is_url(data): + data = await self.get_url_content(data) + return data + + async def get_url_content(self, url): + """ + Download from URL to ComfyUI input directory as hash.ext to avoid downloading the resource + multiple times + """ + filename_without_extension = self.get_url_hash(url) + existing_file = await self.find_input_file( + self.input_dir, + filename_without_extension + ) + if existing_file: + return os.path.basename(existing_file) + else: + file_name = os.path.basename(await self.download_file( + url, + self.input_dir + )) + return file_name + + def is_url(self, value): + try: + return bool(urlparse(value)[0]) + except: + return False + + def get_url_hash(self, url): + return hashlib.md5((f'{url}').encode()).hexdigest() + + async def download_file(self, url, target_dir): + try: + file_name_hash = self.get_url_hash(url) + os.makedirs(target_dir, exist_ok=True) + + async with aiohttp.ClientSession() as session: + async with session.get(url) as response: + if response.status > 399: + raise aiohttp.ClientResponseError(status=response.status, message=f"Unable to download {url}") + + filepath_hash = f"{target_dir}/{file_name_hash}" + async with aiofiles.open(filepath_hash, mode="wb") as file: + await file.write(await response.read()) + + file_extension = await self.get_file_extension(filepath_hash) + filepath = f"{filepath_hash}{file_extension}" + os.replace(filepath_hash, filepath) + + except Exception as e: + raise e + + print(f"Downloaded {url} to {filepath}") + return filepath + + async def find_input_file(self, directory, filename_without_extension): + try: + directory_path = Path(directory) + loop = asyncio.get_running_loop() + files = await loop.run_in_executor(None, self.list_files_in_directory, directory_path, filename_without_extension) + if files: + return files[0] + except Exception as e: + print(f"Error finding input file: {e}") + return None + + def list_files_in_directory(self, directory_path, filename_without_extension): + files = [] + for file in directory_path.glob(f"{filename_without_extension}*"): + if file.is_file(): + files.append(file) + return files + + async def get_file_extension(self, filepath): + try: + mime_str = magic.from_file(filepath, mime=True) + extension = mimetypes.guess_extension(mime_str) or ".jpg" + return extension + except Exception as e: + return ".jpg" # Fallback to a default extension + + + async def apply_modifications(self): + await self.replace_workflow_urls(self.workflow) + + async def get_modified_workflow(self): + await self.apply_modifications() + return self.workflow \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/handlers/image2image.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/text2image.py similarity index 56% rename from build/COPY_ROOT/opt/serverless/handlers/image2image.py rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/text2image.py index 152b1645..36507a7f 100644 --- a/build/COPY_ROOT/opt/serverless/handlers/image2image.py +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/modifiers/text2image.py @@ -1,6 +1,7 @@ -from handlers.basehandler import BaseHandler +from modifiers.basemodifier import BaseModifier import random import time +import json """ @@ -11,52 +12,52 @@ downloading any URL's to the input directory and replacing the URL with a local path. """ -class Image2Image(BaseHandler): +class Text2Image(BaseModifier): - WORKFLOW_JSON = "/opt/serverless/workflows/image2image.json" + WORKFLOW_JSON = "workflows/image2image.json" - def __init__(self, payload): - super().__init__(payload, self.WORKFLOW_JSON) - self.apply_modifiers() - + def __init__(self, modificafions={}): + super().__init__() + self.modificafions = modificafions - def apply_modifiers(self): + async def apply_modifications(self): timestr = time.strftime("%Y%m%d-%H%M%S") - self.prompt["prompt"]["3"]["inputs"]["seed"] = self.get_value( + self.workflow["3"]["inputs"]["seed"] = await self.modify_workflow_value( "seed", random.randint(0,2**32)) - self.prompt["prompt"]["3"]["inputs"]["steps"] = self.get_value( + self.workflow["3"]["inputs"]["steps"] = await self.modify_workflow_value( "steps", 20) - self.prompt["prompt"]["3"]["inputs"]["sampler_name"] = self.get_value( + self.workflow["3"]["inputs"]["sampler_name"] = await self.modify_workflow_value( "sampler_name", "dpmpp_2m") - self.prompt["prompt"]["3"]["inputs"]["scheduler"] = self.get_value( + self.workflow["3"]["inputs"]["scheduler"] = await self.modify_workflow_value( "scheduler", "normal") - self.prompt["prompt"]["3"]["inputs"]["denoise"] = self.get_value( + self.workflow["3"]["inputs"]["denoise"] = await self.modify_workflow_value( "denoise", 0.8700000000000001) - self.prompt["prompt"]["6"]["inputs"]["text"] = self.get_value( + self.workflow["6"]["inputs"]["text"] = await self.modify_workflow_value( "include_text", "") - self.prompt["prompt"]["7"]["inputs"]["text"] = self.get_value( + self.workflow["7"]["inputs"]["text"] = await self.modify_workflow_value( "exclude_text", "") - self.prompt["prompt"]["10"]["inputs"]["image"] = self.get_value( - "input_image", - "") - self.prompt["prompt"]["14"]["inputs"]["ckpt_name"] = self.get_value( + self.workflow["10"]["inputs"]["image"] = await self.modify_workflow_value( + "input_image" + ) + self.workflow["14"]["inputs"]["ckpt_name"] = await self.modify_workflow_value( "ckpt_name", "v1-5-pruned-emaonly.ckpt") - + await super().apply_modifications() + """ Example Request Body: { "input": { - "handler": "Image2Image", + "modifier": "Image2Image", "aws_access_key_id": "your-s3-access-key", "aws_secret_access_key": "your-s3-secret-access-key", "aws_endpoint_url": "https://my-endpoint.backblaze.com", diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/payloads/imgsave.json b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/payloads/imgsave.json new file mode 100644 index 00000000..8135e181 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/payloads/imgsave.json @@ -0,0 +1,43 @@ +{ + "input": { + "handler": "RawWorkflow", + "s3": { + "access_key_id": "your-s3-access-key", + "secret_access_key": "your-s3-secret-access-key", + "endpoint_url": "https://my-endpoint.backblaze.com", + "bucket_name": "your-bucket" + }, + "webhook": { + "webhook_url": "your-webhook-url", + "webhook_extra_params": {} + }, + "modifiers": { + + }, + "workflow_json": { + "9": { + "inputs": { + "filename_prefix": "ComfyUI", + "images": [ + "10", + 0 + ] + }, + "class_type": "SaveImage", + "_meta": { + "title": "Save Image" + } + }, + "10": { + "inputs": { + "image": "https://raw.githubusercontent.com/comfyanonymous/ComfyUI/master/input/example.png", + "upload": "image" + }, + "class_type": "LoadImage", + "_meta": { + "title": "Load Image" + } + } + } + } +} \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/docs/postman/.gitkeep b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/__init__.py similarity index 100% rename from build/COPY_ROOT/opt/serverless/docs/postman/.gitkeep rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/__init__.py diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/models.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/models.py new file mode 100644 index 00000000..bd95a7a8 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requestmodels/models.py @@ -0,0 +1,84 @@ +from typing import List, Union, Dict, Annotated +from pydantic import BaseModel, Field +import os +import json + +class S3Config(BaseModel): + access_key_id: str = Field(default="") + secret_access_key: str = Field(default="") + endpoint_url: str = Field(default="") + bucket_name: str = Field(default="") + connect_timeout: int = Field(default=5) + connect_attempts: int = Field(default=1) + + @staticmethod + def get_defaults(): + return { + "access_key_id": "", + "secret_access_key": "", + "endpoint_url": "", + "bucket_name": "", + "connect_timeout": "5", + "connect_attempts": "1" + } + + def get_config(self): + return { + "access_key_id": getattr(self, "access_key_id", os.environ.get("S3_ACCESS_KEY_ID", "")), + "secret_access_key": getattr(self, "secret_access_key", os.environ.get("S3_SECRET_ACCESS_KEY", "")), + "endpoint_url": getattr(self, "endpoint_url", os.environ.get("S3_ENDPOINT_URL", "")), + "bucket_name": getattr(self, "bucket_name", os.environ.get("S3_BUCKET_NAME", "")), + "connect_timeout": "5", + "connect_attempts": "1" + } + +class WebHook(BaseModel): + url: str = Field(default="") + extra_params: Dict = Field(default={}) + + @staticmethod + def get_defaults(): + return { + "url": "", + "extra_params": {} + } + + def has_valid_url(self): + return network.is_url(self.url) + +class Input(BaseModel): + request_id: str = Field(default="") + modifier: str = Field(default="") + modifications: Dict = Field(default={}) + workflow_json: Dict = Field(default={}) + s3: S3Config = Field(default=S3Config.get_defaults()) + webhook: WebHook = Field(default=WebHook.get_defaults()) + +class Payload(BaseModel): + input: Input + + @staticmethod + def get_openapi_examples(): + directory = './payloads' + result = {} + + for filename in os.listdir(directory): + if filename.endswith('.json'): + filepath = os.path.join(directory, filename) + with open(filepath, 'r', encoding='utf-8') as file: + file_content = json.load(file) + + # Remove the file extension and convert to natural language + key = Payload.snake_to_natural(os.path.splitext(filename)[0]) + + # Add the content to the result dictionary + result[key] = {"value": file_content} + + return result + + @staticmethod + def snake_to_natural(snake_str): + # Convert snake_case to Natural Language + return ' '.join(word.capitalize() for word in snake_str.split('_')) + + \ No newline at end of file diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requirements.txt b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requirements.txt new file mode 100644 index 00000000..3dc0201c --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/requirements.txt @@ -0,0 +1,9 @@ +aiocache +pydantic>=2 +aiobotocore +aiofiles +aiohttp +fastapi==0.103 +pathlib +python-magic +uvicorn==0.23 diff --git a/build/COPY_ROOT/root/.gitkeep b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/__init__.py similarity index 100% rename from build/COPY_ROOT/root/.gitkeep rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/__init__.py diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/result.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/result.py new file mode 100644 index 00000000..3a752ac4 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/responses/result.py @@ -0,0 +1,11 @@ +from pydantic import BaseModel, Field +from typing import Dict + +class Result(BaseModel): + id: str + message: str = Field(default='Request accepted') + status: str = Field(default='pending') + comfyui_response: Dict = Field(default={}) + output: list = Field(default=[]) + timings: Dict = Field(default={}) + diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/generation_worker.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/generation_worker.py new file mode 100644 index 00000000..3aa1335c --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/generation_worker.py @@ -0,0 +1,101 @@ +import asyncio +import aiohttp +import json +from config import config + +class GenerationWorker: + """ + Send payload to ComfyUI and await completion + """ + def __init__(self, worker_id, kwargs): + self.worker_id = worker_id + self.preprocess_queue = kwargs["preprocess_queue"] + self.generation_queue = kwargs["generation_queue"] + self.postprocess_queue = kwargs["postprocess_queue"] + self.request_store = kwargs["request_store"] + self.response_store = kwargs["response_store"] + + async def work(self): + print ("GenerationWorker: waiting for job") + while True: + # Get a task from the job queue + request_id = await self.generation_queue.get() + if request_id is None: + # None is a signal that there are no more tasks + break + + # Process the job + print(f"GenerationWorker {self.worker_id} processing job: {request_id}") + try: + request = await self.request_store.get(request_id) + result = await self.response_store.get(request_id) + comfyui_job_id = await self.post_workflow(request) + + # TODO: Add check to ensure job still running (websocket) + while True: + complete = await self.is_workflow_complete(comfyui_job_id) + if not complete: + print("waiting for job") + await asyncio.sleep(1) + else: + print("job done") + break + + comfyui_response = await self.get_result(comfyui_job_id) + + except Exception as e: + result.status = "failed" + result.message = f"Generation failed: {e}" + await self.response_store.set(request_id, result) + # Send job straight to postprocess for fail result + await self.postprocess_queue.put(request_id) + + result.message = "Generation complete. Queued for upload." + result.comfyui_response = comfyui_response + + await self.response_store.set(request_id, result) + # Send for ComfyUI generation + await self.postprocess_queue.put(request_id) + + # Mark the job as complete + self.generation_queue.task_done() + + print(f"PreprocessWorker {self.worker_id} finished.") + return + + async def post_workflow(self, request): + data = json.dumps( + { + "prompt": request.input.workflow_json, + "client_id": request.input.request_id + }).encode('utf-8') + + async with aiohttp.ClientSession() as session: + try: + print("Posting job to local server...") + async with session.post(config.COMFYUI_API_PROMPT, data=data) as response: + response_data = await response.json() + if "prompt_id" in response_data: + return response_data["prompt_id"] + elif "node_errors" in response_data: + raise aiohttp.ClientError(response_data["node_errors"]) + elif "error" in response_data: + raise aiohttp.ClientError(response_data["error"]) + except Exception as e: + raise aiohttp.ClientError(f"Failed to queue prompt: {e}") + + async def is_workflow_complete(self, comfyui_job_id): + try: + async with aiohttp.ClientSession() as session: + async with session.get(config.COMFYUI_API_HISTORY) as response: + history = await response.json() + if comfyui_job_id in history: + return True + except Exception as e: + raise e + + async def get_result(self, comfyui_job_id): + async with aiohttp.ClientSession() as session: + async with session.get(config.COMFYUI_API_HISTORY) as response: + history = await response.json() + return history[comfyui_job_id] \ No newline at end of file diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/postprocess_worker.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/postprocess_worker.py new file mode 100644 index 00000000..a2e38b9c --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/postprocess_worker.py @@ -0,0 +1,124 @@ +import asyncio +import aiobotocore.session +import aiofiles +import aiofiles.os +from config import config +from pathlib import Path + +class PostprocessWorker: + """ + Upload generated assets and fire webhook response + """ + def __init__(self, worker_id, kwargs): + self.worker_id = worker_id + self.preprocess_queue = kwargs["preprocess_queue"] + self.generation_queue = kwargs["generation_queue"] + self.postprocess_queue = kwargs["postprocess_queue"] + self.request_store = kwargs["request_store"] + self.response_store = kwargs["response_store"] + + async def work(self): + print ("PostprocessWorker: waiting for job") + while True: + # Get a task from the job queue + request_id = await self.postprocess_queue.get() + if request_id is None: + # None is a signal that there are no more tasks + break + + # Process the job + print(f"PostprocessWorker {self.worker_id} processing job: {request_id}") + try: + request = await self.request_store.get(request_id) + result = await self.response_store.get(request_id) + + await self.move_assets(request_id, result) + await self.upload_assets(request_id, request.input.s3.get_config(), result) + + result.status = "success" + result.message = "Process complete." + + except Exception as e: + print(e) + result.status = "failed" + result.message = f"Postprocessing failed: {e}" + await self.response_store.set(request_id, result) + + await self.response_store.set(request_id, result) + + # Mark the job as complete + self.postprocess_queue.task_done() + + print(f"PostprocessWorker {self.worker_id} finished.") + + async def move_assets(self, request_id, result): + custom_output_dir = f"{config.OUTPUT_DIR}{request_id}" + await aiofiles.os.makedirs(custom_output_dir, exist_ok=True) + + for key, value in result.comfyui_response['outputs'].items(): + for inner_key, inner_value in value.items(): + if isinstance(inner_value, list): + for item in inner_value: + if item.get("type") == "output": + original_path = f"{config.OUTPUT_DIR}{item['subfolder']}/{item['filename']}" + new_path = f"{custom_output_dir}/{item['filename']}" + + # Handle duplicated request where output file is not re-generated + if await aiofiles.os.path.islink(original_path): + real_path = await aiofiles.os.readlink(original_path) + async with aiofiles.open(real_path, 'rb') as src_file, aiofiles.open(new_path, 'wb') as dst_file: + file_stat = await aiofiles.os.stat(real_path) + await aiofiles.os.sendfile(dst_file.fileno(), src_file.fileno(), 0, file_stat.st_size) + else: + await aiofiles.os.rename(original_path, new_path) + await aiofiles.os.symlink(new_path, original_path) + key = f"{request_id}/{item['filename']}" + result.output.append({ + "local_path": new_path + }) + + async def upload_assets(self, request_id, s3_config, result): + session = aiobotocore.session.get_session() + async with session.create_client( + 's3', + aws_access_key_id=s3_config["access_key_id"], + aws_secret_access_key=s3_config["secret_access_key"], + endpoint_url=s3_config["endpoint_url"], + config=aiobotocore.config.AioConfig( + connect_timeout=int(s3_config["connect_timeout"]), + retries={"max_attempts": int(s3_config["connect_attempts"])} + ) + ) as s3_client: + tasks = [] + for obj in result.output: + local_path = obj["local_path"] + task = asyncio.create_task(self.upload_file_and_get_url(request_id, s3_client, s3_config["bucket_name"], local_path)) + tasks.append(task) + + # Run all tasks concurrently + presigned_urls = await asyncio.gather(*tasks) + + # Append the presigned URLs to the respective objects + for obj, url in zip(result.output, presigned_urls): + obj["url"] = url + + async def upload_file_and_get_url(self, requst_id, s3_client, bucket_name, local_path): + # Get the file name from the local path + file_name = f"{requst_id}/{Path(local_path).name}" + print (f"uploading {file_name}") + + try: + # Upload the file + with open(local_path, 'rb') as file: + await s3_client.put_object(Bucket=bucket_name, Key=file_name, Body=file) + + # Generate presigned URL + presigned_url = await s3_client.generate_presigned_url( + 'get_object', + Params={'Bucket': bucket_name, 'Key': file_name}, + ExpiresIn=604800 # URL expiration time in seconds + ) + return presigned_url + except Exception as e: + print(f"Error uploading {local_path}: {e}") + return None \ No newline at end of file diff --git a/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/preprocess_worker.py b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/preprocess_worker.py new file mode 100644 index 00000000..93c05c8f --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workers/preprocess_worker.py @@ -0,0 +1,62 @@ +import importlib +from modifiers.basemodifier import BaseModifier + +class PreprocessWorker: + """ + Check for URL's in the payload and download the assets as required + """ + def __init__(self, worker_id, kwargs): + self.worker_id = worker_id + self.preprocess_queue = kwargs["preprocess_queue"] + self.generation_queue = kwargs["generation_queue"] + self.postprocess_queue = kwargs["postprocess_queue"] + self.request_store = kwargs["request_store"] + self.response_store = kwargs["response_store"] + + async def work(self): + print ("PreprocessWorker: waiting for job") + while True: + # Get a task from the job queue + request_id = await self.preprocess_queue.get() + if request_id is None: + # None is a signal that there are no more tasks + break + + # Process the job + print(f"PreprocessWorker {self.worker_id} processing job: {request_id}") + try: + request = await self.request_store.get(request_id) + result = await self.response_store.get(request_id) + modifier = await self.get_workflow_modifier(request.input.modifier, request.input.modifications) + await modifier.load_workflow(request.input.workflow_json) + request.input.workflow_json = await modifier.get_modified_workflow() + await self.request_store.set(request_id, request) + except Exception as e: + result.status = "failed" + result.message = f"Workflow modifier failed: {e}" + await self.response_store.set(request_id, result) + # Send job straight to postprocess for fail result + await self.postprocess_queue.put(request_id) + + result.message = "Preprocessing complete. Queued for generation." + await self.response_store.set(request_id, result) + # Send for ComfyUI generation + await self.generation_queue.put(request_id) + # Mark the job as complete + self.preprocess_queue.task_done() + + print(f"PreprocessWorker {self.worker_id} finished.") + return + + async def get_workflow_modifier(self, modifier_name: str, modifiers: dict) -> BaseModifier: + try: + if modifier_name: + module = importlib.import_module(f'modifiers.{modifier_name.lower()}') + modifier_class = getattr(module, modifier_name) + else: + modifier_class = BaseModifier + return modifier_class(modifiers) + except: + raise + + \ No newline at end of file diff --git a/build/COPY_ROOT/opt/serverless/workflows/image2image.json b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workflows/image2image.json similarity index 100% rename from build/COPY_ROOT/opt/serverless/workflows/image2image.json rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workflows/image2image.json diff --git a/build/COPY_ROOT/opt/serverless/workflows/text2image.json b/build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workflows/text2image.json similarity index 100% rename from build/COPY_ROOT/opt/serverless/workflows/text2image.json rename to build/COPY_ROOT_1/opt/ai-dock/api-wrapper/workflows/text2image.json diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/amd.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/amd.sh similarity index 100% rename from build/COPY_ROOT/opt/ai-dock/bin/build/layer0/amd.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/amd.sh diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/clean.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/clean.sh similarity index 85% rename from build/COPY_ROOT/opt/ai-dock/bin/build/layer0/clean.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/clean.sh index 4a350564..3a33952a 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/clean.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/clean.sh @@ -2,7 +2,6 @@ # Tidy up and keep image small apt-get clean -y -micromamba clean -ay fix-permissions.sh -o container diff --git a/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/common.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/common.sh new file mode 100755 index 00000000..b4c21aa7 --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/common.sh @@ -0,0 +1,42 @@ +#!/bin/false + +source /opt/ai-dock/etc/environment.sh + +build_common_main() { + build_common_install_api +} + +build_common_install_api() { + # ComfyUI API wrapper + $APT_INSTALL libmagic1 + $API_VENV_PIP install --no-cache-dir \ + -r /opt/ai-dock/api-wrapper/requirements.txt + +} + +build_common_install_comfyui() { + # Set git SHA to latest if not provided + if [[ -z $COMFYUI_SHA ]]; then + export COMFYUI_SHA="$(curl -fsSL "https://api.github.com/repos/comfyanonymous/ComfyUI/commits/master" \ + | jq -r '.sha[0:7]')" + env-store COMFYUI_SHA + fi + + cd /opt + git clone https://github.com/comfyanonymous/ComfyUI + cd /opt/ComfyUI + git checkout "$COMFYUI_SHA" + + $COMFYUI_VENV_PIP install --no-cache-dir \ + -r requirements.txt +} + +build_common_run_tests() { + installed_pytorch_version=$("$COMFYUI_VENV_PYTHON" -c "import torch; print(torch.__version__)") + if [[ "$installed_pytorch_version" != "$PYTORCH_VERSION"* ]]; then + echo "Expected PyTorch ${PYTORCH_VERSION} but found ${installed_pytorch_version}\n" + exit 1 + fi +} + +build_common_main "$@" \ No newline at end of file diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/cpu.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/cpu.sh similarity index 100% rename from build/COPY_ROOT/opt/ai-dock/bin/build/layer0/cpu.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/cpu.sh diff --git a/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/init.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/init.sh new file mode 100755 index 00000000..506ba9ec --- /dev/null +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/init.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# Must exit and fail to build if any command fails +set -eo pipefail +umask 002 + +source /opt/ai-dock/bin/build/layer1/common.sh + +if [[ "$XPU_TARGET" == "NVIDIA_GPU" ]]; then + source /opt/ai-dock/bin/build/layer1/nvidia.sh +elif [[ "$XPU_TARGET" == "AMD_GPU" ]]; then + source /opt/ai-dock/bin/build/layer1/amd.sh +elif [[ "$XPU_TARGET" == "CPU" ]]; then + source /opt/ai-dock/bin/build/layer1/cpu.sh +else + printf "No valid XPU_TARGET specified\n" >&2 + exit 1 +fi + +source /opt/ai-dock/bin/build/layer1/clean.sh \ No newline at end of file diff --git a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/nvidia.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/nvidia.sh similarity index 53% rename from build/COPY_ROOT/opt/ai-dock/bin/build/layer0/nvidia.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/nvidia.sh index 4529239b..81f52779 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/build/layer0/nvidia.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/build/layer1/nvidia.sh @@ -7,19 +7,11 @@ build_nvidia_main() { } build_nvidia_install_comfyui() { - micromamba run -n comfyui ${PIP_INSTALL} \ - nvidia-ml-py3 - - micromamba install -n comfyui -c xformers -y \ - xformers \ - pytorch=${PYTORCH_VERSION} \ - pytorch-cuda="$(cut -d '.' -f 1,2 <<< "${CUDA_VERSION}")" - build_common_install_comfyui } build_nvidia_run_tests() { - installed_pytorch_cuda_version=$(micromamba run -n comfyui python -c "import torch; print(torch.version.cuda)") + installed_pytorch_cuda_version=$("$COMFYUI_VENV_PYTHON" -c "import torch; print(torch.version.cuda)") if [[ "$CUDA_VERSION" != "$installed_pytorch_cuda"* ]]; then echo "Expected PyTorch CUDA ${CUDA_VERSION} but found ${installed_pytorch_cuda}\n" exit 1 diff --git a/build/COPY_ROOT/opt/ai-dock/bin/preflight.d/10-default.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/preflight.d/10-default.sh similarity index 71% rename from build/COPY_ROOT/opt/ai-dock/bin/preflight.d/10-default.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/preflight.d/10-default.sh index 8b99c138..05c34c0d 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/preflight.d/10-default.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/preflight.d/10-default.sh @@ -3,7 +3,6 @@ # This file will be sourced in init.sh function preflight_main() { - preflight_copy_notebook preflight_update_comfyui printf "%s" "${COMFYUI_FLAGS}" > /etc/comfyui_flags.conf } @@ -13,14 +12,6 @@ function preflight_serverless() { printf "%s" "${COMFYUI_FLAGS}" > /etc/comfyui_flags.conf } -function preflight_copy_notebook() { - if micromamba env list | grep 'jupyter' > /dev/null 2>&1; then - if [[ ! -f "${WORKSPACE}comfyui.ipynb" ]]; then - cp /usr/local/share/ai-dock/comfyui.ipynb ${WORKSPACE} - fi - fi -} - function preflight_update_comfyui() { if [[ ${AUTO_UPDATE,,} == "true" ]]; then /opt/ai-dock/bin/update-comfyui.sh diff --git a/build/COPY_ROOT/opt/ai-dock/bin/set-comfyui-flags.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/set-comfyui-flags.sh similarity index 100% rename from build/COPY_ROOT/opt/ai-dock/bin/set-comfyui-flags.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/set-comfyui-flags.sh diff --git a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui-rp-api.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui-api-wrapper.sh similarity index 50% rename from build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui-rp-api.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui-api-wrapper.sh index 7a595dcf..134a4d5e 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui-rp-api.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui-api-wrapper.sh @@ -3,7 +3,7 @@ trap cleanup EXIT LISTEN_PORT=38188 -SERVICE_NAME="RunPod Serverless API" +SERVICE_NAME="ComfyUI API Wrapper" function cleanup() { kill $(jobs -p) > /dev/null 2>&1 @@ -13,21 +13,19 @@ function cleanup() { function start() { source /opt/ai-dock/etc/environment.sh - if [[ ${SERVERLESS,,} = "true" ]]; then - printf "Refusing to start hosted API service in serverless mode\n" - exec sleep 10 - fi + source /opt/ai-dock/bin/venv-set.sh api printf "Starting %s...\n" ${SERVICE_NAME} fuser -k -SIGKILL ${LISTEN_PORT}/tcp > /dev/null 2>&1 & wait -n - cd /opt/serverless/providers/runpod && \ - micromamba run -n serverless python worker.py \ - --rp_serve_api \ - --rp_api_port $LISTEN_PORT \ - --rp_api_host 127.0.0.1 + cd /opt/ai-dock/api-wrapper && \ + source "$API_VENV/bin/activate" + uvicorn main:app \ + --host 127.0.0.1 \ + --port $LISTEN_PORT \ + --reload } start 2>&1 \ No newline at end of file diff --git a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui.sh similarity index 88% rename from build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui.sh index 9268a73b..9136deed 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/supervisor-comfyui.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/supervisor-comfyui.sh @@ -16,6 +16,9 @@ function cleanup() { function start() { source /opt/ai-dock/etc/environment.sh + source /opt/ai-dock/bin/venv-set.sh serviceportal + source /opt/ai-dock/bin/venv-set.sh comfyui + if [[ ! -v COMFYUI_PORT || -z $COMFYUI_PORT ]]; then COMFYUI_PORT=${COMFYUI_PORT_HOST:-8188} fi @@ -42,13 +45,13 @@ function start() { BASE_FLAGS="--listen 127.0.0.1 --disable-auto-launch" - # Delay launch until micromamba is ready + # Delay launch until venv is ready if [[ -f /run/workspace_sync || -f /run/container_config ]]; then if [[ ${SERVERLESS,,} != "true" ]]; then printf "Waiting for workspace sync...\n" fuser -k -SIGKILL ${LISTEN_PORT}/tcp > /dev/null 2>&1 & wait -n - /usr/bin/python3 /opt/ai-dock/fastapi/logviewer/main.py \ + "$SERVICEPORTAL_VENV_PYTHON" /opt/ai-dock/fastapi/logviewer/main.py \ -p $LISTEN_PORT \ -r 5 \ -s "${SERVICE_NAME}" \ @@ -78,9 +81,9 @@ function start() { FLAGS_COMBINED="${PLATFORM_FLAGS} ${BASE_FLAGS} $(cat /etc/comfyui_flags.conf)" printf "Starting %s...\n" "${SERVICE_NAME}" - cd /opt/ComfyUI && \ - micromamba run -n comfyui \ - -e LD_PRELOAD=libtcmalloc.so \ + cd /opt/ComfyUI + source "$COMFYUI_VENV/bin/activate" + LD_PRELOAD=libtcmalloc.so \ python main.py \ ${FLAGS_COMBINED} --port ${LISTEN_PORT} } diff --git a/build/COPY_ROOT/opt/ai-dock/bin/update-comfyui.sh b/build/COPY_ROOT_1/opt/ai-dock/bin/update-comfyui.sh similarity index 81% rename from build/COPY_ROOT/opt/ai-dock/bin/update-comfyui.sh rename to build/COPY_ROOT_1/opt/ai-dock/bin/update-comfyui.sh index 19d6a6d0..42914595 100755 --- a/build/COPY_ROOT/opt/ai-dock/bin/update-comfyui.sh +++ b/build/COPY_ROOT_1/opt/ai-dock/bin/update-comfyui.sh @@ -20,4 +20,5 @@ cd /opt/ComfyUI git checkout ${branch} git pull -micromamba run -n comfyui ${PIP_INSTALL} -r requirements.txt +"$COMFYUI_VENV_PIP" install --no-cache-dir \ + -r requirements.txt diff --git a/build/COPY_ROOT/opt/ai-dock/storage_monitor/etc/mappings.sh b/build/COPY_ROOT_1/opt/ai-dock/storage_monitor/etc/mappings.sh similarity index 100% rename from build/COPY_ROOT/opt/ai-dock/storage_monitor/etc/mappings.sh rename to build/COPY_ROOT_1/opt/ai-dock/storage_monitor/etc/mappings.sh diff --git a/build/COPY_ROOT/opt/caddy/share/service_config_18188 b/build/COPY_ROOT_1/opt/caddy/share/service_config_18188 similarity index 62% rename from build/COPY_ROOT/opt/caddy/share/service_config_18188 rename to build/COPY_ROOT_1/opt/caddy/share/service_config_18188 index 79295ae2..667a1e52 100644 --- a/build/COPY_ROOT/opt/caddy/share/service_config_18188 +++ b/build/COPY_ROOT_1/opt/caddy/share/service_config_18188 @@ -1,27 +1,15 @@ :!PROXY_PORT { import universal-config - - @openapi { - path /openapi.json - } - @rp-api { - path /rp-api* + @ai-dock-api { + path /ai-dock/api/* } header @authenticating_bearer Set-Cookie "ai_dock_token={$WEB_TOKEN}; Path=/ ;Max-Age=604800; HttpOnly; SameSite=lax" header @authenticating_basic Set-Cookie "ai_dock_token={$WEB_PASSWORD_B64}; Path=/ ;Max-Age=604800; HttpOnly; SameSite=lax" - route @openapi { - handle_path /openapi.json { - root * /opt/serverless/docs/swagger/openapi.yaml - file_server @authorized - } - redir {$SERVICEPORTAL_LOGIN} - } - - route @rp-api { - uri strip_prefix /rp-api + route @ai-dock-api { + uri strip_prefix /ai-dock/api reverse_proxy @authorized localhost:38188 redir {$SERVICEPORTAL_LOGIN} } diff --git a/build/COPY_ROOT/usr/.gitkeep b/build/COPY_ROOT_1/root/.gitkeep similarity index 100% rename from build/COPY_ROOT/usr/.gitkeep rename to build/COPY_ROOT_1/root/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/serverless/handlers/.gitkeep b/build/COPY_ROOT_1/usr/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/serverless/handlers/.gitkeep rename to build/COPY_ROOT_1/usr/.gitkeep diff --git a/build/COPY_ROOT/usr/local/share/ai-dock/comfyui.ipynb b/build/COPY_ROOT_1/usr/local/share/ai-dock/comfyui.ipynb similarity index 100% rename from build/COPY_ROOT/usr/local/share/ai-dock/comfyui.ipynb rename to build/COPY_ROOT_1/usr/local/share/ai-dock/comfyui.ipynb diff --git a/build/COPY_ROOT_EXTRA/opt/ai-dock/bin/build/layer1/init.sh b/build/COPY_ROOT_99/opt/ai-dock/bin/build/layer99/init.sh similarity index 83% rename from build/COPY_ROOT_EXTRA/opt/ai-dock/bin/build/layer1/init.sh rename to build/COPY_ROOT_99/opt/ai-dock/bin/build/layer99/init.sh index 17ecd407..df623891 100755 --- a/build/COPY_ROOT_EXTRA/opt/ai-dock/bin/build/layer1/init.sh +++ b/build/COPY_ROOT_99/opt/ai-dock/bin/build/layer99/init.sh @@ -1,14 +1,23 @@ #!/bin/bash +# Must exit and fail to build if any command fails +set -eo pipefail +umask 002 + # Use this layer to add nodes and models +APT_PACKAGES=( + #"package-1" + #"package-2" +) # Packages are installed after nodes so we can fix them... -PYTHON_PACKAGES=( +PIP_PACKAGES=( "opencv-python==4.7.0.72" ) NODES=( - #"https://github.com/ltdrdata/ComfyUI-Manager" + "https://github.com/ltdrdata/ComfyUI-Manager" + "https://github.com/cubiq/ComfyUI_essentials" ) CHECKPOINT_MODELS=( @@ -18,6 +27,10 @@ CHECKPOINT_MODELS=( #"https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors" ) +UNET_MODELS=( + +) + LORA_MODELS=( #"https://civitai.com/api/download/models/16576" ) @@ -57,11 +70,15 @@ CONTROLNET_MODELS=( ### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ### function build_extra_start() { + build_extra_get_apt_packages build_extra_get_nodes - build_extra_install_python_packages + build_extra_get_pip_packages build_extra_get_models \ "/opt/storage/stable_diffusion/models/ckpt" \ "${CHECKPOINT_MODELS[@]}" + build_extra_get_models \ + "/opt/storage/stable_diffusion/models/unet" \ + "${UNET_MODELS[@]}" build_extra_get_models \ "/opt/storage/stable_diffusion/models/lora" \ "${LORA_MODELS[@]}" @@ -75,13 +92,15 @@ function build_extra_start() { "/opt/storage/stable_diffusion/models/esrgan" \ "${ESRGAN_MODELS[@]}" - cd /opt/ComfyUI && \ - micromamba run -n comfyui -e LD_PRELOAD=libtcmalloc.so python main.py \ + cd /opt/ComfyUI + source "$COMFYUI_VENV/bin/activate" + LD_PRELOAD=libtcmalloc.so python main.py \ --cpu \ --listen 127.0.0.1 \ --port 11404 \ --disable-auto-launch \ --quick-test-for-ci + deactivate } function build_extra_get_nodes() { @@ -94,22 +113,30 @@ function build_extra_get_nodes() { printf "Updating node: %s...\n" "${repo}" ( cd "$path" && git pull ) if [[ -e $requirements ]]; then - micromamba -n comfyui run ${PIP_INSTALL} -r "$requirements" + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + -r "$requirements" fi fi else printf "Downloading node: %s...\n" "${repo}" git clone "${repo}" "${path}" --recursive if [[ -e $requirements ]]; then - micromamba -n comfyui run ${PIP_INSTALL} -r "${requirements}" + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + -r "${requirements}" fi fi done } -function build_extra_install_python_packages() { - if [ ${#PYTHON_PACKAGES[@]} -gt 0 ]; then - micromamba -n comfyui run ${PIP_INSTALL} ${PYTHON_PACKAGES[*]} +function build_extra_get_apt_packages() { + if [ ${#APT_PACKAGES[@]} -gt 0 ]; then + $APT_INSTALL ${APT_PACKAGES[*]} + fi +} +function build_extra_get_pip_packages() { + if [ ${#PIP_PACKAGES[@]} -gt 0 ]; then + "$COMFYUI_VENV_PIP" install --no-cache-dir \ + ${PIP_PACKAGES[*]} fi } diff --git a/build/COPY_ROOT_EXTRA/opt/serverless/workflows/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/ckpt/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/serverless/workflows/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/ckpt/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/ckpt/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/controlnet/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/ckpt/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/controlnet/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/controlnet/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/diffusers/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/controlnet/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/diffusers/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/diffusers/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/embeddings/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/diffusers/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/embeddings/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/embeddings/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/esrgan/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/embeddings/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/esrgan/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/esrgan/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/gligen/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/esrgan/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/gligen/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/gligen/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/hypernetworks/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/gligen/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/hypernetworks/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/hypernetworks/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/lora/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/hypernetworks/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/lora/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/lora/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/style_models/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/lora/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/style_models/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/style_models/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/unet/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/style_models/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/unet/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/unet/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/vae/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/unet/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/vae/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae/.gitkeep b/build/COPY_ROOT_99/opt/storage/stable_diffusion/models/vae_approx/.gitkeep similarity index 100% rename from build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae/.gitkeep rename to build/COPY_ROOT_99/opt/storage/stable_diffusion/models/vae_approx/.gitkeep diff --git a/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae_approx/.gitkeep b/build/COPY_ROOT_EXTRA/opt/storage/stable_diffusion/models/vae_approx/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/build/Dockerfile b/build/Dockerfile index 889c9c09..ff215ad7 100644 --- a/build/Dockerfile +++ b/build/Dockerfile @@ -1,40 +1,43 @@ # For build automation - Allows building from any ai-dock base image # Use a *cuda*base* image as default because pytorch brings the libs -ARG IMAGE_BASE="ghcr.io/ai-dock/python:3.10-cuda-11.8.0-runtime-22.04" +ARG IMAGE_BASE="ghcr.io/ai-dock/python:3.10-cuda-12.1.1-base-22.04" FROM ${IMAGE_BASE} LABEL org.opencontainers.image.source https://github.com/ai-dock/comfyui LABEL org.opencontainers.image.description "ComfyUI Stable Diffusion backend and GUI" LABEL maintainer="Rob Ballantyne " -ARG PYTHON_VERSION="3.10" -ENV PYTHON_VERSION=${PYTHON_VERSION} - -ARG PYTORCH_VERSION="2.2.1" -ENV PYTORCH_VERSION="${PYTORCH_VERSION}" +ENV COMFYUI_VENV=$VENV_DIR/comfyui +ENV COMFYUI_VENV_PYTHON=$COMFYUI_VENV/bin/python +ENV COMFYUI_VENV_PIP=$COMFYUI_VENV/bin/pip -ARG COMFYUI_SHA -ENV COMFYUI_SHA=${COMFYUI_SHA} +ENV API_VENV=$VENV_DIR/api +ENV API_VENV_PYTHON=$API_VENV/bin/python +ENV API_VENV_PIP=$API_VENV/bin/pip ENV IMAGE_SLUG="comfyui" -ENV OPT_SYNC=ComfyUI:serverless - -# Copy early so we can use scripts in the build - Changes to these files will invalidate the cache and cause a rebuild. -COPY --chown=0:1111 ./COPY_ROOT/ / - -# Use build scripts to ensure we can build all targets from one Dockerfile in a single layer. -# Don't put anything heavy in here - We can use multi-stage building above if necessary. +ENV OPT_SYNC=ComfyUI +# Prepare environment +ARG PYTHON_VERSION="3.10" +ENV PYTHON_VERSION=${PYTHON_VERSION} +ARG PYTORCH_VERSION="2.3.1" +ENV PYTORCH_VERSION="${PYTORCH_VERSION}" +COPY --chown=0:1111 ./COPY_ROOT_0/ / ARG IMAGE_BASE RUN set -eo pipefail && /opt/ai-dock/bin/build/layer0/init.sh | tee /var/log/build.log -# Must be set after layer0 -ENV MAMBA_DEFAULT_ENV=comfyui -ENV MAMBA_DEFAULT_RUN="micromamba run -n $MAMBA_DEFAULT_ENV" +RUN echo "bust cache" +# Install software +ARG COMFYUI_SHA +ENV COMFYUI_SHA=${COMFYUI_SHA} +COPY --chown=0:1111 ./COPY_ROOT_1/ / +RUN set -eo pipefail && /opt/ai-dock/bin/build/layer1/init.sh | tee -a /var/log/build.log # Copy overrides and models into later layers for fast rebuilds -COPY --chown=0:1111 ./COPY_ROOT_EXTRA/ / -RUN set -eo pipefail && /opt/ai-dock/bin/build/layer1/init.sh | tee -a /var/log/build.log +COPY --chown=0:1111 ./COPY_ROOT_99/ / +RUN set -eo pipefail && /opt/ai-dock/bin/build/layer99/init.sh | tee -a /var/log/build.log +ENV PYTHON_DEFAULT_VENV=comfyui -# Keep init.sh as-is and place additional logic in /opt/ai-dock/bin/preflight.sh +# Keep init.sh as-is and place additional logic in /opt/ai-dock/bin/preflight.d CMD ["init.sh"] diff --git a/config/provisioning/default.sh b/config/provisioning/default.sh index 3904dbbd..35f09848 100755 --- a/config/provisioning/default.sh +++ b/config/provisioning/default.sh @@ -6,8 +6,14 @@ # Packages are installed after nodes so we can fix them... -PYTHON_PACKAGES=( - #"opencv-python==4.7.0.72" +APT_PACKAGES=( + #"package-1" + #"package-2" +) + +PIP_PACKAGES=( + #"package-1" + #"package-2" ) NODES=( @@ -21,6 +27,10 @@ CHECKPOINT_MODELS=( #"https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors" ) +UNET_MODELS=( + +) + LORA_MODELS=( #"https://civitai.com/api/download/models/16576" ) @@ -60,15 +70,22 @@ CONTROLNET_MODELS=( ### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ### function provisioning_start() { - DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000)) - DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000)) - DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED)) + if [[ ! -d /opt/environments/python ]]; then + export MAMBA_BASE=true + fi + source /opt/ai-dock/etc/environment.sh + source /opt/ai-dock/bin/venv-set.sh comfyui + provisioning_print_header + provisioning_get_apt_packages provisioning_get_nodes - provisioning_install_python_packages + provisioning_get_pip_packages provisioning_get_models \ "${WORKSPACE}/storage/stable_diffusion/models/ckpt" \ "${CHECKPOINT_MODELS[@]}" + provisioning_get_models \ + "${WORKSPACE}/storage/stable_diffusion/models/unet" \ + "${UNET_MODELS[@]}" provisioning_get_models \ "${WORKSPACE}/storage/stable_diffusion/models/lora" \ "${LORA_MODELS[@]}" @@ -84,6 +101,26 @@ function provisioning_start() { provisioning_print_end } +function pip_install() { + if [[ -z $MAMBA_BASE ]]; then + "$COMFYUI_VENV_PIP" install --no-cache-dir "$@" + else + micromamba run -n comfyui pip install --no-cache-dir "$@" + fi +} + +function provisioning_get_apt_packages() { + if [[ -n $APT_PACKAGES ]]; then + sudo $APT_INSTALL ${APT_PACKAGES[@]} + fi +} + +function provisioning_get_pip_packages() { + if [[ -n $PIP_PACKAGES ]]; then + pip_install ${PIP_PACKAGES[@]} + fi +} + function provisioning_get_nodes() { for repo in "${NODES[@]}"; do dir="${repo##*/}" @@ -94,37 +131,27 @@ function provisioning_get_nodes() { printf "Updating node: %s...\n" "${repo}" ( cd "$path" && git pull ) if [[ -e $requirements ]]; then - micromamba -n comfyui run ${PIP_INSTALL} -r "$requirements" + pip_install -r "$requirements" fi fi else printf "Downloading node: %s...\n" "${repo}" git clone "${repo}" "${path}" --recursive if [[ -e $requirements ]]; then - micromamba -n comfyui run ${PIP_INSTALL} -r "${requirements}" + pip_install -r "${requirements}" fi fi done } -function provisioning_install_python_packages() { - if [ ${#PYTHON_PACKAGES[@]} -gt 0 ]; then - micromamba -n comfyui run ${PIP_INSTALL} ${PYTHON_PACKAGES[*]} - fi -} function provisioning_get_models() { if [[ -z $2 ]]; then return 1; fi + dir="$1" mkdir -p "$dir" shift - if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then - arr=("$@") - else - printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n" - arr=("$1") - fi - + arr=("$@") printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir" for url in "${arr[@]}"; do printf "Downloading: %s\n" "${url}" @@ -146,7 +173,17 @@ function provisioning_print_end() { # Download from $1 URL to $2 file path function provisioning_download() { - wget -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1" + if [[ -n $HF_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?huggingface\.co(/|$|\?) ]]; then + auth_token="$HF_TOKEN" + elif + [[ -n $CIVITAI_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?civitai\.com(/|$|\?) ]]; then + auth_token="$CIVITAI_TOKEN" + fi + if [[ -n $auth_token ]];then + wget --header="Authorization: Bearer $auth_token" -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1" + else + wget -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1" + fi } provisioning_start diff --git a/docker-compose.yaml b/docker-compose.yaml index 6ee86137..4f8b5312 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -7,14 +7,14 @@ services: context: ./build args: PYTHON_VERSION: ${PYTHON_VERSION:-3.10} - PYTORCH_VERSION: ${PYTORCH_VERSION:-2.2.2} + PYTORCH_VERSION: ${PYTORCH_VERSION:-2.3.0} COMFYUI_SHA: ${COMFYUI_SHA:-} # Base on Python image which is base + python + jupyter - IMAGE_BASE: ${IMAGE_BASE:-ghcr.io/ai-dock/python:${PYTHON_VERSION:-3.10}-cuda-11.8.0-runtime-22.04} + IMAGE_BASE: ${IMAGE_BASE:-ghcr.io/ai-dock/python:${PYTHON_VERSION:-3.10}-v2-cuda-12.1.1-base-22.04} tags: - - "ghcr.io/ai-dock/comfyui:${IMAGE_TAG:-pytorch-${PYTORCH_VERSION:-2.2.2}-py3.10-cuda-11.8.0-runtime-22.04}" + - "ghcr.io/ai-dock/comfyui:${IMAGE_TAG:-pytorch-${PYTORCH_VERSION:-2.3.0}-py3.10-cuda-12.1.1-base-22.04}" - image: ghcr.io/ai-dock/comfyui:${IMAGE_TAG:-pytorch-${PYTORCH_VERSION:-2.2.2}-py3.10-cuda-11.8.0-runtime-22.04} + image: ghcr.io/ai-dock/comfyui:${IMAGE_TAG:-pytorch-${PYTORCH_VERSION:-2.3.0}-py3.10-cuda-12.1.1-base-22.04} ## For Nvidia GPU's - You probably want to uncomment this #deploy: @@ -41,6 +41,8 @@ services: # Avoids changing local file owner - ./config/authorized_keys:/root/.ssh/authorized_keys_mount - ./config/provisioning/default.sh:/opt/ai-dock/bin/provisioning.sh + # In-container development + - ./build/COPY_ROOT_1/opt/ai-dock/api-wrapper:/opt/ai-dock/api-wrapper ports: # SSH available on host machine port 2222 to avoid conflict. Change to suit - ${SSH_PORT_HOST:-2222}:22 @@ -63,7 +65,10 @@ services: - WORKSPACE_SYNC=${WORKSPACE_SYNC:-false} - CF_TUNNEL_TOKEN=${CF_TUNNEL_TOKEN:-} - CF_QUICK_TUNNELS=${CF_QUICK_TUNNELS:-true} + - CIVITAI_TOKEN=${CIVITAI_TOKEN:-} + - HF_TOKEN=${HF_TOKEN:-} - WEB_ENABLE_AUTH=${WEB_ENABLE_AUTH:-true} + - WEB_ENABLE_HTTPS=${WEB_ENABLE_HTTPS:-true} - WEB_USER=${WEB_USER:-user} - WEB_PASSWORD=${WEB_PASSWORD:-password} - SSH_PORT_HOST=${SSH_PORT_HOST:-2222}