diff --git a/.github/workflows/maintests.yml b/.github/workflows/maintests.yml index cfad9593..550d7365 100644 --- a/.github/workflows/maintests.yml +++ b/.github/workflows/maintests.yml @@ -1,4 +1,4 @@ -name: Tests +name: Main Tests on: push: @@ -39,7 +39,7 @@ jobs: run: | python -m pip install --upgrade pip pip install --upgrade -r requirements.dev.txt - - name: Check build_helper.py hordelib imports have no breaking dependency changes + - name: Check build_helper.py horde-engine imports have no breaking dependency changes run: tox -e test-build-helper - name: Build unit test environment, confirm CUDA is available on host run: tox -e tests -- -k test_cuda diff --git a/.github/workflows/prtests.yml b/.github/workflows/prtests.yml index 57f1f0f8..6abbeb7a 100644 --- a/.github/workflows/prtests.yml +++ b/.github/workflows/prtests.yml @@ -1,4 +1,4 @@ -name: Unstable Tests +name: Pull Request Tests on: pull_request_target: @@ -44,7 +44,7 @@ jobs: run: | python -m pip install --upgrade pip pip install --upgrade -r requirements.dev.txt - - name: Check build_helper.py hordelib imports have no breaking dependency changes + - name: Check build_helper.py horde-engine imports have no breaking dependency changes run: tox -e test-build-helper - name: Build unit test environment, confirm CUDA is available on host run: tox -e tests -- -k test_cuda diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bbd2f3e..31771fae 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: hordelib build and publish +name: horde-engine build and publish on: push: @@ -11,8 +11,13 @@ permissions: jobs: build-n-publish: - name: hordelib build and publish + name: horde-engine build and publish runs-on: ubuntu-latest + + environment: # see https://github.com/pypa/gh-action-pypi-publish/tree/release/v1/ + name: pypi + url: https://pypi.org/p/horde-engine/ + steps: # Version bumps the project and creates a tag by creating @@ -46,8 +51,8 @@ jobs: python-version: "3.10" # Install build deps - # NOTE If any hordelib imports used by build_helper.py are changed, or the specific modules - # imported from hordelib depend on a package not included here, running build_helper.py later on will fail. + # NOTE If any horde-engine imports used by build_helper.py are changed, or the specific modules + # imported from horde-engine depend on a package not included here, running build_helper.py later on will fail. # See `build_helper.py` for more information. - name: "🛠 Install pypa/build" if: ${{ steps.release.outputs.version != '' }} @@ -70,8 +75,6 @@ jobs: with: add: 'CHANGELOG.md' message: 'ci: update changelog' - committer_name: GitHub Actions - committer_email: actions@github.com # Patches our requirements.txt and pyproject.toml # Build a changelog @@ -104,9 +107,9 @@ jobs: curl -X PURGE https://camo.githubusercontent.com/769edfb1778d4cbc3f93bc5ad0be9597bbd2d9c162cc1e9fb44172a5b660af01/68747470733a2f2f706570792e746563682f62616467652f686f7264656c6962 - # - name: "Inform with Discord Webhook" - # if: ${{ steps.release.outputs.version != '' }} - # uses: tsickert/discord-webhook@v5.3.0 - # with: - # webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} - # content: "New version of hordelib has been published to pypi: ${{ steps.release.outputs.version }}. You can update your worker by running `./update-runtime.sh --hordelib` (Linux) or `update-runtime.cmd --hordelib` (Windows). Changelog: https://t.ly/z2vQ" + - name: "Inform with Discord Webhook" + if: ${{ steps.release.outputs.version != '' }} + uses: tsickert/discord-webhook@v5.3.0 + with: + webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} + content: "New version of horde-engine has been published to pypi: ${{ steps.release.outputs.version }}. Changelog: https://t.ly/z2vQ" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 05627545..84ab31b5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,21 +1,21 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 24.3.0 + rev: 24.4.2 hooks: - id: black exclude: ^hordelib/nodes/.*\..*$ - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.3.3 + rev: v0.4.3 hooks: - id: ruff - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.9.0' + rev: 'v1.10.0' hooks: - id: mypy exclude: ^examples/.*$ # FIXME diff --git a/README.md b/README.md index 67871989..7c0fc720 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# hordelib +# horde-engine [![PyPI Version][pypi-image]][pypi-url] [![Downloads][downloads-image]][downloads-url] @@ -10,11 +10,13 @@ [![All Models][all-model-images]][all-model-url] [![Release Changelog][changelog-image]][changelog-url] -`hordelib` is a wrapper around [ComfyUI](https://github.com/comfyanonymous/ComfyUI) primarily to enable the [AI Horde](https://aihorde.net/) to run inference pipelines designed visually in the ComfyUI GUI. +> Note: This project was formerly known as `hordelib`. The project namespace will be changed in the near future to reflect this change. -The developers of `hordelib` can be found in the AI Horde Discord server: [https://discord.gg/3DxrhksKzn](https://discord.gg/3DxrhksKzn) +`horde-engine` is a wrapper around [ComfyUI](https://github.com/comfyanonymous/ComfyUI) primarily to enable the [AI Horde](https://aihorde.net/) to run inference pipelines designed visually in the ComfyUI GUI. -`hordelib` has been the default inference backend library of the [AI Horde](https://aihorde.net/) since `hordelib` v1.0.0. +The developers of `horde-engine` can be found in the AI Horde Discord server: [https://discord.gg/3DxrhksKzn](https://discord.gg/3DxrhksKzn) + +Note that `horde-engine` (previously known as `hordelib`) has been the default inference backend library of the [AI Horde](https://aihorde.net/) since `hordelib` v1.0.0. ## Purpose @@ -45,19 +47,25 @@ If you only have 16GB of RAM you will also need swap space. So if you typically Horde payloads can be processed simply with (for example): ```python -import os +# import os +# Wherever your models are +# os.environ["AIWORKER_CACHE_HOME"] = "f:/ai/models" # Defaults to `models/` in the current working directory + import hordelib -hordelib.initialise() + +hordelib.initialise() # This must be called before any other hordelib functions from hordelib.horde import HordeLib from hordelib.shared_model_manager import SharedModelManager -# Wherever your models are -os.environ["AIWORKER_CACHE_HOME"] = "f:/ai/models" - generate = HordeLib() -SharedModelManager.loadModelManagers(compvis=True) -SharedModelManager.manager.load("Deliberate") + +if SharedModelManager.manager.compvis is None: + raise Exception("Failed to load compvis model manager") + +SharedModelManager.manager.compvis.download_model("Deliberate") +SharedModelManager.manager.compvis.validate_model("Deliberate") + data = { "sampler_name": "k_dpmpp_2m", @@ -79,7 +87,12 @@ data = { "model": "Deliberate", } pil_image = generate.basic_inference_single_image(data).image + +if pil_image is None: + raise Exception("Failed to generate image") + pil_image.save("test.png") + ``` Note that `hordelib.initialise()` will erase all command line arguments from argv. So make sure you parse them before you call that. @@ -88,7 +101,7 @@ See `tests/run_*.py` for more standalone examples. ### Logging -If you don't want `hordelib` to setup and control the logging configuration initialise with: +If you don't want `hordelib` to setup and control the logging configuration (we use [loguru](https://loguru.readthedocs.io/en/stable/)) initialise with: ```python import hordelib @@ -111,14 +124,6 @@ Custom nodes for ComfyUI providing Controlnet preprocessing capability. Licened Custom nodes for ComfyUI providing face restoration. -## DMCA Abuse - -On 26th May 2023 an [individual](https://github.com/hlky) issued a [DMCA takedown notice](https://github.com/github/dmca/blob/master/2023/05/2023-05-26-nataili.md) to Github against `hordelib` which claimed their name had been removed from the copyright header in the AGPL license in the 7 files listed in the takedown notice. This claim was true, and this attribution had been removed by a `hordelib` contributor prior to being committed into the `hordelib` repository. - -Unfortunately, it appears the individual making the DMCA claim was acting in bad faith, and even when their name was restored to the copyright attribution in the files, they persisted to press the DMCA takedown claim, which, due to the nature of the Github process, resulted in hordelib being subject to a DMCA takedown on Github. - -This version of `hordelib` has the 7 files mentioned in the DMCA takedown removed and replaced with alternatives and the functionality required to run the AI Horde Worker was restored on 7th June 2023. - ## Development Requirements: @@ -177,9 +182,9 @@ In this example we install the dependencies in the OS default environment. When `pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu118 --upgrade` -Ensure ComfyUI is installed and patched, one way is running the tests: +Ensure ComfyUI is installed, one way is running the tests: -`tox` +`tox -- -k test_comfy_install` From then on to run ComfyUI: @@ -245,32 +250,10 @@ The `images/` directory should have our test images. ### Updating the embedded version of ComfyUI -We use a ComfyUI version pinned to a specific commit, see `hordelib/consts.py:COMFYUI_VERSION` - -To test if the latest version works and upgrade to it, from the project root simply: - -1. `cd ComfyUI` _Change CWD to the embedded comfy_ -1. `git checkout master` _Switch to master branch_ -1. `git pull` _Get the latest comfyui code_ -1. `git rev-parse HEAD` _Update the hash in `hordelib.consts:COMFYUI_VERSION`_ -1. `cd ..` _Get back to the hordelib project root_ -1. `tox` _See if everything still works_ - -Now ComfyUI is pinned to a new version. - -### ComfyUI Patching - -We patch the ComfyUI source code to: - -1. Modify the model manager to allow us to dynamically move models between VRAM, RAM and disk cache. -2. Allow make ComfyUI output some handy JSON we need for development purposes. - -To create a patch file: -- Make the required changes to a clean install of ComfyUI and then run `git diff > yourfile.patch` then move the patch file to wherever you want to save it. +- Change the value in `consts.py` to the desired ComfyUI version. +- Run the test suite via `tox` -Note that the patch file _really_ needs to be in UTF-8 format and some common terminals, e.g. Powershell, won't do this by default. In Powershell to create a patch file use: `git diff | Set-Content -Encoding utf8 -Path yourfile.patch` -Patches can be applied with the `hordelib.install_comfyui.Installer` classes `apply_patch()` method. diff --git a/hordelib/consts.py b/hordelib/consts.py index d834d377..ed26b847 100644 --- a/hordelib/consts.py +++ b/hordelib/consts.py @@ -6,7 +6,7 @@ from hordelib.config_path import get_hordelib_path -COMFYUI_VERSION = "a7dd82e668bfaf7fac365a4e73a1ba1acf224fbb" +COMFYUI_VERSION = "16eabdf70dbdb64dc4822908f0fe455c56d11ec3" """The exact version of ComfyUI version to load.""" REMOTE_PROXY = "" diff --git a/hordelib/model_manager/base.py b/hordelib/model_manager/base.py index 3e264c9a..afb7856f 100644 --- a/hordelib/model_manager/base.py +++ b/hordelib/model_manager/base.py @@ -420,15 +420,24 @@ def is_file_available(self, file_path: str | Path) -> bool: Returns True if the file exists, False otherwise """ parsed_full_path = Path(f"{self.model_folder_path}/{file_path}") + is_custom_model = False + if isinstance(file_path, str): + check_path = Path(file_path) + if check_path.is_absolute(): + parsed_full_path = Path(file_path) + is_custom_model = True + if isinstance(file_path, Path) and file_path.is_absolute(): + parsed_full_path = Path(file_path) + is_custom_model = True if parsed_full_path.suffix == ".part": logger.debug(f"File {file_path} is a partial download, skipping") return False sha_file_path = Path(f"{self.model_folder_path}/{parsed_full_path.stem}.sha256") - if parsed_full_path.exists() and not sha_file_path.exists(): + if parsed_full_path.exists() and not sha_file_path.exists() and not is_custom_model: self.get_file_sha256_hash(parsed_full_path) - return parsed_full_path.exists() and sha_file_path.exists() + return parsed_full_path.exists() and (sha_file_path.exists() or is_custom_model) def download_file( self, @@ -739,6 +748,7 @@ def is_model_available(self, model_name: str) -> bool: model_files = self.get_model_filenames(model_name) for file_entry in model_files: if not self.is_file_available(file_entry["file_path"]): + logger.debug([file_entry["file_path"], self.is_file_available(file_entry["file_path"])]) return False return True diff --git a/hordelib/model_manager/compvis.py b/hordelib/model_manager/compvis.py index fca1bcf0..281cf35c 100644 --- a/hordelib/model_manager/compvis.py +++ b/hordelib/model_manager/compvis.py @@ -1,3 +1,9 @@ +import json +import os +from pathlib import Path + +from loguru import logger + from hordelib.consts import MODEL_CATEGORY_NAMES from hordelib.model_manager.base import BaseModelManager @@ -14,3 +20,31 @@ def __init__( download_reference=download_reference, **kwargs, ) + + def load_model_database(self) -> None: + super().load_model_database() + + num_custom_models = 0 + + try: + extra_models_path_str = os.getenv("HORDELIB_CUSTOM_MODELS") + if extra_models_path_str: + extra_models_path = Path(extra_models_path_str) + if extra_models_path.exists(): + extra_models = json.loads((extra_models_path).read_text()) + for mname in extra_models: + # Avoid cloberring + if mname in self.model_reference: + continue + # Merge all custom models into our new model reference + self.model_reference[mname] = extra_models[mname] + if self.is_model_available(mname): + self.available_models.append(mname) + + num_custom_models += len(extra_models) + + except json.decoder.JSONDecodeError as e: + logger.error(f"Custom model database {self.models_db_path} is not valid JSON: {e}") + raise + + logger.info(f"Loaded {num_custom_models} models from {self.models_db_path}") diff --git a/hordelib/nodes/node_model_loader.py b/hordelib/nodes/node_model_loader.py index 111f2cee..87b89950 100644 --- a/hordelib/nodes/node_model_loader.py +++ b/hordelib/nodes/node_model_loader.py @@ -1,6 +1,8 @@ # node_model_loader.py # Simple proof of concept custom node to load models. +from pathlib import Path + import comfy.model_management import comfy.sd import folder_paths # type: ignore @@ -46,6 +48,9 @@ def load_checkpoint( logger.debug(f"Will load Loras: {will_load_loras}, seamless tiling: {seamless_tiling_enabled}") if ckpt_name: logger.debug(f"Checkpoint name: {ckpt_name}") + # Check if the checkpoint name is a path + if Path(ckpt_name).is_absolute(): + logger.debug("Checkpoint name is an absolute path.") if preloading: logger.debug("Preloading model.") @@ -87,13 +92,20 @@ def load_checkpoint( else: # If there's no file_type passed, we follow the previous approach and pick the first file # (There should be only one) - ckpt_name = file_entry["file_path"].name + if file_entry["file_path"].is_absolute(): + ckpt_name = str(file_entry["file_path"]) + else: + ckpt_name = file_entry["file_path"].name break # Clear references so comfy can free memory as needed SharedModelManager.manager._models_in_ram = {} - ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + if ckpt_name is not None and Path(ckpt_name).is_absolute(): + ckpt_path = ckpt_name + else: + ckpt_path = folder_paths.get_full_path("checkpoints", ckpt_name) + with torch.no_grad(): result = comfy.sd.load_checkpoint_guess_config( ckpt_path, diff --git a/images_expected/custom_model_text_to_image.png b/images_expected/custom_model_text_to_image.png new file mode 100644 index 00000000..836d1942 Binary files /dev/null and b/images_expected/custom_model_text_to_image.png differ diff --git a/pyproject.toml b/pyproject.toml index 4a4fd316..32dfba9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,12 +3,12 @@ requires = ["setuptools", "setuptools-scm"] build-backend = "setuptools.build_meta" [project] -name = "hordelib" -description = "A thin wrapper around ComfyUI to allow use by AI Horde." +name = "horde-engine" +description = "A wrapper around ComfyUI to allow use by AI Horde." authors = [ - {name = "Jug", email = "jugdev@proton.me"}, - {name = "db0", email = "mail@dbzer0.com"}, {name = "tazlin", email = "tazlin.on.github@gmail.com"}, + {name = "db0", email = "mail@dbzer0.com"}, + {name = "Jug", email = "jugdev@proton.me"}, ] readme = "README.md" requires-python = ">=3.10" @@ -17,7 +17,7 @@ classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: GNU Affero General Public License v3", "Operating System :: OS Independent", - "Development Status :: 2 - Pre-Alpha", + "Development Status :: 5 - Production/Stable", ] # Don't specify dynamic deps for tox, only for build @@ -25,9 +25,9 @@ dynamic=["version"] #dynamic=["version", "dependencies"] [project.urls] -"Homepage" = "https://github.com/jug-dev/hordelib" -"Bug Tracker" = "https://github.com/jug-dev/hordelib/issues" -"Changelog" = "https://github.com/jug-dev/hordelib/blob/releases/CHANGELOG.md" +"Homepage" = "https://github.com/Haidra-Org/hordelib" +"Bug Tracker" = "https://github.com/Haidra-Org/hordelib/issues" +"Changelog" = "https://github.com/Haidra-Org/hordelib/blob/releases/CHANGELOG.md" [tool.setuptools] license-files = ["LICENSE", "CHANGELOG*"] @@ -45,7 +45,7 @@ hordelib = "hordelib" [options.index-client] -extra-index-urls = ["https://download.pytorch.org/whl/cu118"] +extra-index-urls = ["https://download.pytorch.org/whl/cu121"] [tool.pytest.ini_options] minversion = "7.0" diff --git a/requirements.dev.txt b/requirements.dev.txt index b0b285d2..646ae2b2 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,9 +1,9 @@ -pytest==8.1.1 -mypy==1.9.0 -black==24.3.0 -ruff==0.3.3 -tox~=4.14.1 -pre-commit~=3.6.2 +pytest==8.2.0 +mypy==1.10.0 +black==24.4.2 +ruff==0.4.3 +tox~=4.15.0 +pre-commit~=3.7.0 build>=0.10.0 coverage>=7.2.7 diff --git a/tests/conftest.py b/tests/conftest.py index f8962842..a82491f9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import json import os from collections.abc import Generator from pathlib import Path @@ -20,7 +21,11 @@ def line_break(): @pytest.fixture(scope="session") -def init_horde(): +def init_horde( + custom_model_info_for_testing: tuple[str, str, str, str], + default_custom_model_json_path: str, + default_custom_model_json: dict[str, dict], +): """This fixture initialises HordeLib and sets the VRAM to leave free to 90%. You must call this fixture if your test uses a module which imports `hordelib.comfy_horde`. You will usually see a characteristic RuntimeError exception if you forget to call this fixture, but you may also see an @@ -33,6 +38,54 @@ def init_horde(): examples_path.exists() and examples_path.is_dir() ), "The `images_expected` directory must exist. You can find in in the github repo." + HORDELIB_CUSTOM_MODELS = os.getenv("HORDELIB_CUSTOM_MODELS", None) + print(f"HORDELIB_CUSTOM_MODELS: {HORDELIB_CUSTOM_MODELS}") + + if HORDELIB_CUSTOM_MODELS is not None: + assert os.path.exists( + HORDELIB_CUSTOM_MODELS, + ), f"Custom models directory {HORDELIB_CUSTOM_MODELS} does not exist." + else: + if not os.path.exists(default_custom_model_json_path): + os.makedirs(os.path.dirname(default_custom_model_json_path), exist_ok=True) + with open(default_custom_model_json_path, "w") as f: + + json.dump(default_custom_model_json, f, indent=4) + + os.environ["HORDELIB_CUSTOM_MODELS"] = default_custom_model_json_path + + HORDELIB_CUSTOM_MODELS = os.getenv("HORDELIB_CUSTOM_MODELS", None) + + assert HORDELIB_CUSTOM_MODELS is not None + + # Load the custom models json and confirm the model is on disk + custom_models = None + with open(HORDELIB_CUSTOM_MODELS) as f: + custom_models = json.load(f) + + assert custom_models is not None + + custom_model_name, _, custom_model_filename, custom_model_url = custom_model_info_for_testing + + assert custom_model_name in custom_models + assert "config" in custom_models[custom_model_name] + assert "files" in custom_models[custom_model_name]["config"] + assert "path" in custom_models[custom_model_name]["config"]["files"][0] + assert custom_model_filename in custom_models[custom_model_name]["config"]["files"][0]["path"] + + custom_model_in_json_path = custom_models[custom_model_name]["config"]["files"][0]["path"] + + print(f"Custom model path: {custom_model_in_json_path}") + # If the custom model is not on disk, download it + if not os.path.exists(custom_model_in_json_path): + import requests + + response = requests.get(custom_model_url) + response.raise_for_status() + + with open(custom_model_in_json_path, "wb") as f: + f.write(response.content) + import hordelib hordelib.initialise(setup_logging=True, logging_verbosity=5, disable_smart_memory=True) @@ -53,7 +106,10 @@ def isolated_comfy_horde_instance(init_horde) -> Comfy_Horde: @pytest.fixture(scope="session") -def shared_model_manager(hordelib_instance: HordeLib) -> Generator[type[SharedModelManager], None, None]: +def shared_model_manager( + custom_model_info_for_testing: tuple[str, str, str, str], + hordelib_instance: HordeLib, +) -> Generator[type[SharedModelManager], None, None]: SharedModelManager() SharedModelManager.load_model_managers(ALL_MODEL_MANAGER_TYPES) @@ -75,6 +131,9 @@ def shared_model_manager(hordelib_instance: HordeLib) -> Generator[type[SharedMo assert SharedModelManager.manager.download_model("Stable Cascade 1.0") assert SharedModelManager.manager.validate_model("Stable Cascade 1.0") + custom_model_name, _, _, _ = custom_model_info_for_testing + assert custom_model_name in SharedModelManager.manager.compvis.available_models + assert SharedModelManager.manager.controlnet is not None assert SharedModelManager.manager.controlnet.download_all_models() assert SharedModelManager.preload_annotators() @@ -111,6 +170,48 @@ def stable_cascade_base_model_name(shared_model_manager: type[SharedModelManager return "Stable Cascade 1.0" +@pytest.fixture(scope="session") +def custom_model_info_for_testing() -> tuple[str, str, str, str]: + """Returns a tuple of the custom model name, its baseline, the on-disk file name and the download url.""" + # https://civitai.com/models/338712/pvc-style-modelmovable-figure-model-xl?modelVersionId=413807 + return ( + "Movable figure model XL", + "stable_diffusion_xl", + "PVCStyleModelMovable_beta25Realistic.safetensors", + "https://huggingface.co/mirroring/horde_models/resolve/main/PVCStyleModelMovable_beta25Realistic.safetensors?download=true", + ) + + +@pytest.fixture(scope="session") +def default_custom_model_directory_name() -> str: + return "custom" + + +@pytest.fixture(scope="session") +def default_custom_model_json_path(default_custom_model_directory_name) -> str: + AIWORKER_CACHE_HOME = os.getenv("AIWORKER_CACHE_HOME", "models") + return os.path.join(AIWORKER_CACHE_HOME, default_custom_model_directory_name, "custom_models.json") + + +@pytest.fixture(scope="session") +def default_custom_model_json( + custom_model_info_for_testing: tuple[str, str, str, str], + default_custom_model_directory_name, +) -> dict[str, dict]: + model_name, baseline, filename, _ = custom_model_info_for_testing + AIWORKER_CACHE_HOME = os.getenv("AIWORKER_CACHE_HOME", "models") + return { + model_name: { + "name": model_name, + "baseline": baseline, + "type": "ckpt", + "config": { + "files": [{"path": os.path.join(AIWORKER_CACHE_HOME, default_custom_model_directory_name, filename)}], + }, + }, + } + + @pytest.fixture(scope="session") def db0_test_image() -> PIL.Image.Image: return PIL.Image.open("images/test_db0.jpg") diff --git a/tests/model_managers/test_shared_model_manager.py b/tests/model_managers/test_shared_model_manager.py index a4583a0b..4c554e6d 100644 --- a/tests/model_managers/test_shared_model_manager.py +++ b/tests/model_managers/test_shared_model_manager.py @@ -69,7 +69,11 @@ def test_check_sha( continue if not (".pt" in path or ".ckpt" in path or ".safetensors" in path): continue - model_manager.get_file_sha256_hash(f"{model_manager.model_folder_path}/{path}") + # Check if `path` is already a full path + if os.path.isabs(path): + model_manager.get_file_sha256_hash(path) + else: + model_manager.get_file_sha256_hash(f"{model_manager.model_folder_path}/{path}") def test_check_validate_all_available_models( self, diff --git a/tests/test_horde_inference_custom_model.py b/tests/test_horde_inference_custom_model.py new file mode 100644 index 00000000..d07994bb --- /dev/null +++ b/tests/test_horde_inference_custom_model.py @@ -0,0 +1,54 @@ +# test_horde.py + +from PIL import Image + +from hordelib.horde import HordeLib + +from .testing_shared_functions import check_single_inference_image_similarity + + +class TestHordeInference: + def test_custom_model_text_to_image( + self, + hordelib_instance: HordeLib, + custom_model_info_for_testing: tuple[str, str, str, str], + ): + model_name, _, _, _ = custom_model_info_for_testing + data = { + "sampler_name": "k_euler_a", + "cfg_scale": 7.5, + "denoising_strength": 1.0, + "seed": 1312, + "height": 1024, + "width": 1024, + "karras": False, + "tiling": False, + "hires_fix": False, + "clip_skip": 2, + "control_type": None, + "image_is_control": False, + "return_control_map": False, + "prompt": ( + "surreal,amazing quality,masterpiece,best quality,awesome,inspiring,cinematic composition" + ",soft shadows,Film grain,shallow depth of field,highly detailed,high budget,cinemascope,epic," + "OverallDetail,color graded cinematic,atmospheric lighting,imperfections,natural,shallow dof," + "1girl,solo,looking at viewer,kurumi_ebisuzawa,twin tails,hair ribbon,leather jacket,leather pants," + "black jacket,tight pants,black chocker,zipper,fingerless gloves,biker clothes,spikes,unzipped," + "shoulder spikes,multiple belts,shiny clothes,(graffiti:1.2),brick wall,dutch angle,crossed arms," + "arms under breasts,anarchist mask,v-shaped eyebrows" + ), + "ddim_steps": 30, + "n_iter": 1, + "model": model_name, + } + pil_image = hordelib_instance.basic_inference_single_image(data).image + assert pil_image is not None + assert isinstance(pil_image, Image.Image) + + img_filename = "custom_model_text_to_image.png" + pil_image.save(f"images/{img_filename}", quality=100) + + assert check_single_inference_image_similarity( + f"images_expected/{img_filename}", + pil_image, + ) diff --git a/tests/test_utils.py b/tests/test_utils.py index 62cb06be..745fe508 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,3 +1,5 @@ +import pytest + from hordelib.settings import UserSettings from hordelib.utils.distance import ( CosineSimilarityResultCode, @@ -20,6 +22,7 @@ def test_worker_settings_percent_check(): assert UserSettings._is_percentage("%50") is False +@pytest.mark.skip(reason="This refers to code that is not currently used in production.") class TestWorkerSettingsWithInit: def test_worker_settings_properties_comparable(self, init_horde): assert UserSettings.get_ram_to_leave_free_mb() > 0 @@ -49,6 +52,7 @@ def test_result_codes_in_order(self): last_value = result_code +@pytest.mark.skip(reason="This refers to code that is not currently used in production.") class TestGPUInfo: def test_gpuinfo_init(self): gpu = GPUInfo() diff --git a/tox.ini b/tox.ini index 15a0309d..67540290 100644 --- a/tox.ini +++ b/tox.ini @@ -76,6 +76,7 @@ passenv = AIWORKER_CACHE_HOME TESTS_ONGOING HORDELIB_SKIP_SIMILARITY_FAIL + HORDELIB_CUSTOM_MODELS CIVIT_API_TOKEN HORDE_MODEL_REFERENCE_GITHUB_BRANCH