From 504f426f0ab11c093dd63527640f6d2b280c8246 Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 11:53:39 +1100 Subject: [PATCH 1/6] Update xformers to ~0.0.22 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4a64b530bfc..32eaec63da5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,7 @@ dependencies = [ "pytest-datadir", ] "xformers" = [ - "xformers==0.0.21; sys_platform!='darwin'", + "xformers~=0.0.22; sys_platform!='darwin'", "triton; sys_platform=='linux'", ] "onnx" = ["onnxruntime"] From a98426d2c624f64b4afea6dda5e8742743dca2da Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 11:57:02 +1100 Subject: [PATCH 2/6] Update installer.py to cu121 --- installer/lib/installer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/installer/lib/installer.py b/installer/lib/installer.py index bf48e3b06d2..f59a358b6ca 100644 --- a/installer/lib/installer.py +++ b/installer/lib/installer.py @@ -460,10 +460,10 @@ def get_torch_source() -> (Union[str, None], str): url = "https://download.pytorch.org/whl/cpu" if device == "cuda": - url = "https://download.pytorch.org/whl/cu118" + url = "https://download.pytorch.org/whl/cu121" optional_modules = "[xformers,onnx-cuda]" if device == "cuda_and_dml": - url = "https://download.pytorch.org/whl/cu118" + url = "https://download.pytorch.org/whl/cu121" optional_modules = "[xformers,onnx-directml]" # in all other cases, Torch wheels should be coming from PyPi as of Torch 1.13 From 3a50798a52e60e74f835996e375b6db933d39101 Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 12:00:39 +1100 Subject: [PATCH 3/6] Update xformers to 0.0.22post7 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 32eaec63da5..8e9c97e5b62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,7 @@ dependencies = [ "pytest-datadir", ] "xformers" = [ - "xformers~=0.0.22; sys_platform!='darwin'", + "xformers==0.0.22post7; sys_platform!='darwin'", "triton; sys_platform=='linux'", ] "onnx" = ["onnxruntime"] From d573a23090e16c22234c3a0eb501126a8aedc610 Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 15:48:53 +1100 Subject: [PATCH 4/6] Moved FreeU Config Import --- invokeai/app/invocations/model.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index 32815191c28..1fab98da39e 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -3,7 +3,8 @@ from pydantic import BaseModel, ConfigDict, Field -from invokeai.app.invocations.shared import FreeUConfig +# TODO: Permanent fix for this +# from invokeai.app.invocations.shared import FreeUConfig from ...backend.model_management import BaseModelType, ModelType, SubModelType from .baseinvocation import ( @@ -20,6 +21,18 @@ ) +class FreeUConfig(BaseModel): + """ + Configuration for the FreeU hyperparameters. + - https://huggingface.co/docs/diffusers/main/en/using-diffusers/freeu + - https://github.com/ChenyangSi/FreeU + """ + + s1: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_s1) + s2: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_s2) + b1: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_b1) + b2: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_b2) + class ModelInfo(BaseModel): model_name: str = Field(description="Info to load submodel") base_model: BaseModelType = Field(description="Base model") From 5c3a27aac6a9503986aee64f27b15fa478cb3fc9 Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 16:03:06 +1100 Subject: [PATCH 5/6] fixed sorts --- invokeai/app/invocations/model.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index 1fab98da39e..83ece5be862 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -3,9 +3,6 @@ from pydantic import BaseModel, ConfigDict, Field -# TODO: Permanent fix for this -# from invokeai.app.invocations.shared import FreeUConfig - from ...backend.model_management import BaseModelType, ModelType, SubModelType from .baseinvocation import ( BaseInvocation, @@ -20,6 +17,10 @@ invocation_output, ) +# TODO: Permanent fix for this +# from invokeai.app.invocations.shared import FreeUConfig + + class FreeUConfig(BaseModel): """ From 4cfd55936ca281f2c9857237ff973750fc55ecab Mon Sep 17 00:00:00 2001 From: Millun Atluri Date: Tue, 7 Nov 2023 16:06:18 +1100 Subject: [PATCH 6/6] run black formatting --- invokeai/app/invocations/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index 83ece5be862..4c57996e67a 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -21,7 +21,6 @@ # from invokeai.app.invocations.shared import FreeUConfig - class FreeUConfig(BaseModel): """ Configuration for the FreeU hyperparameters. @@ -34,6 +33,7 @@ class FreeUConfig(BaseModel): b1: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_b1) b2: float = Field(ge=-1, le=3, description=FieldDescriptions.freeu_b2) + class ModelInfo(BaseModel): model_name: str = Field(description="Info to load submodel") base_model: BaseModelType = Field(description="Base model")