forked from lllyasviel/Fooocus
-
Notifications
You must be signed in to change notification settings - Fork 1
/
launch.py
107 lines (76 loc) · 3.88 KB
/
launch.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import os
import sys
print('[System ARGV] ' + str(sys.argv))
root = os.path.dirname(os.path.abspath(__file__))
backend_path = os.path.join(root, 'backend', 'headless')
sys.path += [root, backend_path]
os.chdir(root)
os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1"
os.environ["GRADIO_SERVER_PORT"] = "7865"
import platform
import fooocus_version
from build_launcher import build_launcher
from modules.launch_util import is_installed, run, python, run_pip, requirements_met
from modules.model_loader import load_file_from_url
from modules.config import path_checkpoints, path_loras, path_vae_approx, path_fooocus_expansion, \
checkpoint_downloads, path_embeddings, embeddings_downloads, lora_downloads
REINSTALL_ALL = False
TRY_INSTALL_XFORMERS = False
def prepare_environment():
torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu121")
torch_command = os.environ.get('TORCH_COMMAND',
f"pip install torch==2.1.0 torchvision==0.16.0 --extra-index-url {torch_index_url}")
requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt")
print(f"Python {sys.version}")
print(f"Fooocus version: {fooocus_version.version}")
if REINSTALL_ALL or not is_installed("torch") or not is_installed("torchvision"):
run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True)
if TRY_INSTALL_XFORMERS:
if REINSTALL_ALL or not is_installed("xformers"):
xformers_package = os.environ.get('XFORMERS_PACKAGE', 'xformers==0.0.20')
if platform.system() == "Windows":
if platform.python_version().startswith("3.10"):
run_pip(f"install -U -I --no-deps {xformers_package}", "xformers", live=True)
else:
print("Installation of xformers is not supported in this version of Python.")
print(
"You can also check this and build manually: https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Xformers#building-xformers-on-windows-by-duckness")
if not is_installed("xformers"):
exit(0)
elif platform.system() == "Linux":
run_pip(f"install -U -I --no-deps {xformers_package}", "xformers")
if REINSTALL_ALL or not requirements_met(requirements_file):
run_pip(f"install -r \"{requirements_file}\"", "requirements")
return
vae_approx_filenames = [
('xlvaeapp.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/xlvaeapp.pth'),
('vaeapp_sd15.pth', 'https://huggingface.co/lllyasviel/misc/resolve/main/vaeapp_sd15.pt'),
('xl-to-v1_interposer-v3.1.safetensors',
'https://huggingface.co/lllyasviel/misc/resolve/main/xl-to-v1_interposer-v3.1.safetensors')
]
def download_models():
for file_name, url in checkpoint_downloads.items():
load_file_from_url(url=url, model_dir=path_checkpoints, file_name=file_name)
for file_name, url in embeddings_downloads.items():
load_file_from_url(url=url, model_dir=path_embeddings, file_name=file_name)
for file_name, url in lora_downloads.items():
load_file_from_url(url=url, model_dir=path_loras, file_name=file_name)
for file_name, url in vae_approx_filenames:
load_file_from_url(url=url, model_dir=path_vae_approx, file_name=file_name)
load_file_from_url(
url='https://huggingface.co/lllyasviel/misc/resolve/main/fooocus_expansion.bin',
model_dir=path_fooocus_expansion,
file_name='pytorch_model.bin'
)
return
def ini_fcbh_args():
from args_manager import args
return args
prepare_environment()
build_launcher()
args = ini_fcbh_args()
if args.cuda_device is not None:
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda_device)
print("Set device to:", args.cuda_device)
download_models()
from webui import *