moved most of functionality from webui.bat into cross-platform launch.py

moved stable diffusion dependencies into requirements.txt
added checkout with specific commit hashes to all external repos
This commit is contained in:
AUTOMATIC 2022-09-13 16:48:18 +03:00
parent c84e333622
commit 33e6b6e9a6
4 changed files with 135 additions and 106 deletions

127
launch.py Normal file
View file

@ -0,0 +1,127 @@
# this scripts installs necessary requirements and launches main program in webui.py
import subprocess
import os
import sys
import importlib.util
import shlex
dir_repos = "repositories"
dir_tmp = "tmp"
python = sys.executable
git = os.environ.get('GIT', "git")
torch_command = os.environ.get('TORCH_COMMAND', "pip install torch==1.12.1+cu113 --extra-index-url https://download.pytorch.org/whl/cu113")
requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt")
commandline_args = os.environ.get('COMMANDLINE_ARGS', "")
k_diffusion_package = os.environ.get('K_DIFFUSION_PACKAGE', "git+https://github.com/crowsonkb/k-diffusion.git@1a0703dfb7d24d8806267c3e7ccc4caf67fd1331")
gfpgan_package = os.environ.get('GFPGAN_PACKAGE', "git+https://github.com/TencentARC/GFPGAN.git@8d2447a2d918f8eba5a4a01463fd48e45126a379")
stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "69ae4b35e0a0f6ee1af8bb9a5d0016ccb27e36dc")
taming_transformers_commit_hash = os.environ.get('TAMING_TRANSFORMERS_COMMIT_HASH', "24268930bf1dce879235a7fddd0b2355b84d7ea6")
codeformer_commit_hash = os.environ.get('CODEFORMER_COMMIT_HASH', "c5b4593074ba6214284d6acd5f1719b6c5d739af")
blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
def repo_dir(name):
return os.path.join(dir_repos, name)
def run(command, desc=None, errdesc=None):
if desc is not None:
print(desc)
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
if result.returncode != 0:
message = f"""{errdesc or 'Error running command'}.
Command: {command}
Error code: {result.returncode}
stdout: {result.stdout.decode(encoding="utf8", errors="ignore") if len(result.stdout)>0 else '<empty>'}
stderr: {result.stderr.decode(encoding="utf8", errors="ignore") if len(result.stderr)>0 else '<empty>'}
"""
raise RuntimeError(message)
return result.stdout.decode(encoding="utf8", errors="ignore")
def run_python(code, desc=None, errdesc=None):
return run(f'{python} -c "{code}"', desc, errdesc)
def run_pip(args, desc=None):
return run(f'{python} -m pip {args} --prefer-binary', desc=f"Installing {desc}", errdesc=f"Couldn't install {desc}")
def check_run(command):
result = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
return result.returncode == 0
def check_run_python(code):
return check_run(f'{python} -c "{code}"')
def is_installed(package):
try:
spec = importlib.util.find_spec(package)
except ModuleNotFoundError:
return False
return spec is not None
def git_clone(url, dir, name, commithash=None):
# TODO clone into temporary dir and move if successful
if os.path.exists(dir):
return
run(f'{git} clone "{url}" "{dir}"', f"Cloning {name} into {dir}...", f"Couldn't clone {name}")
if commithash is not None:
run(f"{git} -C {dir} checkout {commithash}", None, "Couldn't checkout {name}'s hash: {commithash}")
try:
commit = run(f"{git} rev-parse HEAD").strip()
except Exception:
commit = "<none>"
print(f"Python {sys.version}")
print(f"Commit hash: {commit}")
if not is_installed("torch"):
run(f"{python} -m {torch_command}", "Installing torch", "Couldn't install torch")
run_python("import torch; assert torch.cuda.is_available(), 'Torch is not able to use GPU'")
if not is_installed("k_diffusion.sampling"):
run_pip(f"install {k_diffusion_package}", "k-diffusion")
if not is_installed("gfpgan"):
run_pip(f"install {gfpgan_package}", "gfpgan")
os.makedirs(dir_repos, exist_ok=True)
git_clone("https://github.com/CompVis/stable-diffusion.git", repo_dir('stable-diffusion'), "Stable Diffusion", stable_diffusion_commit_hash)
git_clone("https://github.com/CompVis/taming-transformers.git", repo_dir('taming-transformers'), "Taming Transformers", taming_transformers_commit_hash)
git_clone("https://github.com/sczhou/CodeFormer.git", repo_dir('CodeFormer'), "CodeFormer", codeformer_commit_hash)
git_clone("https://github.com/salesforce/BLIP.git", repo_dir('BLIP'), "BLIP", blip_commit_hash)
if not is_installed("lpips"):
run_pip(f"install -r {os.path.join(repo_dir('CodeFormer'), 'requirements.txt')}", "requirements for CodeFormer")
run_pip(f"install -r {requirements_file}", "requirements for Web UI")
sys.argv += shlex.split(commandline_args)
def start_webui():
print(f"Launching Web UI with arguments: {' '.join(sys.argv[1:])}")
import webui
webui.webui()
start_webui()

View file

@ -1,3 +1,5 @@
transformers
diffusers
basicsr
gfpgan
gradio

View file

@ -1,3 +1,5 @@
transformers==4.19.2
diffusers==0.2.4
basicsr==1.4.2
gfpgan
gradio==3.3

110
webui.bat
View file

@ -1,26 +1,16 @@
@echo off
if not defined PYTHON (set PYTHON=python)
if not defined GIT (set GIT=git)
if not defined COMMANDLINE_ARGS (set COMMANDLINE_ARGS=%*)
if not defined VENV_DIR (set VENV_DIR=venv)
if not defined TORCH_COMMAND (set TORCH_COMMAND=pip install torch==1.12.1+cu113 --extra-index-url https://download.pytorch.org/whl/cu113)
if not defined REQS_FILE (set REQS_FILE=requirements_versions.txt)
mkdir tmp 2>NUL
%PYTHON% -c "" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :check_git
if %ERRORLEVEL% == 0 goto :start_venv
echo Couldn't launch python
goto :show_stdout_stderr
:check_git
%GIT% --help >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :setup_venv
echo Couldn't launch git
goto :show_stdout_stderr
:setup_venv
:start_venv
if [%VENV_DIR%] == [-] goto :skip_venv
dir %VENV_DIR%\Scripts\Python.exe >tmp/stdout.txt 2>tmp/stderr.txt
@ -35,105 +25,13 @@ goto :show_stdout_stderr
:activate_venv
set PYTHON="%~dp0%VENV_DIR%\Scripts\Python.exe"
%PYTHON% --version
echo venv %PYTHON%
goto :print_commit
goto :launch
:skip_venv
%PYTHON% --version
:print_commit
%GIT% rev-parse HEAD
:install_torch
%PYTHON% -c "import torch" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :check_gpu
echo Installing torch...
%PYTHON% -m %TORCH_COMMAND% >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :check_gpu
echo Failed to install torch
goto :show_stdout_stderr
:check_gpu
%PYTHON% -c "import torch; assert torch.cuda.is_available(), 'CUDA is not available'" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_sd_reqs
echo Torch is not able to use GPU
goto :show_stdout_stderr
:install_sd_reqs
%PYTHON% -c "import transformers; import wheel" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_k_diff
echo Installing SD requirements...
%PYTHON% -m pip install wheel transformers==4.19.2 diffusers invisible-watermark --prefer-binary >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_k_diff
goto :show_stdout_stderr
:install_k_diff
%PYTHON% -c "import k_diffusion.sampling" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_GFPGAN
echo Installing K-Diffusion...
%PYTHON% -m pip install git+https://github.com/crowsonkb/k-diffusion.git@1a0703dfb7d24d8806267c3e7ccc4caf67fd1331 --prefer-binary --only-binary=psutil >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_GFPGAN
goto :show_stdout_stderr
:install_GFPGAN
%PYTHON% -c "import gfpgan" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_reqs
echo Installing GFPGAN
%PYTHON% -m pip install git+https://github.com/TencentARC/GFPGAN.git@8d2447a2d918f8eba5a4a01463fd48e45126a379 --prefer-binary >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_reqs
goto :show_stdout_stderr
:install_reqs
echo Installing requirements...
%PYTHON% -m pip install -r %REQS_FILE% --prefer-binary >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :make_dirs
goto :show_stdout_stderr
:make_dirs
mkdir repositories 2>NUL
if exist repositories\stable-diffusion goto :clone_transformers
echo Cloning Stable Difusion repository...
%GIT% clone https://github.com/CompVis/stable-diffusion.git repositories\stable-diffusion >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :clone_transformers
goto :show_stdout_stderr
:clone_transformers
if exist repositories\taming-transformers goto :clone_codeformer
echo Cloning Taming Transforming repository...
%GIT% clone https://github.com/CompVis/taming-transformers.git repositories\taming-transformers >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :clone_codeformer
goto :show_stdout_stderr
:clone_codeformer
if exist repositories\CodeFormer goto :install_codeformer_reqs
echo Cloning CodeFormer repository...
%GIT% clone https://github.com/sczhou/CodeFormer.git repositories\CodeFormer >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :install_codeformer_reqs
goto :show_stdout_stderr
:install_codeformer_reqs
%PYTHON% -c "import lpips" >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :clone_blip
echo Installing requirements for CodeFormer...
%PYTHON% -m pip install -r repositories\CodeFormer\requirements.txt --prefer-binary >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% == 0 goto :clone_blip
goto :show_stdout_stderr
:clone_blip
if exist repositories\BLIP goto :launch
echo Cloning BLIP repository...
%GIT% clone https://github.com/salesforce/BLIP.git repositories\BLIP >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% NEQ 0 goto :show_stdout_stderr
%GIT% -C repositories/BLIP checkout 48211a1594f1321b00f14c9f7a5b4813144b2fb9 >tmp/stdout.txt 2>tmp/stderr.txt
if %ERRORLEVEL% NEQ 0 goto :show_stdout_stderr
:launch
echo Launching webui.py...
%PYTHON% webui.py %COMMANDLINE_ARGS%
%PYTHON% launch.py
pause
exit /b