Compare commits

..

3 Commits

Author SHA1 Message Date
40ea9f1cce
improved/fixed start/install/update system 2024-08-29 12:26:37 +02:00
6687d59e84
revert dockerfile change 2024-08-29 12:24:22 +02:00
06d9e39f9d
qol impro. 2024-08-29 12:10:20 +02:00
11 changed files with 60 additions and 59 deletions

View File

@ -39,10 +39,18 @@ class Stack:
self.pid = config.get(f"{self.name}-pid") self.pid = config.get(f"{self.name}-pid")
def install(self): def install(self):
self.check_for_broken_install()
self.create_venv()
self._install()
self.create_file('.installed', 'true') self.create_file('.installed', 'true')
logger.info(f"Installed {self.name}") logger.info(f"Installed {self.name}")
def _install(self):
pass
def is_installed(self): def is_installed(self):
return self.file_exists('.installed') return self.file_exists('.installed')
@ -57,26 +65,25 @@ class Stack:
self.create_dir('') self.create_dir('')
def update(self, folder: str = 'webui'): def update(self, folder: str = 'webui'):
if self.dir_exists(folder): if self.is_installed():
logger.info(f"Updating {self.name}") logger.info(f"Updating {self.name}")
self.git_pull(folder) self.git_pull(folder)
else: else:
logger.warning(f"Could not update {self.name} as {folder} does not exist") logger.warning(f"Could not update {self.name} as {self.name} is not installed")
choices.any_key.ask()
def uninstall(self): def uninstall(self):
logger.info(f"Uninstalling {self.name}")
self.bash(f"rm -rf {self.path}") self.bash(f"rm -rf {self.path}")
def start(self): def start(self):
if self.is_installed(): if self.is_installed():
self.update() self._start()
else: else:
self.check_for_broken_install() logger.error(f"{self.name} is not installed")
self.create_venv() choices.any_key.ask()
self.install()
self._launch() def _start(self):
def _launch(self):
pass pass
def stop(self): def stop(self):
@ -147,7 +154,7 @@ class Stack:
if choice is True: if choice is True:
self.stop() self.stop()
self._launch() self._start()
return return
else: else:
# TODO: attach to subprocess / redirect logs? # TODO: attach to subprocess / redirect logs?

View File

@ -63,7 +63,9 @@ def run_command(command: str, exit_on_error: bool = True):
if process.returncode != 0: if process.returncode != 0:
logger.fatal(f"Failed to run command: {command}") logger.fatal(f"Failed to run command: {command}")
raise Exception(f"Failed to run command: {command}")
if exit_on_error:
raise Exception(f"Failed to run command: {command}")
return out, err, process.returncode return out, err, process.returncode
@ -71,13 +73,9 @@ def run_command(command: str, exit_on_error: bool = True):
def load_service_from_string(service: str) -> Stack: def load_service_from_string(service: str) -> Stack:
logger.debug(f"Loading service from string: {service}") logger.debug(f"Loading service from string: {service}")
try: service_name = service.replace("_", " ").title().replace(" ", "")
service_name = service.replace("_", " ").title().replace(" ", "")
module = importlib.import_module(f"services.{service}") module = importlib.import_module(f"services.{service}")
met = getattr(module, service_name) met = getattr(module, service_name)
return met() return met()
except ModuleNotFoundError as e:
logger.error(f"Failed to load service: {e}")
return None

View File

@ -9,18 +9,22 @@ ENV DEBIAN_FRONTEND=noninteractive \
# For bitsandbytes # For bitsandbytes
ENV BNB_GPU_TARGETS="gfx803;gfx900;gfx906;gfx908;gfx90a;gfx1010;gfx1030;gfx1100;gfx1101;gfx1102" ENV BNB_GPU_TARGETS="gfx803;gfx900;gfx906;gfx908;gfx90a;gfx1010;gfx1030;gfx1100;gfx1101;gfx1102"
# For LLAMA # For LLAMA
#ARG ROCM_DOCKER_ARCH=\ # Synthax error but works
# gfx803 \ ARG ROCM_DOCKER_ARCH=\
# gfx900 \ gfx803 \
# gfx906 \ gfx900 \
# gfx908 \ gfx906 \
# gfx90a \ gfx908 \
# gfx1010 \ gfx90a \
# gfx1030 \ gfx1010 \
# gfx1100 \ gfx1030 \
# gfx1101 \ gfx1100 \
# gfx1102 gfx1101 \
ARG ROCM_DOCKER_ARCH="gfx803 gfx 900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101 gfx1102" gfx1102
# no synthax error but doesn't work for some reason (error on llama compile)??
# CMake Error: The source directory "/tmp/llama-cpp-python/gfx1102" does not exist.
# ARG ROCM_DOCKER_ARCH="gfx803 gfx 900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101 gfx1102"
ENV GPU_TARGETS=${ROCM_DOCKER_ARCH} ENV GPU_TARGETS=${ROCM_DOCKER_ARCH}
ENV CC=/opt/rocm/llvm/bin/clang ENV CC=/opt/rocm/llvm/bin/clang

View File

@ -10,7 +10,7 @@ class BackgroundRemovalDis(Stack):
'https://huggingface.co/spaces/ECCV2022/dis-background-removal' 'https://huggingface.co/spaces/ECCV2022/dis-background-removal'
) )
def install(self): def _install(self):
self.git_clone(url=self.url, dest="webui") self.git_clone(url=self.url, dest="webui")
self.install_requirements("webui/requirements.txt") self.install_requirements("webui/requirements.txt")
self.pip_install("gradio") # gradio is not in requirements.txt for some reason self.pip_install("gradio") # gradio is not in requirements.txt for some reason
@ -26,8 +26,6 @@ class BackgroundRemovalDis(Stack):
# self.remove_dir("webui/.git") # saves a lot of space due to big repo # self.remove_dir("webui/.git") # saves a lot of space due to big repo
super().install() def _start(self):
def _launch(self):
self.python(f"app.py", current_dir="webui", self.python(f"app.py", current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0", f"GRADIO_SERVER_PORT={self.port}"], daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0", f"GRADIO_SERVER_PORT={self.port}"], daemon=True)

View File

@ -10,7 +10,7 @@ class ComfyUi(Stack):
'https://github.com/comfyanonymous/ComfyUI.git' 'https://github.com/comfyanonymous/ComfyUI.git'
) )
def install(self): def _install(self):
# Install the webui # Install the webui
self.git_clone(url=self.url, dest="webui") self.git_clone(url=self.url, dest="webui")
self.install_requirements("webui/requirements.txt") self.install_requirements("webui/requirements.txt")
@ -26,9 +26,7 @@ class ComfyUi(Stack):
self.git_clone(url="https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4.git", self.git_clone(url="https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4.git",
dest="webui/custom_nodes/ComfyUI_bitsandbytes_NF4") dest="webui/custom_nodes/ComfyUI_bitsandbytes_NF4")
super().install() def _start(self):
def _launch(self):
args = ["--port", str(self.port)] args = ["--port", str(self.port)]
self.python(f"main.py", args=args, current_dir="webui", self.python(f"main.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class StableDiffusionForge(Stack):
'https://github.com/lllyasviel/stable-diffusion-webui-forge' 'https://github.com/lllyasviel/stable-diffusion-webui-forge'
) )
def install(self): def _install(self):
# Install the webui # Install the webui
self.git_clone(url=self.url, dest="webui") self.git_clone(url=self.url, dest="webui")
@ -19,9 +19,7 @@ class StableDiffusionForge(Stack):
# Add NF4 support for Flux # Add NF4 support for Flux
self.install_from_prebuilt("bitsandbytes") self.install_from_prebuilt("bitsandbytes")
super().install() def _start(self):
def _launch(self):
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)] args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
self.python(f"launch.py", args=args, current_dir="webui", self.python(f"launch.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class StableDiffusionWebui(Stack):
'https://github.com/AUTOMATIC1111/stable-diffusion-webui' 'https://github.com/AUTOMATIC1111/stable-diffusion-webui'
) )
def install(self): def _install(self):
# Install the webui # Install the webui
self.git_clone(url=self.url, branch="dev", dest="webui") self.git_clone(url=self.url, branch="dev", dest="webui")
@ -19,9 +19,7 @@ class StableDiffusionWebui(Stack):
# Add NF4 support for Flux # Add NF4 support for Flux
self.install_from_prebuilt("bitsandbytes") self.install_from_prebuilt("bitsandbytes")
super().install() def _start(self):
def _launch(self):
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)] args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
self.python(f"launch.py", args=args, current_dir="webui", self.python(f"launch.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class TextGenerationWebui(Stack):
'https://github.com/oobabooga/text-generation-webui/' 'https://github.com/oobabooga/text-generation-webui/'
) )
def install(self): def _install(self):
# Install LlamaCpp from prebuilt # Install LlamaCpp from prebuilt
self.pip_install("llama-cpp-python", env=["CMAKE_ARGS=\"-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS\""]) # cpu self.pip_install("llama-cpp-python", env=["CMAKE_ARGS=\"-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS\""]) # cpu
@ -48,9 +48,7 @@ class TextGenerationWebui(Stack):
self.pip_install("auto-gptq", args=["--no-build-isolation", "--extra-index-url", self.pip_install("auto-gptq", args=["--no-build-isolation", "--extra-index-url",
"https://huggingface.github.io/autogptq-index/whl/rocm573/"]) "https://huggingface.github.io/autogptq-index/whl/rocm573/"])
super().install() def _start(self):
def _launch(self):
args = ["--listen", "--listen-port", str(self.port)] args = ["--listen", "--listen-port", str(self.port)]
self.python(f"server.py", args=args, current_dir="webui", self.python(f"server.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class XttsWebui(Stack):
'https://github.com/daswer123/xtts-webui' 'https://github.com/daswer123/xtts-webui'
) )
def install(self): def _install(self):
# Install the webui # Install the webui
self.git_clone(url=self.url, dest="webui") self.git_clone(url=self.url, dest="webui")
@ -30,9 +30,7 @@ class XttsWebui(Stack):
# Deepspeed and ninja (not working yet) # Deepspeed and ninja (not working yet)
# self.pip_install(["ninja", "deepspeed"]) # self.pip_install(["ninja", "deepspeed"])
super().install() def _start(self):
def _launch(self):
args = ["--host", "0.0.0.0", "--port", str(self.port)] args = ["--host", "0.0.0.0", "--port", str(self.port)]
self.python(f"server.py", current_dir="webui", self.python(f"server.py", current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], args=args, daemon=True) env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], args=args, daemon=True)

View File

@ -26,7 +26,9 @@ def update_choices():
Choice("exit") Choice("exit")
]) ])
_services_choices = [Choice(service.name, value=service.id) for service in loaded_services.values()] _services_choices = [Choice(f"{service.name} [{'ON' if service.status() else 'OFF'}]", value=service.id) for service
in loaded_services.values()]
_services_choices.append(Choice("go back", value="back")) _services_choices.append(Choice("go back", value="back"))
start_service = questionary.select( start_service = questionary.select(

View File

@ -4,6 +4,7 @@ import questionary
from core.vars import logger, loaded_services from core.vars import logger, loaded_services
from ui import choices from ui import choices
from ui.choices import update_choices
def clear_terminal(): def clear_terminal():
@ -23,11 +24,11 @@ def handle_services(action, service):
elif action == "stop": elif action == "stop":
logger.info(f"Stopping service: {service.name}") logger.info(f"Stopping service: {service.name}")
service.stop() service.stop()
elif action == "update": elif action == "install":
confirmation = choices.are_you_sure.ask() confirmation = choices.are_you_sure.ask()
if confirmation: if confirmation:
logger.info(f"Installing/updating service: {service.name}") logger.info(f"Installing/updating service: {service.name}")
service.update() service.install()
elif action == "uninstall": elif action == "uninstall":
confirmation = choices.are_you_sure.ask() confirmation = choices.are_you_sure.ask()
if confirmation: if confirmation:
@ -42,6 +43,7 @@ def handle_services(action, service):
def run_interactive_cmd_ui(): def run_interactive_cmd_ui():
while True: while True:
clear_terminal() clear_terminal()
update_choices()
choice = choices.start.ask() choice = choices.start.ask()
if choice == "Start service": if choice == "Start service":
@ -54,7 +56,7 @@ def run_interactive_cmd_ui():
elif choice == "Install/update service": elif choice == "Install/update service":
service = choices.install_service.ask() service = choices.install_service.ask()
handle_services("update", service) handle_services("install", service)
elif choice == "Uninstall service": elif choice == "Uninstall service":
service = choices.uninstall_service.ask() service = choices.uninstall_service.ask()