Compare commits

..

No commits in common. "40ea9f1cced03255d6f2daee30dc2507a36dc339" and "157e6efe16ad23d86efe214ac3f223895dbff776" have entirely different histories.

11 changed files with 59 additions and 60 deletions

View File

@ -39,18 +39,10 @@ class Stack:
self.pid = config.get(f"{self.name}-pid")
def install(self):
self.check_for_broken_install()
self.create_venv()
self._install()
self.create_file('.installed', 'true')
logger.info(f"Installed {self.name}")
def _install(self):
pass
def is_installed(self):
return self.file_exists('.installed')
@ -65,25 +57,26 @@ class Stack:
self.create_dir('')
def update(self, folder: str = 'webui'):
if self.is_installed():
if self.dir_exists(folder):
logger.info(f"Updating {self.name}")
self.git_pull(folder)
else:
logger.warning(f"Could not update {self.name} as {self.name} is not installed")
choices.any_key.ask()
logger.warning(f"Could not update {self.name} as {folder} does not exist")
def uninstall(self):
logger.info(f"Uninstalling {self.name}")
self.bash(f"rm -rf {self.path}")
def start(self):
if self.is_installed():
self._start()
self.update()
else:
logger.error(f"{self.name} is not installed")
choices.any_key.ask()
self.check_for_broken_install()
self.create_venv()
self.install()
def _start(self):
self._launch()
def _launch(self):
pass
def stop(self):
@ -154,7 +147,7 @@ class Stack:
if choice is True:
self.stop()
self._start()
self._launch()
return
else:
# TODO: attach to subprocess / redirect logs?

View File

@ -63,9 +63,7 @@ def run_command(command: str, exit_on_error: bool = True):
if process.returncode != 0:
logger.fatal(f"Failed to run command: {command}")
if exit_on_error:
raise Exception(f"Failed to run command: {command}")
raise Exception(f"Failed to run command: {command}")
return out, err, process.returncode
@ -73,9 +71,13 @@ def run_command(command: str, exit_on_error: bool = True):
def load_service_from_string(service: str) -> Stack:
logger.debug(f"Loading service from string: {service}")
service_name = service.replace("_", " ").title().replace(" ", "")
try:
service_name = service.replace("_", " ").title().replace(" ", "")
module = importlib.import_module(f"services.{service}")
met = getattr(module, service_name)
return met()
module = importlib.import_module(f"services.{service}")
met = getattr(module, service_name)
return met()
except ModuleNotFoundError as e:
logger.error(f"Failed to load service: {e}")
return None

View File

@ -9,22 +9,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
# For bitsandbytes
ENV BNB_GPU_TARGETS="gfx803;gfx900;gfx906;gfx908;gfx90a;gfx1010;gfx1030;gfx1100;gfx1101;gfx1102"
# For LLAMA
# Synthax error but works
ARG ROCM_DOCKER_ARCH=\
gfx803 \
gfx900 \
gfx906 \
gfx908 \
gfx90a \
gfx1010 \
gfx1030 \
gfx1100 \
gfx1101 \
gfx1102
# no synthax error but doesn't work for some reason (error on llama compile)??
# CMake Error: The source directory "/tmp/llama-cpp-python/gfx1102" does not exist.
# ARG ROCM_DOCKER_ARCH="gfx803 gfx 900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101 gfx1102"
#ARG ROCM_DOCKER_ARCH=\
# gfx803 \
# gfx900 \
# gfx906 \
# gfx908 \
# gfx90a \
# gfx1010 \
# gfx1030 \
# gfx1100 \
# gfx1101 \
# gfx1102
ARG ROCM_DOCKER_ARCH="gfx803 gfx 900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101 gfx1102"
ENV GPU_TARGETS=${ROCM_DOCKER_ARCH}
ENV CC=/opt/rocm/llvm/bin/clang

View File

@ -10,7 +10,7 @@ class BackgroundRemovalDis(Stack):
'https://huggingface.co/spaces/ECCV2022/dis-background-removal'
)
def _install(self):
def install(self):
self.git_clone(url=self.url, dest="webui")
self.install_requirements("webui/requirements.txt")
self.pip_install("gradio") # gradio is not in requirements.txt for some reason
@ -26,6 +26,8 @@ class BackgroundRemovalDis(Stack):
# self.remove_dir("webui/.git") # saves a lot of space due to big repo
def _start(self):
super().install()
def _launch(self):
self.python(f"app.py", current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0", f"GRADIO_SERVER_PORT={self.port}"], daemon=True)

View File

@ -10,7 +10,7 @@ class ComfyUi(Stack):
'https://github.com/comfyanonymous/ComfyUI.git'
)
def _install(self):
def install(self):
# Install the webui
self.git_clone(url=self.url, dest="webui")
self.install_requirements("webui/requirements.txt")
@ -26,7 +26,9 @@ class ComfyUi(Stack):
self.git_clone(url="https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4.git",
dest="webui/custom_nodes/ComfyUI_bitsandbytes_NF4")
def _start(self):
super().install()
def _launch(self):
args = ["--port", str(self.port)]
self.python(f"main.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class StableDiffusionForge(Stack):
'https://github.com/lllyasviel/stable-diffusion-webui-forge'
)
def _install(self):
def install(self):
# Install the webui
self.git_clone(url=self.url, dest="webui")
@ -19,7 +19,9 @@ class StableDiffusionForge(Stack):
# Add NF4 support for Flux
self.install_from_prebuilt("bitsandbytes")
def _start(self):
super().install()
def _launch(self):
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
self.python(f"launch.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class StableDiffusionWebui(Stack):
'https://github.com/AUTOMATIC1111/stable-diffusion-webui'
)
def _install(self):
def install(self):
# Install the webui
self.git_clone(url=self.url, branch="dev", dest="webui")
@ -19,7 +19,9 @@ class StableDiffusionWebui(Stack):
# Add NF4 support for Flux
self.install_from_prebuilt("bitsandbytes")
def _start(self):
super().install()
def _launch(self):
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
self.python(f"launch.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class TextGenerationWebui(Stack):
'https://github.com/oobabooga/text-generation-webui/'
)
def _install(self):
def install(self):
# Install LlamaCpp from prebuilt
self.pip_install("llama-cpp-python", env=["CMAKE_ARGS=\"-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS\""]) # cpu
@ -48,7 +48,9 @@ class TextGenerationWebui(Stack):
self.pip_install("auto-gptq", args=["--no-build-isolation", "--extra-index-url",
"https://huggingface.github.io/autogptq-index/whl/rocm573/"])
def _start(self):
super().install()
def _launch(self):
args = ["--listen", "--listen-port", str(self.port)]
self.python(f"server.py", args=args, current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], daemon=True)

View File

@ -10,7 +10,7 @@ class XttsWebui(Stack):
'https://github.com/daswer123/xtts-webui'
)
def _install(self):
def install(self):
# Install the webui
self.git_clone(url=self.url, dest="webui")
@ -30,7 +30,9 @@ class XttsWebui(Stack):
# Deepspeed and ninja (not working yet)
# self.pip_install(["ninja", "deepspeed"])
def _start(self):
super().install()
def _launch(self):
args = ["--host", "0.0.0.0", "--port", str(self.port)]
self.python(f"server.py", current_dir="webui",
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"], args=args, daemon=True)

View File

@ -26,9 +26,7 @@ def update_choices():
Choice("exit")
])
_services_choices = [Choice(f"{service.name} [{'ON' if service.status() else 'OFF'}]", value=service.id) for service
in loaded_services.values()]
_services_choices = [Choice(service.name, value=service.id) for service in loaded_services.values()]
_services_choices.append(Choice("go back", value="back"))
start_service = questionary.select(

View File

@ -4,7 +4,6 @@ import questionary
from core.vars import logger, loaded_services
from ui import choices
from ui.choices import update_choices
def clear_terminal():
@ -24,11 +23,11 @@ def handle_services(action, service):
elif action == "stop":
logger.info(f"Stopping service: {service.name}")
service.stop()
elif action == "install":
elif action == "update":
confirmation = choices.are_you_sure.ask()
if confirmation:
logger.info(f"Installing/updating service: {service.name}")
service.install()
service.update()
elif action == "uninstall":
confirmation = choices.are_you_sure.ask()
if confirmation:
@ -43,7 +42,6 @@ def handle_services(action, service):
def run_interactive_cmd_ui():
while True:
clear_terminal()
update_choices()
choice = choices.start.ask()
if choice == "Start service":
@ -56,7 +54,7 @@ def run_interactive_cmd_ui():
elif choice == "Install/update service":
service = choices.install_service.ask()
handle_services("install", service)
handle_services("update", service)
elif choice == "Uninstall service":
service = choices.uninstall_service.ask()