Compare commits
5 Commits
ec9517c0c9
...
5f813f4b1e
Author | SHA1 | Date | |
---|---|---|---|
5f813f4b1e | |||
2672190fd2 | |||
8561196f0d | |||
1ca5f4f70a | |||
7daec372c7 |
@ -2,6 +2,10 @@
|
|||||||
|
|
||||||
This is a simple project to make hosting multiple AI tools easily on Linux with AMD GPUs using ROCM locally (without docker).
|
This is a simple project to make hosting multiple AI tools easily on Linux with AMD GPUs using ROCM locally (without docker).
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> Currently rewriting this project to be more modular and easier to use. This is a work in progress.
|
||||||
|
> Instructions below outdated !
|
||||||
|
|
||||||
To use you have to clone the repo run the install script for the service you want to use.
|
To use you have to clone the repo run the install script for the service you want to use.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
2
background-removal-dis-rocm/.gitignore
vendored
2
background-removal-dis-rocm/.gitignore
vendored
@ -1,2 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
@ -1,42 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Function to install StableDiffusion
|
|
||||||
install_background_remover() {
|
|
||||||
# As no data is stored in the webui folder, we can re-run the installation process for updates
|
|
||||||
rm -R webui -f
|
|
||||||
|
|
||||||
echo "Cloning webui..."
|
|
||||||
# original repo (hf): https://huggingface.co/spaces/ECCV2022/dis-background-removal/tree/main
|
|
||||||
git clone ssh://git@git.broillet.ch:222/Clone/dis-background-removal.git webui
|
|
||||||
|
|
||||||
echo "Installing requirements..."
|
|
||||||
$python_exec -m pip install -r webui/requirements.txt
|
|
||||||
|
|
||||||
echo "Cloning DIS repo"
|
|
||||||
git clone ssh://git@git.broillet.ch:222/Clone/DIS.git tmp-dis
|
|
||||||
mv tmp-dis/IS-Net/* webui/
|
|
||||||
sudo rm -R tmp-dis
|
|
||||||
|
|
||||||
echo "Finalizing..."
|
|
||||||
mkdir webui/saved_models -p
|
|
||||||
mv webui/isnet.pth webui/saved_models
|
|
||||||
sudo rm -R webui/.git
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
|
|
||||||
# Set it up
|
|
||||||
install_background_remover
|
|
||||||
|
|
||||||
clean
|
|
||||||
|
|
||||||
echo "BackgroundRemover installation complete."
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,19 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start BG-remover
|
|
||||||
cd webui/
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 $python_exec app.py
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
1
bitsandbytes-rocm-build/.gitignore
vendored
1
bitsandbytes-rocm-build/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
build_output/
|
|
@ -1,15 +0,0 @@
|
|||||||
# bitsandbytes-rocm-build-pip
|
|
||||||
|
|
||||||
This is a simple script to help you build the latest bitsandbytes for rocm.
|
|
||||||
The official build process requires a lot of ROCM-related packages and can mess up your computer easily if you install the wrong packages.
|
|
||||||
|
|
||||||
This creates a Docker image that builds the package and extract the built wheel file that you can then easily install using pip.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./build.sh
|
|
||||||
./extract_build.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
The wheel file will be in a folder named ``build_output/``
|
|
||||||
|
|
||||||
*You might also find one of my already built wheel in this folder or on the github releases page (may not be up to date)*
|
|
Binary file not shown.
@ -1 +0,0 @@
|
|||||||
docker build . -t 'bitsandbytes-rocm-build:6.1.2' -f Dockerfile
|
|
@ -1,31 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Set variables
|
|
||||||
IMAGE_NAME="bitsandbytes-rocm-build:6.1.2"
|
|
||||||
CONTAINER_NAME="bitsandbytes-rocm-build"
|
|
||||||
FILE_IN_CONTAINER="/tmp/bitsandbytes/dist/"
|
|
||||||
FILE_ON_HOST="./build_output/"
|
|
||||||
|
|
||||||
# Run the Docker container
|
|
||||||
docker run -d --name $CONTAINER_NAME $IMAGE_NAME
|
|
||||||
|
|
||||||
# Check if the container is running
|
|
||||||
if [ "$(docker ps -q -f name=$CONTAINER_NAME)" ]; then
|
|
||||||
echo "Container $CONTAINER_NAME is running."
|
|
||||||
|
|
||||||
# Copy the file from the container to the host
|
|
||||||
docker cp $CONTAINER_NAME:$FILE_IN_CONTAINER $FILE_ON_HOST
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "File copied successfully to $FILE_ON_HOST"
|
|
||||||
else
|
|
||||||
echo "Failed to copy file."
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Failed to start container $CONTAINER_NAME."
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker stop $CONTAINER_NAME
|
|
||||||
docker rm $CONTAINER_NAME
|
|
||||||
|
|
||||||
echo "Now you can install bitsandbytes locally using \"pip install\" with the file in the build_output/ folder"
|
|
4
comfyui-rocm/.gitignore
vendored
4
comfyui-rocm/.gitignore
vendored
@ -1,4 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
||||||
models/
|
|
||||||
*.log
|
|
@ -1,52 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
NAME="ComfyUI"
|
|
||||||
|
|
||||||
# Function to install/update
|
|
||||||
install() {
|
|
||||||
if [ -d "webui" ]; then
|
|
||||||
echo $NAME "is already installed. Updating..."
|
|
||||||
yes_or_no "Do you want to update $NAME?" && {
|
|
||||||
cd webui
|
|
||||||
git pull
|
|
||||||
echo "$NAME WebUI successfully updated."
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
echo "Cloning $NAME repository..."
|
|
||||||
git clone https://github.com/comfyanonymous/ComfyUI.git webui
|
|
||||||
|
|
||||||
echo "Running $NAME setup..."
|
|
||||||
$python_exec -m pip install -r webui/requirements.txt
|
|
||||||
|
|
||||||
|
|
||||||
cd webui/custom_nodes
|
|
||||||
|
|
||||||
# Install manager
|
|
||||||
git clone https://github.com/ltdrdata/ComfyUI-Manager.git
|
|
||||||
|
|
||||||
# Add GGUF support
|
|
||||||
git clone https://github.com/city96/ComfyUI-GGUF
|
|
||||||
$python_exec -m pip install --upgrade gguf numpy==1.26.4
|
|
||||||
|
|
||||||
# Add NF4 support
|
|
||||||
git clone https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4.git
|
|
||||||
$python_exec -m pip install --upgrade ../../../bitsandbytes-rocm-build/bitsandbytes-0.43.3.dev0-cp310-cp310-linux_x86_64.whl # install bitsandbytes for rocm until it is available on pypi
|
|
||||||
|
|
||||||
ln -s webui/models models
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
install
|
|
||||||
clean
|
|
||||||
echo "$NAME installation/update complete. Use ./run.sh to start"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 $python_exec webui/main.py
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
1
llama-cpp-python-rocm-build/.gitignore
vendored
1
llama-cpp-python-rocm-build/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
build_output/
|
|
@ -1,39 +0,0 @@
|
|||||||
FROM rocm/dev-ubuntu-22.04:6.1.2
|
|
||||||
|
|
||||||
ENV TORCH_VERSION="rocm6.1" \
|
|
||||||
DEBIAN_FRONTEND=noninteractive \
|
|
||||||
PYTHONUNBUFFERED=1 \
|
|
||||||
PYTHONIOENCODING=UTF-8 \
|
|
||||||
# for llama
|
|
||||||
CMAKE_ARGS="-DGGML_HIPBLAS=on" \
|
|
||||||
FORCE_CMAKE=1
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
|
||||||
|
|
||||||
RUN apt-get update -y
|
|
||||||
RUN apt-get install -y wget git cron cmake make software-properties-common
|
|
||||||
|
|
||||||
# Install python3.10
|
|
||||||
RUN add-apt-repository ppa:deadsnakes/ppa -y && apt-get update -y
|
|
||||||
RUN apt-get install -y python3.10 python3.10-dev python3.10-venv
|
|
||||||
|
|
||||||
ENV VIRTUAL_ENV=/opt/venv
|
|
||||||
RUN python3.10 -m venv $VIRTUAL_ENV
|
|
||||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
|
||||||
|
|
||||||
RUN pip3 install --upgrade pip wheel setuptools build
|
|
||||||
|
|
||||||
# Install pytorch for rocm
|
|
||||||
RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION}
|
|
||||||
|
|
||||||
# ROCM llama-cpp-python
|
|
||||||
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
|
||||||
## Clone repo and install python requirements
|
|
||||||
RUN git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git
|
|
||||||
WORKDIR /tmp/llama-cpp-python
|
|
||||||
|
|
||||||
## Build
|
|
||||||
RUN python3.10 -m build --wheel
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
RUN apt-get clean && pip3 cache purge
|
|
@ -1,16 +0,0 @@
|
|||||||
# llama-cpp-python-rocm-build-pip
|
|
||||||
|
|
||||||
This is a simple script to help you build the latest llama-cpp-python for rocm.
|
|
||||||
The official build process requires a lot of ROCM-related packages and can mess up your computer easily if you install the wrong packages.
|
|
||||||
|
|
||||||
This creates a Docker image that builds the package and extract the built wheel file that you can then easily install using pip.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./build.sh
|
|
||||||
./extract_build.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
The wheel file will be in a folder named ``build_output/``
|
|
||||||
|
|
||||||
*You might also find one of my already built wheel in this folder or on the github releases page (may not be up to date)*
|
|
||||||
|
|
@ -1 +0,0 @@
|
|||||||
docker build . -t 'llama-cpp-python-rocm-build:6.1.2' -f Dockerfile
|
|
@ -1,31 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Set variables
|
|
||||||
IMAGE_NAME="llama-cpp-python-rocm-build:6.1.2"
|
|
||||||
CONTAINER_NAME="llama-cpp-python-rocm-build"
|
|
||||||
FILE_IN_CONTAINER="/tmp/llama-cpp-python/dist/"
|
|
||||||
FILE_ON_HOST="./build_output/"
|
|
||||||
|
|
||||||
# Run the Docker container
|
|
||||||
docker run -d --name $CONTAINER_NAME $IMAGE_NAME
|
|
||||||
|
|
||||||
# Check if the container is running
|
|
||||||
if [ "$(docker ps -q -f name=$CONTAINER_NAME)" ]; then
|
|
||||||
echo "Container $CONTAINER_NAME is running."
|
|
||||||
|
|
||||||
# Copy the file from the container to the host
|
|
||||||
docker cp $CONTAINER_NAME:$FILE_IN_CONTAINER $FILE_ON_HOST
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo "File copied successfully to $FILE_ON_HOST"
|
|
||||||
else
|
|
||||||
echo "Failed to copy file."
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Failed to start container $CONTAINER_NAME."
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker stop $CONTAINER_NAME
|
|
||||||
docker rm $CONTAINER_NAME
|
|
||||||
|
|
||||||
echo "Now you can install llama-cpp-python locally using \"pip install\" with the file in the build_output/ folder"
|
|
23
main.py
Normal file
23
main.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
PYTHON_EXEC = 'python3.10'
|
||||||
|
PATH = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
ROCM_VERSION = "6.1.2"
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
LEVEL = logging.DEBUG
|
||||||
|
logger = logging.getLogger('ai-suite-rocm')
|
||||||
|
if not logger.hasHandlers():
|
||||||
|
handler_with_formatter = logging.StreamHandler(stream=sys.stdout)
|
||||||
|
handler_with_formatter.setFormatter(logging.Formatter('[%(levelname)s] : %(message)s'))
|
||||||
|
logger.addHandler(handler_with_formatter)
|
||||||
|
logger.setLevel(LEVEL)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logger.info("Starting AI Suite for ROCM")
|
||||||
|
|
||||||
|
from services import TextGeneration
|
||||||
|
|
||||||
|
test = TextGeneration().start()
|
@ -1,10 +1,17 @@
|
|||||||
FROM rocm/dev-ubuntu-22.04:6.1.2
|
FROM rocm/dev-ubuntu-22.04:6.1.2
|
||||||
|
|
||||||
ENV ROCM_ARCH="gfx1030" \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
TORCH_VERSION="rocm6.1" \
|
|
||||||
DEBIAN_FRONTEND=noninteractive \
|
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PYTHONIOENCODING=UTF-8
|
PYTHONIOENCODING=UTF-8 \
|
||||||
|
|
||||||
|
# for bitsandbytes
|
||||||
|
ROCM_ARCH="gfx1030" \
|
||||||
|
TORCH_VERSION="rocm6.1" \
|
||||||
|
|
||||||
|
# for llama
|
||||||
|
CMAKE_ARGS="-DGGML_HIPBLAS=on" \
|
||||||
|
FORCE_CMAKE=1
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
WORKDIR /tmp
|
||||||
|
|
||||||
@ -19,11 +26,12 @@ ENV VIRTUAL_ENV=/opt/venv
|
|||||||
RUN python3.10 -m venv $VIRTUAL_ENV
|
RUN python3.10 -m venv $VIRTUAL_ENV
|
||||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
RUN pip3 install --upgrade pip wheel setuptools
|
RUN pip3 install --upgrade pip wheel setuptools build
|
||||||
|
|
||||||
# Install pytorch for rocm
|
# Install pytorch for rocm
|
||||||
RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION}
|
RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION}
|
||||||
|
|
||||||
|
|
||||||
# ROCM bitsandbytes
|
# ROCM bitsandbytes
|
||||||
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
||||||
## Clone repo and install python requirements
|
## Clone repo and install python requirements
|
||||||
@ -35,5 +43,14 @@ RUN cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH}
|
|||||||
RUN make
|
RUN make
|
||||||
RUN python3.10 setup.py bdist_wheel --universal
|
RUN python3.10 setup.py bdist_wheel --universal
|
||||||
|
|
||||||
|
|
||||||
|
# ROCM llama-cpp-python
|
||||||
|
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
||||||
|
## Clone repo and install python requirements
|
||||||
|
RUN git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git
|
||||||
|
WORKDIR /tmp/llama-cpp-python
|
||||||
|
RUN python3.10 -m build --wheel
|
||||||
|
|
||||||
|
|
||||||
# Cleanup
|
# Cleanup
|
||||||
RUN apt-get clean && pip3 cache purge
|
RUN apt-get clean && pip3 cache purge
|
1
prebuilts/build.sh
Executable file
1
prebuilts/build.sh
Executable file
@ -0,0 +1 @@
|
|||||||
|
docker build . -t 'prebuilts-rocm:6.1.2' -f Dockerfile
|
27
prebuilts/extract_build.sh
Executable file
27
prebuilts/extract_build.sh
Executable file
@ -0,0 +1,27 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Set variables
|
||||||
|
IMAGE_NAME="prebuilts-rocm:6.1.2"
|
||||||
|
CONTAINER_NAME="prebuilts-rocm"
|
||||||
|
FILES_TO_COPY=["/tmp/bitsandbytes/dist/", "/tmp/llama-cpp-python/dist/"]
|
||||||
|
WHERE_TO_PASTE="./build_output/"
|
||||||
|
|
||||||
|
# Run the Docker container
|
||||||
|
docker run -d --name $CONTAINER_NAME $IMAGE_NAME
|
||||||
|
|
||||||
|
# Check if the container is running
|
||||||
|
if [ "$(docker ps -q -f name=$CONTAINER_NAME)" ]; then
|
||||||
|
echo "Container $CONTAINER_NAME is running."
|
||||||
|
|
||||||
|
# Copy the files from the container to the host
|
||||||
|
for file in $FILES_TO_COPY; do
|
||||||
|
docker cp $CONTAINER_NAME:$file $WHERE_TO_PASTE
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Files copied to $WHERE_TO_PASTE."
|
||||||
|
else
|
||||||
|
echo "Failed to start container $CONTAINER_NAME."
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker stop $CONTAINER_NAME
|
||||||
|
docker rm $CONTAINER_NAME
|
2
services/__init__.py
Normal file
2
services/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
from services.services import Stack
|
||||||
|
from services.txtgen import TextGeneration
|
34
services/background-removal-dis.py
Normal file
34
services/background-removal-dis.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class BGRemovalDIS(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'BGRemovalDIS',
|
||||||
|
'bg-remove-dis-rocm',
|
||||||
|
5005,
|
||||||
|
'https://huggingface.co/spaces/ECCV2022/dis-background-removal'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
self.git_clone(url=self.url, dest="webui")
|
||||||
|
self.install_requirements("webui/requirements.txt")
|
||||||
|
self.pip_install("gradio") # gradio is not in requirements.txt for some reason
|
||||||
|
|
||||||
|
self.remove_line_in_file("os.", "webui/app.py") # remove manual clone of DIS from app.py (done below)
|
||||||
|
|
||||||
|
self.git_clone("https://github.com/xuebinqin/DIS.git", dest="tmp-dis")
|
||||||
|
self.move_all_files_in_dir("tmp-dis/IS-Net", "webui")
|
||||||
|
self.remove_dir("tmp-dis")
|
||||||
|
|
||||||
|
self.create_dir("webui/saved_models")
|
||||||
|
self.move_file_or_dir("webui/isnet.pth", "webui/saved_models/isnet.pth")
|
||||||
|
|
||||||
|
# self.remove_dir("webui/.git") # saves a lot of space due to big repo
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--port", str(self.port)]
|
||||||
|
self.python(f"app.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
34
services/comfyui.py
Normal file
34
services/comfyui.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class ComfyUI(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'ComfyUI',
|
||||||
|
'comfyui-rocm',
|
||||||
|
5004,
|
||||||
|
'https://github.com/comfyanonymous/ComfyUI.git'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Install the webui
|
||||||
|
self.git_clone(url=self.url, dest="webui")
|
||||||
|
self.install_requirements("webui/requirements.txt")
|
||||||
|
|
||||||
|
# Install the manager
|
||||||
|
self.git_clone(url="https://github.com/ltdrdata/ComfyUI-Manager.git", dest="webui/custom_nodes/manager")
|
||||||
|
|
||||||
|
# Add GGUF support
|
||||||
|
self.pip_install(["gguf", "numpy==1.26.4"])
|
||||||
|
|
||||||
|
# Add NF4 support for Flux
|
||||||
|
self.install_from_prebuilt("bitsandbytes")
|
||||||
|
self.git_clone(url="https://github.com/comfyanonymous/ComfyUI_bitsandbytes_NF4.git",
|
||||||
|
dest="webui/custom_nodes/ComfyUI_bitsandbytes_NF4")
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--port", str(self.port)]
|
||||||
|
self.python(f"main.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
168
services/services.py
Normal file
168
services/services.py
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
import utils
|
||||||
|
from main import PATH, PYTHON_EXEC, logger, LEVEL
|
||||||
|
|
||||||
|
|
||||||
|
class Stack:
|
||||||
|
def __init__(self, name: str, path: str, port: int, url: str):
|
||||||
|
self.name = name
|
||||||
|
self.path = os.path.join(PATH, path)
|
||||||
|
self.url = url
|
||||||
|
self.port = port
|
||||||
|
|
||||||
|
self.process = None
|
||||||
|
|
||||||
|
if self.is_installed():
|
||||||
|
self.update()
|
||||||
|
else:
|
||||||
|
self.check_for_broken_install()
|
||||||
|
self.create_venv()
|
||||||
|
self.install()
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
self.create_file('.installed', 'true')
|
||||||
|
logger.info(f"Installed {self.name}")
|
||||||
|
|
||||||
|
def is_installed(self):
|
||||||
|
return self.file_exists('.installed')
|
||||||
|
|
||||||
|
def check_for_broken_install(self):
|
||||||
|
if not self.is_installed() and len(os.listdir(self.path)) > 0:
|
||||||
|
logger.warning("Found files from a previous/borked/crashed installation, cleaning up...")
|
||||||
|
self.bash(f"rm -rf {self.path}")
|
||||||
|
self.create_dir('')
|
||||||
|
|
||||||
|
def update(self, folder: str = 'webui'):
|
||||||
|
if self.dir_exists(folder):
|
||||||
|
logger.info(f"Updating {self.name}")
|
||||||
|
self.git_pull(folder)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Could not update {self.name} as {folder} does not exist")
|
||||||
|
|
||||||
|
def uninstall(self):
|
||||||
|
self.bash(f"rm -rf {self.path}")
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def restart(self):
|
||||||
|
self.stop()
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
def status(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Python/Bash utils
|
||||||
|
def create_venv(self):
|
||||||
|
venv_path = os.path.join(self.path, 'venv')
|
||||||
|
if not self.has_venv():
|
||||||
|
logger.debug(f"Creating venv for {self.name}")
|
||||||
|
self.bash(f"{PYTHON_EXEC} -m venv {venv_path} --system-site-packages")
|
||||||
|
self.pip("install --upgrade pip")
|
||||||
|
else:
|
||||||
|
logger.debug(f"Venv already exists for {self.name}")
|
||||||
|
|
||||||
|
def has_venv(self) -> bool:
|
||||||
|
return self.dir_exists('venv')
|
||||||
|
|
||||||
|
def pip_install(self, package: str | list, no_deps: bool = False):
|
||||||
|
if isinstance(package, list):
|
||||||
|
for p in package:
|
||||||
|
self.pip(f"install -U {p} {'--no-deps' if no_deps else ''}")
|
||||||
|
else:
|
||||||
|
self.pip(f"install -U {package} {'--no-deps' if no_deps else ''}")
|
||||||
|
|
||||||
|
def install_requirements(self, filename: str = 'requirements.txt'):
|
||||||
|
self.pip(f"install -r {filename}")
|
||||||
|
|
||||||
|
def pip(self, cmd: str, env=[], current_dir: str = None):
|
||||||
|
self.bash(f"{' '.join(env)} {self.path}/venv/bin/pip {cmd}", current_dir)
|
||||||
|
|
||||||
|
def python(self, cmd: str, env=[], current_dir: str = None):
|
||||||
|
self.bash(f"{' '.join(env)} {self.path}/venv/bin/python {cmd}", current_dir)
|
||||||
|
|
||||||
|
def bash(self, cmd: str, current_dir: str = None):
|
||||||
|
cmd = f"cd {self.path if current_dir is None else os.path.join(self.path, current_dir)} && {cmd}"
|
||||||
|
|
||||||
|
logger.debug(f"Running command: {cmd}")
|
||||||
|
|
||||||
|
if LEVEL == logging.DEBUG:
|
||||||
|
process = subprocess.Popen(cmd, shell=True)
|
||||||
|
process.wait()
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise Exception(f"Failed to run command: {cmd}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
out, err = process.communicate()
|
||||||
|
|
||||||
|
if process.returncode != 0:
|
||||||
|
logger.fatal(f"Failed to run command: {cmd}")
|
||||||
|
logger.fatal(f"Error: {err.decode('utf-8')}")
|
||||||
|
logger.fatal(f"Output: {out.decode('utf-8')}")
|
||||||
|
raise Exception(f"Failed to run command: {cmd}")
|
||||||
|
|
||||||
|
# Git utils
|
||||||
|
def git_clone(self, url: str, branch: str = None, dest: str = None):
|
||||||
|
self.bash(f"git clone {f"-b {branch}" if branch is not None else ''} {url} {'' if dest is None else dest}")
|
||||||
|
|
||||||
|
def git_pull(self, repo_folder: str, force: bool = False):
|
||||||
|
self.bash(f"git reset --hard HEAD {'&& git clean -f -d' if force else ''} && git pull", repo_folder)
|
||||||
|
|
||||||
|
# Prebuilt utils
|
||||||
|
def install_from_prebuilt(self, name):
|
||||||
|
for prebuilt in utils.get_prebuilts():
|
||||||
|
if prebuilt['name'].split("-")[0] == name:
|
||||||
|
self.pip(f"install {prebuilt['browser_download_url']}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# File utils
|
||||||
|
def create_file(self, name, content):
|
||||||
|
with open(os.path.join(self.path, name), 'w') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
def create_dir(self, name):
|
||||||
|
logger.debug(f"Creating directory {name}")
|
||||||
|
os.makedirs(os.path.join(self.path, name), exist_ok=True)
|
||||||
|
|
||||||
|
def remove_file(self, name):
|
||||||
|
logger.debug(f"Removing file {name}")
|
||||||
|
os.remove(os.path.join(self.path, name))
|
||||||
|
|
||||||
|
def remove_dir(self, name):
|
||||||
|
logger.debug(f"Removing directory {name}")
|
||||||
|
os.rmdir(os.path.join(self.path, name))
|
||||||
|
|
||||||
|
def move_file_or_dir(self, src, dest):
|
||||||
|
logger.debug(f"Moving file/dir {src} to {dest}")
|
||||||
|
os.rename(os.path.join(self.path, src), os.path.join(self.path, dest))
|
||||||
|
|
||||||
|
def move_all_files_in_dir(self, src, dest):
|
||||||
|
logger.debug(f"Moving all files in directory {src} to {dest}")
|
||||||
|
for file in os.listdir(os.path.join(self.path, src)):
|
||||||
|
os.rename(os.path.join(self.path, src, file), os.path.join(self.path, dest, file))
|
||||||
|
|
||||||
|
def file_exists(self, name):
|
||||||
|
return os.path.exists(os.path.join(self.path, name))
|
||||||
|
|
||||||
|
def dir_exists(self, name):
|
||||||
|
return os.path.exists(os.path.join(self.path, name))
|
||||||
|
|
||||||
|
def remove_line_in_file(self, contains: str | list, file: str):
|
||||||
|
logger.debug(f"Removing lines containing {contains} in {file}")
|
||||||
|
|
||||||
|
if isinstance(contains, list):
|
||||||
|
for c in contains:
|
||||||
|
self.bash(f"sed -i '/{c}/d' {file}")
|
||||||
|
else:
|
||||||
|
self.bash(f"sed -i '/{contains}/d' {file}")
|
||||||
|
|
||||||
|
def replace_line_in_file(self, match: str, replace: str, file: str):
|
||||||
|
logger.debug(f"Replacing lines containing {match} with {replace} in {file}")
|
||||||
|
self.bash(f"sed -i 's/{match}/{replace}/g' {file}")
|
27
services/stablediffusion-forge.py
Normal file
27
services/stablediffusion-forge.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class StableDiffusionForge(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'StableDiffusion Forge WebUI',
|
||||||
|
'stablediffusion-forge-rocm',
|
||||||
|
5003,
|
||||||
|
'https://github.com/lllyasviel/stable-diffusion-webui-forge'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Install the webui
|
||||||
|
self.git_clone(url=self.url, dest="webui")
|
||||||
|
|
||||||
|
self.python("launch.py --skip-torch-cuda-test --exit", current_dir="webui")
|
||||||
|
|
||||||
|
# Add NF4 support for Flux
|
||||||
|
self.install_from_prebuilt("bitsandbytes")
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
|
||||||
|
self.python(f"launch.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
27
services/stablediffusion-webui.py
Normal file
27
services/stablediffusion-webui.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class StableDiffusionForge(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'StableDiffusion WebUI',
|
||||||
|
'stablediffusion-webui-rocm',
|
||||||
|
5002,
|
||||||
|
'https://github.com/AUTOMATIC1111/stable-diffusion-webui'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Install the webui
|
||||||
|
self.git_clone(url=self.url, branch="dev", dest="webui")
|
||||||
|
|
||||||
|
self.python("launch.py --skip-torch-cuda-test --exit", current_dir="webui")
|
||||||
|
|
||||||
|
# Add NF4 support for Flux
|
||||||
|
self.install_from_prebuilt("bitsandbytes")
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--listen", "--enable-insecure-extension-access", "--port", str(self.port)]
|
||||||
|
self.python(f"launch.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
56
services/txtgen.py
Normal file
56
services/txtgen.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class TextGeneration(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'Text Generation',
|
||||||
|
'text-generation-rocm',
|
||||||
|
5000,
|
||||||
|
'https://github.com/oobabooga/text-generation-webui/'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Install LlamaCpp from prebuilt
|
||||||
|
self.pip("install llama-cpp-python", ["CMAKE_ARGS=\"-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS\""]) # cpu
|
||||||
|
|
||||||
|
# Install LlamaCpp for ROCM from source
|
||||||
|
# self.pip("install llama-cpp-python", ["CMAKE_ARGS=\"-DGGML_HIPBLAS=on\" FORCE_CMAKE=1"]) # manual gpu (only works if whole rocm suite installed)
|
||||||
|
# self.install_from_prebuilt("llama_cpp_python") # gpu (only works if whole rocm suite installed)
|
||||||
|
|
||||||
|
# Install Triton for ROCM from prebuilt
|
||||||
|
# self.install_from_prebuilt("triton")
|
||||||
|
|
||||||
|
# Install Triton for ROCM from source
|
||||||
|
# self.git_clone(url="https://github.com/ROCmSoftwarePlatform/triton.git")
|
||||||
|
# self.pip_install(['ninja', 'cmake'])
|
||||||
|
# self.pip("install -e .", path="triton")
|
||||||
|
|
||||||
|
# Install the webui
|
||||||
|
self.git_clone(url=self.url, dest="webui")
|
||||||
|
self.remove_line_in_file(["accelerate", "lm_eval", "optimum", "autoawq", "llama_cpp_python"],
|
||||||
|
"../text-generation-rocm/webui/requirements_amd.txt")
|
||||||
|
self.install_requirements("../text-generation-rocm/webui/requirements_amd.txt")
|
||||||
|
self.pip_install(["accelerate", "optimum"])
|
||||||
|
self.pip_install(
|
||||||
|
"https://github.com/casper-hansen/AutoAWQ_kernels/releases/download/v0.0.7/autoawq_kernels-0.0.7+rocm571-cp310-cp310-linux_x86_64.whl",
|
||||||
|
no_deps=True)
|
||||||
|
self.pip_install(
|
||||||
|
"https://github.com/casper-hansen/AutoAWQ/releases/download/v0.2.6/autoawq-0.2.6-cp310-cp310-linux_x86_64.whl",
|
||||||
|
no_deps=True)
|
||||||
|
# Fix llama trying to use cuda version
|
||||||
|
self.remove_line_in_file("llama_cpp_cuda", "../text-generation-rocm/webui/modules/llama_cpp_python_hijack.py")
|
||||||
|
|
||||||
|
# Install useful packages
|
||||||
|
self.pip_install(
|
||||||
|
"https://github.com/turboderp/exllamav2/releases/download/v0.1.9/exllamav2-0.1.9+rocm6.1.torch2.4.0-cp310-cp310-linux_x86_64.whl")
|
||||||
|
self.install_from_prebuilt("bitsandbytes")
|
||||||
|
self.pip(
|
||||||
|
"install auto-gptq --no-build-isolation --extra-index-url https://huggingface.github.io/autogptq-index/whl/rocm573/")
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--listen", "--listen-port", str(self.port)]
|
||||||
|
self.python(f"server.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
38
services/xtts.py
Normal file
38
services/xtts.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from services import Stack
|
||||||
|
|
||||||
|
|
||||||
|
class XTTS(Stack):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__(
|
||||||
|
'XTTS WebUI',
|
||||||
|
'xtts-rocm',
|
||||||
|
5001,
|
||||||
|
'https://github.com/daswer123/xtts-webui'
|
||||||
|
)
|
||||||
|
|
||||||
|
def install(self):
|
||||||
|
# Install the webui
|
||||||
|
self.git_clone(url=self.url, dest="webui")
|
||||||
|
|
||||||
|
self.remove_line_in_file("torch", "webui/requirements.txt")
|
||||||
|
self.install_requirements("webui/requirements.txt")
|
||||||
|
|
||||||
|
# sed -i 's/device = "cuda" if torch.cuda.is_available() else "cpu"/device = "cpu"/' webui/scripts/utils/formatter.py
|
||||||
|
# sed -i 's/asr_model = WhisperModel(whisper_model, device=device, compute_type="float16")/asr_model = WhisperModel(whisper_model, device=device, compute_type="int8")/' webui/scripts/utils/formatter.py
|
||||||
|
|
||||||
|
# Disable gpu for faster-whipser as ROCM isn't supported yet
|
||||||
|
self.replace_line_in_file("device = \"cuda\" if torch.cuda.is_available() else \"cpu\"", "device = \"cpu\"",
|
||||||
|
"webui/scripts/utils/formatter.py")
|
||||||
|
self.replace_line_in_file("asr_model = WhisperModel(whisper_model, device=device, compute_type=\"float16\")",
|
||||||
|
"asr_model = WhisperModel(whisper_model, device=device, compute_type=\"int8\")",
|
||||||
|
"webui/scripts/utils/formatter.py")
|
||||||
|
|
||||||
|
# Deepspeed and ninja (not working yet)
|
||||||
|
# self.pip_install(["ninja", "deepspeed"])
|
||||||
|
|
||||||
|
super().install()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
args = ["--host", "0.0.0.0", "--port", str(self.port)]
|
||||||
|
self.python(f"server.py {' '.join(args)}", current_dir="webui",
|
||||||
|
env=["TORCH_BLAS_PREFER_HIPBLASLT=0"])
|
4
stablediffusion-forge-rocm/.gitignore
vendored
4
stablediffusion-forge-rocm/.gitignore
vendored
@ -1,4 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
||||||
models/
|
|
||||||
outputs/
|
|
@ -1,43 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Function to install/update StableDiffusion
|
|
||||||
install_stablediffusionforge() {
|
|
||||||
if [ -d "webui" ]; then
|
|
||||||
echo "StableDiffusionForge repository already exists."
|
|
||||||
yes_or_no "Do you want to update StableDiffusionForge WebUI ?" && {
|
|
||||||
cd webui
|
|
||||||
git pull
|
|
||||||
echo "StableDiffusionForge WebUI successfully updated."
|
|
||||||
}
|
|
||||||
else
|
|
||||||
echo "Cloning StableDiffusionForge repository..."
|
|
||||||
git clone https://github.com/lllyasviel/stable-diffusion-webui-forge webui
|
|
||||||
|
|
||||||
echo "Running StableDiffusionForge setup..."
|
|
||||||
$python_exec webui/launch.py --skip-torch-cuda-test --exit
|
|
||||||
|
|
||||||
echo "Adding Flux NF4 support for ROCM"
|
|
||||||
$python_exec -m pip install --upgrade ../bitsandbytes-rocm-build/bitsandbytes-0.43.3.dev0-cp310-cp310-linux_x86_64.whl # install bitsandbytes for rocm until it is available on pypi
|
|
||||||
|
|
||||||
ln -s webui/models models
|
|
||||||
ln -s webui/outputs outputs
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
|
|
||||||
# Install StableDiffusionForge
|
|
||||||
install_stablediffusionforge
|
|
||||||
|
|
||||||
clean
|
|
||||||
|
|
||||||
echo "StableDiffusion installation/update complete. Use ./run.sh to start"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start SD
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 $python_exec webui/launch.py --listen --enable-insecure-extension-access # --opt-split-attention
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
4
stablediffusion-rocm/.gitignore
vendored
4
stablediffusion-rocm/.gitignore
vendored
@ -1,4 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
||||||
models/
|
|
||||||
outputs/
|
|
@ -1,40 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Function to install/update StableDiffusion
|
|
||||||
install_stablediffusion() {
|
|
||||||
if [ -d "webui" ]; then
|
|
||||||
echo "StableDiffusion repository already exists."
|
|
||||||
yes_or_no "Do you want to update StableDiffusion WebUI (dev branch) ?" && {
|
|
||||||
cd webui
|
|
||||||
git pull
|
|
||||||
echo "StableDiffusion WebUI successfully updated."
|
|
||||||
}
|
|
||||||
else
|
|
||||||
echo "Cloning StableDiffusion repository..."
|
|
||||||
git clone -b dev https://github.com/AUTOMATIC1111/stable-diffusion-webui webui
|
|
||||||
|
|
||||||
echo "Running StableDiffusion setup..."
|
|
||||||
$python_exec webui/launch.py --skip-torch-cuda-test --exit
|
|
||||||
|
|
||||||
ln -s webui/models models
|
|
||||||
ln -s webui/outputs outputs
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
|
|
||||||
# Install StableDiffusion
|
|
||||||
install_stablediffusion
|
|
||||||
|
|
||||||
clean
|
|
||||||
|
|
||||||
echo "StableDiffusion installation/update complete. Use ./run.sh to start"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start SD
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 $python_exec webui/launch.py --listen --enable-insecure-extension-access --opt-split-attention
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
4
text-generation-rocm/.gitignore
vendored
4
text-generation-rocm/.gitignore
vendored
@ -1,4 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
||||||
models/
|
|
||||||
outputs/
|
|
@ -1,90 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
NAME="TextGeneration"
|
|
||||||
|
|
||||||
# Function to install/update
|
|
||||||
install() {
|
|
||||||
if [ -d "webui" ]; then
|
|
||||||
echo $NAME "is already installed. Updating..."
|
|
||||||
yes_or_no "Do you want to update $NAME?" && {
|
|
||||||
cd webui
|
|
||||||
git pull
|
|
||||||
echo "$NAME WebUI successfully updated."
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
# Add BnB
|
|
||||||
$python_exec -m pip install --upgrade https://github.com/M4TH1EU/ai-suite-rocm-local/releases/download/prebuilt-wheels-for-rocm/bitsandbytes-0.43.3-cp310-cp310-linux_x86_64.whl # install bitsandbytes for rocm until it is available on pypi
|
|
||||||
|
|
||||||
# Add AutoGPTQ
|
|
||||||
$python_exec -m pip install auto-gptq --no-build-isolation --extra-index-url https://huggingface.github.io/autogptq-index/whl/rocm573/
|
|
||||||
|
|
||||||
# Add ExLlamav2
|
|
||||||
$python_exec -m pip install https://github.com/turboderp/exllamav2/releases/download/v0.1.9/exllamav2-0.1.9+rocm6.1.torch2.4.0-cp310-cp310-linux_x86_64.whl
|
|
||||||
|
|
||||||
# Add LlamaCPP
|
|
||||||
CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" pip install llama-cpp-python # cpu
|
|
||||||
# CMAKE_ARGS="-DGGML_HIPBLAS=on" FORCE_CMAKE=1 $python_exec -m pip install llama-cpp-python # gpu
|
|
||||||
|
|
||||||
# llama cpp built with hipblas doesn't work unless the whole rocm stack is installed locally
|
|
||||||
# so for now, use llama with openblas (cpu)
|
|
||||||
|
|
||||||
# main_venv_path=$(dirname $(python -c "import torch; print(torch.__file__)"))"/lib/"
|
|
||||||
# llama_lib_path="$(pwd)/venv/lib64/python3.10/site-packages/llama_cpp/lib"
|
|
||||||
#
|
|
||||||
# for file in "$main_venv_path"/*.so; do
|
|
||||||
# ln -s "$file" "$llama_lib_path/$(basename "$file")"
|
|
||||||
# done
|
|
||||||
|
|
||||||
# ln -s "$llama_lib_path/libhipblas.so" "$llama_lib_path/libhipblas.so.1"
|
|
||||||
# ln -s "$llama_lib_path/libhipblas.so" "$llama_lib_path/libhipblas.so.2"
|
|
||||||
# ln -s "$llama_lib_path/librocblas.so" "$llama_lib_path/librocblas.so.3"
|
|
||||||
# ln -s "$llama_lib_path/librocblas.so" "$llama_lib_path/librocblas.so.4"
|
|
||||||
# ln -s "$llama_lib_path/libamdhip64.so" "$llama_lib_path/libamdhip64.so.5"
|
|
||||||
# ln -s "$llama_lib_path/libamdhip64.so" "$llama_lib_path/libamdhip64.so.6"
|
|
||||||
|
|
||||||
|
|
||||||
# Add Triton
|
|
||||||
# $python_exec -m pip install https://github.com/M4TH1EU/ai-suite-rocm-local/releases/download/prebuilt-wheels-for-rocm/llama_cpp_python-0.2.89-cp310-cp310-linux_x86_64.whl
|
|
||||||
# git clone https://github.com/ROCmSoftwarePlatform/triton.git .tritonrocm
|
|
||||||
# cd .tritonrocm/python
|
|
||||||
# $python_exec -m pip install ninja cmake; # build time dependencies
|
|
||||||
# $python_exec -m pip uninstall triton -y && $python_exec -m pip install -e .
|
|
||||||
# cd .. && sudo rm -R .tritonrocm
|
|
||||||
|
|
||||||
echo "Cloning $NAME repository..."
|
|
||||||
git clone https://github.com/oobabooga/text-generation-webui.git webui
|
|
||||||
|
|
||||||
echo "Running $NAME setup..."
|
|
||||||
|
|
||||||
# For some reasons theses want to reinstall torch for nvidia instead of using the download for rocm so manually install them
|
|
||||||
sed -i '/accelerate/d' webui/requirements_amd.txt
|
|
||||||
sed -i '/lm_eval/d' webui/requirements_amd.txt
|
|
||||||
sed -i '/optimum/d' webui/requirements_amd.txt
|
|
||||||
sed -i '/autoawq/d' webui/requirements_amd.txt
|
|
||||||
sed -i '/llama_cpp_python/d' webui/requirements_amd.txt
|
|
||||||
|
|
||||||
$python_exec -m pip install -r webui/requirements_amd.txt
|
|
||||||
|
|
||||||
# only works after requirements_amd.txt is installed ??!
|
|
||||||
$python_exec -m pip install accelerate optimum
|
|
||||||
$python_exec -m pip install https://github.com/casper-hansen/AutoAWQ_kernels/releases/download/v0.0.7/autoawq_kernels-0.0.7+rocm571-cp310-cp310-linux_x86_64.whl --no-deps
|
|
||||||
$python_exec -m pip install https://github.com/casper-hansen/AutoAWQ/releases/download/v0.2.6/autoawq-0.2.6-cp310-cp310-linux_x86_64.whl --no-deps
|
|
||||||
$python_exec -m pip install lm_eval
|
|
||||||
ln -s webui/models models
|
|
||||||
fi
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
install
|
|
||||||
clean
|
|
||||||
echo "$NAME installation/update complete. Use ./run.sh to start"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec="$(pwd)/venv/bin/python3.10"
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start
|
|
||||||
cd webui
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 $python_exec server.py --listen
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
28
utils.py
Normal file
28
utils.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
from main import ROCM_VERSION, logger
|
||||||
|
|
||||||
|
|
||||||
|
def get_prebuilts(repo_owner: str = "M4TH1EU", repo_name: str = "ai-suite-rocm-local",
|
||||||
|
release_tag: str = f"prebuilt-whl-{ROCM_VERSION}") -> list:
|
||||||
|
|
||||||
|
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/tags/{release_tag}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(api_url) as response:
|
||||||
|
if response.status != 200:
|
||||||
|
logger.error(f"Failed to fetch data: HTTP Status {response.status}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
release_data = json.load(response)
|
||||||
|
|
||||||
|
assets = release_data.get('assets', [])
|
||||||
|
if not assets:
|
||||||
|
logger.error("No assets found in release data")
|
||||||
|
return []
|
||||||
|
|
||||||
|
return assets
|
||||||
|
|
||||||
|
except urllib.error.URLError as e:
|
||||||
|
logger.error(f"Error fetching release data: {e}")
|
2
xtts-rocm/.gitignore
vendored
2
xtts-rocm/.gitignore
vendored
@ -1,2 +0,0 @@
|
|||||||
venv/
|
|
||||||
webui/
|
|
@ -1,48 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec=venv/bin/python3.10
|
|
||||||
|
|
||||||
# Function to install/update StableDiffusion
|
|
||||||
install_xtts() {
|
|
||||||
if [ -d "webui" ]; then
|
|
||||||
echo "XTTS repository already exists. Skipping clone."
|
|
||||||
yes_or_no "Do you want to update XTTS WebUI ?" && {
|
|
||||||
cd webui
|
|
||||||
rm requirements_without_torch.txt
|
|
||||||
git pull
|
|
||||||
echo "XTTS WebUI successfully updated."
|
|
||||||
cd ..
|
|
||||||
}
|
|
||||||
else
|
|
||||||
echo "Cloning XTTS repository..."
|
|
||||||
git clone https://github.com/daswer123/xtts-webui webui
|
|
||||||
fi
|
|
||||||
|
|
||||||
iconv -f UTF-16 -t UTF-8 webui/requirements.txt | grep -v 'torch' > webui/requirements_without_torch.txt
|
|
||||||
$python_exec -m pip install -r webui/requirements_without_torch.txt
|
|
||||||
|
|
||||||
# Disable gpu for faster-whipser as ROCM isn't supported yet
|
|
||||||
sed -i 's/device = "cuda" if torch.cuda.is_available() else "cpu"/device = "cpu"/' webui/scripts/utils/formatter.py
|
|
||||||
sed -i 's/asr_model = WhisperModel(whisper_model, device=device, compute_type="float16")/asr_model = WhisperModel(whisper_model, device=device, compute_type="int8")/' webui/scripts/utils/formatter.py
|
|
||||||
|
|
||||||
# Deepspeed and ninja (not working)
|
|
||||||
$python_exec -m pip install ninja deepspeed
|
|
||||||
# apt-get install -y ninja-build
|
|
||||||
|
|
||||||
ln -S webui/models models
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
prepare_env
|
|
||||||
|
|
||||||
# Install XTTS
|
|
||||||
install_xtts
|
|
||||||
|
|
||||||
clean
|
|
||||||
|
|
||||||
echo "XTTS installation/update complete."
|
|
||||||
}
|
|
||||||
|
|
||||||
# Run main function
|
|
||||||
main
|
|
@ -1,18 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source ../utils.sh
|
|
||||||
python_exec=venv/bin/python3.10
|
|
||||||
|
|
||||||
# Main function
|
|
||||||
main() {
|
|
||||||
# Create virtual environment
|
|
||||||
use_venv
|
|
||||||
|
|
||||||
# Prints ROCM info with available GPUs
|
|
||||||
rocm-smi
|
|
||||||
|
|
||||||
# Start XTTS
|
|
||||||
cd webui/
|
|
||||||
TORCH_BLAS_PREFER_HIPBLASLT=0 ../$python_exec app.py --host 0.0.0.0 -v v2.0.3
|
|
||||||
}
|
|
||||||
|
|
||||||
main
|
|
Loading…
Reference in New Issue
Block a user