From 27cac7ab571d0dd8f9dbd752a2125678ea007407 Mon Sep 17 00:00:00 2001 From: Mathieu Broillet Date: Sun, 12 Nov 2023 18:47:30 +0100 Subject: [PATCH] add support for kohyass and bugfixes --- Dockerfile | 39 ++++++++++++++++++++++++++++++++------- docker-compose.yml | 35 +++++++++++++++++++++++++++++------ 2 files changed, 61 insertions(+), 13 deletions(-) diff --git a/Dockerfile b/Dockerfile index ff7f1d0..170ad6c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,18 +13,34 @@ RUN apt-get update &&\ git \ python3.10 \ python3-dev \ + python3.10-tk \ python-is-python3 \ python3.10-venv \ rsync RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 RUN python3.10 -m pip install --upgrade pip wheel setuptools +# Install requirements for ROCM build-stuff +RUN apt-get install -y hipblas hipblaslt hipsparse hipcub hip-runtime-amd rocthrust rocthrust-dev rocrand + # Install PyTorch RUN python3.10 -m pip install torch torchvision --index-url https://download.pytorch.org/whl/nightly/rocm5.7 # Create ai folder for saving projects RUN mkdir /ai/ +# Install bitsandbytes, required for many AI tools, with ROCM fork/PR +WORKDIR /ai/git/bitsandbytes +RUN git clone https://github.com/TimDettmers/bitsandbytes.git /ai/git/bitsandbytes +# Clone the ROCM PR branch cause it isn't merged yet +RUN git fetch origin refs/pull/756/head:rocmport && git checkout rocmport +# Set the env variables to the container ROCM setup +ENV ROCM_HOME=/opt/rocm-5.7.0 +ENV ROCM_TARGET=gfx1030 +# Build and install globally +RUN make hip && pip install . + + # Install StableDiffusion in /ai/ WORKDIR /ai/stablediffusion-webui RUN git clone -b dev https://github.com/AUTOMATIC1111/stable-diffusion-webui /ai/git/stablediffusion-webui @@ -33,28 +49,37 @@ RUN cp -R /ai/git/stablediffusion-webui/* /ai/stablediffusion-webui/ RUN python3.10 -m venv /ai/venv/stablediffusion/ --system-site-packages RUN /ai/venv/stablediffusion/bin/python launch.py --skip-torch-cuda-test --exit RUN /ai/venv/stablediffusion/bin/python -m pip install opencv-python-headless tomesd protobuf --upgrade -# RUN /ai/venv/stablediffusion/bin/python -m pip install -r requirements.txt # should not be needed + # Install Kobold AI in /ai/ WORKDIR /ai/koboldai RUN git clone https://github.com/YellowRoseCx/koboldcpp-rocm.git -b main --depth 1 /ai/git/koboldai RUN cp -R /ai/git/koboldai/* /ai/koboldai/ -# Create VENV for KoboldAI with inherit pytorch -# RUN python3.10 -m venv /ai/venv/koboldai/ --system-site-packages # not needed actually -# Install python requirements -# RUN /ai/venv/koboldai/bin/python -m pip install -r requirements.txt # should not be needed # Build KoboldAI for ROCM RUN make LLAMA_HIPBLAS=1 -j4 + # Install LlamaCPP in /ai/ WORKDIR /ai/llamacpp RUN git clone https://github.com/ggerganov/llama.cpp.git -b master /ai/git/llamacpp RUN cp -R /ai/git/llamacpp/* /ai/llamacpp/ -# Install requirements for LlamaCPP build -RUN apt-get install -y hipblas hipblaslt hipsparse hipcub hip-runtime-amd rocthrust rocthrust-dev rocrand # Build LlamaCPP for ROCM RUN make LLAMA_HIPBLAS=1 -j4 +RUN pip install --ignore-installed flask requests +# Install KoyhaSS in /ai/ +WORKDIR /ai/kohya_ss +# RUN apt-get install -y libgl1 libglib2.0-0 libgoogle-perftools-dev python3-html5lib python3-apt python3.10-distutils +RUN git clone https://github.com/bmaltais/kohya_ss.git -b master /ai/git/kohya_ss +# For some reason, the .release file is required and isn't copied automatically +RUN cp -R /ai/git/kohya_ss/* /ai/kohya_ss/ && cp /ai/git/kohya_ss/.release /ai/kohya_ss/ +# Create VENV for KoyhaSS with inherit pytorch +RUN python3.10 -m venv /ai/venv/kohya_ss/ --system-site-packages +# Install python requirements +RUN /ai/venv/kohya_ss/bin/python -m pip install --upgrade pip wheel +RUN /ai/venv/kohya_ss/bin/python -m pip install -r requirements.txt && /ai/venv/kohya_ss/bin/python -m pip uninstall tensorflow +RUN /ai/venv/kohya_ss/bin/python -m pip install accelerate tensorboard tensorflow-rocm lion_pytorch +RUN /ai/venv/kohya_ss/bin/python -m pip install typing_extensions --upgrade # Set safe directory for extensions and stuff RUN git config --global --add safe.directory "*" diff --git a/docker-compose.yml b/docker-compose.yml index fe11e9c..7342e71 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -45,10 +45,6 @@ services: container_name: koboldai-rocm environment: TZ: "Europe/Zurich" - ROC_ENABLE_PRE_VEGA: 1 - COMMANDLINE_ARGS: "" - # HSA_OVERRIDE_GFX_VERSION: 10.3.0 - # PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128 entrypoint: ["/bin/sh", "-c"] working_dir: /ai/koboldai/ command: ["./koboldcpp.py --config config.kcpps"] @@ -67,19 +63,24 @@ services: volumes: - ./koboldai/config.kcpps:/ai/koboldai/config.kcpps - ./koboldai/models:/ai/koboldai/localmodels + llamacpp-rocm: image: ai-suite-rocm:5.7 container_name: llamacpp-rocm environment: TZ: "Europe/Zurich" ROC_ENABLE_PRE_VEGA: 1 - COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r \"User:\" -f prompts/chat-with-ellie.txt" + # COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r \"User:\" -f prompts/chat-with-ellie.txt" + COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 2048 --n-gpu-layers 40 --port 5002 --host 0.0.0.0" # HSA_OVERRIDE_GFX_VERSION: 10.3.0 # PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128 entrypoint: ["/bin/sh", "-c"] working_dir: /ai/llamacpp/ - command: ["./main $$COMMANDLINE_ARGS"] + command: ["./server $$COMMANDLINE_ARGS & python /ai/llamacpp/examples/server/api_like_OAI.py --port 5003 --host 0.0.0.0 --llama-api http://127.0.0.1:5002"] tty: true + ports: + - "5002:5002" + - "5003:5003" devices: - "/dev/kfd:/dev/kfd" - "/dev/dri:/dev/dri" @@ -93,3 +94,25 @@ services: volumes: - ./llamacpp/models:/ai/llamacpp/models - ./llamacpp/extra:/ai/llamacpp/extra + + koyhass-rocm: + image: ai-suite-rocm:5.7 + container_name: koyhass-rocm + environment: + TZ: "Europe/Zurich" + CLI_ARGS: "" + entrypoint: ["/bin/sh", "-c"] + working_dir: /ai/kohya_ss/ + command: ["/ai/venv/kohya_ss/bin/python \"./kohya_gui.py\" ${CLI_ARGS} --listen 0.0.0.0 --server_port 5004"] + ports: + - "5004:5004" + devices: + - "/dev/kfd:/dev/kfd" + - "/dev/dri:/dev/dri" + group_add: + - video + ipc: host + cap_add: + - SYS_PTRACE + security_opt: + - seccomp:unconfined \ No newline at end of file