add support for kohyass and bugfixes
This commit is contained in:
parent
caa7af7f5f
commit
27cac7ab57
39
Dockerfile
39
Dockerfile
@ -13,18 +13,34 @@ RUN apt-get update &&\
|
|||||||
git \
|
git \
|
||||||
python3.10 \
|
python3.10 \
|
||||||
python3-dev \
|
python3-dev \
|
||||||
|
python3.10-tk \
|
||||||
python-is-python3 \
|
python-is-python3 \
|
||||||
python3.10-venv \
|
python3.10-venv \
|
||||||
rsync
|
rsync
|
||||||
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
|
RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
|
||||||
RUN python3.10 -m pip install --upgrade pip wheel setuptools
|
RUN python3.10 -m pip install --upgrade pip wheel setuptools
|
||||||
|
|
||||||
|
# Install requirements for ROCM build-stuff
|
||||||
|
RUN apt-get install -y hipblas hipblaslt hipsparse hipcub hip-runtime-amd rocthrust rocthrust-dev rocrand
|
||||||
|
|
||||||
# Install PyTorch
|
# Install PyTorch
|
||||||
RUN python3.10 -m pip install torch torchvision --index-url https://download.pytorch.org/whl/nightly/rocm5.7
|
RUN python3.10 -m pip install torch torchvision --index-url https://download.pytorch.org/whl/nightly/rocm5.7
|
||||||
|
|
||||||
# Create ai folder for saving projects
|
# Create ai folder for saving projects
|
||||||
RUN mkdir /ai/
|
RUN mkdir /ai/
|
||||||
|
|
||||||
|
# Install bitsandbytes, required for many AI tools, with ROCM fork/PR
|
||||||
|
WORKDIR /ai/git/bitsandbytes
|
||||||
|
RUN git clone https://github.com/TimDettmers/bitsandbytes.git /ai/git/bitsandbytes
|
||||||
|
# Clone the ROCM PR branch cause it isn't merged yet
|
||||||
|
RUN git fetch origin refs/pull/756/head:rocmport && git checkout rocmport
|
||||||
|
# Set the env variables to the container ROCM setup
|
||||||
|
ENV ROCM_HOME=/opt/rocm-5.7.0
|
||||||
|
ENV ROCM_TARGET=gfx1030
|
||||||
|
# Build and install globally
|
||||||
|
RUN make hip && pip install .
|
||||||
|
|
||||||
|
|
||||||
# Install StableDiffusion in /ai/
|
# Install StableDiffusion in /ai/
|
||||||
WORKDIR /ai/stablediffusion-webui
|
WORKDIR /ai/stablediffusion-webui
|
||||||
RUN git clone -b dev https://github.com/AUTOMATIC1111/stable-diffusion-webui /ai/git/stablediffusion-webui
|
RUN git clone -b dev https://github.com/AUTOMATIC1111/stable-diffusion-webui /ai/git/stablediffusion-webui
|
||||||
@ -33,28 +49,37 @@ RUN cp -R /ai/git/stablediffusion-webui/* /ai/stablediffusion-webui/
|
|||||||
RUN python3.10 -m venv /ai/venv/stablediffusion/ --system-site-packages
|
RUN python3.10 -m venv /ai/venv/stablediffusion/ --system-site-packages
|
||||||
RUN /ai/venv/stablediffusion/bin/python launch.py --skip-torch-cuda-test --exit
|
RUN /ai/venv/stablediffusion/bin/python launch.py --skip-torch-cuda-test --exit
|
||||||
RUN /ai/venv/stablediffusion/bin/python -m pip install opencv-python-headless tomesd protobuf --upgrade
|
RUN /ai/venv/stablediffusion/bin/python -m pip install opencv-python-headless tomesd protobuf --upgrade
|
||||||
# RUN /ai/venv/stablediffusion/bin/python -m pip install -r requirements.txt # should not be needed
|
|
||||||
|
|
||||||
# Install Kobold AI in /ai/
|
# Install Kobold AI in /ai/
|
||||||
WORKDIR /ai/koboldai
|
WORKDIR /ai/koboldai
|
||||||
RUN git clone https://github.com/YellowRoseCx/koboldcpp-rocm.git -b main --depth 1 /ai/git/koboldai
|
RUN git clone https://github.com/YellowRoseCx/koboldcpp-rocm.git -b main --depth 1 /ai/git/koboldai
|
||||||
RUN cp -R /ai/git/koboldai/* /ai/koboldai/
|
RUN cp -R /ai/git/koboldai/* /ai/koboldai/
|
||||||
# Create VENV for KoboldAI with inherit pytorch
|
|
||||||
# RUN python3.10 -m venv /ai/venv/koboldai/ --system-site-packages # not needed actually
|
|
||||||
# Install python requirements
|
|
||||||
# RUN /ai/venv/koboldai/bin/python -m pip install -r requirements.txt # should not be needed
|
|
||||||
# Build KoboldAI for ROCM
|
# Build KoboldAI for ROCM
|
||||||
RUN make LLAMA_HIPBLAS=1 -j4
|
RUN make LLAMA_HIPBLAS=1 -j4
|
||||||
|
|
||||||
|
|
||||||
# Install LlamaCPP in /ai/
|
# Install LlamaCPP in /ai/
|
||||||
WORKDIR /ai/llamacpp
|
WORKDIR /ai/llamacpp
|
||||||
RUN git clone https://github.com/ggerganov/llama.cpp.git -b master /ai/git/llamacpp
|
RUN git clone https://github.com/ggerganov/llama.cpp.git -b master /ai/git/llamacpp
|
||||||
RUN cp -R /ai/git/llamacpp/* /ai/llamacpp/
|
RUN cp -R /ai/git/llamacpp/* /ai/llamacpp/
|
||||||
# Install requirements for LlamaCPP build
|
|
||||||
RUN apt-get install -y hipblas hipblaslt hipsparse hipcub hip-runtime-amd rocthrust rocthrust-dev rocrand
|
|
||||||
# Build LlamaCPP for ROCM
|
# Build LlamaCPP for ROCM
|
||||||
RUN make LLAMA_HIPBLAS=1 -j4
|
RUN make LLAMA_HIPBLAS=1 -j4
|
||||||
|
RUN pip install --ignore-installed flask requests
|
||||||
|
|
||||||
|
# Install KoyhaSS in /ai/
|
||||||
|
WORKDIR /ai/kohya_ss
|
||||||
|
# RUN apt-get install -y libgl1 libglib2.0-0 libgoogle-perftools-dev python3-html5lib python3-apt python3.10-distutils
|
||||||
|
RUN git clone https://github.com/bmaltais/kohya_ss.git -b master /ai/git/kohya_ss
|
||||||
|
# For some reason, the .release file is required and isn't copied automatically
|
||||||
|
RUN cp -R /ai/git/kohya_ss/* /ai/kohya_ss/ && cp /ai/git/kohya_ss/.release /ai/kohya_ss/
|
||||||
|
# Create VENV for KoyhaSS with inherit pytorch
|
||||||
|
RUN python3.10 -m venv /ai/venv/kohya_ss/ --system-site-packages
|
||||||
|
# Install python requirements
|
||||||
|
RUN /ai/venv/kohya_ss/bin/python -m pip install --upgrade pip wheel
|
||||||
|
RUN /ai/venv/kohya_ss/bin/python -m pip install -r requirements.txt && /ai/venv/kohya_ss/bin/python -m pip uninstall tensorflow
|
||||||
|
RUN /ai/venv/kohya_ss/bin/python -m pip install accelerate tensorboard tensorflow-rocm lion_pytorch
|
||||||
|
RUN /ai/venv/kohya_ss/bin/python -m pip install typing_extensions --upgrade
|
||||||
|
|
||||||
# Set safe directory for extensions and stuff
|
# Set safe directory for extensions and stuff
|
||||||
RUN git config --global --add safe.directory "*"
|
RUN git config --global --add safe.directory "*"
|
||||||
|
@ -45,10 +45,6 @@ services:
|
|||||||
container_name: koboldai-rocm
|
container_name: koboldai-rocm
|
||||||
environment:
|
environment:
|
||||||
TZ: "Europe/Zurich"
|
TZ: "Europe/Zurich"
|
||||||
ROC_ENABLE_PRE_VEGA: 1
|
|
||||||
COMMANDLINE_ARGS: ""
|
|
||||||
# HSA_OVERRIDE_GFX_VERSION: 10.3.0
|
|
||||||
# PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128
|
|
||||||
entrypoint: ["/bin/sh", "-c"]
|
entrypoint: ["/bin/sh", "-c"]
|
||||||
working_dir: /ai/koboldai/
|
working_dir: /ai/koboldai/
|
||||||
command: ["./koboldcpp.py --config config.kcpps"]
|
command: ["./koboldcpp.py --config config.kcpps"]
|
||||||
@ -67,19 +63,24 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./koboldai/config.kcpps:/ai/koboldai/config.kcpps
|
- ./koboldai/config.kcpps:/ai/koboldai/config.kcpps
|
||||||
- ./koboldai/models:/ai/koboldai/localmodels
|
- ./koboldai/models:/ai/koboldai/localmodels
|
||||||
|
|
||||||
llamacpp-rocm:
|
llamacpp-rocm:
|
||||||
image: ai-suite-rocm:5.7
|
image: ai-suite-rocm:5.7
|
||||||
container_name: llamacpp-rocm
|
container_name: llamacpp-rocm
|
||||||
environment:
|
environment:
|
||||||
TZ: "Europe/Zurich"
|
TZ: "Europe/Zurich"
|
||||||
ROC_ENABLE_PRE_VEGA: 1
|
ROC_ENABLE_PRE_VEGA: 1
|
||||||
COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r \"User:\" -f prompts/chat-with-ellie.txt"
|
# COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r \"User:\" -f prompts/chat-with-ellie.txt"
|
||||||
|
COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 2048 --n-gpu-layers 40 --port 5002 --host 0.0.0.0"
|
||||||
# HSA_OVERRIDE_GFX_VERSION: 10.3.0
|
# HSA_OVERRIDE_GFX_VERSION: 10.3.0
|
||||||
# PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128
|
# PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128
|
||||||
entrypoint: ["/bin/sh", "-c"]
|
entrypoint: ["/bin/sh", "-c"]
|
||||||
working_dir: /ai/llamacpp/
|
working_dir: /ai/llamacpp/
|
||||||
command: ["./main $$COMMANDLINE_ARGS"]
|
command: ["./server $$COMMANDLINE_ARGS & python /ai/llamacpp/examples/server/api_like_OAI.py --port 5003 --host 0.0.0.0 --llama-api http://127.0.0.1:5002"]
|
||||||
tty: true
|
tty: true
|
||||||
|
ports:
|
||||||
|
- "5002:5002"
|
||||||
|
- "5003:5003"
|
||||||
devices:
|
devices:
|
||||||
- "/dev/kfd:/dev/kfd"
|
- "/dev/kfd:/dev/kfd"
|
||||||
- "/dev/dri:/dev/dri"
|
- "/dev/dri:/dev/dri"
|
||||||
@ -93,3 +94,25 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./llamacpp/models:/ai/llamacpp/models
|
- ./llamacpp/models:/ai/llamacpp/models
|
||||||
- ./llamacpp/extra:/ai/llamacpp/extra
|
- ./llamacpp/extra:/ai/llamacpp/extra
|
||||||
|
|
||||||
|
koyhass-rocm:
|
||||||
|
image: ai-suite-rocm:5.7
|
||||||
|
container_name: koyhass-rocm
|
||||||
|
environment:
|
||||||
|
TZ: "Europe/Zurich"
|
||||||
|
CLI_ARGS: ""
|
||||||
|
entrypoint: ["/bin/sh", "-c"]
|
||||||
|
working_dir: /ai/kohya_ss/
|
||||||
|
command: ["/ai/venv/kohya_ss/bin/python \"./kohya_gui.py\" ${CLI_ARGS} --listen 0.0.0.0 --server_port 5004"]
|
||||||
|
ports:
|
||||||
|
- "5004:5004"
|
||||||
|
devices:
|
||||||
|
- "/dev/kfd:/dev/kfd"
|
||||||
|
- "/dev/dri:/dev/dri"
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
ipc: host
|
||||||
|
cap_add:
|
||||||
|
- SYS_PTRACE
|
||||||
|
security_opt:
|
||||||
|
- seccomp:unconfined
|
Loading…
Reference in New Issue
Block a user