ai-suite-rocm/docker-compose.yml

118 lines
3.7 KiB
YAML
Raw Normal View History

2023-11-10 21:10:06 +01:00
version: '3'
services:
stablediff-rocm:
image: ai-suite-rocm:5.7
container_name: stablediffusion-rocm
environment:
TZ: "Europe/Zurich"
ROC_ENABLE_PRE_VEGA: 1
COMMANDLINE_ARGS: "--listen --enable-insecure-extension-access --opt-split-attention"
#HSA_OVERRIDE_GFX_VERSION: 10.3.0
#PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128
entrypoint: ["/bin/sh", "-c"]
working_dir: /ai/stablediffusion-webui/
command: >
"rocm-smi; echo launch.py $$COMMANDLINE_ARGS;
if [ ! -f ./models/Stable-diffusion/*.ckpt ]; then
echo 'Please copy stable diffusion model to stablediff-models directory'
echo 'You may need sudo to perform this action'
exit 1
fi;
chmod -R 777 /ai/stablediffusion-webui/outputs;
/ai/venv/stablediffusion/bin/python launch.py"
ports:
- "5000:7860"
devices:
- "/dev/kfd:/dev/kfd"
- "/dev/dri:/dev/dri"
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp:unconfined
volumes:
# - ./stablediffusion/webui:/ai/stablediffusion-webui
- ./stablediffusion/models:/ai/stablediffusion-webui/models/
- ./stablediffusion/embeddings:/ai/stablediffusion-webui/embeddings/
- ./stablediffusion/extensions:/ai/stablediffusion-webui/extensions/
- ./stablediffusion/outputs:/ai/stablediffusion-webui/outputs/
kobold-rocm:
image: ai-suite-rocm:5.7
container_name: koboldai-rocm
environment:
TZ: "Europe/Zurich"
entrypoint: ["/bin/sh", "-c"]
working_dir: /ai/koboldai/
command: ["./koboldcpp.py --config config.kcpps"]
ports:
- "5001:5001"
devices:
- "/dev/kfd:/dev/kfd"
- "/dev/dri:/dev/dri"
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp:unconfined
volumes:
- ./koboldai/config.kcpps:/ai/koboldai/config.kcpps
- ./koboldai/models:/ai/koboldai/localmodels
2023-11-12 18:47:30 +01:00
2023-11-10 21:10:06 +01:00
llamacpp-rocm:
image: ai-suite-rocm:5.7
container_name: llamacpp-rocm
environment:
TZ: "Europe/Zurich"
ROC_ENABLE_PRE_VEGA: 1
2023-11-12 18:47:30 +01:00
# COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 512 -b 1024 -n 256 --keep 48 --repeat_penalty 1.0 --color -i -r \"User:\" -f prompts/chat-with-ellie.txt"
COMMANDLINE_ARGS: "-m /ai/llamacpp/models/llama2-13b-tiefighter.Q6_K.gguf -c 2048 --n-gpu-layers 40 --port 5002 --host 0.0.0.0"
2023-11-10 21:10:06 +01:00
# HSA_OVERRIDE_GFX_VERSION: 10.3.0
# PYTORCH_HIP_ALLOC_CONF: garbage_collection_threshold:0.8,max_split_size_mb:128
entrypoint: ["/bin/sh", "-c"]
working_dir: /ai/llamacpp/
2023-11-12 18:47:30 +01:00
command: ["./server $$COMMANDLINE_ARGS & python /ai/llamacpp/examples/server/api_like_OAI.py --port 5003 --host 0.0.0.0 --llama-api http://127.0.0.1:5002"]
2023-11-10 21:10:06 +01:00
tty: true
2023-11-12 18:47:30 +01:00
ports:
- "5002:5002"
- "5003:5003"
2023-11-10 21:10:06 +01:00
devices:
- "/dev/kfd:/dev/kfd"
- "/dev/dri:/dev/dri"
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp:unconfined
volumes:
- ./llamacpp/models:/ai/llamacpp/models
- ./llamacpp/extra:/ai/llamacpp/extra
2023-11-12 18:47:30 +01:00
koyhass-rocm:
image: ai-suite-rocm:5.7
container_name: koyhass-rocm
environment:
TZ: "Europe/Zurich"
CLI_ARGS: ""
entrypoint: ["/bin/sh", "-c"]
working_dir: /ai/kohya_ss/
command: ["/ai/venv/kohya_ss/bin/python \"./kohya_gui.py\" ${CLI_ARGS} --listen 0.0.0.0 --server_port 5004"]
ports:
- "5004:5004"
devices:
- "/dev/kfd:/dev/kfd"
- "/dev/dri:/dev/dri"
group_add:
- video
ipc: host
cap_add:
- SYS_PTRACE
security_opt:
- seccomp:unconfined