try llamacpp dockerfile structure
This commit is contained in:
parent
b280b7fa09
commit
4080e682a9
@ -9,7 +9,18 @@ ENV DEBIAN_FRONTEND=noninteractive \
|
||||
# For bitsandbytes
|
||||
ENV BNB_GPU_TARGETS="gfx803;gfx900;gfx906;gfx908;gfx90a;gfx1010;gfx1030;gfx1100;gfx1101;gfx1102"
|
||||
# For LLAMA
|
||||
ENV GPU_TARGETS="gfx803 gfx900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101"
|
||||
ARG ROCM_DOCKER_ARCH=\
|
||||
gfx803 \
|
||||
gfx900 \
|
||||
gfx906 \
|
||||
gfx908 \
|
||||
gfx90a \
|
||||
gfx1010 \
|
||||
gfx1030 \
|
||||
gfx1100 \
|
||||
gfx1101 \
|
||||
gfx1102
|
||||
ENV GPU_TARGETS=${ROCM_DOCKER_ARCH}
|
||||
|
||||
ENV CC=/opt/rocm/llvm/bin/clang
|
||||
ENV CXX=/opt/rocm/llvm/bin/clang++
|
||||
|
Loading…
Reference in New Issue
Block a user