fix gpu targets and paths (workflow)
This commit is contained in:
parent
0d96ae2df3
commit
0a35601018
@ -36,12 +36,10 @@ jobs:
|
|||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
env:
|
env:
|
||||||
ROCM_VERSION: ${{ inputs.rocm_version }}
|
ROCM_VERSION: ${{ inputs.rocm_version }}
|
||||||
GPU_ARCH: ${{ inputs.gpu_arch }}
|
|
||||||
TORCH_VERSION: ${{ inputs.torch_version }}
|
TORCH_VERSION: ${{ inputs.torch_version }}
|
||||||
run: |
|
run: |
|
||||||
docker build \
|
docker build \
|
||||||
--build-arg ROCM_VERSION=${ROCM_VERSION} \
|
--build-arg ROCM_VERSION=${ROCM_VERSION} \
|
||||||
--build-arg GPU_ARCH=${GPU_ARCH} \
|
|
||||||
--build-arg TORCH_VERSION=${TORCH_VERSION} \
|
--build-arg TORCH_VERSION=${TORCH_VERSION} \
|
||||||
-t prebuilt-wheels ./prebuilts
|
-t prebuilt-wheels ./prebuilts
|
||||||
|
|
||||||
@ -49,12 +47,10 @@ jobs:
|
|||||||
- name: Run Docker container and generate wheels
|
- name: Run Docker container and generate wheels
|
||||||
env:
|
env:
|
||||||
ROCM_VERSION: ${{ inputs.rocm_version }}
|
ROCM_VERSION: ${{ inputs.rocm_version }}
|
||||||
GPU_ARCH: ${{ inputs.gpu_arch }}
|
|
||||||
TORCH_VERSION: ${{ inputs.torch_version }}
|
TORCH_VERSION: ${{ inputs.torch_version }}
|
||||||
run: |
|
run: |
|
||||||
docker create --name prebuilt-container \
|
docker create --name prebuilt-container \
|
||||||
-e ROCM_ARCH=${ROCM_ARCH} \
|
-e ROCM_ARCH=${ROCM_ARCH} \
|
||||||
-e GPU_ARCH=${GPU_ARCH} \
|
|
||||||
-e TORCH_VERSION=${TORCH_VERSION} \
|
-e TORCH_VERSION=${TORCH_VERSION} \
|
||||||
prebuilt-wheels
|
prebuilt-wheels
|
||||||
docker start -a prebuilt-container
|
docker start -a prebuilt-container
|
||||||
@ -63,14 +59,15 @@ jobs:
|
|||||||
- name: Copy bitsandbytes wheel to host
|
- name: Copy bitsandbytes wheel to host
|
||||||
run: |
|
run: |
|
||||||
mkdir -p /tmp/bitsandbytes/dist
|
mkdir -p /tmp/bitsandbytes/dist
|
||||||
docker cp prebuilt-container:/tmp/bitsandbytes/dist /tmp/bitsandbytes/dist
|
docker cp prebuilt-container:/tmp/bitsandbytes/dist/ /tmp/bitsandbytes/dist/
|
||||||
|
ls -l /tmp/bitsandbytes/dist/
|
||||||
|
|
||||||
# Step 6: Copy llama-cpp-python wheel artifact to host
|
# Step 6: Copy llama-cpp-python wheel artifact to host
|
||||||
- name: Copy llama-cpp-python wheel to host
|
- name: Copy llama-cpp-python wheel to host
|
||||||
run: |
|
run: |
|
||||||
mkdir -p /tmp/llama-cpp-python/dist
|
mkdir -p /tmp/llama-cpp-python/dist
|
||||||
docker cp prebuilt-container:/tmp/llama-cpp-python/dist /tmp/llama-cpp-python/dist
|
docker cp prebuilt-container:/tmp/llama-cpp-python/dist/ /tmp/llama-cpp-python/dist/
|
||||||
|
ls -l /tmp/llama-cpp-python/dist/
|
||||||
|
|
||||||
# Step 7: Upload bitsandbytes wheel artifact
|
# Step 7: Upload bitsandbytes wheel artifact
|
||||||
- name: Upload bitsandbytes wheel
|
- name: Upload bitsandbytes wheel
|
||||||
|
@ -1,8 +1,15 @@
|
|||||||
FROM rocm/dev-ubuntu-22.04:6.1.2
|
FROM rocm/dev-ubuntu-22.04:${ROCM_VERSION}
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PYTHONIOENCODING=UTF-8
|
PYTHONIOENCODING=UTF-8 \
|
||||||
|
|
||||||
|
# For bitsandbytes
|
||||||
|
ENV BNB_GPU_TARGETS="gfx803;gfx900;gfx906;gfx908;gfx90a;gfx1010;gfx1030;gfx1100;gfx1101;gfx1102"
|
||||||
|
|
||||||
|
# For LLAMA
|
||||||
|
ENV GPU_TARGETS="gfx803 gfx900 gfx906 gfx908 gfx90a gfx1010 gfx1030 gfx1100 gfx1101 gfx1102"
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
WORKDIR /tmp
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-fo
|
|||||||
cd /tmp/bitsandbytes
|
cd /tmp/bitsandbytes
|
||||||
pip3 install -r requirements-dev.txt
|
pip3 install -r requirements-dev.txt
|
||||||
## Build
|
## Build
|
||||||
cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH}
|
cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${BNB_GPU_TARGETS}
|
||||||
make
|
make
|
||||||
python3.10 setup.py bdist_wheel --universal
|
python3.10 setup.py bdist_wheel --universal
|
||||||
|
|
||||||
@ -15,4 +15,4 @@ python3.10 setup.py bdist_wheel --universal
|
|||||||
## Clone repo and install python requirements
|
## Clone repo and install python requirements
|
||||||
git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python
|
git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python
|
||||||
cd /tmp/llama-cpp-python
|
cd /tmp/llama-cpp-python
|
||||||
CMAKE_ARGS="-DGGML_HIPBLAS=on" python3.10 -m build --wheel
|
CMAKE_ARGS="-D GGML_HIPBLAS=on -D AMDGPU_TARGETS=${GPU_TARGETS}" FORCE_CMAKE=1 python3.10 -m build --wheel
|
Loading…
Reference in New Issue
Block a user