split dockerfile with entrypoint and test github workflow
This commit is contained in:
parent
e78b59de20
commit
07b1b81e9d
86
.github/workflows/build-and_publish-prebuilts-whl.yml
vendored
Normal file
86
.github/workflows/build-and_publish-prebuilts-whl.yml
vendored
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
name: Build and Publish Artifacts
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
rocm_version:
|
||||||
|
description: 'ROCm version'
|
||||||
|
required: true
|
||||||
|
default: '6.1.2'
|
||||||
|
gpu_arch:
|
||||||
|
description: 'GPU architecture (e.g., gfx1030)'
|
||||||
|
required: true
|
||||||
|
default: 'gfx1030'
|
||||||
|
torch_version:
|
||||||
|
description: 'Torch version (e.g., rocm6.1)'
|
||||||
|
required: true
|
||||||
|
default: 'rocm6.1'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Step 1: Checkout the repository
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Step 2: Set up Docker Buildx
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
# Step 3: Build the Docker image
|
||||||
|
- name: Build Docker image
|
||||||
|
env:
|
||||||
|
ROCM_VERSION: ${{ inputs.rocm_version }}
|
||||||
|
GPU_ARCH: ${{ inputs.gpu_arch }}
|
||||||
|
TORCH_VERSION: ${{ inputs.torch_version }}
|
||||||
|
run: |
|
||||||
|
docker build \
|
||||||
|
--build-arg ROCM_VERSION=${ROCM_VERSION} \
|
||||||
|
--build-arg GPU_ARCH=${GPU_ARCH} \
|
||||||
|
--build-arg TORCH_VERSION=${TORCH_VERSION} \
|
||||||
|
-t prebuilt-wheels ./prebuilts
|
||||||
|
|
||||||
|
# Step 4: Create a container and run the script
|
||||||
|
- name: Run Docker container and generate wheels
|
||||||
|
env:
|
||||||
|
ROCM_VERSION: ${{ inputs.rocm_version }}
|
||||||
|
GPU_ARCH: ${{ inputs.gpu_arch }}
|
||||||
|
TORCH_VERSION: ${{ inputs.torch_version }}
|
||||||
|
run: |
|
||||||
|
docker create --name prebuilt-container \
|
||||||
|
-e ROCM_ARCH=${ROCM_ARCH} \
|
||||||
|
-e GPU_ARCH=${GPU_ARCH} \
|
||||||
|
-e TORCH_VERSION=${TORCH_VERSION} \
|
||||||
|
prebuilt-wheels
|
||||||
|
docker start -a prebuilt-container
|
||||||
|
|
||||||
|
# Step 5: Copy bitsandbytes wheel artifact to host
|
||||||
|
- name: Copy bitsandbytes wheel to host
|
||||||
|
run: |
|
||||||
|
docker cp prebuilt-container:/tmp/bitsandbytes/dist /tmp/bitsandbytes/dist
|
||||||
|
|
||||||
|
# Step 6: Copy llama-cpp-python wheel artifact to host
|
||||||
|
- name: Copy llama-cpp-python wheel to host
|
||||||
|
run: |
|
||||||
|
docker cp prebuilt-container:/tmp/llama-cpp-python/dist /tmp/llama-cpp-python/dist
|
||||||
|
|
||||||
|
# Step 7: Upload bitsandbytes wheel artifact
|
||||||
|
- name: Upload bitsandbytes wheel
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: bitsandbytes-wheels
|
||||||
|
path: /tmp/bitsandbytes/dist/*.whl
|
||||||
|
|
||||||
|
# Step 8: Upload llama-cpp-python wheel artifact
|
||||||
|
- name: Upload llama-cpp-python wheel
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: llama-cpp-python-wheels
|
||||||
|
path: /tmp/llama-cpp-python/dist/*.whl
|
||||||
|
|
||||||
|
# Step 9: Cleanup Docker container
|
||||||
|
- name: Cleanup
|
||||||
|
run: |
|
||||||
|
docker rm prebuilt-container
|
@ -2,16 +2,7 @@ FROM rocm/dev-ubuntu-22.04:6.1.2
|
|||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive \
|
ENV DEBIAN_FRONTEND=noninteractive \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PYTHONIOENCODING=UTF-8 \
|
PYTHONIOENCODING=UTF-8
|
||||||
|
|
||||||
# for bitsandbytes
|
|
||||||
ROCM_ARCH="gfx1030" \
|
|
||||||
TORCH_VERSION="rocm6.1" \
|
|
||||||
|
|
||||||
# for llama
|
|
||||||
CMAKE_ARGS="-DGGML_HIPBLAS=on" \
|
|
||||||
FORCE_CMAKE=1
|
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /tmp
|
WORKDIR /tmp
|
||||||
|
|
||||||
@ -30,27 +21,11 @@ RUN pip3 install --upgrade pip wheel setuptools build
|
|||||||
|
|
||||||
# Install pytorch for rocm
|
# Install pytorch for rocm
|
||||||
RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION}
|
RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION}
|
||||||
|
# Install deps
|
||||||
|
|
||||||
# ROCM bitsandbytes
|
|
||||||
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
||||||
## Clone repo and install python requirements
|
|
||||||
RUN git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-foundation/bitsandbytes.git
|
|
||||||
WORKDIR /tmp/bitsandbytes
|
|
||||||
RUN pip3 install -r requirements-dev.txt
|
|
||||||
## Build
|
|
||||||
RUN cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH}
|
|
||||||
RUN make
|
|
||||||
RUN python3.10 setup.py bdist_wheel --universal
|
|
||||||
|
|
||||||
|
|
||||||
# ROCM llama-cpp-python
|
|
||||||
RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev
|
|
||||||
## Clone repo and install python requirements
|
|
||||||
RUN git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git
|
|
||||||
WORKDIR /tmp/llama-cpp-python
|
|
||||||
RUN python3.10 -m build --wheel
|
|
||||||
|
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
|
|
||||||
# Cleanup
|
# Cleanup
|
||||||
RUN apt-get clean && pip3 cache purge
|
RUN apt-get clean && pip3 cache purge
|
||||||
|
18
prebuilts/entrypoint.sh
Normal file
18
prebuilts/entrypoint.sh
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/sh -l
|
||||||
|
|
||||||
|
# ROCM bitsandbytes
|
||||||
|
## Clone repo and install python requirements
|
||||||
|
git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-foundation/bitsandbytes.git
|
||||||
|
cd /tmp/bitsandbytes
|
||||||
|
pip3 install -r requirements-dev.txt
|
||||||
|
## Build
|
||||||
|
cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH}
|
||||||
|
make
|
||||||
|
python3.10 setup.py bdist_wheel --universal
|
||||||
|
|
||||||
|
|
||||||
|
# ROCM llama-cpp-python
|
||||||
|
## Clone repo and install python requirements
|
||||||
|
git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git
|
||||||
|
cd /tmp/llama-cpp-python
|
||||||
|
CMAKE_ARGS="-DGGML_HIPBLAS=on" python3.10 -m build --wheel
|
Loading…
Reference in New Issue
Block a user