From 07b1b81e9d4112fc1baaa8d7757402c7c88c1efc Mon Sep 17 00:00:00 2001 From: Mathieu Broillet Date: Wed, 28 Aug 2024 11:18:19 +0200 Subject: [PATCH] split dockerfile with entrypoint and test github workflow --- .../build-and_publish-prebuilts-whl.yml | 86 +++++++++++++++++++ prebuilts/Dockerfile | 33 +------ prebuilts/entrypoint.sh | 18 ++++ 3 files changed, 108 insertions(+), 29 deletions(-) create mode 100644 .github/workflows/build-and_publish-prebuilts-whl.yml create mode 100644 prebuilts/entrypoint.sh diff --git a/.github/workflows/build-and_publish-prebuilts-whl.yml b/.github/workflows/build-and_publish-prebuilts-whl.yml new file mode 100644 index 0000000..f15e6d1 --- /dev/null +++ b/.github/workflows/build-and_publish-prebuilts-whl.yml @@ -0,0 +1,86 @@ +name: Build and Publish Artifacts + +on: + workflow_dispatch: + inputs: + rocm_version: + description: 'ROCm version' + required: true + default: '6.1.2' + gpu_arch: + description: 'GPU architecture (e.g., gfx1030)' + required: true + default: 'gfx1030' + torch_version: + description: 'Torch version (e.g., rocm6.1)' + required: true + default: 'rocm6.1' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + # Step 1: Checkout the repository + - name: Checkout repository + uses: actions/checkout@v3 + + # Step 2: Set up Docker Buildx + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # Step 3: Build the Docker image + - name: Build Docker image + env: + ROCM_VERSION: ${{ inputs.rocm_version }} + GPU_ARCH: ${{ inputs.gpu_arch }} + TORCH_VERSION: ${{ inputs.torch_version }} + run: | + docker build \ + --build-arg ROCM_VERSION=${ROCM_VERSION} \ + --build-arg GPU_ARCH=${GPU_ARCH} \ + --build-arg TORCH_VERSION=${TORCH_VERSION} \ + -t prebuilt-wheels ./prebuilts + + # Step 4: Create a container and run the script + - name: Run Docker container and generate wheels + env: + ROCM_VERSION: ${{ inputs.rocm_version }} + GPU_ARCH: ${{ inputs.gpu_arch }} + TORCH_VERSION: ${{ inputs.torch_version }} + run: | + docker create --name prebuilt-container \ + -e ROCM_ARCH=${ROCM_ARCH} \ + -e GPU_ARCH=${GPU_ARCH} \ + -e TORCH_VERSION=${TORCH_VERSION} \ + prebuilt-wheels + docker start -a prebuilt-container + + # Step 5: Copy bitsandbytes wheel artifact to host + - name: Copy bitsandbytes wheel to host + run: | + docker cp prebuilt-container:/tmp/bitsandbytes/dist /tmp/bitsandbytes/dist + + # Step 6: Copy llama-cpp-python wheel artifact to host + - name: Copy llama-cpp-python wheel to host + run: | + docker cp prebuilt-container:/tmp/llama-cpp-python/dist /tmp/llama-cpp-python/dist + + # Step 7: Upload bitsandbytes wheel artifact + - name: Upload bitsandbytes wheel + uses: actions/upload-artifact@v3 + with: + name: bitsandbytes-wheels + path: /tmp/bitsandbytes/dist/*.whl + + # Step 8: Upload llama-cpp-python wheel artifact + - name: Upload llama-cpp-python wheel + uses: actions/upload-artifact@v3 + with: + name: llama-cpp-python-wheels + path: /tmp/llama-cpp-python/dist/*.whl + + # Step 9: Cleanup Docker container + - name: Cleanup + run: | + docker rm prebuilt-container \ No newline at end of file diff --git a/prebuilts/Dockerfile b/prebuilts/Dockerfile index ab69ad7..6478fac 100644 --- a/prebuilts/Dockerfile +++ b/prebuilts/Dockerfile @@ -2,16 +2,7 @@ FROM rocm/dev-ubuntu-22.04:6.1.2 ENV DEBIAN_FRONTEND=noninteractive \ PYTHONUNBUFFERED=1 \ - PYTHONIOENCODING=UTF-8 \ - - # for bitsandbytes - ROCM_ARCH="gfx1030" \ - TORCH_VERSION="rocm6.1" \ - - # for llama - CMAKE_ARGS="-DGGML_HIPBLAS=on" \ - FORCE_CMAKE=1 - + PYTHONIOENCODING=UTF-8 WORKDIR /tmp @@ -30,27 +21,11 @@ RUN pip3 install --upgrade pip wheel setuptools build # Install pytorch for rocm RUN pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/${TORCH_VERSION} - - -# ROCM bitsandbytes +# Install deps RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev -## Clone repo and install python requirements -RUN git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-foundation/bitsandbytes.git -WORKDIR /tmp/bitsandbytes -RUN pip3 install -r requirements-dev.txt -## Build -RUN cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH} -RUN make -RUN python3.10 setup.py bdist_wheel --universal - - -# ROCM llama-cpp-python -RUN apt-get install -y hipblas hipblaslt hiprand hipsparse hipcub rocthrust-dev -## Clone repo and install python requirements -RUN git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git -WORKDIR /tmp/llama-cpp-python -RUN python3.10 -m build --wheel +COPY entrypoint.sh /entrypoint.sh +ENTRYPOINT ["/entrypoint.sh"] # Cleanup RUN apt-get clean && pip3 cache purge diff --git a/prebuilts/entrypoint.sh b/prebuilts/entrypoint.sh new file mode 100644 index 0000000..b76578a --- /dev/null +++ b/prebuilts/entrypoint.sh @@ -0,0 +1,18 @@ +#!/bin/sh -l + +# ROCM bitsandbytes +## Clone repo and install python requirements +git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-foundation/bitsandbytes.git +cd /tmp/bitsandbytes +pip3 install -r requirements-dev.txt +## Build +cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${ROCM_ARCH} +make +python3.10 setup.py bdist_wheel --universal + + +# ROCM llama-cpp-python +## Clone repo and install python requirements +git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git +cd /tmp/llama-cpp-python +CMAKE_ARGS="-DGGML_HIPBLAS=on" python3.10 -m build --wheel \ No newline at end of file