2024-08-28 11:18:19 +02:00
|
|
|
#!/bin/sh -l
|
|
|
|
|
|
|
|
# ROCM bitsandbytes
|
|
|
|
## Clone repo and install python requirements
|
2024-08-28 14:14:40 +02:00
|
|
|
git clone --depth 1 -b multi-backend-refactor https://github.com/bitsandbytes-foundation/bitsandbytes.git /tmp/bitsandbytes
|
2024-08-28 11:18:19 +02:00
|
|
|
cd /tmp/bitsandbytes
|
|
|
|
pip3 install -r requirements-dev.txt
|
|
|
|
## Build
|
2024-08-28 15:13:16 +02:00
|
|
|
cmake -DCOMPUTE_BACKEND=hip -S . -DBNB_ROCM_ARCH=${BNB_GPU_TARGETS}
|
2024-08-28 11:18:19 +02:00
|
|
|
make
|
|
|
|
python3.10 setup.py bdist_wheel --universal
|
|
|
|
|
|
|
|
|
|
|
|
# ROCM llama-cpp-python
|
|
|
|
## Clone repo and install python requirements
|
2024-08-28 14:14:40 +02:00
|
|
|
git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python
|
2024-08-28 11:18:19 +02:00
|
|
|
cd /tmp/llama-cpp-python
|
2024-10-05 12:11:19 +02:00
|
|
|
CMAKE_ARGS="-D GGML_HIPBLAS=on -D AMDGPU_TARGETS=${GPU_TARGETS}" FORCE_CMAKE=1 python3.10 -m build --wheel
|
|
|
|
|
|
|
|
|
|
|
|
# ROCM xformers
|
|
|
|
## Clone repo and install python requirements
|
|
|
|
pip3 install ninja
|
|
|
|
git clone --depth 1 https://github.com/facebookresearch/xformers.git /tmp/xformers
|
|
|
|
cd /tmp/xformers
|
|
|
|
python3.10 setup.py bdist_wheel --universal
|