Compare commits

..

2 Commits

Author SHA1 Message Date
e79bd5f9a7
add xformers to prebuilts 2024-10-05 12:11:19 +02:00
6450001541
keep symlinks after updating 2024-10-05 10:55:04 +02:00
4 changed files with 48 additions and 3 deletions

View File

@ -59,6 +59,12 @@ jobs:
mkdir -p /tmp/llama-cpp-python/ mkdir -p /tmp/llama-cpp-python/
docker cp prebuilt-container:/tmp/llama-cpp-python/dist/ /tmp/llama-cpp-python/ docker cp prebuilt-container:/tmp/llama-cpp-python/dist/ /tmp/llama-cpp-python/
# Step 6: Copy xformers wheel artifact to host
- name: Copy xformers wheel to host
run: |
mkdir -p /tmp/xformers/
docker cp prebuilt-container:/tmp/xformers/dist/ /tmp/xformers/
# Step 7: Upload bitsandbytes wheel artifact # Step 7: Upload bitsandbytes wheel artifact
- name: Upload bitsandbytes wheel - name: Upload bitsandbytes wheel
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
@ -73,7 +79,14 @@ jobs:
name: llama-cpp-python-wheels name: llama-cpp-python-wheels
path: /tmp/llama-cpp-python/dist/*.whl path: /tmp/llama-cpp-python/dist/*.whl
# Step 9: Cleanup Docker container # Step 9: Upload xformers wheel artifact
- name: Upload xformers wheel
uses: actions/upload-artifact@v4
with:
name: xformers-wheels
path: /tmp/xformers/dist/*.whl
# Step 10: Cleanup Docker container
- name: Cleanup - name: Cleanup
run: | run: |
docker rm prebuilt-container docker rm prebuilt-container

View File

@ -39,7 +39,6 @@ class Stack:
self.pid = config.get(f"{self.name}-pid") self.pid = config.get(f"{self.name}-pid")
def install(self): def install(self):
if self.is_installed(): if self.is_installed():
self.update() self.update()
@ -74,8 +73,10 @@ class Stack:
self.stop() self.stop()
logger.info(f"Updating {self.name}") logger.info(f"Updating {self.name}")
symlinks = utils.find_symlink_in_folder(self.path)
self.git_pull(folder) self.git_pull(folder)
self._update() self._update()
utils.create_symlinks(symlinks)
if status: if status:
self.start() self.start()

View File

@ -1,7 +1,9 @@
import importlib import importlib
import json import json
import os import os
import shutil
import subprocess import subprocess
from pathlib import Path
from urllib import request, error from urllib import request, error
from core.stack import Stack from core.stack import Stack
@ -79,3 +81,24 @@ def load_service_from_string(service: str) -> Stack:
met = getattr(module, service_name) met = getattr(module, service_name)
return met() return met()
def find_symlink_in_folder(folder: str):
symlinks = {}
for file in Path(folder).rglob("webui/**"):
if file.is_symlink():
symlinks[file] = file.resolve()
return symlinks
def create_symlinks(symlinks: dict[Path, Path]):
for target, link in symlinks.items():
logger.debug(f"(re)Creating symlink: {link} -> {target}")
if target.is_symlink():
target.unlink()
if target.exists() and target.is_dir():
shutil.rmtree(target)
os.symlink(link, target)

View File

@ -16,3 +16,11 @@ python3.10 setup.py bdist_wheel --universal
git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python
cd /tmp/llama-cpp-python cd /tmp/llama-cpp-python
CMAKE_ARGS="-D GGML_HIPBLAS=on -D AMDGPU_TARGETS=${GPU_TARGETS}" FORCE_CMAKE=1 python3.10 -m build --wheel CMAKE_ARGS="-D GGML_HIPBLAS=on -D AMDGPU_TARGETS=${GPU_TARGETS}" FORCE_CMAKE=1 python3.10 -m build --wheel
# ROCM xformers
## Clone repo and install python requirements
pip3 install ninja
git clone --depth 1 https://github.com/facebookresearch/xformers.git /tmp/xformers
cd /tmp/xformers
python3.10 setup.py bdist_wheel --universal