diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7ae99f42..31acf0e4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -62,12 +62,7 @@ jobs: sudo apt-get update sudo apt-get install podman bats bash codespell python3-argcomplete pipx git cmake make install-requirements - git clone https://github.com/ggerganov/llama.cpp - cd llama.cpp - git submodule update --init --recursive - cmake -B build -DGGML_NATIVE=OFF - cmake --build build --config Release - sudo cmake --install build + sudo ./container-images/scripts/build_llama_and_whisper.sh - name: Upgrade to podman 5 run: | diff --git a/container-images/scripts/build_llama_and_whisper.sh b/container-images/scripts/build_llama_and_whisper.sh index 48cad908..b03d499d 100755 --- a/container-images/scripts/build_llama_and_whisper.sh +++ b/container-images/scripts/build_llama_and_whisper.sh @@ -125,7 +125,7 @@ main() { local common_flags configure_common_flags common_flags+=("-DGGML_CCACHE=OFF" "-DCMAKE_INSTALL_PREFIX=$install_prefix") - dnf_install + command -v dnf && dnf_install clone_and_build_whisper_cpp common_flags+=("-DLLAMA_CURL=ON") case "$containerfile" in @@ -139,7 +139,7 @@ main() { esac clone_and_build_llama_cpp - dnf -y clean all + command -v dnf && dnf -y clean all rm -rf /var/cache/*dnf* /opt/rocm-*/lib/*/library/*gfx9* ldconfig # needed for libraries }