chore(llama.cpp): detect version and bump to b3995 for win (#3447)

* chore(llama.cpp): auto detech llama.cpp version and bump to b3995 for win

* ci(build): fetch tags before getting llama.cpp version

* ci(build): do not display all fetched tags when build
This commit is contained in:
Wei Zhang 2024-11-21 22:54:24 +08:00 committed by GitHub
parent a20846255d
commit f2cf549ad9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 4 deletions

View File

@ -249,14 +249,14 @@ jobs:
package-win-cuda:
runs-on: ubuntu-latest
env:
LLAMA_CPP_VERSION: b3571
needs: [release-binary]
permissions:
contents: write
steps:
- name: Checkout
uses: actions/checkout@v3
with:
submodules: recursive
- name: Download all artifacts
uses: actions/download-artifact@v3

View File

@ -1,8 +1,12 @@
#!/bin/bash
# get current bash file directory
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
LLAMA_CPP_PATH="${PROJECT_ROOT}/crates/llama-cpp-server/llama.cpp"
# Input variables
TABBY_VERSION=${TABBY_VERSION:-dev}
LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION:-b3571}
LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION:-$(cd ${LLAMA_CPP_PATH} && git fetch --tags origin >/dev/null && git describe --tags --abbrev=0)}
echo "LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION}"
LLAMA_CPP_PLATFORM=${LLAMA_CPP_PLATFORM:-cuda-cu11.7.1-x64}
OUTPUT_NAME=${OUTPUT_NAME:-tabby_x86_64-windows-msvc-cuda117}