mirror of
https://github.com/TabbyML/tabby
synced 2024-11-22 00:08:06 +00:00
chore(llama.cpp): detect version and bump to b3995 for win (#3447)
* chore(llama.cpp): auto detech llama.cpp version and bump to b3995 for win * ci(build): fetch tags before getting llama.cpp version * ci(build): do not display all fetched tags when build
This commit is contained in:
parent
a20846255d
commit
f2cf549ad9
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@ -249,14 +249,14 @@ jobs:
|
||||
|
||||
package-win-cuda:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
LLAMA_CPP_VERSION: b3571
|
||||
needs: [release-binary]
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
|
@ -1,8 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
# get current bash file directory
|
||||
PROJECT_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
LLAMA_CPP_PATH="${PROJECT_ROOT}/crates/llama-cpp-server/llama.cpp"
|
||||
|
||||
# Input variables
|
||||
TABBY_VERSION=${TABBY_VERSION:-dev}
|
||||
LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION:-b3571}
|
||||
LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION:-$(cd ${LLAMA_CPP_PATH} && git fetch --tags origin >/dev/null && git describe --tags --abbrev=0)}
|
||||
echo "LLAMA_CPP_VERSION=${LLAMA_CPP_VERSION}"
|
||||
LLAMA_CPP_PLATFORM=${LLAMA_CPP_PLATFORM:-cuda-cu11.7.1-x64}
|
||||
OUTPUT_NAME=${OUTPUT_NAME:-tabby_x86_64-windows-msvc-cuda117}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user