mirror of
https://github.com/occ-ai/obs-localvocal
synced 2024-11-07 02:40:12 +00:00
refactor: Update whisper model path and enable hipBLAS acceleration (#146)
* refactor: Update whisper model path and enable hipBLAS acceleration * refactor: Update whisper model path and enable hipBLAS acceleration * refactor: Update whisper model path and enable hipBLAS acceleration * refactor: Update whisper model path and enable hipBLAS acceleration * refactor: Update whisper model path and enable hipBLAS acceleration * refactor: Update whisper model path and enable CoreML acceleration
This commit is contained in:
parent
87c5a0a1ca
commit
78907ea14d
6
.github/actions/build-plugin/action.yaml
vendored
6
.github/actions/build-plugin/action.yaml
vendored
@ -8,8 +8,8 @@ inputs:
|
||||
description: 'Build configuration'
|
||||
required: false
|
||||
default: 'RelWithDebInfo'
|
||||
cublas:
|
||||
description: 'Enable cuBLAS'
|
||||
acceleration:
|
||||
description: 'Enable acceleration'
|
||||
required: false
|
||||
default: 'cpu'
|
||||
codesign:
|
||||
@ -86,7 +86,7 @@ runs:
|
||||
|
||||
.github/scripts/Build-Windows.ps1 @BuildArgs
|
||||
env:
|
||||
CPU_OR_CUDA: ${{ inputs.cublas }}
|
||||
ACCELERATION: ${{ inputs.acceleration }}
|
||||
|
||||
- name: Create Summary 📊
|
||||
if: contains(fromJSON('["Linux", "macOS"]'),runner.os)
|
||||
|
6
.github/actions/package-plugin/action.yaml
vendored
6
.github/actions/package-plugin/action.yaml
vendored
@ -8,8 +8,8 @@ inputs:
|
||||
description: 'Build configuration'
|
||||
required: false
|
||||
default: 'RelWithDebInfo'
|
||||
cublas:
|
||||
description: 'Enable cuBLAS'
|
||||
acceleration:
|
||||
description: 'Enable acceleration'
|
||||
required: false
|
||||
default: 'cpu'
|
||||
codesign:
|
||||
@ -112,7 +112,7 @@ runs:
|
||||
$PackageArgs = @{
|
||||
Target = '${{ inputs.target }}'
|
||||
Configuration = '${{ inputs.config }}'
|
||||
Cublas = '${{ inputs.cublas }}'
|
||||
Acceleration = '${{ inputs.acceleration }}'
|
||||
}
|
||||
|
||||
if ( '${{ inputs.package }}' -eq 'true' ) {
|
||||
|
14
.github/scripts/Package-Windows.ps1
vendored
14
.github/scripts/Package-Windows.ps1
vendored
@ -4,8 +4,8 @@ param(
|
||||
[string] $Target = 'x64',
|
||||
[ValidateSet('Debug', 'RelWithDebInfo', 'Release', 'MinSizeRel')]
|
||||
[string] $Configuration = 'RelWithDebInfo',
|
||||
[ValidateSet('cpu', 'clblast', '12.2.0', '11.8.0')]
|
||||
[string] $Cublas = 'cpu',
|
||||
[ValidateSet('cpu', 'hipblas', 'cuda')]
|
||||
[string] $Acceleration = 'cpu',
|
||||
[switch] $BuildInstaller,
|
||||
[switch] $SkipDeps
|
||||
)
|
||||
@ -49,16 +49,8 @@ function Package {
|
||||
$BuildSpec = Get-Content -Path ${BuildSpecFile} -Raw | ConvertFrom-Json
|
||||
$ProductName = $BuildSpec.name
|
||||
$ProductVersion = $BuildSpec.version
|
||||
# Check if $cublas is cpu or cuda
|
||||
if ( $Cublas -eq 'cpu' ) {
|
||||
$CudaName = 'cpu'
|
||||
} elseif ( $Cublas -eq 'cblast' ) {
|
||||
$CudaName = 'cblast'
|
||||
} else {
|
||||
$CudaName = "cuda${Cublas}"
|
||||
}
|
||||
|
||||
$OutputName = "${ProductName}-${ProductVersion}-windows-${Target}-${CudaName}"
|
||||
$OutputName = "${ProductName}-${ProductVersion}-windows-${Target}-${Acceleration}"
|
||||
|
||||
if ( ! $SkipDeps ) {
|
||||
Install-BuildDependencies -WingetFile "${ScriptHome}/.Wingetfile"
|
||||
|
8
.github/workflows/build-project.yaml
vendored
8
.github/workflows/build-project.yaml
vendored
@ -246,7 +246,7 @@ jobs:
|
||||
needs: check-event
|
||||
strategy:
|
||||
matrix:
|
||||
cublas: [cpu, clblast, 12.2.0, 11.8.0]
|
||||
acceleration: [cpu, cuda, hipblas]
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
@ -276,7 +276,7 @@ jobs:
|
||||
with:
|
||||
target: x64
|
||||
config: ${{ needs.check-event.outputs.config }}
|
||||
cublas: ${{ matrix.cublas }}
|
||||
acceleration: ${{ matrix.acceleration }}
|
||||
|
||||
- name: Package Plugin 📀
|
||||
uses: ./.github/actions/package-plugin
|
||||
@ -284,10 +284,10 @@ jobs:
|
||||
target: x64
|
||||
config: ${{ needs.check-event.outputs.config }}
|
||||
package: ${{ fromJSON(needs.check-event.outputs.package) }}
|
||||
cublas: ${{ matrix.cublas }}
|
||||
acceleration: ${{ matrix.acceleration }}
|
||||
|
||||
- name: Upload Artifacts 📡
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ steps.setup.outputs.pluginName }}-${{ steps.setup.outputs.pluginVersion }}-windows-x64-${{ matrix.cublas }}-${{ needs.check-event.outputs.commitHash }}
|
||||
name: ${{ steps.setup.outputs.pluginName }}-${{ steps.setup.outputs.pluginVersion }}-windows-x64-${{ matrix.acceleration }}-${{ needs.check-event.outputs.commitHash }}
|
||||
path: ${{ github.workspace }}/release/${{ steps.setup.outputs.pluginName }}-${{ steps.setup.outputs.pluginVersion }}-windows-x64*.*
|
||||
|
5
.github/workflows/push.yaml
vendored
5
.github/workflows/push.yaml
vendored
@ -76,9 +76,8 @@ jobs:
|
||||
|
||||
variants=(
|
||||
'windows-x64-cpu;zip|exe'
|
||||
'windows-x64-clblast;zip|exe'
|
||||
'windows-x64-11.8.0;zip|exe'
|
||||
'windows-x64-12.2.0;zip|exe'
|
||||
'windows-x64-hipblas;zip|exe'
|
||||
'windows-x64-cuda;zip|exe'
|
||||
'macos-arm64;tar.xz|pkg'
|
||||
'macos-x86_64;tar.xz|pkg'
|
||||
'ubuntu-22.04-x86_64;tar.xz|deb|ddeb'
|
||||
|
@ -51,7 +51,7 @@ Current Features:
|
||||
- Send captions on a RTMP stream to e.g. YouTube, Twitch
|
||||
- Bring your own Whisper model (any GGML)
|
||||
- Translate captions in real time to major languages (both Whisper built-in translation as well as NMT models)
|
||||
- CUDA, OpenCL, Apple Arm64, AVX & SSE acceleration support
|
||||
- CUDA, hipBLAS (AMD ROCm), Apple Arm64, AVX & SSE acceleration support
|
||||
- Filter out or replace any part of the produced captions
|
||||
- Partial transcriptions for a streaming-captions experience
|
||||
|
||||
@ -178,10 +178,10 @@ The build should exist in the `./release` folder off the root. You can manually
|
||||
|
||||
LocalVocal will now build with CUDA support automatically through a prebuilt binary of Whisper.cpp from https://github.com/occ-ai/occ-ai-dep-whispercpp. The CMake scripts will download all necessary files.
|
||||
|
||||
To build with cuda add `CPU_OR_CUDA` as an environment variable (with `cpu`, `clblast`, `12.2.0` or `11.8.0`) and build regularly
|
||||
To build with cuda add `ACCELERATION` as an environment variable (with `cpu`, `hipblas`, or `cuda`) and build regularly
|
||||
|
||||
```powershell
|
||||
> $env:CPU_OR_CUDA="12.2.0"
|
||||
> $env:ACCELERATION="cuda"
|
||||
> .github/scripts/Build-Windows.ps1 -Configuration Release
|
||||
```
|
||||
|
||||
|
@ -19,13 +19,12 @@ if(APPLE)
|
||||
|
||||
elseif(WIN32)
|
||||
|
||||
# check CPU_OR_CUDA environment variable
|
||||
if(NOT DEFINED ENV{CPU_OR_CUDA})
|
||||
message(
|
||||
FATAL_ERROR "Please set the CPU_OR_CUDA environment variable to either `cpu`, `clblast`, `12.2.0` or `11.8.0`")
|
||||
# check ACCELERATION environment variable
|
||||
if(NOT DEFINED ENV{ACCELERATION})
|
||||
message(FATAL_ERROR "Please set the ACCELERATION environment variable to either `cpu`, `hipblas`, or `cuda`")
|
||||
endif()
|
||||
|
||||
if($ENV{CPU_OR_CUDA} STREQUAL "cpu" OR $ENV{CPU_OR_CUDA} STREQUAL "clblast")
|
||||
if($ENV{ACCELERATION} STREQUAL "cpu" OR $ENV{ACCELERATION} STREQUAL "hipblas")
|
||||
FetchContent_Declare(
|
||||
ctranslate2_fetch
|
||||
URL https://github.com/occ-ai/obs-ai-ctranslate2-dep/releases/download/1.2.0/libctranslate2-windows-4.1.1-Release-cpu.zip
|
||||
@ -33,21 +32,12 @@ elseif(WIN32)
|
||||
else()
|
||||
# add compile definitions for CUDA
|
||||
add_compile_definitions(POLYGLOT_WITH_CUDA)
|
||||
add_compile_definitions(POLYGLOT_CUDA_VERSION=$ENV{CPU_OR_CUDA})
|
||||
add_compile_definitions(POLYGLOT_CUDA_VERSION="12.2.0")
|
||||
|
||||
if($ENV{CPU_OR_CUDA} STREQUAL "12.2.0")
|
||||
FetchContent_Declare(
|
||||
ctranslate2_fetch
|
||||
URL https://github.com/occ-ai/obs-ai-ctranslate2-dep/releases/download/1.2.0/libctranslate2-windows-4.1.1-Release-cuda12.2.0.zip
|
||||
URL_HASH SHA256=131724d510f9f2829970953a1bc9e4e8fb7b4cbc8218e32270dcfe6172a51558)
|
||||
elseif($ENV{CPU_OR_CUDA} STREQUAL "11.8.0")
|
||||
FetchContent_Declare(
|
||||
ctranslate2_fetch
|
||||
URL https://github.com/occ-ai/obs-ai-ctranslate2-dep/releases/download/1.2.0/libctranslate2-windows-4.1.1-Release-cuda11.8.0.zip
|
||||
URL_HASH SHA256=a120bee82f821df35a4646add30ac18b5c23e4e16b56fa7ba338eeae336e0d81)
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported CUDA version: $ENV{CPU_OR_CUDA}")
|
||||
endif()
|
||||
FetchContent_Declare(
|
||||
ctranslate2_fetch
|
||||
URL https://github.com/occ-ai/obs-ai-ctranslate2-dep/releases/download/1.2.0/libctranslate2-windows-4.1.1-Release-cuda12.2.0.zip
|
||||
URL_HASH SHA256=131724d510f9f2829970953a1bc9e4e8fb7b4cbc8218e32270dcfe6172a51558)
|
||||
endif()
|
||||
|
||||
FetchContent_MakeAvailable(ctranslate2_fetch)
|
||||
|
@ -1,30 +1,23 @@
|
||||
include(ExternalProject)
|
||||
include(FetchContent)
|
||||
|
||||
if(${CMAKE_BUILD_TYPE} STREQUAL Release OR ${CMAKE_BUILD_TYPE} STREQUAL RelWithDebInfo)
|
||||
set(Whispercpp_BUILD_TYPE Release)
|
||||
else()
|
||||
set(Whispercpp_BUILD_TYPE Debug)
|
||||
endif()
|
||||
set(PREBUILT_WHISPERCPP_VERSION "0.0.5")
|
||||
set(PREBUILT_WHISPERCPP_URL_BASE
|
||||
"https://github.com/occ-ai/occ-ai-dep-whispercpp/releases/download/${PREBUILT_WHISPERCPP_VERSION}")
|
||||
|
||||
if(APPLE)
|
||||
# check the "MACOS_ARCH" env var to figure out if this is x86 or arm64
|
||||
if(NOT DEFINED ENV{MACOS_ARCH})
|
||||
message(FATAL_ERROR "The MACOS_ARCH environment variable is not set. Please set it to either `x86_64` or `arm64`")
|
||||
endif(NOT DEFINED ENV{MACOS_ARCH})
|
||||
|
||||
set(WHISPER_CPP_URL
|
||||
"https://github.com/occ-ai/occ-ai-dep-whispercpp/releases/download/0.0.3/whispercpp-macos-$ENV{MACOS_ARCH}-0.0.3.tar.gz"
|
||||
)
|
||||
if($ENV{MACOS_ARCH} STREQUAL "x86_64")
|
||||
set(WHISPER_CPP_HASH "94DB35C69E958C8A59F0F331734F4F4F45F4BB13D2F54D3C838457E8590874C4")
|
||||
set(WHISPER_CPP_HASH "da61500b9a37f8630b9e4ed49bc3fe7858729d7a28a2e80bf6cfa4cb97523546")
|
||||
elseif($ENV{MACOS_ARCH} STREQUAL "arm64")
|
||||
set(WHISPER_CPP_HASH "ACA1DF8F34F4946B56FEED89B7548C9AD56D1DD89615C96BDEB6E4734A946451")
|
||||
set(WHISPER_CPP_HASH "ef1e2628ba09414c0848d58c471440f38b8393cb5d428edf82b9e78aeeecdd15")
|
||||
else()
|
||||
message(
|
||||
FATAL_ERROR
|
||||
"The MACOS_ARCH environment variable is not set to a valid value. Please set it to either `x86_64` or `arm64`")
|
||||
endif()
|
||||
set(WHISPER_CPP_URL
|
||||
"${PREBUILT_WHISPERCPP_URL_BASE}/whispercpp-macos-$ENV{MACOS_ARCH}-${PREBUILT_WHISPERCPP_VERSION}.tar.gz")
|
||||
|
||||
FetchContent_Declare(
|
||||
whispercpp_fetch
|
||||
@ -35,45 +28,46 @@ if(APPLE)
|
||||
add_library(Whispercpp::Whisper STATIC IMPORTED)
|
||||
set_target_properties(
|
||||
Whispercpp::Whisper
|
||||
PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${whispercpp_fetch_SOURCE_DIR}/lib/static/${CMAKE_STATIC_LIBRARY_PREFIX}whisper${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
PROPERTIES IMPORTED_LOCATION
|
||||
${whispercpp_fetch_SOURCE_DIR}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}whisper${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
set_target_properties(Whispercpp::Whisper PROPERTIES INTERFACE_INCLUDE_DIRECTORIES
|
||||
${whispercpp_fetch_SOURCE_DIR}/include)
|
||||
add_library(Whispercpp::GGML STATIC IMPORTED)
|
||||
set_target_properties(
|
||||
Whispercpp::GGML
|
||||
PROPERTIES IMPORTED_LOCATION
|
||||
${whispercpp_fetch_SOURCE_DIR}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}ggml${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
|
||||
add_library(Whispercpp::CoreML STATIC IMPORTED)
|
||||
set_target_properties(
|
||||
Whispercpp::CoreML
|
||||
PROPERTIES
|
||||
IMPORTED_LOCATION
|
||||
${whispercpp_fetch_SOURCE_DIR}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}whisper.coreml${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
|
||||
elseif(WIN32)
|
||||
if(NOT DEFINED ENV{CPU_OR_CUDA})
|
||||
if(NOT DEFINED ENV{ACCELERATION})
|
||||
message(
|
||||
FATAL_ERROR
|
||||
"The CPU_OR_CUDA environment variable is not set. Please set it to either `cpu`, `clblast` or `11.8.0` or `12.2.0`"
|
||||
FATAL_ERROR "The ACCELERATION environment variable is not set. Please set it to either `cpu`, `cuda` or `hipblas`"
|
||||
)
|
||||
endif(NOT DEFINED ENV{CPU_OR_CUDA})
|
||||
|
||||
set(ARCH_PREFIX $ENV{CPU_OR_CUDA})
|
||||
if(NOT $ENV{CPU_OR_CUDA} STREQUAL "cpu" AND NOT $ENV{CPU_OR_CUDA} STREQUAL "clblast")
|
||||
set(ARCH_PREFIX "cuda$ENV{CPU_OR_CUDA}")
|
||||
add_compile_definitions("LOCALVOCAL_WITH_CUDA")
|
||||
elseif($ENV{CPU_OR_CUDA} STREQUAL "cpu")
|
||||
add_compile_definitions("LOCALVOCAL_WITH_CPU")
|
||||
else()
|
||||
add_compile_definitions("LOCALVOCAL_WITH_CLBLAST")
|
||||
endif()
|
||||
endif(NOT DEFINED ENV{ACCELERATION})
|
||||
|
||||
set(ARCH_PREFIX $ENV{ACCELERATION})
|
||||
set(WHISPER_CPP_URL
|
||||
"https://github.com/occ-ai/occ-ai-dep-whispercpp/releases/download/0.0.3/whispercpp-windows-${ARCH_PREFIX}-0.0.3.zip"
|
||||
)
|
||||
if($ENV{CPU_OR_CUDA} STREQUAL "cpu")
|
||||
set(WHISPER_CPP_HASH "A7243E649E0B6D080AA6D2210DB0AC08C597FA11B88C3068B8A60083AD9E62EF")
|
||||
elseif($ENV{CPU_OR_CUDA} STREQUAL "clblast")
|
||||
set(WHISPER_CPP_HASH "7957AC76A0E6517C95951B3BECCB554CD992E30DAF8716681B40F375590F69F1")
|
||||
elseif($ENV{CPU_OR_CUDA} STREQUAL "12.2.0")
|
||||
set(WHISPER_CPP_HASH "0F6BC1F91C573A867D6972554FC29C3D8EAFD7994FA0FEBBEAFCF945DC8A9F41")
|
||||
elseif($ENV{CPU_OR_CUDA} STREQUAL "11.8.0")
|
||||
set(WHISPER_CPP_HASH "51CB6750ADDF96F38106E4E88212FCC06500999E568E5A4EDC6D42CA6D7CA99D")
|
||||
"${PREBUILT_WHISPERCPP_URL_BASE}/whispercpp-windows-${ARCH_PREFIX}-${PREBUILT_WHISPERCPP_VERSION}.zip")
|
||||
if($ENV{ACCELERATION} STREQUAL "cpu")
|
||||
set(WHISPER_CPP_HASH "2b1cfa0dd764132c4cde60e112a8e6328d28d158d91a8845080baa3e9d2dcdcd")
|
||||
add_compile_definitions("LOCALVOCAL_WITH_CPU")
|
||||
elseif($ENV{ACCELERATION} STREQUAL "cuda")
|
||||
set(WHISPER_CPP_HASH "011e813742fddf0911c4a36d2080d7a388cf78738081297088e7d50023e4f9bc")
|
||||
add_compile_definitions("LOCALVOCAL_WITH_CUDA")
|
||||
elseif($ENV{ACCELERATION} STREQUAL "hipblas")
|
||||
set(WHISPER_CPP_HASH "f2980d6cd3df9cac464378d26d2c19d827bcac995c8d0398a39230a9be936013")
|
||||
add_compile_definitions("LOCALVOCAL_WITH_HIPBLAS")
|
||||
else()
|
||||
message(
|
||||
FATAL_ERROR
|
||||
"The CPU_OR_CUDA environment variable is not set to a valid value. Please set it to either `cpu` or `11.8.0` or `12.2.0`"
|
||||
"The ACCELERATION environment variable is not set to a valid value. Please set it to either `cpu` or `cuda` or `hipblas`"
|
||||
)
|
||||
endif()
|
||||
|
||||
@ -96,7 +90,7 @@ elseif(WIN32)
|
||||
set_target_properties(Whispercpp::Whisper PROPERTIES INTERFACE_INCLUDE_DIRECTORIES
|
||||
${whispercpp_fetch_SOURCE_DIR}/include)
|
||||
|
||||
if($ENV{CPU_OR_CUDA} STREQUAL "cpu")
|
||||
if($ENV{ACCELERATION} STREQUAL "cpu")
|
||||
# add openblas to the link line
|
||||
add_library(Whispercpp::OpenBLAS STATIC IMPORTED)
|
||||
set_target_properties(Whispercpp::OpenBLAS PROPERTIES IMPORTED_LOCATION
|
||||
@ -107,6 +101,11 @@ elseif(WIN32)
|
||||
file(GLOB WHISPER_DLLS ${whispercpp_fetch_SOURCE_DIR}/bin/*.dll)
|
||||
install(FILES ${WHISPER_DLLS} DESTINATION "obs-plugins/64bit")
|
||||
else()
|
||||
if(${CMAKE_BUILD_TYPE} STREQUAL Release OR ${CMAKE_BUILD_TYPE} STREQUAL RelWithDebInfo)
|
||||
set(Whispercpp_BUILD_TYPE Release)
|
||||
else()
|
||||
set(Whispercpp_BUILD_TYPE Debug)
|
||||
endif()
|
||||
set(Whispercpp_Build_GIT_TAG "v1.6.2")
|
||||
set(WHISPER_EXTRA_CXX_FLAGS "-fPIC")
|
||||
set(WHISPER_ADDITIONAL_CMAKE_ARGS -DWHISPER_BLAS=OFF -DWHISPER_CUBLAS=OFF -DWHISPER_OPENBLAS=OFF)
|
||||
@ -144,9 +143,10 @@ endif()
|
||||
add_library(Whispercpp INTERFACE)
|
||||
add_dependencies(Whispercpp Whispercpp_Build)
|
||||
target_link_libraries(Whispercpp INTERFACE Whispercpp::Whisper)
|
||||
if(WIN32 AND "$ENV{CPU_OR_CUDA}" STREQUAL "cpu")
|
||||
if(WIN32 AND "$ENV{ACCELERATION}" STREQUAL "cpu")
|
||||
target_link_libraries(Whispercpp INTERFACE Whispercpp::OpenBLAS)
|
||||
endif()
|
||||
if(APPLE)
|
||||
target_link_libraries(Whispercpp INTERFACE "-framework Accelerate")
|
||||
target_link_libraries(Whispercpp INTERFACE "-framework Accelerate -framework CoreML -framework Metal")
|
||||
target_link_libraries(Whispercpp INTERFACE Whispercpp::GGML Whispercpp::CoreML)
|
||||
endif(APPLE)
|
||||
|
@ -84,4 +84,5 @@ advanced_group="Advanced Configuration"
|
||||
buffered_output_parameters="Buffered Output Configuration"
|
||||
file_output_info="Note: Translation output will be saved to a file in the same directory with the target language added to the name, e.g. 'output_es.srt'."
|
||||
partial_transcription="Enable Partial Transcription"
|
||||
partial_transcription_info="Partial transcription will increase processing load on your machine to transcribe content in real-time, which may impact performance."
|
||||
partial_transcription_info="Partial transcription will increase processing load on your machine to transcribe content in real-time, which may impact performance."
|
||||
partial_latency="Latency (ms)"
|
@ -13,7 +13,7 @@ Start by cloning the repo.
|
||||
|
||||
Proceed to build the plugin regularly, e.g.
|
||||
```powershell
|
||||
obs-localvocal> $env:CPU_OR_CUDA="12.2.0"
|
||||
obs-localvocal> $env:ACCELERATION="cuda"
|
||||
obs-localvocal> .\.github\scripts\Build-Windows.ps1 -Configuration Release
|
||||
```
|
||||
|
||||
|
@ -172,7 +172,6 @@ create_context(int sample_rate, int channels, const std::string &whisper_model_p
|
||||
gf->whisper_params.max_len = 0;
|
||||
gf->whisper_params.split_on_word = false;
|
||||
gf->whisper_params.max_tokens = 0;
|
||||
gf->whisper_params.speed_up = false;
|
||||
gf->whisper_params.suppress_blank = true;
|
||||
gf->whisper_params.suppress_non_speech_tokens = true;
|
||||
gf->whisper_params.temperature = 0.0;
|
||||
|
@ -411,8 +411,6 @@ void add_whisper_params_group_properties(obs_properties_t *ppts)
|
||||
// int max_tokens; // max tokens per segment (0 = no limit)
|
||||
obs_properties_add_int_slider(whisper_params_group, "max_tokens", MT_("max_tokens"), 0, 100,
|
||||
1);
|
||||
// bool speed_up; // speed-up the audio by 2x using Phase Vocoder
|
||||
obs_properties_add_bool(whisper_params_group, "speed_up", MT_("speed_up"));
|
||||
// const char * initial_prompt;
|
||||
obs_properties_add_text(whisper_params_group, "initial_prompt", MT_("initial_prompt"),
|
||||
OBS_TEXT_DEFAULT);
|
||||
|
@ -380,7 +380,6 @@ void transcription_filter_update(void *data, obs_data_t *s)
|
||||
gf->whisper_params.max_len = (int)obs_data_get_int(s, "max_len");
|
||||
gf->whisper_params.split_on_word = obs_data_get_bool(s, "split_on_word");
|
||||
gf->whisper_params.max_tokens = (int)obs_data_get_int(s, "max_tokens");
|
||||
gf->whisper_params.speed_up = obs_data_get_bool(s, "speed_up");
|
||||
gf->whisper_params.suppress_blank = obs_data_get_bool(s, "suppress_blank");
|
||||
gf->whisper_params.suppress_non_speech_tokens =
|
||||
obs_data_get_bool(s, "suppress_non_speech_tokens");
|
||||
@ -611,7 +610,6 @@ void transcription_filter_defaults(obs_data_t *s)
|
||||
obs_data_set_default_int(s, "max_len", 0);
|
||||
obs_data_set_default_bool(s, "split_on_word", true);
|
||||
obs_data_set_default_int(s, "max_tokens", 0);
|
||||
obs_data_set_default_bool(s, "speed_up", false);
|
||||
obs_data_set_default_bool(s, "suppress_blank", false);
|
||||
obs_data_set_default_bool(s, "suppress_non_speech_tokens", true);
|
||||
obs_data_set_default_double(s, "temperature", 0.1);
|
||||
|
@ -64,9 +64,9 @@ struct whisper_context *init_whisper_context(const std::string &model_path_in,
|
||||
#ifdef LOCALVOCAL_WITH_CUDA
|
||||
cparams.use_gpu = true;
|
||||
obs_log(LOG_INFO, "Using CUDA GPU for inference, device %d", cparams.gpu_device);
|
||||
#elif defined(LOCALVOCAL_WITH_CLBLAST)
|
||||
#elif defined(LOCALVOCAL_WITH_HIPBLAS)
|
||||
cparams.use_gpu = true;
|
||||
obs_log(LOG_INFO, "Using OpenCL for inference");
|
||||
obs_log(LOG_INFO, "Using hipBLAS for inference");
|
||||
#else
|
||||
cparams.use_gpu = false;
|
||||
obs_log(LOG_INFO, "Using CPU for inference");
|
||||
|
Loading…
Reference in New Issue
Block a user