fix: when there's an error happens in background inference loop, it should exit the process (#713)

This commit is contained in:
Meng Zhang 2023-11-06 12:41:49 -08:00 committed by GitHub
parent c5cfba403f
commit 9344c32b31
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 22 additions and 2 deletions

1
Cargo.lock generated
View File

@ -2138,6 +2138,7 @@ dependencies = [
"futures",
"tabby-inference",
"tokio",
"tracing",
]
[[package]]

View File

@ -18,3 +18,4 @@ tabby-inference = { path = "../tabby-inference" }
derive_builder = { workspace = true }
futures.workspace = true
async-stream.workspace = true
tracing.workspace = true

View File

@ -1,3 +1,5 @@
mod utils;
use std::{collections::HashMap, sync::Arc};
use async_stream::stream;
@ -77,7 +79,7 @@ impl AsyncTextInferenceEngine {
let result = match engine.as_mut().unwrap().step() {
Ok(result) => result,
Err(err) => {
panic!("Failed to step: {}", err)
fatal!("Failed to step: {}", err)
}
};
@ -161,7 +163,7 @@ impl LlamaTextGeneration {
pub fn create(options: LlamaTextGenerationOptions) -> Self {
let engine = create_engine(options.use_gpu, &options.model_path);
if engine.is_null() {
panic!("Unable to load model: {}", options.model_path);
fatal!("Unable to load model: {}", options.model_path);
}
let ret = LlamaTextGeneration {
engine: Arc::new(AsyncTextInferenceEngine::create(engine)),

View File

@ -0,0 +1,16 @@
#[macro_export]
macro_rules! fatal {
($msg:expr) => {
({
tracing::error!($msg);
std::process::exit(1);
})
};
($fmt:expr, $($arg:tt)*) => {
({
tracing::error!($fmt, $($arg)*);
std::process::exit(1);
})
};
}