mirror of
https://github.com/TabbyML/tabby
synced 2024-11-22 00:08:06 +00:00
chore(llama-cpp-server): add timeout and retry delay for llama-server health check (#3369)
This commit is contained in:
parent
44d65fb872
commit
27f45f9213
@ -1,4 +1,4 @@
|
||||
use std::{env::var, net::TcpListener, process::Stdio};
|
||||
use std::{env::var, net::TcpListener, process::Stdio, time::Duration};
|
||||
|
||||
use tokio::{io::AsyncBufReadExt, task::JoinHandle};
|
||||
use tracing::{debug, warn};
|
||||
@ -126,7 +126,14 @@ impl LlamaCppSupervisor {
|
||||
debug!("Waiting for llama-server <{}> to start...", self.name);
|
||||
let client = reqwest::Client::builder().no_proxy().build().unwrap();
|
||||
loop {
|
||||
let Ok(resp) = client.get(api_endpoint(self.port) + "/health").send().await else {
|
||||
let Ok(resp) = client
|
||||
.get(api_endpoint(self.port) + "/health")
|
||||
.timeout(Duration::from_secs(1))
|
||||
.send()
|
||||
.await
|
||||
else {
|
||||
debug!("llama-server <{}> not ready yet, retrying...", self.name);
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
continue;
|
||||
};
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user