Use session refs for completions
This commit is contained in:
parent
fc99a7843d
commit
05e0fa88b3
56
src/cli.rs
56
src/cli.rs
|
|
@ -86,21 +86,12 @@ impl ChatCLI {
|
|||
self.session.add_user_message(message.to_string());
|
||||
self.session.save()?;
|
||||
|
||||
// Clone data needed for the API call before getting mutable client reference
|
||||
let model = self.session.model.clone();
|
||||
let messages = self.session.messages.clone();
|
||||
let enable_web_search = self.session.enable_web_search;
|
||||
let enable_reasoning_summary = self.session.enable_reasoning_summary;
|
||||
let reasoning_effort = self.session.reasoning_effort.clone();
|
||||
let enable_extended_thinking = self.session.enable_extended_thinking;
|
||||
let thinking_budget_tokens = self.session.thinking_budget_tokens;
|
||||
|
||||
// Check if we should use streaming before getting client
|
||||
let should_use_streaming = {
|
||||
let client = self.get_client()?;
|
||||
client.supports_streaming()
|
||||
};
|
||||
|
||||
|
||||
if should_use_streaming {
|
||||
println!(); // Add padding before AI response
|
||||
print!("{}> ", console::style("🤖").magenta());
|
||||
|
|
@ -117,20 +108,20 @@ impl ChatCLI {
|
|||
}) as StreamCallback
|
||||
};
|
||||
|
||||
let client = self.get_client()?;
|
||||
match client
|
||||
let client = self.get_client()?.clone();
|
||||
let response = client
|
||||
.chat_completion_stream(
|
||||
&model,
|
||||
&messages,
|
||||
enable_web_search,
|
||||
enable_reasoning_summary,
|
||||
&reasoning_effort,
|
||||
enable_extended_thinking,
|
||||
thinking_budget_tokens,
|
||||
&self.session.model,
|
||||
&self.session.messages,
|
||||
self.session.enable_web_search,
|
||||
self.session.enable_reasoning_summary,
|
||||
&self.session.reasoning_effort,
|
||||
self.session.enable_extended_thinking,
|
||||
self.session.thinking_budget_tokens,
|
||||
stream_callback,
|
||||
)
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match response {
|
||||
Ok(response) => {
|
||||
println!(); // Add newline after streaming
|
||||
self.session.add_assistant_message(response);
|
||||
|
|
@ -145,20 +136,19 @@ impl ChatCLI {
|
|||
} else {
|
||||
// Fallback to non-streaming
|
||||
let spinner = self.display.show_spinner("Thinking");
|
||||
let client = self.get_client()?;
|
||||
|
||||
match client
|
||||
let client = self.get_client()?.clone();
|
||||
let response = client
|
||||
.chat_completion(
|
||||
&model,
|
||||
&messages,
|
||||
enable_web_search,
|
||||
enable_reasoning_summary,
|
||||
&reasoning_effort,
|
||||
enable_extended_thinking,
|
||||
thinking_budget_tokens,
|
||||
&self.session.model,
|
||||
&self.session.messages,
|
||||
self.session.enable_web_search,
|
||||
self.session.enable_reasoning_summary,
|
||||
&self.session.reasoning_effort,
|
||||
self.session.enable_extended_thinking,
|
||||
self.session.thinking_budget_tokens,
|
||||
)
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match response {
|
||||
Ok(response) => {
|
||||
spinner.finish("Done");
|
||||
self.display.print_assistant_response(&response);
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use super::{provider::Provider, session::Message};
|
|||
|
||||
pub type StreamCallback = Box<dyn Fn(&str) -> Pin<Box<dyn Future<Output = ()> + Send>> + Send + Sync>;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ChatClient {
|
||||
OpenAI(OpenAIClient),
|
||||
Anthropic(AnthropicClient),
|
||||
|
|
@ -80,14 +80,14 @@ impl ChatClient {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OpenAIClient {
|
||||
client: Client,
|
||||
api_key: String,
|
||||
base_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AnthropicClient {
|
||||
client: Client,
|
||||
api_key: String,
|
||||
|
|
|
|||
Loading…
Reference in New Issue