cleaned up presentation

This commit is contained in:
leach 2025-08-19 23:28:17 -04:00
parent 10b79eaf79
commit 18864201d4
4 changed files with 148 additions and 12 deletions

View File

@ -67,6 +67,7 @@ impl ChatCLI {
if let Err(e) = self.handle_user_message(line).await {
self.display.print_error(&format!("Error: {}", e));
}
println!(); // Add padding before next prompt
}
}
None => {
@ -91,6 +92,8 @@ impl ChatCLI {
let enable_web_search = self.session.enable_web_search;
let enable_reasoning_summary = self.session.enable_reasoning_summary;
let reasoning_effort = self.session.reasoning_effort.clone();
let enable_extended_thinking = self.session.enable_extended_thinking;
let thinking_budget_tokens = self.session.thinking_budget_tokens;
// Check if we should use streaming before getting client
let should_use_streaming = {
@ -99,7 +102,8 @@ impl ChatCLI {
};
if should_use_streaming {
print!("{} ", console::style("🤖").magenta());
println!(); // Add padding before AI response
print!("{}> ", console::style("🤖").magenta());
use std::io::{self, Write};
io::stdout().flush().ok();
@ -121,6 +125,8 @@ impl ChatCLI {
enable_web_search,
enable_reasoning_summary,
&reasoning_effort,
enable_extended_thinking,
thinking_budget_tokens,
stream_callback,
)
.await
@ -148,6 +154,8 @@ impl ChatCLI {
enable_web_search,
enable_reasoning_summary,
&reasoning_effort,
enable_extended_thinking,
thinking_budget_tokens,
)
.await
{
@ -379,15 +387,17 @@ impl ChatCLI {
let web_status = if self.session.enable_web_search { "✓ enabled" } else { "✗ disabled" };
let reasoning_status = if self.session.enable_reasoning_summary { "✓ enabled" } else { "✗ disabled" };
let extended_thinking_status = if self.session.enable_extended_thinking { "✓ enabled" } else { "✗ disabled" };
println!(" Web Search: {}", web_status);
println!(" Reasoning Summaries: {}", reasoning_status);
println!(" Reasoning Effort: {}", self.session.reasoning_effort);
println!(" Extended Thinking: {}", extended_thinking_status);
println!(" Thinking Budget: {} tokens", self.session.thinking_budget_tokens);
// Check model compatibility
let model = self.session.model.clone();
let provider = get_provider_for_model(&model);
let web_enabled = self.session.enable_web_search;
let reasoning_enabled = self.session.enable_reasoning_summary;
// Show compatibility warnings based on provider
@ -396,10 +406,16 @@ impl ChatCLI {
if reasoning_enabled {
self.display.print_warning("Reasoning summaries are not supported by Anthropic models");
}
if self.session.enable_extended_thinking {
// Extended thinking is supported by Anthropic models
}
// Web search is now supported by Anthropic models
}
crate::core::provider::Provider::OpenAI => {
// OpenAI models generally support these features
if self.session.enable_extended_thinking {
self.display.print_warning("Extended thinking is not supported by OpenAI models");
}
// OpenAI models generally support other features
}
}
@ -408,6 +424,8 @@ impl ChatCLI {
"Toggle Web Search",
"Toggle Reasoning Summaries",
"Set Reasoning Effort",
"Toggle Extended Thinking",
"Set Thinking Budget",
"Done"
];
@ -443,6 +461,47 @@ impl ChatCLI {
}
}
}
Some("Toggle Extended Thinking") => {
self.session.enable_extended_thinking = !self.session.enable_extended_thinking;
let state = if self.session.enable_extended_thinking { "enabled" } else { "disabled" };
self.display.print_command_result(&format!("Extended thinking {}", state));
let provider = get_provider_for_model(&self.session.model);
match provider {
crate::core::provider::Provider::OpenAI => {
self.display.print_warning("Extended thinking is not supported by OpenAI models");
}
crate::core::provider::Provider::Anthropic => {
// Supported
}
}
}
Some("Set Thinking Budget") => {
let budget_options = vec!["1024", "2500", "5000", "10000", "16000"];
let current_budget = self.session.thinking_budget_tokens.to_string();
if let Some(budget_str) = self.input.select_from_list(
"Select thinking budget (tokens):",
&budget_options,
Some(&current_budget),
)? {
if let Ok(budget) = budget_str.parse::<u32>() {
self.session.thinking_budget_tokens = budget;
self.display.print_command_result(&format!("Thinking budget set to {} tokens", budget));
let provider = get_provider_for_model(&self.session.model);
match provider {
crate::core::provider::Provider::OpenAI => {
self.display.print_warning("Extended thinking is not supported by OpenAI models");
}
crate::core::provider::Provider::Anthropic => {
if budget < 1024 {
self.display.print_warning("Minimum thinking budget is 1024 tokens for Anthropic models");
}
}
}
}
}
}
Some("Done") | None => {
break;
}

View File

@ -27,13 +27,15 @@ impl ChatClient {
enable_web_search: bool,
enable_reasoning_summary: bool,
reasoning_effort: &str,
enable_extended_thinking: bool,
thinking_budget_tokens: u32,
) -> Result<String> {
match self {
ChatClient::OpenAI(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
}
ChatClient::Anthropic(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort, enable_extended_thinking, thinking_budget_tokens).await
}
}
}
@ -45,6 +47,8 @@ impl ChatClient {
enable_web_search: bool,
enable_reasoning_summary: bool,
reasoning_effort: &str,
enable_extended_thinking: bool,
thinking_budget_tokens: u32,
stream_callback: StreamCallback,
) -> Result<String> {
match self {
@ -52,7 +56,7 @@ impl ChatClient {
client.chat_completion_stream(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort, stream_callback).await
}
ChatClient::Anthropic(client) => {
client.chat_completion_stream(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort, stream_callback).await
client.chat_completion_stream(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort, enable_extended_thinking, thinking_budget_tokens, stream_callback).await
}
}
}
@ -900,15 +904,26 @@ impl AnthropicClient {
_enable_web_search: bool,
_enable_reasoning_summary: bool,
_reasoning_effort: &str,
enable_extended_thinking: bool,
thinking_budget_tokens: u32,
) -> Result<String> {
let url = format!("{}/messages", self.base_url);
let (system_prompt, user_messages) = Self::convert_messages(messages);
let config = crate::config::Config::load().unwrap_or_default();
// Calculate max_tokens - must be greater than thinking budget when extended thinking is enabled
let max_tokens = if enable_extended_thinking {
// Ensure max_tokens is at least thinking_budget + 4096 for the actual response
std::cmp::max(config.limits.max_tokens_anthropic, thinking_budget_tokens + 4096)
} else {
config.limits.max_tokens_anthropic
};
let mut payload = json!({
"model": model,
"max_tokens": config.limits.max_tokens_anthropic,
"max_tokens": max_tokens,
"messages": user_messages
});
@ -924,6 +939,14 @@ impl AnthropicClient {
}]);
}
// Add extended thinking if enabled
if enable_extended_thinking {
payload["thinking"] = json!({
"type": "enabled",
"budget_tokens": thinking_budget_tokens
});
}
let response = self
.client
.post(&url)
@ -961,6 +984,8 @@ impl AnthropicClient {
_enable_web_search: bool,
_enable_reasoning_summary: bool,
_reasoning_effort: &str,
enable_extended_thinking: bool,
thinking_budget_tokens: u32,
stream_callback: StreamCallback,
) -> Result<String> {
let url = format!("{}/messages", self.base_url);
@ -968,9 +993,18 @@ impl AnthropicClient {
let (system_prompt, user_messages) = Self::convert_messages(messages);
let config = crate::config::Config::load().unwrap_or_default();
// Calculate max_tokens - must be greater than thinking budget when extended thinking is enabled
let max_tokens = if enable_extended_thinking {
// Ensure max_tokens is at least thinking_budget + 4096 for the actual response
std::cmp::max(config.limits.max_tokens_anthropic, thinking_budget_tokens + 4096)
} else {
config.limits.max_tokens_anthropic
};
let mut payload = json!({
"model": model,
"max_tokens": config.limits.max_tokens_anthropic,
"max_tokens": max_tokens,
"messages": user_messages,
"stream": true
});
@ -987,6 +1021,14 @@ impl AnthropicClient {
}]);
}
// Add extended thinking if enabled
if enable_extended_thinking {
payload["thinking"] = json!({
"type": "enabled",
"budget_tokens": thinking_budget_tokens
});
}
let response = self
.client
.post(&url)

View File

@ -44,6 +44,10 @@ pub struct SessionData {
pub enable_reasoning_summary: bool,
#[serde(default = "default_reasoning_effort")]
pub reasoning_effort: String,
#[serde(default = "default_enable_extended_thinking")]
pub enable_extended_thinking: bool,
#[serde(default = "default_thinking_budget")]
pub thinking_budget_tokens: u32,
pub updated_at: DateTime<Utc>,
}
@ -51,6 +55,14 @@ fn default_reasoning_effort() -> String {
"medium".to_string()
}
fn default_enable_extended_thinking() -> bool {
false
}
fn default_thinking_budget() -> u32 {
5000
}
#[derive(Debug, Clone)]
pub struct Session {
pub name: String,
@ -59,6 +71,8 @@ pub struct Session {
pub enable_web_search: bool,
pub enable_reasoning_summary: bool,
pub reasoning_effort: String,
pub enable_extended_thinking: bool,
pub thinking_budget_tokens: u32,
}
impl Session {
@ -70,6 +84,8 @@ impl Session {
enable_web_search: true,
enable_reasoning_summary: false,
reasoning_effort: "medium".to_string(),
enable_extended_thinking: false,
thinking_budget_tokens: 5000,
};
// Add system prompt as first message
@ -106,6 +122,8 @@ impl Session {
enable_web_search: self.enable_web_search,
enable_reasoning_summary: self.enable_reasoning_summary,
reasoning_effort: self.reasoning_effort.clone(),
enable_extended_thinking: self.enable_extended_thinking,
thinking_budget_tokens: self.thinking_budget_tokens,
updated_at: Utc::now(),
};
@ -144,6 +162,8 @@ impl Session {
enable_web_search: data.enable_web_search,
enable_reasoning_summary: data.enable_reasoning_summary,
reasoning_effort: data.reasoning_effort,
enable_extended_thinking: data.enable_extended_thinking,
thinking_budget_tokens: data.thinking_budget_tokens,
};
// Ensure system prompt is present
@ -386,6 +406,8 @@ impl Session {
enable_web_search: self.enable_web_search,
enable_reasoning_summary: self.enable_reasoning_summary,
reasoning_effort: self.reasoning_effort.clone(),
enable_extended_thinking: self.enable_extended_thinking,
thinking_budget_tokens: self.thinking_budget_tokens,
};
// Save the new session
@ -415,7 +437,9 @@ impl Session {
content.push_str(&format!("**Model:** {}\n", self.model));
content.push_str(&format!("**Web Search:** {}\n", if self.enable_web_search { "Enabled" } else { "Disabled" }));
content.push_str(&format!("**Reasoning Summary:** {}\n", if self.enable_reasoning_summary { "Enabled" } else { "Disabled" }));
content.push_str(&format!("**Reasoning Effort:** {}\n\n", self.reasoning_effort));
content.push_str(&format!("**Reasoning Effort:** {}\n", self.reasoning_effort));
content.push_str(&format!("**Extended Thinking:** {}\n", if self.enable_extended_thinking { "Enabled" } else { "Disabled" }));
content.push_str(&format!("**Thinking Budget:** {} tokens\n\n", self.thinking_budget_tokens));
content.push_str("---\n\n");
// Messages (skip system prompt)
@ -445,7 +469,9 @@ impl Session {
"settings": {
"enable_web_search": self.enable_web_search,
"enable_reasoning_summary": self.enable_reasoning_summary,
"reasoning_effort": self.reasoning_effort
"reasoning_effort": self.reasoning_effort,
"enable_extended_thinking": self.enable_extended_thinking,
"thinking_budget_tokens": self.thinking_budget_tokens
},
"messages": self.messages,
"exported_at": chrono::Utc::now().to_rfc3339()
@ -463,7 +489,9 @@ impl Session {
content.push_str(&format!("Model: {}\n", self.model));
content.push_str(&format!("Web Search: {}\n", if self.enable_web_search { "Enabled" } else { "Disabled" }));
content.push_str(&format!("Reasoning Summary: {}\n", if self.enable_reasoning_summary { "Enabled" } else { "Disabled" }));
content.push_str(&format!("Reasoning Effort: {}\n\n", self.reasoning_effort));
content.push_str(&format!("Reasoning Effort: {}\n", self.reasoning_effort));
content.push_str(&format!("Extended Thinking: {}\n", if self.enable_extended_thinking { "Enabled" } else { "Disabled" }));
content.push_str(&format!("Thinking Budget: {} tokens\n\n", self.thinking_budget_tokens));
content.push_str("===============================================\n\n");
// Messages (skip system prompt)

View File

@ -46,11 +46,13 @@ impl Display {
#[allow(dead_code)]
pub fn print_user_input(&self, content: &str) {
println!("{} {}", style("👤").cyan(), content);
println!("{}> {}", style("👤").cyan(), content);
println!(); // Add single line of padding after user input
}
pub fn print_assistant_response(&self, content: &str) {
print!("{} ", style("🤖").magenta());
println!(); // Add padding before AI response
print!("{}> ", style("🤖").magenta());
self.print_formatted_content_with_pagination(content);
}
@ -138,6 +140,11 @@ impl Display {
// Print remaining text
print!("{}", &text[last_end..]);
// Ensure content ends with a newline if it doesn't already
if !text.ends_with('\n') {
println!();
}
}
fn print_code_block(&self, code: &str, language: &str) {