cleanup of commands
This commit is contained in:
parent
e1dd961f3f
commit
6d0592bda5
157
src/cli.rs
157
src/cli.rs
|
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
|||
|
||||
use crate::config::Config;
|
||||
use crate::core::{
|
||||
create_client, get_provider_for_model, provider::get_all_models, provider::get_supported_models,
|
||||
create_client, get_provider_for_model, provider::get_all_models,
|
||||
ChatClient, Session,
|
||||
};
|
||||
use crate::utils::{Display, InputHandler, SessionAction};
|
||||
|
|
@ -136,16 +136,10 @@ impl ChatCLI {
|
|||
"/model" => {
|
||||
self.model_switcher().await?;
|
||||
}
|
||||
"/models" => {
|
||||
self.list_models();
|
||||
}
|
||||
"/list" => {
|
||||
self.list_sessions()?;
|
||||
}
|
||||
"/new" => {
|
||||
self.handle_new_session(&parts)?;
|
||||
}
|
||||
"/switch" | "/sessions" => {
|
||||
"/switch" => {
|
||||
self.session_manager().await?;
|
||||
}
|
||||
"/clear" => {
|
||||
|
|
@ -156,18 +150,6 @@ impl ChatCLI {
|
|||
"/tools" => {
|
||||
self.tools_manager().await?;
|
||||
}
|
||||
"/effort" => {
|
||||
self.handle_effort_command(&parts)?;
|
||||
}
|
||||
"/stats" => {
|
||||
self.show_session_stats()?;
|
||||
}
|
||||
"/optimize" => {
|
||||
self.optimize_session()?;
|
||||
}
|
||||
"/cleanup" => {
|
||||
self.cleanup_session()?;
|
||||
}
|
||||
_ => {
|
||||
self.display.print_error(&format!("Unknown command: {} (see /help)", parts[0]));
|
||||
}
|
||||
|
|
@ -208,47 +190,6 @@ impl ChatCLI {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn list_models(&self) {
|
||||
self.display.print_info("Supported models:");
|
||||
let supported = get_supported_models();
|
||||
|
||||
for (provider, models) in supported {
|
||||
println!(" {}:", provider.as_str().to_uppercase());
|
||||
for model in models {
|
||||
let marker = if model == self.session.model { " <- current" } else { "" };
|
||||
println!(" {}{}", model, marker);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn list_sessions(&self) -> Result<()> {
|
||||
use crate::core::session::Session;
|
||||
let sessions = Session::list_sessions_lazy(true)?;
|
||||
|
||||
if sessions.is_empty() {
|
||||
self.display.print_info("No saved sessions");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.display.print_info("Saved sessions:");
|
||||
for session_info in sessions {
|
||||
let marker = if session_info.name == self.session.name { "★" } else { " " };
|
||||
let date_str = session_info.last_modified.format("%Y-%m-%d %H:%M:%S");
|
||||
let model = session_info.model.as_deref().unwrap_or("unknown");
|
||||
let msg_count = session_info.message_count.unwrap_or(0);
|
||||
let file_size = session_info.file_size.unwrap_or(0);
|
||||
let size_str = if file_size > 1024 {
|
||||
format!("{:.1}KB", file_size as f64 / 1024.0)
|
||||
} else {
|
||||
format!("{}B", file_size)
|
||||
};
|
||||
|
||||
println!(" {} {} ({}msgs, {}, {}, {})",
|
||||
marker, session_info.name, msg_count, model, size_str, date_str);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_new_session(&mut self, parts: &[&str]) -> Result<()> {
|
||||
if parts.len() != 2 {
|
||||
|
|
@ -450,98 +391,4 @@ impl ChatCLI {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_effort_command(&mut self, parts: &[&str]) -> Result<()> {
|
||||
if parts.len() == 1 {
|
||||
self.display.print_info(&format!("Current reasoning effort: {}", self.session.reasoning_effort));
|
||||
self.display.print_info("Available levels: low, medium, high");
|
||||
if !self.session.model.starts_with("gpt-5") {
|
||||
self.display.print_warning("Reasoning effort is only supported by GPT-5 models");
|
||||
}
|
||||
} else if parts.len() == 2 {
|
||||
let effort = parts[1];
|
||||
if !["low", "medium", "high"].contains(&effort) {
|
||||
self.display.print_error("Invalid effort level. Use: low, medium, or high");
|
||||
} else {
|
||||
if !self.session.model.starts_with("gpt-5") {
|
||||
self.display.print_warning("Reasoning effort is only supported by GPT-5 models");
|
||||
}
|
||||
self.session.reasoning_effort = effort.to_string();
|
||||
self.display.print_command_result(&format!("Reasoning effort set to {}", effort));
|
||||
}
|
||||
} else {
|
||||
self.display.print_error("Usage: /effort [low|medium|high]");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_session_stats(&self) -> Result<()> {
|
||||
let stats = self.session.get_stats();
|
||||
|
||||
self.display.print_info("Session Statistics:");
|
||||
println!(" Total messages: {}", stats.total_messages);
|
||||
println!(" User messages: {}", stats.user_messages);
|
||||
println!(" Assistant messages: {}", stats.assistant_messages);
|
||||
println!(" Total characters: {}", stats.total_characters);
|
||||
println!(" Average message length: {}", stats.average_message_length);
|
||||
|
||||
let memory_usage = std::mem::size_of_val(&self.session) +
|
||||
self.session.messages.iter()
|
||||
.map(|m| m.content.len() + m.role.len())
|
||||
.sum::<usize>();
|
||||
|
||||
let memory_str = if memory_usage > 1024 * 1024 {
|
||||
format!("{:.1} MB", memory_usage as f64 / (1024.0 * 1024.0))
|
||||
} else if memory_usage > 1024 {
|
||||
format!("{:.1} KB", memory_usage as f64 / 1024.0)
|
||||
} else {
|
||||
format!("{} bytes", memory_usage)
|
||||
};
|
||||
|
||||
println!(" Estimated memory usage: {}", memory_str);
|
||||
|
||||
if self.session.needs_cleanup() {
|
||||
self.display.print_warning("Session is large and may benefit from cleanup (/cleanup)");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn optimize_session(&mut self) -> Result<()> {
|
||||
let stats_before = self.session.get_stats();
|
||||
|
||||
self.session.optimize_for_memory();
|
||||
self.session.save()?;
|
||||
|
||||
let stats_after = self.session.get_stats();
|
||||
let chars_saved = stats_before.total_characters.saturating_sub(stats_after.total_characters);
|
||||
|
||||
self.display.print_command_result(&format!(
|
||||
"Session optimized: {} characters cleaned up",
|
||||
chars_saved
|
||||
));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cleanup_session(&mut self) -> Result<()> {
|
||||
let stats_before = self.session.get_stats();
|
||||
|
||||
if self.input.confirm("This will remove older messages to reduce memory usage. Continue?")? {
|
||||
self.session.cleanup_for_memory();
|
||||
self.session.save()?;
|
||||
|
||||
let stats_after = self.session.get_stats();
|
||||
let messages_removed = stats_before.total_messages.saturating_sub(stats_after.total_messages);
|
||||
|
||||
self.display.print_command_result(&format!(
|
||||
"Session cleaned up: {} messages removed, keeping most recent conversations",
|
||||
messages_removed
|
||||
));
|
||||
} else {
|
||||
self.display.print_info("Cleanup cancelled");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -71,17 +71,10 @@ Available Commands:
|
|||
/help - Show this help message
|
||||
/exit - Exit the CLI
|
||||
/model - Interactive model switcher
|
||||
/models - List all supported models
|
||||
/list - List all saved sessions
|
||||
/new <session_name> - Create a new session
|
||||
/switch - Interactive session manager (switch/delete)
|
||||
/sessions - Alias for /switch
|
||||
/clear - Clear current conversation
|
||||
/tools - Interactive tool and feature manager
|
||||
/effort [low|medium|high] - Set reasoning effort level (GPT-5 only)
|
||||
/stats - Show current session statistics
|
||||
/optimize - Optimize session memory usage
|
||||
/cleanup - Remove older messages to free memory
|
||||
|
||||
Environment Variables:
|
||||
OPENAI_API_KEY - Required for OpenAI models
|
||||
|
|
|
|||
Loading…
Reference in New Issue