This commit is contained in:
leach 2025-08-15 16:27:48 -04:00
parent 7d237f692c
commit 54a456581d
3 changed files with 274 additions and 8 deletions

View File

@ -165,6 +165,15 @@ impl ChatCLI {
"/effort" => {
self.handle_effort_command(&parts)?;
}
"/stats" => {
self.show_session_stats()?;
}
"/optimize" => {
self.optimize_session()?;
}
"/cleanup" => {
self.cleanup_session()?;
}
_ => {
self.display.print_error(&format!("Unknown command: {} (see /help)", parts[0]));
}
@ -226,7 +235,8 @@ impl ChatCLI {
}
fn list_sessions(&self) -> Result<()> {
let sessions = Session::list_sessions()?;
use crate::core::session::Session;
let sessions = Session::list_sessions_lazy(true)?;
if sessions.is_empty() {
self.display.print_info("No saved sessions");
@ -234,10 +244,20 @@ impl ChatCLI {
}
self.display.print_info("Saved sessions:");
for (name, updated) in sessions {
let marker = if name == self.session.name { "" } else { " " };
let date_str = updated.format("%Y-%m-%d %H:%M:%S");
println!(" {} {} (updated: {})", marker, name, date_str);
for session_info in sessions {
let marker = if session_info.name == self.session.name { "" } else { " " };
let date_str = session_info.last_modified.format("%Y-%m-%d %H:%M:%S");
let model = session_info.model.as_deref().unwrap_or("unknown");
let msg_count = session_info.message_count.unwrap_or(0);
let file_size = session_info.file_size.unwrap_or(0);
let size_str = if file_size > 1024 {
format!("{:.1}KB", file_size as f64 / 1024.0)
} else {
format!("{}B", file_size)
};
println!(" {} {} ({}msgs, {}, {}, {})",
marker, session_info.name, msg_count, model, size_str, date_str);
}
Ok(())
@ -429,4 +449,74 @@ impl ChatCLI {
Ok(())
}
fn show_session_stats(&self) -> Result<()> {
let stats = self.session.get_stats();
self.display.print_info("Session Statistics:");
println!(" Total messages: {}", stats.total_messages);
println!(" User messages: {}", stats.user_messages);
println!(" Assistant messages: {}", stats.assistant_messages);
println!(" Total characters: {}", stats.total_characters);
println!(" Average message length: {}", stats.average_message_length);
let memory_usage = std::mem::size_of_val(&self.session) +
self.session.messages.iter()
.map(|m| m.content.len() + m.role.len())
.sum::<usize>();
let memory_str = if memory_usage > 1024 * 1024 {
format!("{:.1} MB", memory_usage as f64 / (1024.0 * 1024.0))
} else if memory_usage > 1024 {
format!("{:.1} KB", memory_usage as f64 / 1024.0)
} else {
format!("{} bytes", memory_usage)
};
println!(" Estimated memory usage: {}", memory_str);
if self.session.needs_cleanup() {
self.display.print_warning("Session is large and may benefit from cleanup (/cleanup)");
}
Ok(())
}
fn optimize_session(&mut self) -> Result<()> {
let stats_before = self.session.get_stats();
self.session.optimize_for_memory();
self.session.save()?;
let stats_after = self.session.get_stats();
let chars_saved = stats_before.total_characters.saturating_sub(stats_after.total_characters);
self.display.print_command_result(&format!(
"Session optimized: {} characters cleaned up",
chars_saved
));
Ok(())
}
fn cleanup_session(&mut self) -> Result<()> {
let stats_before = self.session.get_stats();
if self.input.confirm("This will remove older messages to reduce memory usage. Continue?")? {
self.session.cleanup_for_memory();
self.session.save()?;
let stats_after = self.session.get_stats();
let messages_removed = stats_before.total_messages.saturating_sub(stats_after.total_messages);
self.display.print_command_result(&format!(
"Session cleaned up: {} messages removed, keeping most recent conversations",
messages_removed
));
} else {
self.display.print_info("Cleanup cancelled");
}
Ok(())
}
}

View File

@ -18,6 +18,24 @@ pub struct Message {
pub content: String,
}
#[derive(Debug, Clone)]
pub struct ConversationStats {
pub total_messages: usize,
pub user_messages: usize,
pub assistant_messages: usize,
pub total_characters: usize,
pub average_message_length: usize,
}
#[derive(Debug, Clone)]
pub struct SessionInfo {
pub name: String,
pub last_modified: DateTime<Utc>,
pub model: Option<String>,
pub message_count: Option<usize>,
pub file_size: Option<u64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SessionData {
pub model: String,
@ -144,6 +162,7 @@ impl Session {
role: "user".to_string(),
content,
});
self.truncate_history_if_needed();
}
pub fn add_assistant_message(&mut self, content: String) {
@ -151,6 +170,56 @@ impl Session {
role: "assistant".to_string(),
content,
});
self.truncate_history_if_needed();
}
/// Truncates conversation history to stay within configured limits
fn truncate_history_if_needed(&mut self) {
let config = Config::load().unwrap_or_default();
let max_history = config.limits.max_conversation_history;
// Always preserve the system prompt (first message)
if self.messages.len() > max_history + 1 {
let system_prompt = self.messages[0].clone();
let messages_to_keep = max_history;
let start_index = self.messages.len() - messages_to_keep;
// Keep the most recent messages
let mut new_messages = vec![system_prompt];
new_messages.extend_from_slice(&self.messages[start_index..]);
self.messages = new_messages;
}
}
/// Truncates individual messages that exceed reasonable length
pub fn truncate_long_messages(&mut self) {
const MAX_MESSAGE_LENGTH: usize = 10000; // 10k characters per message
const TRUNCATION_NOTICE: &str = "\n\n[Message truncated for performance...]";
for message in &mut self.messages {
if message.content.len() > MAX_MESSAGE_LENGTH {
message.content.truncate(MAX_MESSAGE_LENGTH - TRUNCATION_NOTICE.len());
message.content.push_str(TRUNCATION_NOTICE);
}
}
}
/// Gets conversation statistics
pub fn get_stats(&self) -> ConversationStats {
let total_messages = self.messages.len();
let user_messages = self.messages.iter().filter(|m| m.role == "user").count();
let assistant_messages = self.messages.iter().filter(|m| m.role == "assistant").count();
let total_chars: usize = self.messages.iter().map(|m| m.content.len()).sum();
let avg_message_length = if total_messages > 0 { total_chars / total_messages } else { 0 };
ConversationStats {
total_messages,
user_messages,
assistant_messages,
total_characters: total_chars,
average_message_length: avg_message_length,
}
}
pub fn clear_messages(&mut self) {
@ -163,6 +232,13 @@ impl Session {
}
pub fn list_sessions() -> Result<Vec<(String, DateTime<Utc>)>> {
let sessions = Self::list_sessions_lazy(false)?;
Ok(sessions.into_iter().map(|s| (s.name, s.last_modified)).collect())
}
/// Lists sessions with lazy loading - only loads full data if detailed=true
pub fn list_sessions_lazy(detailed: bool) -> Result<Vec<SessionInfo>> {
let config = Config::load().unwrap_or_default();
let sessions_dir = Self::sessions_dir()?;
if !sessions_dir.exists() {
@ -170,27 +246,124 @@ impl Session {
}
let mut sessions = Vec::new();
let mut count = 0;
for entry in fs::read_dir(&sessions_dir)? {
let entry = entry?;
let path = entry.path();
// Respect max sessions limit for performance
if count >= config.limits.max_sessions_to_list {
break;
}
if let Some(extension) = path.extension() {
if extension == "json" {
if extension == config.session.file_extension.as_str() {
if let Some(name) = path.file_stem().and_then(|s| s.to_str()) {
let metadata = entry.metadata()?;
let modified = metadata.modified()?;
let datetime = DateTime::<Utc>::from(modified);
sessions.push((name.to_string(), datetime));
let file_size = metadata.len();
let (model, message_count) = if detailed {
// Only load session data if detailed info is requested
match Self::get_session_metadata(name) {
Ok((model, count)) => (Some(model), Some(count)),
Err(_) => (None, None)
}
} else {
(None, None)
};
sessions.push(SessionInfo {
name: name.to_string(),
last_modified: datetime,
model,
message_count,
file_size: Some(file_size),
});
count += 1;
}
}
}
}
sessions.sort_by(|a, b| b.1.cmp(&a.1)); // Sort by modification time, newest first
sessions.sort_by(|a, b| b.last_modified.cmp(&a.last_modified)); // Sort by modification time, newest first
Ok(sessions)
}
/// Gets metadata from a session file without loading all messages
fn get_session_metadata(name: &str) -> Result<(String, usize)> {
let path = Self::session_path(name)?;
if !path.exists() {
return Err(anyhow::anyhow!("Session '{}' does not exist", name));
}
let json_data = fs::read_to_string(&path)
.with_context(|| format!("Failed to read session from {:?}", path))?;
// Parse only the fields we need for metadata
let data: SessionData = serde_json::from_str(&json_data)
.with_context(|| format!("Failed to parse session data from {:?}", path))?;
Ok((data.model, data.messages.len()))
}
/// Optimizes session data in memory by removing redundant information
pub fn optimize_for_memory(&mut self) {
// Truncate very long messages for memory efficiency
self.truncate_long_messages();
// Remove excessive whitespace from messages
for message in &mut self.messages {
message.content = message.content
.lines()
.map(|line| line.trim())
.collect::<Vec<_>>()
.join("\n")
.trim()
.to_string();
}
}
/// Checks if session needs cleanup based on size and age
pub fn needs_cleanup(&self) -> bool {
let stats = self.get_stats();
let config = Config::load().unwrap_or_default();
// Check if conversation is too long
if stats.total_messages > config.limits.max_conversation_history * 2 {
return true;
}
// Check if total character count is excessive (>1MB)
if stats.total_characters > 1_000_000 {
return true;
}
false
}
/// Performs aggressive cleanup for memory optimization
pub fn cleanup_for_memory(&mut self) {
let config = Config::load().unwrap_or_default();
let target_messages = config.limits.max_conversation_history / 2;
if self.messages.len() > target_messages + 1 {
let system_prompt = self.messages[0].clone();
let start_index = self.messages.len() - target_messages;
let mut new_messages = vec![system_prompt];
new_messages.extend_from_slice(&self.messages[start_index..]);
self.messages = new_messages;
}
self.optimize_for_memory();
}
pub fn delete_session(name: &str) -> Result<()> {
let path = Self::session_path(name)?;

View File

@ -80,6 +80,9 @@ Available Commands:
/tool websearch on|off - Enable/disable web search (OpenAI only)
/reasoning on|off - Enable/disable reasoning summaries (OpenAI only)
/effort [low|medium|high] - Set reasoning effort level (GPT-5 only)
/stats - Show current session statistics
/optimize - Optimize session memory usage
/cleanup - Remove older messages to free memory
Environment Variables:
OPENAI_API_KEY - Required for OpenAI models