Initial commit: GPT CLI (Rust)

- Complete Rust implementation of GPT CLI
- Support for OpenAI and Anthropic models
- Session persistence and management
- Web search integration via Responses API
- Interactive commands and model switching
- Comprehensive error handling and logging

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
leach
2025-08-15 15:01:28 -04:00
commit b171a6b2b2
15 changed files with 3913 additions and 0 deletions

429
src/cli.rs Normal file
View File

@@ -0,0 +1,429 @@
use anyhow::Result;
use crate::core::{
create_client, get_provider_for_model, provider::get_all_models, provider::get_supported_models,
provider::is_model_supported, ChatClient, Session,
};
use crate::utils::{Display, InputHandler};
pub struct ChatCLI {
session: Session,
client: Option<ChatClient>,
current_model: Option<String>,
display: Display,
input: InputHandler,
}
impl ChatCLI {
pub fn new(session: Session) -> Result<Self> {
Ok(Self {
session,
client: None,
current_model: None,
display: Display::new(),
input: InputHandler::new()?,
})
}
fn get_client(&mut self) -> Result<&ChatClient> {
if self.client.is_none() || self.current_model.as_ref() != Some(&self.session.model) {
let client = create_client(&self.session.model)?;
self.current_model = Some(self.session.model.clone());
self.client = Some(client);
}
Ok(self.client.as_ref().unwrap())
}
pub async fn run(&mut self) -> Result<()> {
self.display.print_header();
self.display.print_info("Type your message and press Enter. Commands start with '/'.");
self.display.print_info("Type /help for help.");
let provider = get_provider_for_model(&self.session.model);
self.display.print_model_info(&self.session.model, provider.as_str());
self.display.print_session_info(&self.session.name);
println!();
loop {
match self.input.read_line("👤> ")? {
Some(line) => {
let line = line.trim();
if line.is_empty() {
continue;
}
if line.starts_with('/') {
if !self.handle_command(line).await? {
break;
}
} else {
if let Err(e) = self.handle_user_message(line).await {
self.display.print_error(&format!("Error: {}", e));
}
}
}
None => {
self.display.print_info("Goodbye!");
break;
}
}
}
self.session.save()?;
self.input.save_history()?;
Ok(())
}
async fn handle_user_message(&mut self, message: &str) -> Result<()> {
self.session.add_user_message(message.to_string());
self.session.save()?;
let spinner = self.display.show_spinner("Thinking");
// Clone data needed for the API call before getting mutable client reference
let model = self.session.model.clone();
let messages = self.session.messages.clone();
let enable_web_search = self.session.enable_web_search;
let enable_reasoning_summary = self.session.enable_reasoning_summary;
let reasoning_effort = self.session.reasoning_effort.clone();
let client = self.get_client()?;
match client
.chat_completion(
&model,
&messages,
enable_web_search,
enable_reasoning_summary,
&reasoning_effort,
)
.await
{
Ok(response) => {
spinner.finish("Done");
self.display.print_assistant_response(&response);
self.session.add_assistant_message(response);
self.session.save()?;
}
Err(e) => {
spinner.finish_with_error("Failed");
return Err(e);
}
}
Ok(())
}
async fn handle_command(&mut self, command: &str) -> Result<bool> {
let parts: Vec<&str> = command.split_whitespace().collect();
if parts.is_empty() {
return Ok(true);
}
match parts[0].to_lowercase().as_str() {
"/help" => {
self.display.print_help();
}
"/exit" => {
self.session.save()?;
self.display.print_info("Session saved. Goodbye!");
return Ok(false);
}
"/model" => {
self.handle_model_command(&parts).await?;
}
"/models" => {
self.list_models();
}
"/list" => {
self.list_sessions()?;
}
"/new" => {
self.handle_new_session(&parts)?;
}
"/switch" => {
self.handle_switch_session(&parts).await?;
}
"/clear" => {
self.session.clear_messages();
self.session.save()?;
self.display.print_command_result("Conversation cleared");
}
"/delete" => {
self.handle_delete_session(&parts).await?;
}
"/tool" => {
self.handle_tool_command(&parts)?;
}
"/reasoning" => {
self.handle_reasoning_command(&parts)?;
}
"/effort" => {
self.handle_effort_command(&parts)?;
}
_ => {
self.display.print_error(&format!("Unknown command: {} (see /help)", parts[0]));
}
}
Ok(true)
}
async fn handle_model_command(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() == 1 {
let all_models = get_all_models();
let selection = self.input.select_from_list(
"Select a model:",
&all_models,
Some(&self.session.model),
)?;
if let Some(model) = selection {
self.session.model = model.to_string();
let provider = get_provider_for_model(&self.session.model);
self.display.print_command_result(&format!(
"Model switched to {} ({})",
self.session.model,
provider.as_str()
));
self.client = None; // Force client recreation
}
} else if parts.len() == 2 {
let model = parts[1];
if !is_model_supported(model) {
self.display.print_error("Unsupported model. Use /models to see the list of supported models.");
} else {
self.session.model = model.to_string();
let provider = get_provider_for_model(&self.session.model);
self.display.print_command_result(&format!(
"Model switched to {} ({})",
self.session.model,
provider.as_str()
));
self.client = None; // Force client recreation
}
} else {
self.display.print_error("Usage: /model [model_name]");
}
Ok(())
}
fn list_models(&self) {
self.display.print_info("Supported models:");
let supported = get_supported_models();
for (provider, models) in supported {
println!(" {}:", provider.as_str().to_uppercase());
for model in models {
let marker = if model == self.session.model { " <- current" } else { "" };
println!(" {}{}", model, marker);
}
}
}
fn list_sessions(&self) -> Result<()> {
let sessions = Session::list_sessions()?;
if sessions.is_empty() {
self.display.print_info("No saved sessions");
return Ok(());
}
self.display.print_info("Saved sessions:");
for (name, updated) in sessions {
let marker = if name == self.session.name { "" } else { " " };
let date_str = updated.format("%Y-%m-%d %H:%M:%S");
println!(" {} {} (updated: {})", marker, name, date_str);
}
Ok(())
}
fn handle_new_session(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() != 2 {
self.display.print_error("Usage: /new <session_name>");
return Ok(());
}
self.session.save()?;
let new_session = Session::new(parts[1].to_string(), self.session.model.clone());
self.session = new_session;
self.display.print_command_result(&format!("New session '{}' started", self.session.name));
Ok(())
}
async fn handle_switch_session(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() == 1 {
let sessions = Session::list_sessions()?;
let session_names: Vec<String> = sessions
.into_iter()
.map(|(name, _)| name)
.filter(|name| name != &self.session.name)
.collect();
if let Some(selection) = self.input.select_from_list(
"Switch to session:",
&session_names,
None,
)? {
self.session.save()?;
match Session::load(&selection) {
Ok(session) => {
self.session = session;
self.display.print_command_result(&format!(
"Switched to session '{}' (model={})",
self.session.name, self.session.model
));
self.client = None; // Force client recreation
}
Err(e) => {
self.display.print_error(&format!("Failed to load session: {}", e));
}
}
}
} else if parts.len() == 2 {
let session_name = parts[1];
self.session.save()?;
match Session::load(session_name) {
Ok(session) => {
self.session = session;
self.display.print_command_result(&format!(
"Switched to session '{}' (model={})",
self.session.name, self.session.model
));
self.client = None; // Force client recreation
}
Err(e) => {
self.display.print_error(&format!("Failed to load session: {}", e));
}
}
} else {
self.display.print_error("Usage: /switch [session_name]");
}
Ok(())
}
async fn handle_delete_session(&mut self, parts: &[&str]) -> Result<()> {
let target = if parts.len() == 1 {
let sessions = Session::list_sessions()?;
let session_names: Vec<String> = sessions
.into_iter()
.map(|(name, _)| name)
.filter(|name| name != &self.session.name)
.collect();
self.input.select_from_list("Delete session:", &session_names, None)?
} else if parts.len() == 2 {
Some(parts[1].to_string())
} else {
self.display.print_error("Usage: /delete [session_name]");
return Ok(());
};
if let Some(target) = target {
if target == self.session.name {
self.display.print_error(
"Cannot delete the session you are currently using. Switch to another session first."
);
return Ok(());
}
if self.input.confirm(&format!("Delete session '{}'?", target))? {
match Session::delete_session(&target) {
Ok(()) => {
self.display.print_command_result(&format!("Session '{}' deleted", target));
}
Err(e) => {
self.display.print_error(&format!("Failed to delete session: {}", e));
}
}
}
}
Ok(())
}
fn handle_tool_command(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() != 3 || parts[1].to_lowercase() != "websearch" || !["on", "off"].contains(&parts[2]) {
self.display.print_error("Usage: /tool websearch on|off");
return Ok(());
}
let enable = parts[2] == "on";
if enable {
let model = self.session.model.clone();
if let Ok(client) = self.get_client() {
if !client.supports_feature_for_model("web_search", &model) {
let provider = get_provider_for_model(&model);
self.display.print_warning(&format!(
"Web search is not supported by {} models",
provider.as_str()
));
}
}
}
self.session.enable_web_search = enable;
let state = if enable { "enabled" } else { "disabled" };
self.display.print_command_result(&format!("Web search tool {}", state));
Ok(())
}
fn handle_reasoning_command(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() != 2 || !["on", "off"].contains(&parts[1]) {
self.display.print_error("Usage: /reasoning on|off");
return Ok(());
}
let enable = parts[1] == "on";
if enable {
let model = self.session.model.clone();
if let Ok(client) = self.get_client() {
if !client.supports_feature_for_model("reasoning_summary", &model) {
let provider = get_provider_for_model(&model);
self.display.print_warning(&format!(
"Reasoning summaries are not supported by {} models",
provider.as_str()
));
}
}
}
self.session.enable_reasoning_summary = enable;
let state = if enable { "enabled" } else { "disabled" };
self.display.print_command_result(&format!("Reasoning summaries {}", state));
Ok(())
}
fn handle_effort_command(&mut self, parts: &[&str]) -> Result<()> {
if parts.len() == 1 {
self.display.print_info(&format!("Current reasoning effort: {}", self.session.reasoning_effort));
self.display.print_info("Available levels: low, medium, high");
if !self.session.model.starts_with("gpt-5") {
self.display.print_warning("Reasoning effort is only supported by GPT-5 models");
}
} else if parts.len() == 2 {
let effort = parts[1];
if !["low", "medium", "high"].contains(&effort) {
self.display.print_error("Invalid effort level. Use: low, medium, or high");
} else {
if !self.session.model.starts_with("gpt-5") {
self.display.print_warning("Reasoning effort is only supported by GPT-5 models");
}
self.session.reasoning_effort = effort.to_string();
self.display.print_command_result(&format!("Reasoning effort set to {}", effort));
}
} else {
self.display.print_error("Usage: /effort [low|medium|high]");
}
Ok(())
}
}

560
src/core/client.rs Normal file
View File

@@ -0,0 +1,560 @@
use anyhow::{Context, Result};
use reqwest::Client;
use serde::Deserialize;
use serde_json::{json, Value};
use std::env;
use super::{provider::Provider, session::Message};
#[derive(Debug)]
pub enum ChatClient {
OpenAI(OpenAIClient),
Anthropic(AnthropicClient),
}
impl ChatClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
enable_web_search: bool,
enable_reasoning_summary: bool,
reasoning_effort: &str,
) -> Result<String> {
match self {
ChatClient::OpenAI(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
}
ChatClient::Anthropic(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
}
}
}
pub fn supports_feature(&self, feature: &str) -> bool {
match self {
ChatClient::OpenAI(client) => client.supports_feature(feature),
ChatClient::Anthropic(client) => client.supports_feature(feature),
}
}
pub fn supports_feature_for_model(&self, feature: &str, model: &str) -> bool {
match self {
ChatClient::OpenAI(client) => client.supports_feature_for_model(feature, model),
ChatClient::Anthropic(client) => client.supports_feature_for_model(feature, model),
}
}
}
#[derive(Debug)]
pub struct OpenAIClient {
client: Client,
api_key: String,
base_url: String,
}
#[derive(Debug)]
pub struct AnthropicClient {
client: Client,
api_key: String,
base_url: String,
}
#[derive(Deserialize)]
struct OpenAIResponse {
choices: Vec<Choice>,
}
#[derive(Deserialize)]
struct Choice {
message: OpenAIMessage,
#[allow(dead_code)]
finish_reason: Option<String>,
}
#[derive(Deserialize)]
struct OpenAIMessage {
content: Option<String>,
tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Deserialize)]
struct ToolCall {
#[allow(dead_code)]
id: String,
#[allow(dead_code)]
#[serde(rename = "type")]
tool_type: String,
function: FunctionCall,
}
#[derive(Deserialize)]
struct FunctionCall {
name: String,
arguments: String,
}
// Responses API structures
#[derive(Deserialize)]
struct ResponsesApiResponse {
#[allow(dead_code)]
id: String,
#[allow(dead_code)]
object: String,
#[allow(dead_code)]
created_at: u64,
status: String,
output: Vec<OutputItem>,
}
#[derive(Deserialize)]
struct OutputItem {
#[allow(dead_code)]
id: String,
#[serde(rename = "type")]
item_type: String,
#[serde(default)]
status: Option<String>,
#[serde(default)]
role: Option<String>,
#[serde(default)]
content: Option<Vec<ResponseContent>>,
#[serde(default)]
action: Option<SearchAction>,
}
#[derive(Deserialize)]
struct SearchAction {
#[allow(dead_code)]
#[serde(rename = "type")]
action_type: String,
query: String,
}
#[derive(Deserialize)]
struct ResponseContent {
#[serde(rename = "type")]
content_type: String,
#[serde(default)]
text: Option<String>,
#[serde(default)]
annotations: Option<Vec<Annotation>>,
}
#[derive(Deserialize)]
struct Annotation {
#[serde(rename = "type")]
annotation_type: String,
#[allow(dead_code)]
start_index: usize,
#[allow(dead_code)]
end_index: usize,
url: String,
title: String,
}
#[derive(Deserialize)]
struct AnthropicResponse {
content: Vec<AnthropicContent>,
}
#[derive(Deserialize)]
struct AnthropicContent {
text: String,
}
impl OpenAIClient {
pub fn new() -> Result<Self> {
let api_key = env::var("OPENAI_API_KEY")
.context("OPENAI_API_KEY environment variable is required")?;
let base_url = env::var("OPENAI_BASE_URL")
.unwrap_or_else(|_| "https://api.openai.com/v1".to_string());
let client = Client::new();
Ok(Self {
client,
api_key,
base_url,
})
}
fn convert_messages(messages: &[Message]) -> Vec<Value> {
messages
.iter()
.map(|msg| {
json!({
"role": msg.role,
"content": msg.content
})
})
.collect()
}
}
impl OpenAIClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
enable_web_search: bool,
_enable_reasoning_summary: bool,
reasoning_effort: &str,
) -> Result<String> {
// Use Responses API for web search with GPT-5, fallback to chat completions
if enable_web_search && model.starts_with("gpt-5") {
return self.responses_api_completion(model, messages, reasoning_effort).await;
}
let url = format!("{}/chat/completions", self.base_url);
let mut payload = json!({
"model": model,
"messages": Self::convert_messages(messages),
"stream": false
});
// Add tools if web search is enabled (for non-GPT-5 models)
if enable_web_search {
payload["tools"] = json!([{
"type": "function",
"function": {
"name": "web_search",
"description": "Search the web for current information on any topic",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The search query to find relevant information"
}
},
"required": ["query"]
}
}
}]);
payload["tool_choice"] = json!("auto");
}
// Add reasoning effort for GPT-5 models
if model.starts_with("gpt-5") && ["low", "medium", "high"].contains(&reasoning_effort) {
payload["reasoning_effort"] = json!(reasoning_effort);
}
let response = self
.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await
.context("Failed to send request to OpenAI API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("OpenAI API error: {}", error_text));
}
let response_json: OpenAIResponse = response
.json()
.await
.context("Failed to parse OpenAI API response")?;
let choice = response_json
.choices
.first()
.context("No choices in OpenAI API response")?;
// Handle tool calls if present
if let Some(tool_calls) = &choice.message.tool_calls {
let mut response_parts = Vec::new();
if let Some(content) = &choice.message.content {
response_parts.push(content.clone());
}
for tool_call in tool_calls {
if tool_call.function.name == "web_search" {
// Parse the query from the function arguments
if let Ok(args) = serde_json::from_str::<serde_json::Value>(&tool_call.function.arguments) {
if let Some(query) = args.get("query").and_then(|q| q.as_str()) {
response_parts.push(format!(
"\n[Web Search Request: \"{}\"]\nNote: Web search functionality is not implemented in this CLI. The AI wanted to search for: {}",
query, query
));
}
}
}
}
let final_content = if response_parts.is_empty() {
"The AI attempted to use tools but no content was returned.".to_string()
} else {
response_parts.join("\n")
};
return Ok(final_content);
}
// Handle regular content response
let content = choice.message.content.as_ref()
.context("No content in OpenAI API response")?;
Ok(content.clone())
}
async fn responses_api_completion(
&self,
model: &str,
messages: &[Message],
reasoning_effort: &str,
) -> Result<String> {
let url = format!("{}/responses", self.base_url);
// Convert messages to input text (simple approach for now)
let input_text = messages
.iter()
.filter(|msg| msg.role != "system")
.map(|msg| msg.content.as_str())
.collect::<Vec<_>>()
.join("\n");
let mut payload = json!({
"model": model,
"tools": [{"type": "web_search_preview"}],
"input": input_text
});
// Add reasoning effort for GPT-5 models
if ["low", "medium", "high"].contains(&reasoning_effort) {
payload["reasoning"] = json!({
"effort": reasoning_effort
});
}
let response = self
.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await
.context("Failed to send request to OpenAI Responses API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("OpenAI Responses API error: {}", error_text));
}
// Get response text first for debugging
let response_text = response.text().await
.context("Failed to get response text from OpenAI Responses API")?;
// Try to parse JSON and provide better error context
let response_json: ResponsesApiResponse = serde_json::from_str(&response_text)
.with_context(|| format!("Failed to parse OpenAI Responses API response. Response was: {}", response_text))?;
// Process the output array to extract the assistant message
let mut final_content = String::new();
let mut citations = Vec::new();
let mut search_count = 0;
for item in response_json.output {
match item.item_type.as_str() {
"web_search_call" => {
if item.status.as_deref() == Some("completed") {
search_count += 1;
if let Some(action) = &item.action {
final_content.push_str(&format!("🔍 Search {}: \"{}\"\n", search_count, action.query));
}
}
}
"message" => {
if item.role == Some("assistant".to_string()) && item.status.as_deref() == Some("completed") {
if let Some(content_items) = item.content {
for content_item in content_items {
if content_item.content_type == "output_text" {
if let Some(text) = &content_item.text {
if search_count > 0 {
final_content.push_str("\n📝 **Response:**\n");
}
final_content.push_str(text);
// Collect citations
if let Some(annotations) = &content_item.annotations {
for annotation in annotations {
if annotation.annotation_type == "url_citation" {
citations.push(format!(
"\n📄 [{}]({}) - {}",
citations.len() + 1,
annotation.url,
annotation.title
));
}
}
}
}
}
}
}
}
}
_ => {} // Handle other types like "reasoning" if needed
}
}
// Append citations to the end
if !citations.is_empty() {
final_content.push_str("\n\n**Sources:**");
for citation in citations {
final_content.push_str(&citation);
}
}
if final_content.is_empty() {
return Err(anyhow::anyhow!("No content found in Responses API response"));
}
Ok(final_content)
}
pub fn supports_feature(&self, feature: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" | "reasoning_effort" => true,
_ => false,
}
}
pub fn supports_feature_for_model(&self, feature: &str, model: &str) -> bool {
match feature {
"web_search" => true,
"reasoning_summary" => true,
"reasoning_effort" => model.starts_with("gpt-5"),
_ => false,
}
}
}
impl AnthropicClient {
pub fn new() -> Result<Self> {
let api_key = env::var("ANTHROPIC_API_KEY")
.context("ANTHROPIC_API_KEY environment variable is required")?;
let base_url = "https://api.anthropic.com/v1".to_string();
let client = Client::new();
Ok(Self {
client,
api_key,
base_url,
})
}
fn convert_messages(messages: &[Message]) -> (Option<String>, Vec<Value>) {
let mut system_prompt = None;
let mut user_messages = Vec::new();
for msg in messages {
match msg.role.as_str() {
"system" => {
system_prompt = Some(msg.content.clone());
}
"user" | "assistant" => {
user_messages.push(json!({
"role": msg.role,
"content": msg.content
}));
}
_ => {}
}
}
(system_prompt, user_messages)
}
}
impl AnthropicClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
_enable_web_search: bool,
_enable_reasoning_summary: bool,
_reasoning_effort: &str,
) -> Result<String> {
let url = format!("{}/messages", self.base_url);
let (system_prompt, user_messages) = Self::convert_messages(messages);
let mut payload = json!({
"model": model,
"max_tokens": 4096,
"messages": user_messages
});
if let Some(system) = system_prompt {
payload["system"] = json!(system);
}
let response = self
.client
.post(&url)
.header("x-api-key", &self.api_key)
.header("Content-Type", "application/json")
.header("anthropic-version", "2023-06-01")
.json(&payload)
.send()
.await
.context("Failed to send request to Anthropic API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("Anthropic API error: {}", error_text));
}
let response_json: AnthropicResponse = response
.json()
.await
.context("Failed to parse Anthropic API response")?;
let content = response_json
.content
.first()
.map(|c| &c.text)
.context("No content in Anthropic API response")?;
Ok(content.clone())
}
pub fn supports_feature(&self, feature: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" => false,
_ => false,
}
}
pub fn supports_feature_for_model(&self, feature: &str, _model: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" | "reasoning_effort" => false,
_ => false,
}
}
}
pub fn create_client(model: &str) -> Result<ChatClient> {
let provider = super::provider::get_provider_for_model(model);
match provider {
Provider::OpenAI => {
let client = OpenAIClient::new()?;
Ok(ChatClient::OpenAI(client))
}
Provider::Anthropic => {
let client = AnthropicClient::new()?;
Ok(ChatClient::Anthropic(client))
}
}
}

7
src/core/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
pub mod session;
pub mod client;
pub mod provider;
pub use session::Session;
pub use client::{ChatClient, create_client};
pub use provider::get_provider_for_model;

72
src/core/provider.rs Normal file
View File

@@ -0,0 +1,72 @@
use std::collections::HashMap;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Provider {
OpenAI,
Anthropic,
}
impl Provider {
pub fn as_str(&self) -> &'static str {
match self {
Provider::OpenAI => "openai",
Provider::Anthropic => "anthropic",
}
}
}
pub fn get_supported_models() -> HashMap<Provider, Vec<&'static str>> {
let mut models = HashMap::new();
models.insert(
Provider::OpenAI,
vec![
"gpt-4.1",
"gpt-4.1-mini",
"gpt-4o",
"gpt-5",
"gpt-5-chat-latest",
"o1",
"o3",
"o4-mini",
"o3-mini",
],
);
models.insert(
Provider::Anthropic,
vec![
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
],
);
models
}
pub fn get_all_models() -> Vec<&'static str> {
get_supported_models()
.values()
.flat_map(|models| models.iter())
.copied()
.collect()
}
pub fn get_provider_for_model(model: &str) -> Provider {
let supported = get_supported_models();
for (provider, models) in supported {
if models.contains(&model) {
return provider;
}
}
Provider::OpenAI // default fallback
}
pub fn is_model_supported(model: &str) -> bool {
get_all_models().contains(&model)
}

202
src/core/session.rs Normal file
View File

@@ -0,0 +1,202 @@
use anyhow::{Context, Result};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
const SYSTEM_PROMPT: &str = "You are an AI assistant running in a terminal (CLI) environment. \
Optimise all answers for 80column readability, prefer plain text, \
ASCII art or concise bullet lists over heavy markup, and wrap code \
snippets in fenced blocks when helpful. Do not emit trailing spaces or \
control characters.";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub role: String,
pub content: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SessionData {
pub model: String,
pub messages: Vec<Message>,
pub enable_web_search: bool,
pub enable_reasoning_summary: bool,
#[serde(default = "default_reasoning_effort")]
pub reasoning_effort: String,
pub updated_at: DateTime<Utc>,
}
fn default_reasoning_effort() -> String {
"medium".to_string()
}
#[derive(Debug, Clone)]
pub struct Session {
pub name: String,
pub model: String,
pub messages: Vec<Message>,
pub enable_web_search: bool,
pub enable_reasoning_summary: bool,
pub reasoning_effort: String,
}
impl Session {
pub fn new(name: String, model: String) -> Self {
let mut session = Self {
name,
model,
messages: Vec::new(),
enable_web_search: true,
enable_reasoning_summary: false,
reasoning_effort: "medium".to_string(),
};
// Add system prompt as first message
session.messages.push(Message {
role: "system".to_string(),
content: SYSTEM_PROMPT.to_string(),
});
session
}
pub fn sessions_dir() -> Result<PathBuf> {
let home = dirs::home_dir().context("Could not find home directory")?;
let sessions_dir = home.join(".chat_cli_sessions");
if !sessions_dir.exists() {
fs::create_dir_all(&sessions_dir)
.with_context(|| format!("Failed to create sessions directory: {:?}", sessions_dir))?;
}
Ok(sessions_dir)
}
pub fn session_path(name: &str) -> Result<PathBuf> {
Ok(Self::sessions_dir()?.join(format!("{}.json", name)))
}
pub fn save(&self) -> Result<()> {
let data = SessionData {
model: self.model.clone(),
messages: self.messages.clone(),
enable_web_search: self.enable_web_search,
enable_reasoning_summary: self.enable_reasoning_summary,
reasoning_effort: self.reasoning_effort.clone(),
updated_at: Utc::now(),
};
let path = Self::session_path(&self.name)?;
let tmp_path = path.with_extension("tmp");
let json_data = serde_json::to_string_pretty(&data)
.context("Failed to serialize session data")?;
fs::write(&tmp_path, json_data)
.with_context(|| format!("Failed to write session to {:?}", tmp_path))?;
fs::rename(&tmp_path, &path)
.with_context(|| format!("Failed to rename {:?} to {:?}", tmp_path, path))?;
Ok(())
}
pub fn load(name: &str) -> Result<Self> {
let path = Self::session_path(name)?;
if !path.exists() {
return Err(anyhow::anyhow!("Session '{}' does not exist", name));
}
let json_data = fs::read_to_string(&path)
.with_context(|| format!("Failed to read session from {:?}", path))?;
let data: SessionData = serde_json::from_str(&json_data)
.with_context(|| format!("Failed to parse session data from {:?}", path))?;
let mut session = Self {
name: name.to_string(),
model: data.model,
messages: data.messages,
enable_web_search: data.enable_web_search,
enable_reasoning_summary: data.enable_reasoning_summary,
reasoning_effort: data.reasoning_effort,
};
// Ensure system prompt is present
if session.messages.is_empty() || session.messages[0].role != "system" {
session.messages.insert(0, Message {
role: "system".to_string(),
content: SYSTEM_PROMPT.to_string(),
});
}
Ok(session)
}
pub fn add_user_message(&mut self, content: String) {
self.messages.push(Message {
role: "user".to_string(),
content,
});
}
pub fn add_assistant_message(&mut self, content: String) {
self.messages.push(Message {
role: "assistant".to_string(),
content,
});
}
pub fn clear_messages(&mut self) {
self.messages.clear();
// Re-add system prompt
self.messages.push(Message {
role: "system".to_string(),
content: SYSTEM_PROMPT.to_string(),
});
}
pub fn list_sessions() -> Result<Vec<(String, DateTime<Utc>)>> {
let sessions_dir = Self::sessions_dir()?;
if !sessions_dir.exists() {
return Ok(Vec::new());
}
let mut sessions = Vec::new();
for entry in fs::read_dir(&sessions_dir)? {
let entry = entry?;
let path = entry.path();
if let Some(extension) = path.extension() {
if extension == "json" {
if let Some(name) = path.file_stem().and_then(|s| s.to_str()) {
let metadata = entry.metadata()?;
let modified = metadata.modified()?;
let datetime = DateTime::<Utc>::from(modified);
sessions.push((name.to_string(), datetime));
}
}
}
}
sessions.sort_by(|a, b| b.1.cmp(&a.1)); // Sort by modification time, newest first
Ok(sessions)
}
pub fn delete_session(name: &str) -> Result<()> {
let path = Self::session_path(name)?;
if !path.exists() {
return Err(anyhow::anyhow!("Session '{}' does not exist", name));
}
fs::remove_file(&path)
.with_context(|| format!("Failed to delete session file: {:?}", path))?;
Ok(())
}
}

68
src/main.rs Normal file
View File

@@ -0,0 +1,68 @@
mod cli;
mod core;
mod utils;
use anyhow::{Context, Result};
use clap::Parser;
use std::env;
use crate::cli::ChatCLI;
use crate::core::{provider::is_model_supported, Session};
use crate::utils::Display;
#[derive(Parser)]
#[command(name = "gpt-cli-rust")]
#[command(about = "A lightweight command-line interface for chatting with AI models")]
#[command(version)]
struct Args {
#[arg(short, long, default_value = "default", help = "Session name")]
session: String,
#[arg(short, long, help = "Model name to use (overrides saved value)")]
model: Option<String>,
}
#[tokio::main]
async fn main() -> Result<()> {
let args = Args::parse();
let display = Display::new();
// Load or create session
let session = match Session::load(&args.session) {
Ok(mut session) => {
if let Some(model) = args.model {
if !is_model_supported(&model) {
display.print_warning(&format!(
"Model '{}' is not supported. Using saved model '{}'",
model, session.model
));
} else {
session.model = model;
}
}
session
}
Err(_) => {
let default_model = args
.model
.or_else(|| env::var("DEFAULT_MODEL").ok())
.unwrap_or_else(|| "gpt-5".to_string());
if !is_model_supported(&default_model) {
display.print_warning(&format!(
"Model '{}' is not supported. Falling back to 'gpt-5'",
default_model
));
Session::new(args.session, "gpt-5".to_string())
} else {
Session::new(args.session, default_model)
}
}
};
// Run the CLI
let mut cli = ChatCLI::new(session).context("Failed to initialize CLI")?;
cli.run().await.context("CLI error")?;
Ok(())
}

132
src/utils/display.rs Normal file
View File

@@ -0,0 +1,132 @@
use console::{style, Term};
use std::io::{self, Write};
pub struct Display {
term: Term,
}
impl Display {
pub fn new() -> Self {
Self {
term: Term::stdout(),
}
}
pub fn print_header(&self) {
self.term.clear_screen().ok();
println!("{}", style("🤖 GPT CLI (Rust)").bold().magenta());
println!("{}", style("".repeat(50)).dim());
}
pub fn print_info(&self, message: &str) {
println!("{} {}", style("").blue(), style(message).dim());
}
#[allow(dead_code)]
pub fn print_success(&self, message: &str) {
println!("{} {}", style("").green(), style(message).green());
}
pub fn print_warning(&self, message: &str) {
println!("{} {}", style("").yellow(), style(message).yellow());
}
pub fn print_error(&self, message: &str) {
eprintln!("{} {}", style("").red(), style(message).red());
}
#[allow(dead_code)]
pub fn print_user_input(&self, content: &str) {
println!("{} {}", style("👤").cyan(), content);
}
pub fn print_assistant_response(&self, content: &str) {
println!("{} {}", style("🤖").magenta(), content);
}
pub fn print_command_result(&self, message: &str) {
println!("{} {}", style("📝").blue(), style(message).dim());
}
pub fn print_model_info(&self, model: &str, provider: &str) {
println!(
"{} Model: {} ({})",
style("🔧").yellow(),
style(model).bold(),
style(provider).dim()
);
}
pub fn print_session_info(&self, session_name: &str) {
println!(
"{} Session: {}",
style("💾").blue(),
style(session_name).bold()
);
}
pub fn print_help(&self) {
let help_text = r#"
Available Commands:
/help - Show this help message
/exit - Exit the CLI
/model [model_name] - Switch model or show interactive picker
/models - List all supported models
/list - List all saved sessions
/new <session_name> - Create a new session
/switch [session_name] - Switch session or show interactive picker
/clear - Clear current conversation
/delete [session_name] - Delete a session
/tool websearch on|off - Enable/disable web search (OpenAI only)
/reasoning on|off - Enable/disable reasoning summaries (OpenAI only)
/effort [low|medium|high] - Set reasoning effort level (GPT-5 only)
Environment Variables:
OPENAI_API_KEY - Required for OpenAI models
ANTHROPIC_API_KEY - Required for Anthropic models
OPENAI_BASE_URL - Optional custom base URL for OpenAI
DEFAULT_MODEL - Default model if not specified
Supported Models:
OpenAI: gpt-4.1, gpt-4.1-mini, gpt-4o, gpt-5, gpt-5-chat-latest, o1, o3, o4-mini, o3-mini
Anthropic: claude-3-5-sonnet-20241022, claude-3-5-haiku-20241022,
claude-3-opus-20240229, claude-3-sonnet-20240229,
claude-3-haiku-20240307
"#;
println!("{}", style(help_text).dim());
}
pub fn show_spinner(&self, message: &str) -> SpinnerHandle {
print!("{} {}... ", style("").yellow(), message);
io::stdout().flush().ok();
SpinnerHandle::new()
}
}
impl Default for Display {
fn default() -> Self {
Self::new()
}
}
pub struct SpinnerHandle {
start_time: std::time::Instant,
}
impl SpinnerHandle {
fn new() -> Self {
Self {
start_time: std::time::Instant::now(),
}
}
pub fn finish(self, message: &str) {
let elapsed = self.start_time.elapsed();
println!("{} ({:.2}s)", style(message).green(), elapsed.as_secs_f32());
}
pub fn finish_with_error(self, message: &str) {
let elapsed = self.start_time.elapsed();
println!("{} ({:.2}s)", style(message).red(), elapsed.as_secs_f32());
}
}

94
src/utils/input.rs Normal file
View File

@@ -0,0 +1,94 @@
use anyhow::Result;
use dialoguer::{theme::ColorfulTheme, Select};
use rustyline::{error::ReadlineError, DefaultEditor};
pub struct InputHandler {
editor: DefaultEditor,
}
impl InputHandler {
pub fn new() -> Result<Self> {
let mut editor = DefaultEditor::new()?;
// Try to load history file
let history_file = dirs::home_dir()
.map(|home| home.join(".chat_cli_history"))
.unwrap_or_else(|| ".chat_cli_history".into());
editor.load_history(&history_file).ok();
Ok(Self { editor })
}
pub fn read_line(&mut self, prompt: &str) -> Result<Option<String>> {
match self.editor.readline(prompt) {
Ok(line) => {
let _ = self.editor.add_history_entry(&line);
Ok(Some(line))
}
Err(ReadlineError::Interrupted) => {
println!("^C");
Ok(None)
}
Err(ReadlineError::Eof) => {
println!("^D");
Ok(None)
}
Err(err) => Err(anyhow::anyhow!("Error reading input: {}", err)),
}
}
pub fn save_history(&mut self) -> Result<()> {
let history_file = dirs::home_dir()
.map(|home| home.join(".chat_cli_history"))
.unwrap_or_else(|| ".chat_cli_history".into());
self.editor.save_history(&history_file)?;
Ok(())
}
pub fn select_from_list<T: ToString + Clone>(
&self,
title: &str,
items: &[T],
current: Option<&str>,
) -> Result<Option<T>> {
if items.is_empty() {
println!("(no items available)");
return Ok(None);
}
let theme = ColorfulTheme::default();
// Find default selection index
let default_index = if let Some(current) = current {
items.iter().position(|item| item.to_string() == current).unwrap_or(0)
} else {
0
};
let selection = Select::with_theme(&theme)
.with_prompt(title)
.items(items)
.default(default_index)
.interact_opt()?;
Ok(selection.map(|idx| items[idx].clone()))
}
pub fn confirm(&self, message: &str) -> Result<bool> {
use dialoguer::Confirm;
let confirmation = Confirm::with_theme(&ColorfulTheme::default())
.with_prompt(message)
.interact()?;
Ok(confirmation)
}
}
impl Default for InputHandler {
fn default() -> Self {
Self::new().expect("Failed to initialize input handler")
}
}

5
src/utils/mod.rs Normal file
View File

@@ -0,0 +1,5 @@
pub mod display;
pub mod input;
pub use display::*;
pub use input::*;