rustGPT/src/config.rs

235 lines
7.2 KiB
Rust
Raw Normal View History

2025-08-15 19:41:32 +00:00
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::env;
use std::fs;
use std::path::PathBuf;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub api: ApiConfig,
pub defaults: DefaultsConfig,
pub limits: LimitsConfig,
pub session: SessionConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiConfig {
pub openai_base_url: String,
pub anthropic_base_url: String,
pub anthropic_version: String,
pub request_timeout_seconds: u64,
pub max_retries: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DefaultsConfig {
pub model: String,
pub reasoning_effort: String,
pub enable_web_search: bool,
pub enable_reasoning_summary: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LimitsConfig {
pub max_tokens_anthropic: u32,
pub max_conversation_history: usize,
pub max_sessions_to_list: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SessionConfig {
pub sessions_dir_name: String,
pub file_extension: String,
}
impl Default for Config {
fn default() -> Self {
Self {
api: ApiConfig::default(),
defaults: DefaultsConfig::default(),
limits: LimitsConfig::default(),
session: SessionConfig::default(),
}
}
}
impl Default for ApiConfig {
fn default() -> Self {
Self {
openai_base_url: "https://api.openai.com/v1".to_string(),
anthropic_base_url: "https://api.anthropic.com/v1".to_string(),
anthropic_version: "2023-06-01".to_string(),
request_timeout_seconds: 120,
max_retries: 3,
}
}
}
impl Default for DefaultsConfig {
fn default() -> Self {
Self {
model: "gpt-5".to_string(),
reasoning_effort: "medium".to_string(),
enable_web_search: true,
enable_reasoning_summary: false,
}
}
}
impl Default for LimitsConfig {
fn default() -> Self {
Self {
max_tokens_anthropic: 4096,
max_conversation_history: 100,
max_sessions_to_list: 50,
}
}
}
impl Default for SessionConfig {
fn default() -> Self {
Self {
sessions_dir_name: ".chat_cli_sessions".to_string(),
file_extension: "json".to_string(),
}
}
}
#[derive(Debug)]
pub struct EnvVariables {
pub openai_api_key: Option<String>,
pub anthropic_api_key: Option<String>,
pub openai_base_url: Option<String>,
pub default_model: Option<String>,
}
impl Config {
pub fn load() -> Result<Self> {
let config_path = Self::config_file_path()?;
if config_path.exists() {
let config_content = fs::read_to_string(&config_path)
.with_context(|| format!("Failed to read config file: {:?}", config_path))?;
let mut config: Config = toml::from_str(&config_content)
.with_context(|| format!("Failed to parse config file: {:?}", config_path))?;
// Override with environment variables if present
config.apply_env_overrides()?;
Ok(config)
} else {
let mut config = Config::default();
config.apply_env_overrides()?;
Ok(config)
}
}
pub fn save(&self) -> Result<()> {
let config_path = Self::config_file_path()?;
// Create config directory if it doesn't exist
if let Some(parent) = config_path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("Failed to create config directory: {:?}", parent))?;
}
let config_content = toml::to_string_pretty(self)
.context("Failed to serialize config")?;
fs::write(&config_path, config_content)
.with_context(|| format!("Failed to write config file: {:?}", config_path))?;
Ok(())
}
pub fn config_file_path() -> Result<PathBuf> {
let home = dirs::home_dir().context("Could not find home directory")?;
Ok(home.join(".config").join("gpt-cli-rust").join("config.toml"))
}
fn apply_env_overrides(&mut self) -> Result<()> {
// Override API URLs
if let Ok(openai_base_url) = env::var("OPENAI_BASE_URL") {
self.api.openai_base_url = openai_base_url;
}
// Override defaults
if let Ok(default_model) = env::var("DEFAULT_MODEL") {
self.defaults.model = default_model;
}
Ok(())
}
pub fn validate_env_variables() -> Result<EnvVariables> {
let openai_api_key = env::var("OPENAI_API_KEY").ok();
let anthropic_api_key = env::var("ANTHROPIC_API_KEY").ok();
let openai_base_url = env::var("OPENAI_BASE_URL").ok();
let default_model = env::var("DEFAULT_MODEL").ok();
// At least one API key must be present
if openai_api_key.is_none() && anthropic_api_key.is_none() {
return Err(anyhow::anyhow!(
"At least one API key must be set: OPENAI_API_KEY or ANTHROPIC_API_KEY"
));
}
Ok(EnvVariables {
openai_api_key,
anthropic_api_key,
openai_base_url,
default_model,
})
}
pub fn validate_model_availability(&self, env: &EnvVariables, model: &str) -> Result<()> {
use crate::core::provider::{get_provider_for_model, Provider};
let provider = get_provider_for_model(model);
match provider {
Provider::OpenAI => {
if env.openai_api_key.is_none() {
return Err(anyhow::anyhow!(
"OPENAI_API_KEY is required for OpenAI model: {}", model
));
}
}
Provider::Anthropic => {
if env.anthropic_api_key.is_none() {
return Err(anyhow::anyhow!(
"ANTHROPIC_API_KEY is required for Anthropic model: {}", model
));
}
}
}
Ok(())
}
pub fn create_example_config() -> Result<()> {
let config_path = Self::config_file_path()?;
if config_path.exists() {
return Ok(()); // Don't overwrite existing config
}
let example_config = Config::default();
example_config.save()?;
println!("Created example config file at: {:?}", config_path);
println!("You can customize it to change default settings.");
Ok(())
}
pub fn print_config_info(&self) {
println!("📋 Configuration:");
println!(" Default model: {}", self.defaults.model);
println!(" Web search: {}", if self.defaults.enable_web_search { "enabled" } else { "disabled" });
println!(" Reasoning summaries: {}", if self.defaults.enable_reasoning_summary { "enabled" } else { "disabled" });
println!(" Request timeout: {}s", self.api.request_timeout_seconds);
println!(" Max conversation history: {}", self.limits.max_conversation_history);
}
}