549 lines
19 KiB
Rust
549 lines
19 KiB
Rust
use anyhow::{Context, Result};
|
|
use serde::{Deserialize, Serialize};
|
|
use std::env;
|
|
use std::fs;
|
|
use std::path::PathBuf;
|
|
|
|
fn default_session_name() -> String {
|
|
"default".to_string()
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct Config {
|
|
pub api: ApiConfig,
|
|
pub defaults: DefaultsConfig,
|
|
pub limits: LimitsConfig,
|
|
pub session: SessionConfig,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct ApiConfig {
|
|
pub openai_base_url: String,
|
|
pub anthropic_base_url: String,
|
|
pub anthropic_version: String,
|
|
pub request_timeout_seconds: u64,
|
|
pub max_retries: u32,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct DefaultsConfig {
|
|
pub model: String,
|
|
pub reasoning_effort: String,
|
|
pub enable_web_search: bool,
|
|
pub enable_reasoning_summary: bool,
|
|
#[serde(default = "default_session_name")]
|
|
pub default_session: String,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct LimitsConfig {
|
|
pub max_tokens_anthropic: u32,
|
|
pub max_conversation_history: usize,
|
|
pub max_sessions_to_list: usize,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct SessionConfig {
|
|
pub sessions_dir_name: String,
|
|
pub file_extension: String,
|
|
}
|
|
|
|
impl Default for Config {
|
|
fn default() -> Self {
|
|
Self {
|
|
api: ApiConfig::default(),
|
|
defaults: DefaultsConfig::default(),
|
|
limits: LimitsConfig::default(),
|
|
session: SessionConfig::default(),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Default for ApiConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
openai_base_url: "https://api.openai.com/v1".to_string(),
|
|
anthropic_base_url: "https://api.anthropic.com/v1".to_string(),
|
|
anthropic_version: "2023-06-01".to_string(),
|
|
request_timeout_seconds: 120,
|
|
max_retries: 3,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Default for DefaultsConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
model: "gpt-5".to_string(),
|
|
reasoning_effort: "medium".to_string(),
|
|
enable_web_search: true,
|
|
enable_reasoning_summary: false,
|
|
default_session: "default".to_string(),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Default for LimitsConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
max_tokens_anthropic: 4096,
|
|
max_conversation_history: 100,
|
|
max_sessions_to_list: 50,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Default for SessionConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
sessions_dir_name: ".chat_cli_sessions".to_string(),
|
|
file_extension: "json".to_string(),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug)]
|
|
pub struct EnvVariables {
|
|
pub openai_api_key: Option<String>,
|
|
pub anthropic_api_key: Option<String>,
|
|
pub openai_base_url: Option<String>,
|
|
pub default_model: Option<String>,
|
|
}
|
|
|
|
impl Config {
|
|
pub fn load() -> Result<Self> {
|
|
let config_path = Self::config_file_path()?;
|
|
|
|
if config_path.exists() {
|
|
let config_content = fs::read_to_string(&config_path)
|
|
.with_context(|| format!("Failed to read config file: {:?}", config_path))?;
|
|
|
|
let mut config: Config = toml::from_str(&config_content)
|
|
.with_context(|| format!("Failed to parse config file: {:?}", config_path))?;
|
|
|
|
// Override with environment variables if present
|
|
config.apply_env_overrides()?;
|
|
|
|
Ok(config)
|
|
} else {
|
|
let mut config = Config::default();
|
|
config.apply_env_overrides()?;
|
|
Ok(config)
|
|
}
|
|
}
|
|
|
|
pub fn save(&self) -> Result<()> {
|
|
let config_path = Self::config_file_path()?;
|
|
|
|
// Create config directory if it doesn't exist
|
|
if let Some(parent) = config_path.parent() {
|
|
fs::create_dir_all(parent)
|
|
.with_context(|| format!("Failed to create config directory: {:?}", parent))?;
|
|
}
|
|
|
|
let config_content = toml::to_string_pretty(self)
|
|
.context("Failed to serialize config")?;
|
|
|
|
fs::write(&config_path, config_content)
|
|
.with_context(|| format!("Failed to write config file: {:?}", config_path))?;
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn config_file_path() -> Result<PathBuf> {
|
|
let home = dirs::home_dir().context("Could not find home directory")?;
|
|
Ok(home.join(".config").join("gpt-cli-rust").join("config.toml"))
|
|
}
|
|
|
|
pub fn apply_env_overrides(&mut self) -> Result<()> {
|
|
// Override API URLs
|
|
if let Ok(openai_base_url) = env::var("OPENAI_BASE_URL") {
|
|
self.api.openai_base_url = openai_base_url;
|
|
}
|
|
|
|
// Override defaults
|
|
if let Ok(default_model) = env::var("DEFAULT_MODEL") {
|
|
self.defaults.model = default_model;
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn validate_env_variables() -> Result<EnvVariables> {
|
|
let openai_api_key = env::var("OPENAI_API_KEY").ok();
|
|
let anthropic_api_key = env::var("ANTHROPIC_API_KEY").ok();
|
|
let openai_base_url = env::var("OPENAI_BASE_URL").ok();
|
|
let default_model = env::var("DEFAULT_MODEL").ok();
|
|
|
|
// At least one API key must be present
|
|
if openai_api_key.is_none() && anthropic_api_key.is_none() {
|
|
return Err(anyhow::anyhow!(
|
|
"At least one API key must be set: OPENAI_API_KEY or ANTHROPIC_API_KEY"
|
|
));
|
|
}
|
|
|
|
Ok(EnvVariables {
|
|
openai_api_key,
|
|
anthropic_api_key,
|
|
openai_base_url,
|
|
default_model,
|
|
})
|
|
}
|
|
|
|
pub fn validate_model_availability(&self, env: &EnvVariables, model: &str) -> Result<()> {
|
|
use crate::core::provider::{get_provider_for_model, Provider};
|
|
|
|
let provider = get_provider_for_model(model);
|
|
|
|
match provider {
|
|
Provider::OpenAI => {
|
|
if env.openai_api_key.is_none() {
|
|
return Err(anyhow::anyhow!(
|
|
"OPENAI_API_KEY is required for OpenAI model: {}", model
|
|
));
|
|
}
|
|
}
|
|
Provider::Anthropic => {
|
|
if env.anthropic_api_key.is_none() {
|
|
return Err(anyhow::anyhow!(
|
|
"ANTHROPIC_API_KEY is required for Anthropic model: {}", model
|
|
));
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn create_example_config() -> Result<()> {
|
|
let config_path = Self::config_file_path()?;
|
|
|
|
if config_path.exists() {
|
|
return Ok(()); // Don't overwrite existing config
|
|
}
|
|
|
|
let example_config = Config::default();
|
|
example_config.save()?;
|
|
|
|
println!("Created example config file at: {:?}", config_path);
|
|
println!("You can customize it to change default settings.");
|
|
|
|
Ok(())
|
|
}
|
|
|
|
pub fn print_config_info(&self) {
|
|
println!("📋 Configuration:");
|
|
println!(" Default model: {}", self.defaults.model);
|
|
println!(" Web search: {}", if self.defaults.enable_web_search { "enabled" } else { "disabled" });
|
|
println!(" Reasoning summaries: {}", if self.defaults.enable_reasoning_summary { "enabled" } else { "disabled" });
|
|
println!(" Request timeout: {}s", self.api.request_timeout_seconds);
|
|
println!(" Max conversation history: {}", self.limits.max_conversation_history);
|
|
}
|
|
|
|
pub fn set_default_session(&mut self, session_name: String) -> Result<()> {
|
|
self.defaults.default_session = session_name;
|
|
self.save()
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use std::env;
|
|
use tempfile::{NamedTempFile, TempDir};
|
|
|
|
fn create_test_config() -> Config {
|
|
Config {
|
|
api: ApiConfig {
|
|
openai_base_url: "https://test-openai.com".to_string(),
|
|
anthropic_base_url: "https://test-anthropic.com".to_string(),
|
|
anthropic_version: "2023-06-01".to_string(),
|
|
request_timeout_seconds: 60,
|
|
max_retries: 2,
|
|
},
|
|
defaults: DefaultsConfig {
|
|
model: "test-model".to_string(),
|
|
reasoning_effort: "low".to_string(),
|
|
enable_web_search: false,
|
|
enable_reasoning_summary: true,
|
|
default_session: "test-session".to_string(),
|
|
},
|
|
limits: LimitsConfig {
|
|
max_tokens_anthropic: 2048,
|
|
max_conversation_history: 50,
|
|
max_sessions_to_list: 25,
|
|
},
|
|
session: SessionConfig {
|
|
sessions_dir_name: ".test_sessions".to_string(),
|
|
file_extension: "json".to_string(),
|
|
},
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_config_defaults() {
|
|
let config = Config::default();
|
|
|
|
assert_eq!(config.api.openai_base_url, "https://api.openai.com/v1");
|
|
assert_eq!(config.api.anthropic_base_url, "https://api.anthropic.com/v1");
|
|
assert_eq!(config.api.anthropic_version, "2023-06-01");
|
|
assert_eq!(config.api.request_timeout_seconds, 120);
|
|
assert_eq!(config.api.max_retries, 3);
|
|
|
|
assert_eq!(config.defaults.model, "gpt-5");
|
|
assert_eq!(config.defaults.reasoning_effort, "medium");
|
|
assert!(config.defaults.enable_web_search);
|
|
assert!(!config.defaults.enable_reasoning_summary);
|
|
assert_eq!(config.defaults.default_session, "default");
|
|
|
|
assert_eq!(config.limits.max_tokens_anthropic, 4096);
|
|
assert_eq!(config.limits.max_conversation_history, 100);
|
|
assert_eq!(config.limits.max_sessions_to_list, 50);
|
|
|
|
assert_eq!(config.session.sessions_dir_name, ".chat_cli_sessions");
|
|
assert_eq!(config.session.file_extension, "json");
|
|
}
|
|
|
|
#[test]
|
|
fn test_config_serialization() {
|
|
let config = create_test_config();
|
|
|
|
let toml_str = toml::to_string_pretty(&config).unwrap();
|
|
let deserialized: Config = toml::from_str(&toml_str).unwrap();
|
|
|
|
assert_eq!(config.api.openai_base_url, deserialized.api.openai_base_url);
|
|
assert_eq!(config.defaults.model, deserialized.defaults.model);
|
|
assert_eq!(config.limits.max_tokens_anthropic, deserialized.limits.max_tokens_anthropic);
|
|
assert_eq!(config.session.sessions_dir_name, deserialized.session.sessions_dir_name);
|
|
}
|
|
|
|
#[test]
|
|
fn test_config_save_and_load() {
|
|
let temp_dir = TempDir::new().unwrap();
|
|
let config_path = temp_dir.path().join("config.toml");
|
|
|
|
// Mock the config file path
|
|
let original_config = create_test_config();
|
|
let toml_content = toml::to_string_pretty(&original_config).unwrap();
|
|
std::fs::write(&config_path, toml_content).unwrap();
|
|
|
|
// Test loading from file content directly since we can't easily mock the config_file_path
|
|
let file_content = std::fs::read_to_string(&config_path).unwrap();
|
|
let loaded_config: Config = toml::from_str(&file_content).unwrap();
|
|
|
|
assert_eq!(original_config.api.openai_base_url, loaded_config.api.openai_base_url);
|
|
assert_eq!(original_config.defaults.model, loaded_config.defaults.model);
|
|
assert_eq!(original_config.limits.max_tokens_anthropic, loaded_config.limits.max_tokens_anthropic);
|
|
}
|
|
|
|
#[test]
|
|
fn test_env_variable_validation_with_both_keys() {
|
|
env::set_var("OPENAI_API_KEY", "test-openai-key");
|
|
env::set_var("ANTHROPIC_API_KEY", "test-anthropic-key");
|
|
env::set_var("OPENAI_BASE_URL", "https://custom-openai.com");
|
|
env::set_var("DEFAULT_MODEL", "custom-model");
|
|
|
|
let env_vars = Config::validate_env_variables().unwrap();
|
|
|
|
assert_eq!(env_vars.openai_api_key, Some("test-openai-key".to_string()));
|
|
assert_eq!(env_vars.anthropic_api_key, Some("test-anthropic-key".to_string()));
|
|
assert_eq!(env_vars.openai_base_url, Some("https://custom-openai.com".to_string()));
|
|
assert_eq!(env_vars.default_model, Some("custom-model".to_string()));
|
|
|
|
// Clean up
|
|
env::remove_var("OPENAI_API_KEY");
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
env::remove_var("OPENAI_BASE_URL");
|
|
env::remove_var("DEFAULT_MODEL");
|
|
}
|
|
|
|
#[test]
|
|
fn test_env_variable_validation_with_only_openai() {
|
|
// Store current values to restore later
|
|
let original_openai = env::var("OPENAI_API_KEY").ok();
|
|
let original_anthropic = env::var("ANTHROPIC_API_KEY").ok();
|
|
|
|
// Ensure anthropic key is not set
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
env::set_var("OPENAI_API_KEY", "test-openai-key-only");
|
|
|
|
let env_vars = Config::validate_env_variables().unwrap();
|
|
|
|
assert_eq!(env_vars.openai_api_key, Some("test-openai-key-only".to_string()));
|
|
assert_eq!(env_vars.anthropic_api_key, None);
|
|
|
|
// Restore original values if they existed
|
|
env::remove_var("OPENAI_API_KEY");
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
if let Some(value) = original_openai {
|
|
env::set_var("OPENAI_API_KEY", value);
|
|
}
|
|
if let Some(value) = original_anthropic {
|
|
env::set_var("ANTHROPIC_API_KEY", value);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_env_variable_validation_with_only_anthropic() {
|
|
// Ensure openai key is not set
|
|
env::remove_var("OPENAI_API_KEY");
|
|
env::set_var("ANTHROPIC_API_KEY", "test-anthropic-key-only");
|
|
|
|
let env_vars = Config::validate_env_variables().unwrap();
|
|
|
|
assert_eq!(env_vars.openai_api_key, None);
|
|
assert_eq!(env_vars.anthropic_api_key, Some("test-anthropic-key-only".to_string()));
|
|
|
|
// Clean up
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
}
|
|
|
|
#[test]
|
|
fn test_env_variable_validation_with_no_keys() {
|
|
// Store current values to restore later
|
|
let original_openai = env::var("OPENAI_API_KEY").ok();
|
|
let original_anthropic = env::var("ANTHROPIC_API_KEY").ok();
|
|
|
|
// Ensure both keys are not set
|
|
env::remove_var("OPENAI_API_KEY");
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
|
|
let result = Config::validate_env_variables();
|
|
|
|
assert!(result.is_err());
|
|
assert!(result.unwrap_err().to_string().contains("At least one API key must be set"));
|
|
|
|
// Restore original values if they existed
|
|
if let Some(value) = original_openai {
|
|
env::set_var("OPENAI_API_KEY", value);
|
|
}
|
|
if let Some(value) = original_anthropic {
|
|
env::set_var("ANTHROPIC_API_KEY", value);
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn test_model_availability_validation_openai() {
|
|
env::set_var("OPENAI_API_KEY", "test-key");
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
|
|
let config = Config::default();
|
|
let env_vars = EnvVariables {
|
|
openai_api_key: Some("test-key".to_string()),
|
|
anthropic_api_key: None,
|
|
openai_base_url: None,
|
|
default_model: None,
|
|
};
|
|
|
|
// Should succeed for OpenAI model
|
|
assert!(config.validate_model_availability(&env_vars, "gpt-4").is_ok());
|
|
|
|
// Should fail for Anthropic model without key
|
|
assert!(config.validate_model_availability(&env_vars, "claude-sonnet-4-20250514").is_err());
|
|
|
|
env::remove_var("OPENAI_API_KEY");
|
|
}
|
|
|
|
#[test]
|
|
fn test_model_availability_validation_anthropic() {
|
|
env::remove_var("OPENAI_API_KEY");
|
|
env::set_var("ANTHROPIC_API_KEY", "test-key");
|
|
|
|
let config = Config::default();
|
|
let env_vars = EnvVariables {
|
|
openai_api_key: None,
|
|
anthropic_api_key: Some("test-key".to_string()),
|
|
openai_base_url: None,
|
|
default_model: None,
|
|
};
|
|
|
|
// Should succeed for Anthropic model
|
|
assert!(config.validate_model_availability(&env_vars, "claude-sonnet-4-20250514").is_ok());
|
|
|
|
// Should fail for OpenAI model without key
|
|
assert!(config.validate_model_availability(&env_vars, "gpt-4").is_err());
|
|
|
|
env::remove_var("ANTHROPIC_API_KEY");
|
|
}
|
|
|
|
#[test]
|
|
fn test_apply_env_overrides() {
|
|
env::set_var("OPENAI_BASE_URL", "https://override-openai.com");
|
|
env::set_var("DEFAULT_MODEL", "override-model");
|
|
|
|
let mut config = Config::default();
|
|
config.apply_env_overrides().unwrap();
|
|
|
|
assert_eq!(config.api.openai_base_url, "https://override-openai.com");
|
|
assert_eq!(config.defaults.model, "override-model");
|
|
|
|
// Clean up
|
|
env::remove_var("OPENAI_BASE_URL");
|
|
env::remove_var("DEFAULT_MODEL");
|
|
}
|
|
|
|
#[test]
|
|
fn test_default_session_name_function() {
|
|
assert_eq!(default_session_name(), "default");
|
|
}
|
|
|
|
#[test]
|
|
fn test_set_default_session() {
|
|
let _temp_file = NamedTempFile::new().unwrap();
|
|
let mut config = create_test_config();
|
|
|
|
// Test the mutation
|
|
assert_eq!(config.defaults.default_session, "test-session");
|
|
config.defaults.default_session = "new-session".to_string();
|
|
assert_eq!(config.defaults.default_session, "new-session");
|
|
|
|
// Note: We can't easily test the full set_default_session method
|
|
// without mocking the file system, but we've tested the core logic
|
|
}
|
|
|
|
#[test]
|
|
fn test_config_file_path() {
|
|
let path = Config::config_file_path().unwrap();
|
|
assert!(path.to_string_lossy().contains(".config"));
|
|
assert!(path.to_string_lossy().contains("gpt-cli-rust"));
|
|
assert!(path.to_string_lossy().contains("config.toml"));
|
|
}
|
|
|
|
#[test]
|
|
fn test_invalid_toml_parsing() {
|
|
let invalid_toml = "this is not valid toml content [[[[";
|
|
let result: Result<Config, _> = toml::from_str(invalid_toml);
|
|
assert!(result.is_err());
|
|
}
|
|
|
|
#[test]
|
|
fn test_config_with_missing_optional_fields() {
|
|
let minimal_toml = r#"
|
|
[api]
|
|
openai_base_url = "https://api.openai.com/v1"
|
|
anthropic_base_url = "https://api.anthropic.com/v1"
|
|
anthropic_version = "2023-06-01"
|
|
request_timeout_seconds = 120
|
|
max_retries = 3
|
|
|
|
[defaults]
|
|
model = "gpt-4"
|
|
reasoning_effort = "medium"
|
|
enable_web_search = true
|
|
enable_reasoning_summary = false
|
|
# default_session field is optional due to serde default
|
|
|
|
[limits]
|
|
max_tokens_anthropic = 4096
|
|
max_conversation_history = 100
|
|
max_sessions_to_list = 50
|
|
|
|
[session]
|
|
sessions_dir_name = ".chat_cli_sessions"
|
|
file_extension = "json"
|
|
"#;
|
|
|
|
let config: Config = toml::from_str(minimal_toml).unwrap();
|
|
assert_eq!(config.defaults.default_session, "default"); // Should use the default value
|
|
}
|
|
} |