anthropic model changes
This commit is contained in:
parent
b847ef8812
commit
10b79eaf79
52
src/cli.rs
52
src/cli.rs
|
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
|||
|
||||
use crate::config::Config;
|
||||
use crate::core::{
|
||||
create_client, get_provider_for_model, provider::get_all_models,
|
||||
create_client, get_provider_for_model, provider::{get_model_info_list, get_display_name_for_model, get_model_id_from_display_name},
|
||||
ChatClient, Session,
|
||||
};
|
||||
use crate::utils::{Display, InputHandler, SessionAction};
|
||||
|
|
@ -45,7 +45,8 @@ impl ChatCLI {
|
|||
self.display.print_info("Type /help for help.");
|
||||
|
||||
let provider = get_provider_for_model(&self.session.model);
|
||||
self.display.print_model_info(&self.session.model, provider.as_str());
|
||||
let display_name = get_display_name_for_model(&self.session.model);
|
||||
self.display.print_model_info(&display_name, provider.as_str());
|
||||
self.display.print_session_info(&self.session.name);
|
||||
|
||||
println!();
|
||||
|
|
@ -216,27 +217,34 @@ impl ChatCLI {
|
|||
}
|
||||
|
||||
async fn model_switcher(&mut self) -> Result<()> {
|
||||
let all_models = get_all_models();
|
||||
let model_info_list = get_model_info_list();
|
||||
let display_names: Vec<String> = model_info_list.iter().map(|info| info.display_name.to_string()).collect();
|
||||
let current_display_name = get_display_name_for_model(&self.session.model);
|
||||
|
||||
let selection = self.input.select_from_list(
|
||||
"Select a model:",
|
||||
&all_models,
|
||||
Some(&self.session.model),
|
||||
&display_names,
|
||||
Some(¤t_display_name),
|
||||
)?;
|
||||
|
||||
match selection {
|
||||
Some(model) => {
|
||||
if model.to_string() == self.session.model {
|
||||
self.display.print_info("Already using that model");
|
||||
Some(display_name) => {
|
||||
if let Some(model_id) = get_model_id_from_display_name(&display_name) {
|
||||
if model_id == self.session.model {
|
||||
self.display.print_info("Already using that model");
|
||||
} else {
|
||||
self.session.model = model_id.clone();
|
||||
let provider = get_provider_for_model(&self.session.model);
|
||||
self.display.print_command_result(&format!(
|
||||
"Model switched to {} ({})",
|
||||
display_name,
|
||||
provider.as_str()
|
||||
));
|
||||
self.client = None; // Force client recreation
|
||||
self.session.save()?; // Save the model change
|
||||
}
|
||||
} else {
|
||||
self.session.model = model.to_string();
|
||||
let provider = get_provider_for_model(&self.session.model);
|
||||
self.display.print_command_result(&format!(
|
||||
"Model switched to {} ({})",
|
||||
self.session.model,
|
||||
provider.as_str()
|
||||
));
|
||||
self.client = None; // Force client recreation
|
||||
self.session.save()?; // Save the model change
|
||||
self.display.print_error("Invalid model selection");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
|
@ -292,9 +300,10 @@ impl ChatCLI {
|
|||
match Session::load(&session_name) {
|
||||
Ok(session) => {
|
||||
self.session = session;
|
||||
let display_name = get_display_name_for_model(&self.session.model);
|
||||
self.display.print_command_result(&format!(
|
||||
"Switched to session '{}' (model={})",
|
||||
self.session.name, self.session.model
|
||||
self.session.name, display_name
|
||||
));
|
||||
self.client = None; // Force client recreation
|
||||
return Ok(());
|
||||
|
|
@ -329,9 +338,10 @@ impl ChatCLI {
|
|||
match Session::load(&remaining_names[0]) {
|
||||
Ok(session) => {
|
||||
self.session = session;
|
||||
let display_name = get_display_name_for_model(&self.session.model);
|
||||
self.display.print_command_result(&format!(
|
||||
"Switched to session '{}' (model={})",
|
||||
self.session.name, self.session.model
|
||||
self.session.name, display_name
|
||||
));
|
||||
self.client = None;
|
||||
return Ok(());
|
||||
|
|
@ -383,12 +393,10 @@ impl ChatCLI {
|
|||
// Show compatibility warnings based on provider
|
||||
match provider {
|
||||
crate::core::provider::Provider::Anthropic => {
|
||||
if web_enabled {
|
||||
self.display.print_warning("Web search is not supported by Anthropic models");
|
||||
}
|
||||
if reasoning_enabled {
|
||||
self.display.print_warning("Reasoning summaries are not supported by Anthropic models");
|
||||
}
|
||||
// Web search is now supported by Anthropic models
|
||||
}
|
||||
crate::core::provider::Provider::OpenAI => {
|
||||
// OpenAI models generally support these features
|
||||
|
|
|
|||
|
|
@ -287,6 +287,35 @@ struct AnthropicDelta {
|
|||
text: Option<String>,
|
||||
}
|
||||
|
||||
// Anthropic tool use structures
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct AnthropicToolUseStart {
|
||||
index: u32,
|
||||
tool_use: AnthropicToolUse,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct AnthropicToolUse {
|
||||
id: String,
|
||||
#[serde(rename = "type")]
|
||||
tool_type: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct AnthropicToolUseDelta {
|
||||
index: u32,
|
||||
delta: AnthropicToolDelta,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct AnthropicToolDelta {
|
||||
#[serde(rename = "type")]
|
||||
delta_type: String,
|
||||
partial_json: Option<String>,
|
||||
}
|
||||
|
||||
impl OpenAIClient {
|
||||
pub fn new(config: &Config) -> Result<Self> {
|
||||
let api_key = env::var("OPENAI_API_KEY")
|
||||
|
|
@ -887,6 +916,14 @@ impl AnthropicClient {
|
|||
payload["system"] = json!(system);
|
||||
}
|
||||
|
||||
// Add web search tool if enabled
|
||||
if _enable_web_search {
|
||||
payload["tools"] = json!([{
|
||||
"type": "web_search_20250305",
|
||||
"name": "web_search"
|
||||
}]);
|
||||
}
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
|
|
@ -942,6 +979,14 @@ impl AnthropicClient {
|
|||
payload["system"] = json!(system);
|
||||
}
|
||||
|
||||
// Add web search tool if enabled
|
||||
if _enable_web_search {
|
||||
payload["tools"] = json!([{
|
||||
"type": "web_search_20250305",
|
||||
"name": "web_search"
|
||||
}]);
|
||||
}
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
|
|
@ -989,8 +1034,21 @@ impl AnthropicClient {
|
|||
}
|
||||
}
|
||||
}
|
||||
"message_start" | "content_block_start" | "content_block_stop" | "message_delta" | "message_stop" => {
|
||||
// Handle other event types if needed
|
||||
"content_block_start" => {
|
||||
// Check if this is a tool use start
|
||||
if let Ok(tool_start) = serde_json::from_value::<AnthropicToolUseStart>(event.data.clone()) {
|
||||
if tool_start.tool_use.name == "web_search" {
|
||||
let search_indicator = "\n🔍 Searching the web...\n";
|
||||
full_response.push_str(search_indicator);
|
||||
stream_callback(search_indicator).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
"content_block_stop" => {
|
||||
// Tool use completed, continue with normal processing
|
||||
}
|
||||
"message_start" | "message_delta" | "message_stop" => {
|
||||
// Handle other message-level events
|
||||
}
|
||||
_ => {
|
||||
// Unknown event type, skip
|
||||
|
|
@ -1018,16 +1076,16 @@ impl AnthropicClient {
|
|||
|
||||
pub fn supports_feature(&self, feature: &str) -> bool {
|
||||
match feature {
|
||||
"streaming" => true,
|
||||
"web_search" | "reasoning_summary" | "reasoning_effort" => false,
|
||||
"streaming" | "web_search" => true,
|
||||
"reasoning_summary" | "reasoning_effort" => false,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn supports_feature_for_model(&self, feature: &str, _model: &str) -> bool {
|
||||
match feature {
|
||||
"streaming" => true,
|
||||
"web_search" | "reasoning_summary" | "reasoning_effort" => false,
|
||||
"streaming" | "web_search" => true,
|
||||
"reasoning_summary" | "reasoning_effort" => false,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,11 @@ pub enum Provider {
|
|||
Anthropic,
|
||||
}
|
||||
|
||||
pub struct ModelInfo {
|
||||
pub model_id: &'static str,
|
||||
pub display_name: &'static str,
|
||||
}
|
||||
|
||||
impl Provider {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
|
|
@ -47,6 +52,43 @@ pub fn get_supported_models() -> HashMap<Provider, Vec<&'static str>> {
|
|||
models
|
||||
}
|
||||
|
||||
pub fn get_model_info_list() -> Vec<ModelInfo> {
|
||||
vec![
|
||||
// OpenAI models
|
||||
ModelInfo { model_id: "gpt-4.1", display_name: "GPT-4.1" },
|
||||
ModelInfo { model_id: "gpt-4.1-mini", display_name: "GPT-4.1 Mini" },
|
||||
ModelInfo { model_id: "gpt-4o", display_name: "GPT-4o" },
|
||||
ModelInfo { model_id: "gpt-5", display_name: "GPT-5" },
|
||||
ModelInfo { model_id: "gpt-5-chat-latest", display_name: "GPT-5 Chat Latest" },
|
||||
ModelInfo { model_id: "o1", display_name: "o1" },
|
||||
ModelInfo { model_id: "o3", display_name: "o3" },
|
||||
ModelInfo { model_id: "o4-mini", display_name: "o4 Mini" },
|
||||
ModelInfo { model_id: "o3-mini", display_name: "o3 Mini" },
|
||||
|
||||
// Anthropic models with friendly names
|
||||
ModelInfo { model_id: "claude-opus-4-1-20250805", display_name: "Claude Opus 4.1" },
|
||||
ModelInfo { model_id: "claude-sonnet-4-20250514", display_name: "Claude Sonnet 4.0" },
|
||||
ModelInfo { model_id: "claude-3-7-sonnet-20250219", display_name: "Claude 3.7 Sonnet" },
|
||||
ModelInfo { model_id: "claude-3-5-haiku-20241022", display_name: "Claude 3.5 Haiku" },
|
||||
ModelInfo { model_id: "claude-3-haiku-20240307", display_name: "Claude 3.0 Haiku" },
|
||||
]
|
||||
}
|
||||
|
||||
pub fn get_display_name_for_model(model_id: &str) -> String {
|
||||
get_model_info_list()
|
||||
.iter()
|
||||
.find(|info| info.model_id == model_id)
|
||||
.map(|info| info.display_name.to_string())
|
||||
.unwrap_or_else(|| model_id.to_string())
|
||||
}
|
||||
|
||||
pub fn get_model_id_from_display_name(display_name: &str) -> Option<String> {
|
||||
get_model_info_list()
|
||||
.iter()
|
||||
.find(|info| info.display_name == display_name)
|
||||
.map(|info| info.model_id.to_string())
|
||||
}
|
||||
|
||||
pub fn get_all_models() -> Vec<&'static str> {
|
||||
get_supported_models()
|
||||
.values()
|
||||
|
|
|
|||
|
|
@ -209,10 +209,9 @@ Environment Variables:
|
|||
DEFAULT_MODEL - Default model if not specified
|
||||
|
||||
Supported Models:
|
||||
OpenAI: gpt-4.1, gpt-4.1-mini, gpt-4o, gpt-5, gpt-5-chat-latest, o1, o3, o4-mini, o3-mini
|
||||
Anthropic: claude-opus-4-1-20250805, claude-sonnet-4-20250514,
|
||||
claude-3-7-sonnet-20250219, claude-3-5-haiku-20241022,
|
||||
claude-3-haiku-20240307
|
||||
OpenAI: GPT-4.1, GPT-4.1 Mini, GPT-4o, GPT-5, GPT-5 Chat Latest, o1, o3, o4 Mini, o3 Mini
|
||||
Anthropic: Claude Opus 4.1, Claude Sonnet 4.0, Claude 3.7 Sonnet,
|
||||
Claude 3.5 Haiku, Claude 3.0 Haiku
|
||||
"#;
|
||||
println!("{}", style(help_text).dim());
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue