rustGPT/src/core/client.rs

570 lines
18 KiB
Rust

use anyhow::{Context, Result};
use reqwest::Client;
use serde::Deserialize;
use serde_json::{json, Value};
use std::env;
use std::time::Duration;
use crate::config::Config;
use super::{provider::Provider, session::Message};
#[derive(Debug)]
pub enum ChatClient {
OpenAI(OpenAIClient),
Anthropic(AnthropicClient),
}
impl ChatClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
enable_web_search: bool,
enable_reasoning_summary: bool,
reasoning_effort: &str,
) -> Result<String> {
match self {
ChatClient::OpenAI(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
}
ChatClient::Anthropic(client) => {
client.chat_completion(model, messages, enable_web_search, enable_reasoning_summary, reasoning_effort).await
}
}
}
pub fn supports_feature(&self, feature: &str) -> bool {
match self {
ChatClient::OpenAI(client) => client.supports_feature(feature),
ChatClient::Anthropic(client) => client.supports_feature(feature),
}
}
pub fn supports_feature_for_model(&self, feature: &str, model: &str) -> bool {
match self {
ChatClient::OpenAI(client) => client.supports_feature_for_model(feature, model),
ChatClient::Anthropic(client) => client.supports_feature_for_model(feature, model),
}
}
}
#[derive(Debug)]
pub struct OpenAIClient {
client: Client,
api_key: String,
base_url: String,
}
#[derive(Debug)]
pub struct AnthropicClient {
client: Client,
api_key: String,
base_url: String,
}
#[derive(Deserialize)]
struct OpenAIResponse {
choices: Vec<Choice>,
}
#[derive(Deserialize)]
struct Choice {
message: OpenAIMessage,
#[allow(dead_code)]
finish_reason: Option<String>,
}
#[derive(Deserialize)]
struct OpenAIMessage {
content: Option<String>,
tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Deserialize)]
struct ToolCall {
#[allow(dead_code)]
id: String,
#[allow(dead_code)]
#[serde(rename = "type")]
tool_type: String,
function: FunctionCall,
}
#[derive(Deserialize)]
struct FunctionCall {
name: String,
arguments: String,
}
// Responses API structures
#[derive(Deserialize)]
struct ResponsesApiResponse {
#[allow(dead_code)]
id: String,
#[allow(dead_code)]
object: String,
#[allow(dead_code)]
created_at: u64,
status: String,
output: Vec<OutputItem>,
}
#[derive(Deserialize)]
struct OutputItem {
#[allow(dead_code)]
id: String,
#[serde(rename = "type")]
item_type: String,
#[serde(default)]
status: Option<String>,
#[serde(default)]
role: Option<String>,
#[serde(default)]
content: Option<Vec<ResponseContent>>,
#[serde(default)]
action: Option<SearchAction>,
}
#[derive(Deserialize)]
struct SearchAction {
#[allow(dead_code)]
#[serde(rename = "type")]
action_type: String,
#[serde(default)]
query: Option<String>,
}
#[derive(Deserialize)]
struct ResponseContent {
#[serde(rename = "type")]
content_type: String,
#[serde(default)]
text: Option<String>,
#[serde(default)]
annotations: Option<Vec<Annotation>>,
}
#[derive(Deserialize)]
struct Annotation {
#[serde(rename = "type")]
annotation_type: String,
#[allow(dead_code)]
start_index: usize,
#[allow(dead_code)]
end_index: usize,
url: String,
title: String,
}
#[derive(Deserialize)]
struct AnthropicResponse {
content: Vec<AnthropicContent>,
}
#[derive(Deserialize)]
struct AnthropicContent {
text: String,
}
impl OpenAIClient {
pub fn new(config: &Config) -> Result<Self> {
let api_key = env::var("OPENAI_API_KEY")
.context("OPENAI_API_KEY environment variable is required")?;
let client = Client::builder()
.timeout(Duration::from_secs(config.api.request_timeout_seconds))
.build()
.context("Failed to create HTTP client")?;
Ok(Self {
client,
api_key,
base_url: config.api.openai_base_url.clone(),
})
}
fn convert_messages(messages: &[Message]) -> Vec<Value> {
messages
.iter()
.map(|msg| {
json!({
"role": msg.role,
"content": msg.content
})
})
.collect()
}
}
impl OpenAIClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
enable_web_search: bool,
_enable_reasoning_summary: bool,
reasoning_effort: &str,
) -> Result<String> {
// Use Responses API for web search with GPT-5, fallback to chat completions
if enable_web_search && model.starts_with("gpt-5") {
return self.responses_api_completion(model, messages, reasoning_effort).await;
}
let url = format!("{}/chat/completions", self.base_url);
let mut payload = json!({
"model": model,
"messages": Self::convert_messages(messages),
"stream": false
});
// Add tools if web search is enabled (for non-GPT-5 models)
if enable_web_search {
payload["tools"] = json!([{
"type": "function",
"function": {
"name": "web_search",
"description": "Search the web for current information on any topic",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The search query to find relevant information"
}
},
"required": ["query"]
}
}
}]);
payload["tool_choice"] = json!("auto");
}
// Add reasoning effort for GPT-5 models
if model.starts_with("gpt-5") && ["low", "medium", "high"].contains(&reasoning_effort) {
payload["reasoning_effort"] = json!(reasoning_effort);
}
let response = self
.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await
.context("Failed to send request to OpenAI API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("OpenAI API error: {}", error_text));
}
let response_json: OpenAIResponse = response
.json()
.await
.context("Failed to parse OpenAI API response")?;
let choice = response_json
.choices
.first()
.context("No choices in OpenAI API response")?;
// Handle tool calls if present
if let Some(tool_calls) = &choice.message.tool_calls {
let mut response_parts = Vec::new();
if let Some(content) = &choice.message.content {
response_parts.push(content.clone());
}
for tool_call in tool_calls {
if tool_call.function.name == "web_search" {
// Parse the query from the function arguments
if let Ok(args) = serde_json::from_str::<serde_json::Value>(&tool_call.function.arguments) {
if let Some(query) = args.get("query").and_then(|q| q.as_str()) {
response_parts.push(format!(
"\n[Web Search Request: \"{}\"]\nNote: Web search functionality is not implemented in this CLI. The AI wanted to search for: {}",
query, query
));
}
}
}
}
let final_content = if response_parts.is_empty() {
"The AI attempted to use tools but no content was returned.".to_string()
} else {
response_parts.join("\n")
};
return Ok(final_content);
}
// Handle regular content response
let content = choice.message.content.as_ref()
.context("No content in OpenAI API response")?;
Ok(content.clone())
}
async fn responses_api_completion(
&self,
model: &str,
messages: &[Message],
reasoning_effort: &str,
) -> Result<String> {
let url = format!("{}/responses", self.base_url);
// Convert messages to input text (simple approach for now)
let input_text = messages
.iter()
.filter(|msg| msg.role != "system")
.map(|msg| msg.content.as_str())
.collect::<Vec<_>>()
.join("\n");
let mut payload = json!({
"model": model,
"tools": [{"type": "web_search_preview"}],
"input": input_text
});
// Add reasoning effort for GPT-5 models
if ["low", "medium", "high"].contains(&reasoning_effort) {
payload["reasoning"] = json!({
"effort": reasoning_effort
});
}
let response = self
.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&payload)
.send()
.await
.context("Failed to send request to OpenAI Responses API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("OpenAI Responses API error: {}", error_text));
}
// Get response text first for debugging
let response_text = response.text().await
.context("Failed to get response text from OpenAI Responses API")?;
// Try to parse JSON and provide better error context
let response_json: ResponsesApiResponse = serde_json::from_str(&response_text)
.with_context(|| format!("Failed to parse OpenAI Responses API response. Response was: {}", response_text))?;
// Process the output array to extract the assistant message
let mut final_content = String::new();
let mut citations = Vec::new();
let mut search_count = 0;
for item in response_json.output {
match item.item_type.as_str() {
"web_search_call" => {
if item.status.as_deref() == Some("completed") {
search_count += 1;
if let Some(action) = &item.action {
if let Some(query) = &action.query {
final_content.push_str(&format!("🔍 Search {}: \"{}\"\n", search_count, query));
} else {
final_content.push_str(&format!("🔍 Search {}: [no query specified]\n", search_count));
}
}
}
}
"message" => {
if item.role == Some("assistant".to_string()) && item.status.as_deref() == Some("completed") {
if let Some(content_items) = item.content {
for content_item in content_items {
if content_item.content_type == "output_text" {
if let Some(text) = &content_item.text {
if search_count > 0 {
final_content.push_str("\n📝 **Response:**\n");
}
final_content.push_str(text);
// Collect citations
if let Some(annotations) = &content_item.annotations {
for annotation in annotations {
if annotation.annotation_type == "url_citation" {
citations.push(format!(
"\n📄 [{}]({}) - {}",
citations.len() + 1,
annotation.url,
annotation.title
));
}
}
}
}
}
}
}
}
}
_ => {} // Handle other types like "reasoning" if needed
}
}
// Append citations to the end
if !citations.is_empty() {
final_content.push_str("\n\n**Sources:**");
for citation in citations {
final_content.push_str(&citation);
}
}
if final_content.is_empty() {
return Err(anyhow::anyhow!("No content found in Responses API response"));
}
Ok(final_content)
}
pub fn supports_feature(&self, feature: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" | "reasoning_effort" => true,
_ => false,
}
}
pub fn supports_feature_for_model(&self, feature: &str, model: &str) -> bool {
match feature {
"web_search" => true,
"reasoning_summary" => true,
"reasoning_effort" => model.starts_with("gpt-5"),
_ => false,
}
}
}
impl AnthropicClient {
pub fn new(config: &Config) -> Result<Self> {
let api_key = env::var("ANTHROPIC_API_KEY")
.context("ANTHROPIC_API_KEY environment variable is required")?;
let client = Client::builder()
.timeout(Duration::from_secs(config.api.request_timeout_seconds))
.build()
.context("Failed to create HTTP client")?;
Ok(Self {
client,
api_key,
base_url: config.api.anthropic_base_url.clone(),
})
}
fn convert_messages(messages: &[Message]) -> (Option<String>, Vec<Value>) {
let mut system_prompt = None;
let mut user_messages = Vec::new();
for msg in messages {
match msg.role.as_str() {
"system" => {
system_prompt = Some(msg.content.clone());
}
"user" | "assistant" => {
user_messages.push(json!({
"role": msg.role,
"content": msg.content
}));
}
_ => {}
}
}
(system_prompt, user_messages)
}
}
impl AnthropicClient {
pub async fn chat_completion(
&self,
model: &str,
messages: &[Message],
_enable_web_search: bool,
_enable_reasoning_summary: bool,
_reasoning_effort: &str,
) -> Result<String> {
let url = format!("{}/messages", self.base_url);
let (system_prompt, user_messages) = Self::convert_messages(messages);
let config = crate::config::Config::load().unwrap_or_default();
let mut payload = json!({
"model": model,
"max_tokens": config.limits.max_tokens_anthropic,
"messages": user_messages
});
if let Some(system) = system_prompt {
payload["system"] = json!(system);
}
let response = self
.client
.post(&url)
.header("x-api-key", &self.api_key)
.header("Content-Type", "application/json")
.header("anthropic-version", &config.api.anthropic_version)
.json(&payload)
.send()
.await
.context("Failed to send request to Anthropic API")?;
if !response.status().is_success() {
let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
return Err(anyhow::anyhow!("Anthropic API error: {}", error_text));
}
let response_json: AnthropicResponse = response
.json()
.await
.context("Failed to parse Anthropic API response")?;
let content = response_json
.content
.first()
.map(|c| &c.text)
.context("No content in Anthropic API response")?;
Ok(content.clone())
}
pub fn supports_feature(&self, feature: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" => false,
_ => false,
}
}
pub fn supports_feature_for_model(&self, feature: &str, _model: &str) -> bool {
match feature {
"web_search" | "reasoning_summary" | "reasoning_effort" => false,
_ => false,
}
}
}
pub fn create_client(model: &str, config: &Config) -> Result<ChatClient> {
let provider = super::provider::get_provider_for_model(model);
match provider {
Provider::OpenAI => {
let client = OpenAIClient::new(config)?;
Ok(ChatClient::OpenAI(client))
}
Provider::Anthropic => {
let client = AnthropicClient::new(config)?;
Ok(ChatClient::Anthropic(client))
}
}
}