Files
owlen/crates/llm/anthropic/src/client.rs
vikingowl 10c8e2baae feat(v2): complete multi-LLM providers, TUI redesign, and advanced agent features
Multi-LLM Provider Support:
- Add llm-core crate with LlmProvider trait abstraction
- Implement Anthropic Claude API client with streaming
- Implement OpenAI API client with streaming
- Add token counting with SimpleTokenCounter and ClaudeTokenCounter
- Add retry logic with exponential backoff and jitter

Borderless TUI Redesign:
- Rewrite theme system with terminal capability detection (Full/Unicode256/Basic)
- Add provider tabs component with keybind switching [1]/[2]/[3]
- Implement vim-modal input (Normal/Insert/Visual/Command modes)
- Redesign chat panel with timestamps and streaming indicators
- Add multi-provider status bar with cost tracking
- Add Nerd Font icons with graceful ASCII fallbacks
- Add syntax highlighting (syntect) and markdown rendering (pulldown-cmark)

Advanced Agent Features:
- Add system prompt builder with configurable components
- Enhance subagent orchestration with parallel execution
- Add git integration module for safe command detection
- Add streaming tool results via channels
- Expand tool set: AskUserQuestion, TodoWrite, LS, MultiEdit, BashOutput, KillShell
- Add WebSearch with provider abstraction

Plugin System Enhancement:
- Add full agent definition parsing from YAML frontmatter
- Add skill system with progressive disclosure
- Wire plugin hooks into HookManager

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-02 17:24:14 +01:00

578 lines
18 KiB
Rust

//! Anthropic Claude API Client
//!
//! Implements the Messages API with streaming support.
use crate::types::*;
use async_trait::async_trait;
use futures::StreamExt;
use llm_core::{
AccountInfo, AuthMethod, ChatMessage, ChatOptions, ChatResponse, ChunkStream, FunctionCall,
LlmError, LlmProvider, ModelInfo, ProviderInfo, ProviderStatus, Role, StreamChunk, Tool,
ToolCall, ToolCallDelta, Usage, UsageStats,
};
use reqwest::Client;
use reqwest_eventsource::{Event, EventSource};
use std::sync::Arc;
use tokio::sync::Mutex;
const API_BASE_URL: &str = "https://api.anthropic.com";
const MESSAGES_ENDPOINT: &str = "/v1/messages";
const API_VERSION: &str = "2023-06-01";
const DEFAULT_MAX_TOKENS: u32 = 8192;
/// Anthropic Claude API client
pub struct AnthropicClient {
http: Client,
auth: AuthMethod,
model: String,
}
impl AnthropicClient {
/// Create a new client with API key authentication
pub fn new(api_key: impl Into<String>) -> Self {
Self {
http: Client::new(),
auth: AuthMethod::api_key(api_key),
model: "claude-sonnet-4-20250514".to_string(),
}
}
/// Create a new client with OAuth token
pub fn with_oauth(access_token: impl Into<String>) -> Self {
Self {
http: Client::new(),
auth: AuthMethod::oauth(access_token),
model: "claude-sonnet-4-20250514".to_string(),
}
}
/// Create a new client with full AuthMethod
pub fn with_auth(auth: AuthMethod) -> Self {
Self {
http: Client::new(),
auth,
model: "claude-sonnet-4-20250514".to_string(),
}
}
/// Set the model to use
pub fn with_model(mut self, model: impl Into<String>) -> Self {
self.model = model.into();
self
}
/// Get current auth method (for token refresh)
pub fn auth(&self) -> &AuthMethod {
&self.auth
}
/// Update the auth method (after refresh)
pub fn set_auth(&mut self, auth: AuthMethod) {
self.auth = auth;
}
/// Convert messages to Anthropic format, extracting system message
fn prepare_messages(messages: &[ChatMessage]) -> (Option<String>, Vec<AnthropicMessage>) {
let mut system_content = None;
let mut anthropic_messages = Vec::new();
for msg in messages {
if msg.role == Role::System {
// Collect system messages
if let Some(content) = &msg.content {
if let Some(existing) = &mut system_content {
*existing = format!("{}\n\n{}", existing, content);
} else {
system_content = Some(content.clone());
}
}
} else {
anthropic_messages.push(AnthropicMessage::from(msg));
}
}
(system_content, anthropic_messages)
}
/// Convert tools to Anthropic format
fn prepare_tools(tools: Option<&[Tool]>) -> Option<Vec<AnthropicTool>> {
tools.map(|t| t.iter().map(AnthropicTool::from).collect())
}
}
#[async_trait]
impl LlmProvider for AnthropicClient {
fn name(&self) -> &str {
"anthropic"
}
fn model(&self) -> &str {
&self.model
}
async fn chat_stream(
&self,
messages: &[ChatMessage],
options: &ChatOptions,
tools: Option<&[Tool]>,
) -> Result<ChunkStream, LlmError> {
let url = format!("{}{}", API_BASE_URL, MESSAGES_ENDPOINT);
let model = if options.model.is_empty() {
&self.model
} else {
&options.model
};
let (system, anthropic_messages) = Self::prepare_messages(messages);
let anthropic_tools = Self::prepare_tools(tools);
let request = MessagesRequest {
model,
messages: anthropic_messages,
max_tokens: options.max_tokens.unwrap_or(DEFAULT_MAX_TOKENS),
system: system.as_deref(),
temperature: options.temperature,
top_p: options.top_p,
stop_sequences: options.stop.as_deref(),
tools: anthropic_tools,
stream: true,
};
let bearer = self
.auth
.bearer_token()
.ok_or_else(|| LlmError::Auth("No authentication configured".to_string()))?;
// Build the SSE request
let req = self
.http
.post(&url)
.header("x-api-key", bearer)
.header("anthropic-version", API_VERSION)
.header("content-type", "application/json")
.json(&request);
let es = EventSource::new(req).map_err(|e| LlmError::Http(e.to_string()))?;
// State for accumulating tool calls across deltas
let tool_state: Arc<Mutex<Vec<PartialToolCall>>> = Arc::new(Mutex::new(Vec::new()));
let stream = es.filter_map(move |event| {
let tool_state = Arc::clone(&tool_state);
async move {
match event {
Ok(Event::Open) => None,
Ok(Event::Message(msg)) => {
// Parse the SSE data as JSON
let event: StreamEvent = match serde_json::from_str(&msg.data) {
Ok(e) => e,
Err(e) => {
tracing::warn!("Failed to parse SSE event: {}", e);
return None;
}
};
convert_stream_event(event, &tool_state).await
}
Err(reqwest_eventsource::Error::StreamEnded) => None,
Err(e) => Some(Err(LlmError::Stream(e.to_string()))),
}
}
});
Ok(Box::pin(stream))
}
async fn chat(
&self,
messages: &[ChatMessage],
options: &ChatOptions,
tools: Option<&[Tool]>,
) -> Result<ChatResponse, LlmError> {
let url = format!("{}{}", API_BASE_URL, MESSAGES_ENDPOINT);
let model = if options.model.is_empty() {
&self.model
} else {
&options.model
};
let (system, anthropic_messages) = Self::prepare_messages(messages);
let anthropic_tools = Self::prepare_tools(tools);
let request = MessagesRequest {
model,
messages: anthropic_messages,
max_tokens: options.max_tokens.unwrap_or(DEFAULT_MAX_TOKENS),
system: system.as_deref(),
temperature: options.temperature,
top_p: options.top_p,
stop_sequences: options.stop.as_deref(),
tools: anthropic_tools,
stream: false,
};
let bearer = self
.auth
.bearer_token()
.ok_or_else(|| LlmError::Auth("No authentication configured".to_string()))?;
let response = self
.http
.post(&url)
.header("x-api-key", bearer)
.header("anthropic-version", API_VERSION)
.json(&request)
.send()
.await
.map_err(|e| LlmError::Http(e.to_string()))?;
if !response.status().is_success() {
let status = response.status();
let text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
// Check for rate limiting
if status == reqwest::StatusCode::TOO_MANY_REQUESTS {
return Err(LlmError::RateLimit {
retry_after_secs: None,
});
}
return Err(LlmError::Api {
message: text,
code: Some(status.to_string()),
});
}
let api_response: MessagesResponse = response
.json()
.await
.map_err(|e| LlmError::Json(e.to_string()))?;
// Convert response to common format
let mut content = String::new();
let mut tool_calls = Vec::new();
for block in api_response.content {
match block {
ResponseContentBlock::Text { text } => {
content.push_str(&text);
}
ResponseContentBlock::ToolUse { id, name, input } => {
tool_calls.push(ToolCall {
id,
call_type: "function".to_string(),
function: FunctionCall {
name,
arguments: input,
},
});
}
}
}
let usage = api_response.usage.map(|u| Usage {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: u.input_tokens + u.output_tokens,
});
Ok(ChatResponse {
content: if content.is_empty() {
None
} else {
Some(content)
},
tool_calls: if tool_calls.is_empty() {
None
} else {
Some(tool_calls)
},
usage,
})
}
}
/// Helper struct for accumulating streaming tool calls
#[derive(Default)]
struct PartialToolCall {
#[allow(dead_code)]
id: String,
#[allow(dead_code)]
name: String,
input_json: String,
}
/// Convert an Anthropic stream event to our common StreamChunk format
async fn convert_stream_event(
event: StreamEvent,
tool_state: &Arc<Mutex<Vec<PartialToolCall>>>,
) -> Option<Result<StreamChunk, LlmError>> {
match event {
StreamEvent::ContentBlockStart {
index,
content_block,
} => {
match content_block {
ContentBlockStartInfo::Text { text } => {
if text.is_empty() {
None
} else {
Some(Ok(StreamChunk {
content: Some(text),
tool_calls: None,
done: false,
usage: None,
}))
}
}
ContentBlockStartInfo::ToolUse { id, name } => {
// Store the tool call start
let mut state = tool_state.lock().await;
while state.len() <= index {
state.push(PartialToolCall::default());
}
state[index] = PartialToolCall {
id: id.clone(),
name: name.clone(),
input_json: String::new(),
};
Some(Ok(StreamChunk {
content: None,
tool_calls: Some(vec![ToolCallDelta {
index,
id: Some(id),
function_name: Some(name),
arguments_delta: None,
}]),
done: false,
usage: None,
}))
}
}
}
StreamEvent::ContentBlockDelta { index, delta } => match delta {
ContentDelta::TextDelta { text } => Some(Ok(StreamChunk {
content: Some(text),
tool_calls: None,
done: false,
usage: None,
})),
ContentDelta::InputJsonDelta { partial_json } => {
// Accumulate the JSON
let mut state = tool_state.lock().await;
if index < state.len() {
state[index].input_json.push_str(&partial_json);
}
Some(Ok(StreamChunk {
content: None,
tool_calls: Some(vec![ToolCallDelta {
index,
id: None,
function_name: None,
arguments_delta: Some(partial_json),
}]),
done: false,
usage: None,
}))
}
},
StreamEvent::MessageDelta { usage, .. } => {
let u = usage.map(|u| Usage {
prompt_tokens: u.input_tokens,
completion_tokens: u.output_tokens,
total_tokens: u.input_tokens + u.output_tokens,
});
Some(Ok(StreamChunk {
content: None,
tool_calls: None,
done: false,
usage: u,
}))
}
StreamEvent::MessageStop => Some(Ok(StreamChunk {
content: None,
tool_calls: None,
done: true,
usage: None,
})),
StreamEvent::Error { error } => Some(Err(LlmError::Api {
message: error.message,
code: Some(error.error_type),
})),
// Ignore other events
StreamEvent::MessageStart { .. }
| StreamEvent::ContentBlockStop { .. }
| StreamEvent::Ping => None,
}
}
// ============================================================================
// ProviderInfo Implementation
// ============================================================================
/// Known Claude models with their specifications
fn get_claude_models() -> Vec<ModelInfo> {
vec![
ModelInfo {
id: "claude-opus-4-20250514".to_string(),
display_name: Some("Claude Opus 4".to_string()),
description: Some("Most capable model for complex tasks".to_string()),
context_window: Some(200_000),
max_output_tokens: Some(32_000),
supports_tools: true,
supports_vision: true,
input_price_per_mtok: Some(15.0),
output_price_per_mtok: Some(75.0),
},
ModelInfo {
id: "claude-sonnet-4-20250514".to_string(),
display_name: Some("Claude Sonnet 4".to_string()),
description: Some("Best balance of performance and speed".to_string()),
context_window: Some(200_000),
max_output_tokens: Some(64_000),
supports_tools: true,
supports_vision: true,
input_price_per_mtok: Some(3.0),
output_price_per_mtok: Some(15.0),
},
ModelInfo {
id: "claude-haiku-3-5-20241022".to_string(),
display_name: Some("Claude 3.5 Haiku".to_string()),
description: Some("Fast and affordable for simple tasks".to_string()),
context_window: Some(200_000),
max_output_tokens: Some(8_192),
supports_tools: true,
supports_vision: true,
input_price_per_mtok: Some(0.80),
output_price_per_mtok: Some(4.0),
},
]
}
#[async_trait]
impl ProviderInfo for AnthropicClient {
async fn status(&self) -> Result<ProviderStatus, LlmError> {
let authenticated = self.auth.bearer_token().is_some();
// Try to reach the API with a simple request
let reachable = if authenticated {
// Test with a minimal message to verify auth works
let test_messages = vec![ChatMessage::user("Hi")];
let test_opts = ChatOptions::new(&self.model).with_max_tokens(1);
match self.chat(&test_messages, &test_opts, None).await {
Ok(_) => true,
Err(LlmError::Auth(_)) => false, // Auth failed
Err(_) => true, // Other errors mean API is reachable
}
} else {
false
};
let account = if authenticated && reachable {
self.account_info().await.ok().flatten()
} else {
None
};
let message = if !authenticated {
Some("Not authenticated - run 'owlen login anthropic' to authenticate".to_string())
} else if !reachable {
Some("Cannot reach Anthropic API".to_string())
} else {
Some("Connected".to_string())
};
Ok(ProviderStatus {
provider: "anthropic".to_string(),
authenticated,
account,
model: self.model.clone(),
endpoint: API_BASE_URL.to_string(),
reachable,
message,
})
}
async fn account_info(&self) -> Result<Option<AccountInfo>, LlmError> {
// Anthropic doesn't have a public account info endpoint
// Return None - account info would come from OAuth token claims
Ok(None)
}
async fn usage_stats(&self) -> Result<Option<UsageStats>, LlmError> {
// Anthropic doesn't expose usage stats via API
// This would require the admin/billing API with different auth
Ok(None)
}
async fn list_models(&self) -> Result<Vec<ModelInfo>, LlmError> {
// Return known models - Anthropic doesn't have a models list endpoint
Ok(get_claude_models())
}
async fn model_info(&self, model_id: &str) -> Result<Option<ModelInfo>, LlmError> {
let models = get_claude_models();
Ok(models.into_iter().find(|m| m.id == model_id))
}
}
#[cfg(test)]
mod tests {
use super::*;
use llm_core::ToolParameters;
use serde_json::json;
#[test]
fn test_message_conversion() {
let messages = vec![
ChatMessage::system("You are helpful"),
ChatMessage::user("Hello"),
ChatMessage::assistant("Hi there!"),
];
let (system, anthropic_msgs) = AnthropicClient::prepare_messages(&messages);
assert_eq!(system, Some("You are helpful".to_string()));
assert_eq!(anthropic_msgs.len(), 2);
assert_eq!(anthropic_msgs[0].role, "user");
assert_eq!(anthropic_msgs[1].role, "assistant");
}
#[test]
fn test_tool_conversion() {
let tools = vec![Tool::function(
"read_file",
"Read a file's contents",
ToolParameters::object(
json!({
"path": {
"type": "string",
"description": "File path"
}
}),
vec!["path".to_string()],
),
)];
let anthropic_tools = AnthropicClient::prepare_tools(Some(&tools)).unwrap();
assert_eq!(anthropic_tools.len(), 1);
assert_eq!(anthropic_tools[0].name, "read_file");
assert_eq!(anthropic_tools[0].description, "Read a file's contents");
}
}