feat(v2): complete multi-LLM providers, TUI redesign, and advanced agent features
Multi-LLM Provider Support: - Add llm-core crate with LlmProvider trait abstraction - Implement Anthropic Claude API client with streaming - Implement OpenAI API client with streaming - Add token counting with SimpleTokenCounter and ClaudeTokenCounter - Add retry logic with exponential backoff and jitter Borderless TUI Redesign: - Rewrite theme system with terminal capability detection (Full/Unicode256/Basic) - Add provider tabs component with keybind switching [1]/[2]/[3] - Implement vim-modal input (Normal/Insert/Visual/Command modes) - Redesign chat panel with timestamps and streaming indicators - Add multi-provider status bar with cost tracking - Add Nerd Font icons with graceful ASCII fallbacks - Add syntax highlighting (syntect) and markdown rendering (pulldown-cmark) Advanced Agent Features: - Add system prompt builder with configurable components - Enhance subagent orchestration with parallel execution - Add git integration module for safe command detection - Add streaming tool results via channels - Expand tool set: AskUserQuestion, TodoWrite, LS, MultiEdit, BashOutput, KillShell - Add WebSearch with provider abstraction Plugin System Enhancement: - Add full agent definition parsing from YAML frontmatter - Add skill system with progressive disclosure - Wire plugin hooks into HookManager 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -10,6 +10,7 @@ serde = { version = "1", features = ["derive"] }
|
||||
directories = "5"
|
||||
figment = { version = "0.10", features = ["toml", "env"] }
|
||||
permissions = { path = "../permissions" }
|
||||
llm-core = { path = "../../llm/core" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.23.0"
|
||||
|
||||
@@ -5,26 +5,52 @@ use figment::{
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::env;
|
||||
use permissions::{Mode, PermissionManager};
|
||||
use llm_core::ProviderType;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Settings {
|
||||
#[serde(default = "default_ollama_url")]
|
||||
pub ollama_url: String,
|
||||
// Provider configuration
|
||||
#[serde(default = "default_provider")]
|
||||
pub provider: String, // "ollama" | "anthropic" | "openai"
|
||||
|
||||
#[serde(default = "default_model")]
|
||||
pub model: String,
|
||||
#[serde(default = "default_mode")]
|
||||
pub mode: String, // "plan" (read-only) for now
|
||||
|
||||
// Ollama-specific
|
||||
#[serde(default = "default_ollama_url")]
|
||||
pub ollama_url: String,
|
||||
|
||||
// API keys for different providers
|
||||
#[serde(default)]
|
||||
pub api_key: Option<String>, // For Ollama Cloud or other API authentication
|
||||
pub api_key: Option<String>, // For Ollama Cloud or backwards compatibility
|
||||
|
||||
#[serde(default)]
|
||||
pub anthropic_api_key: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
// Permission mode
|
||||
#[serde(default = "default_mode")]
|
||||
pub mode: String, // "plan" | "acceptEdits" | "code"
|
||||
}
|
||||
|
||||
fn default_provider() -> String {
|
||||
"ollama".into()
|
||||
}
|
||||
|
||||
fn default_ollama_url() -> String {
|
||||
"http://localhost:11434".into()
|
||||
}
|
||||
|
||||
fn default_model() -> String {
|
||||
// Default model depends on provider, but we use ollama's default here
|
||||
// Users can override this per-provider or use get_effective_model()
|
||||
"qwen3:8b".into()
|
||||
}
|
||||
|
||||
fn default_mode() -> String {
|
||||
"plan".into()
|
||||
}
|
||||
@@ -32,10 +58,13 @@ fn default_mode() -> String {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
ollama_url: default_ollama_url(),
|
||||
provider: default_provider(),
|
||||
model: default_model(),
|
||||
mode: default_mode(),
|
||||
ollama_url: default_ollama_url(),
|
||||
api_key: None,
|
||||
anthropic_api_key: None,
|
||||
openai_api_key: None,
|
||||
mode: default_mode(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,6 +80,34 @@ impl Settings {
|
||||
pub fn get_mode(&self) -> Mode {
|
||||
Mode::from_str(&self.mode).unwrap_or(Mode::Plan)
|
||||
}
|
||||
|
||||
/// Get the ProviderType enum from the provider string
|
||||
pub fn get_provider(&self) -> Option<ProviderType> {
|
||||
ProviderType::from_str(&self.provider)
|
||||
}
|
||||
|
||||
/// Get the effective model for the current provider
|
||||
/// If no model is explicitly set, returns the provider's default
|
||||
pub fn get_effective_model(&self) -> String {
|
||||
// If model is explicitly set and not the default, use it
|
||||
if self.model != default_model() {
|
||||
return self.model.clone();
|
||||
}
|
||||
|
||||
// Otherwise, use provider-specific default
|
||||
self.get_provider()
|
||||
.map(|p| p.default_model().to_string())
|
||||
.unwrap_or_else(|| self.model.clone())
|
||||
}
|
||||
|
||||
/// Get the API key for the current provider
|
||||
pub fn get_provider_api_key(&self) -> Option<String> {
|
||||
match self.get_provider()? {
|
||||
ProviderType::Ollama => self.api_key.clone(),
|
||||
ProviderType::Anthropic => self.anthropic_api_key.clone(),
|
||||
ProviderType::OpenAI => self.openai_api_key.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Error> {
|
||||
@@ -68,9 +125,31 @@ pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Er
|
||||
}
|
||||
|
||||
// Environment variables have highest precedence
|
||||
// OWLEN_* prefix (e.g., OWLEN_PROVIDER, OWLEN_MODEL, OWLEN_API_KEY, OWLEN_ANTHROPIC_API_KEY)
|
||||
fig = fig.merge(Env::prefixed("OWLEN_").split("__"));
|
||||
// Support OLLAMA_API_KEY, OLLAMA_MODEL, etc. (without nesting)
|
||||
|
||||
// Support OLLAMA_* prefix for backwards compatibility
|
||||
fig = fig.merge(Env::prefixed("OLLAMA_"));
|
||||
|
||||
fig.extract()
|
||||
// Support PROVIDER env var (without OWLEN_ prefix)
|
||||
fig = fig.merge(Env::raw().only(&["PROVIDER"]));
|
||||
|
||||
// Extract the settings
|
||||
let mut settings: Settings = fig.extract()?;
|
||||
|
||||
// Manually handle standard provider API key env vars (ANTHROPIC_API_KEY, OPENAI_API_KEY)
|
||||
// These override config files but are overridden by OWLEN_* vars
|
||||
if settings.anthropic_api_key.is_none() {
|
||||
if let Ok(key) = env::var("ANTHROPIC_API_KEY") {
|
||||
settings.anthropic_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
if settings.openai_api_key.is_none() {
|
||||
if let Ok(key) = env::var("OPENAI_API_KEY") {
|
||||
settings.openai_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use config_agent::{load_settings, Settings};
|
||||
use permissions::{Mode, PermissionDecision, Tool};
|
||||
use llm_core::ProviderType;
|
||||
use std::{env, fs};
|
||||
|
||||
#[test]
|
||||
@@ -45,4 +46,189 @@ fn settings_parse_mode_from_config() {
|
||||
// Code mode should allow everything
|
||||
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Allow);
|
||||
assert_eq!(mgr.check(Tool::Bash, None), PermissionDecision::Allow);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_provider_is_ollama() {
|
||||
let s = Settings::default();
|
||||
assert_eq!(s.provider, "ollama");
|
||||
assert_eq!(s.get_provider(), Some(ProviderType::Ollama));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn provider_from_config_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"provider="anthropic""#).unwrap();
|
||||
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.provider, "anthropic");
|
||||
assert_eq!(s.get_provider(), Some(ProviderType::Anthropic));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn provider_from_env_var() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
unsafe {
|
||||
env::set_var("OWLEN_PROVIDER", "openai");
|
||||
env::remove_var("PROVIDER");
|
||||
env::remove_var("ANTHROPIC_API_KEY");
|
||||
env::remove_var("OPENAI_API_KEY");
|
||||
}
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.provider, "openai");
|
||||
assert_eq!(s.get_provider(), Some(ProviderType::OpenAI));
|
||||
unsafe { env::remove_var("OWLEN_PROVIDER"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn provider_from_provider_env_var() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
unsafe {
|
||||
env::set_var("PROVIDER", "anthropic");
|
||||
env::remove_var("OWLEN_PROVIDER");
|
||||
env::remove_var("ANTHROPIC_API_KEY");
|
||||
env::remove_var("OPENAI_API_KEY");
|
||||
}
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.provider, "anthropic");
|
||||
assert_eq!(s.get_provider(), Some(ProviderType::Anthropic));
|
||||
unsafe { env::remove_var("PROVIDER"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn anthropic_api_key_from_owlen_env() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"provider="anthropic""#).unwrap();
|
||||
|
||||
unsafe { env::set_var("OWLEN_ANTHROPIC_API_KEY", "sk-ant-test123"); }
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.anthropic_api_key, Some("sk-ant-test123".to_string()));
|
||||
assert_eq!(s.get_provider_api_key(), Some("sk-ant-test123".to_string()));
|
||||
unsafe { env::remove_var("OWLEN_ANTHROPIC_API_KEY"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn openai_api_key_from_owlen_env() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"provider="openai""#).unwrap();
|
||||
|
||||
unsafe { env::set_var("OWLEN_OPENAI_API_KEY", "sk-test-456"); }
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.openai_api_key, Some("sk-test-456".to_string()));
|
||||
assert_eq!(s.get_provider_api_key(), Some("sk-test-456".to_string()));
|
||||
unsafe { env::remove_var("OWLEN_OPENAI_API_KEY"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn api_keys_from_config_file() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"
|
||||
provider = "anthropic"
|
||||
anthropic_api_key = "sk-ant-from-file"
|
||||
openai_api_key = "sk-openai-from-file"
|
||||
"#).unwrap();
|
||||
|
||||
// Clear any env vars that might interfere
|
||||
unsafe {
|
||||
env::remove_var("ANTHROPIC_API_KEY");
|
||||
env::remove_var("OPENAI_API_KEY");
|
||||
env::remove_var("OWLEN_ANTHROPIC_API_KEY");
|
||||
env::remove_var("OWLEN_OPENAI_API_KEY");
|
||||
}
|
||||
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.anthropic_api_key, Some("sk-ant-from-file".to_string()));
|
||||
assert_eq!(s.openai_api_key, Some("sk-openai-from-file".to_string()));
|
||||
assert_eq!(s.get_provider_api_key(), Some("sk-ant-from-file".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn anthropic_api_key_from_standard_env() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"provider="anthropic""#).unwrap();
|
||||
|
||||
unsafe {
|
||||
env::set_var("ANTHROPIC_API_KEY", "sk-ant-std");
|
||||
env::remove_var("OWLEN_ANTHROPIC_API_KEY");
|
||||
env::remove_var("PROVIDER");
|
||||
env::remove_var("OWLEN_PROVIDER");
|
||||
}
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.anthropic_api_key, Some("sk-ant-std".to_string()));
|
||||
assert_eq!(s.get_provider_api_key(), Some("sk-ant-std".to_string()));
|
||||
unsafe { env::remove_var("ANTHROPIC_API_KEY"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn openai_api_key_from_standard_env() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
let project_file = tmp.path().join(".owlen.toml");
|
||||
fs::write(&project_file, r#"provider="openai""#).unwrap();
|
||||
|
||||
unsafe {
|
||||
env::set_var("OPENAI_API_KEY", "sk-openai-std");
|
||||
env::remove_var("OWLEN_OPENAI_API_KEY");
|
||||
env::remove_var("PROVIDER");
|
||||
env::remove_var("OWLEN_PROVIDER");
|
||||
}
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
assert_eq!(s.openai_api_key, Some("sk-openai-std".to_string()));
|
||||
assert_eq!(s.get_provider_api_key(), Some("sk-openai-std".to_string()));
|
||||
unsafe { env::remove_var("OPENAI_API_KEY"); }
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Ignore due to env var interaction in parallel tests
|
||||
fn owlen_prefix_overrides_standard_env() {
|
||||
let tmp = tempfile::tempdir().unwrap();
|
||||
|
||||
unsafe {
|
||||
env::set_var("ANTHROPIC_API_KEY", "sk-ant-std");
|
||||
env::set_var("OWLEN_ANTHROPIC_API_KEY", "sk-ant-owlen");
|
||||
}
|
||||
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||
// OWLEN_ prefix should take precedence
|
||||
assert_eq!(s.anthropic_api_key, Some("sk-ant-owlen".to_string()));
|
||||
unsafe {
|
||||
env::remove_var("ANTHROPIC_API_KEY");
|
||||
env::remove_var("OWLEN_ANTHROPIC_API_KEY");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn effective_model_uses_provider_default() {
|
||||
// Test Anthropic provider default
|
||||
let mut s = Settings::default();
|
||||
s.provider = "anthropic".to_string();
|
||||
assert_eq!(s.get_effective_model(), "claude-sonnet-4-20250514");
|
||||
|
||||
// Test OpenAI provider default
|
||||
s.provider = "openai".to_string();
|
||||
assert_eq!(s.get_effective_model(), "gpt-4o");
|
||||
|
||||
// Test Ollama provider default
|
||||
s.provider = "ollama".to_string();
|
||||
assert_eq!(s.get_effective_model(), "qwen3:8b");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn effective_model_respects_explicit_model() {
|
||||
let mut s = Settings::default();
|
||||
s.provider = "anthropic".to_string();
|
||||
s.model = "claude-opus-4-20250514".to_string();
|
||||
|
||||
// Should use explicit model, not provider default
|
||||
assert_eq!(s.get_effective_model(), "claude-opus-4-20250514");
|
||||
}
|
||||
Reference in New Issue
Block a user