feat(v2): complete multi-LLM providers, TUI redesign, and advanced agent features
Multi-LLM Provider Support: - Add llm-core crate with LlmProvider trait abstraction - Implement Anthropic Claude API client with streaming - Implement OpenAI API client with streaming - Add token counting with SimpleTokenCounter and ClaudeTokenCounter - Add retry logic with exponential backoff and jitter Borderless TUI Redesign: - Rewrite theme system with terminal capability detection (Full/Unicode256/Basic) - Add provider tabs component with keybind switching [1]/[2]/[3] - Implement vim-modal input (Normal/Insert/Visual/Command modes) - Redesign chat panel with timestamps and streaming indicators - Add multi-provider status bar with cost tracking - Add Nerd Font icons with graceful ASCII fallbacks - Add syntax highlighting (syntect) and markdown rendering (pulldown-cmark) Advanced Agent Features: - Add system prompt builder with configurable components - Enhance subagent orchestration with parallel execution - Add git integration module for safe command detection - Add streaming tool results via channels - Expand tool set: AskUserQuestion, TodoWrite, LS, MultiEdit, BashOutput, KillShell - Add WebSearch with provider abstraction Plugin System Enhancement: - Add full agent definition parsing from YAML frontmatter - Add skill system with progressive disclosure - Wire plugin hooks into HookManager 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -5,26 +5,52 @@ use figment::{
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::env;
|
||||
use permissions::{Mode, PermissionManager};
|
||||
use llm_core::ProviderType;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Settings {
|
||||
#[serde(default = "default_ollama_url")]
|
||||
pub ollama_url: String,
|
||||
// Provider configuration
|
||||
#[serde(default = "default_provider")]
|
||||
pub provider: String, // "ollama" | "anthropic" | "openai"
|
||||
|
||||
#[serde(default = "default_model")]
|
||||
pub model: String,
|
||||
#[serde(default = "default_mode")]
|
||||
pub mode: String, // "plan" (read-only) for now
|
||||
|
||||
// Ollama-specific
|
||||
#[serde(default = "default_ollama_url")]
|
||||
pub ollama_url: String,
|
||||
|
||||
// API keys for different providers
|
||||
#[serde(default)]
|
||||
pub api_key: Option<String>, // For Ollama Cloud or other API authentication
|
||||
pub api_key: Option<String>, // For Ollama Cloud or backwards compatibility
|
||||
|
||||
#[serde(default)]
|
||||
pub anthropic_api_key: Option<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub openai_api_key: Option<String>,
|
||||
|
||||
// Permission mode
|
||||
#[serde(default = "default_mode")]
|
||||
pub mode: String, // "plan" | "acceptEdits" | "code"
|
||||
}
|
||||
|
||||
fn default_provider() -> String {
|
||||
"ollama".into()
|
||||
}
|
||||
|
||||
fn default_ollama_url() -> String {
|
||||
"http://localhost:11434".into()
|
||||
}
|
||||
|
||||
fn default_model() -> String {
|
||||
// Default model depends on provider, but we use ollama's default here
|
||||
// Users can override this per-provider or use get_effective_model()
|
||||
"qwen3:8b".into()
|
||||
}
|
||||
|
||||
fn default_mode() -> String {
|
||||
"plan".into()
|
||||
}
|
||||
@@ -32,10 +58,13 @@ fn default_mode() -> String {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
ollama_url: default_ollama_url(),
|
||||
provider: default_provider(),
|
||||
model: default_model(),
|
||||
mode: default_mode(),
|
||||
ollama_url: default_ollama_url(),
|
||||
api_key: None,
|
||||
anthropic_api_key: None,
|
||||
openai_api_key: None,
|
||||
mode: default_mode(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -51,6 +80,34 @@ impl Settings {
|
||||
pub fn get_mode(&self) -> Mode {
|
||||
Mode::from_str(&self.mode).unwrap_or(Mode::Plan)
|
||||
}
|
||||
|
||||
/// Get the ProviderType enum from the provider string
|
||||
pub fn get_provider(&self) -> Option<ProviderType> {
|
||||
ProviderType::from_str(&self.provider)
|
||||
}
|
||||
|
||||
/// Get the effective model for the current provider
|
||||
/// If no model is explicitly set, returns the provider's default
|
||||
pub fn get_effective_model(&self) -> String {
|
||||
// If model is explicitly set and not the default, use it
|
||||
if self.model != default_model() {
|
||||
return self.model.clone();
|
||||
}
|
||||
|
||||
// Otherwise, use provider-specific default
|
||||
self.get_provider()
|
||||
.map(|p| p.default_model().to_string())
|
||||
.unwrap_or_else(|| self.model.clone())
|
||||
}
|
||||
|
||||
/// Get the API key for the current provider
|
||||
pub fn get_provider_api_key(&self) -> Option<String> {
|
||||
match self.get_provider()? {
|
||||
ProviderType::Ollama => self.api_key.clone(),
|
||||
ProviderType::Anthropic => self.anthropic_api_key.clone(),
|
||||
ProviderType::OpenAI => self.openai_api_key.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Error> {
|
||||
@@ -68,9 +125,31 @@ pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Er
|
||||
}
|
||||
|
||||
// Environment variables have highest precedence
|
||||
// OWLEN_* prefix (e.g., OWLEN_PROVIDER, OWLEN_MODEL, OWLEN_API_KEY, OWLEN_ANTHROPIC_API_KEY)
|
||||
fig = fig.merge(Env::prefixed("OWLEN_").split("__"));
|
||||
// Support OLLAMA_API_KEY, OLLAMA_MODEL, etc. (without nesting)
|
||||
|
||||
// Support OLLAMA_* prefix for backwards compatibility
|
||||
fig = fig.merge(Env::prefixed("OLLAMA_"));
|
||||
|
||||
fig.extract()
|
||||
// Support PROVIDER env var (without OWLEN_ prefix)
|
||||
fig = fig.merge(Env::raw().only(&["PROVIDER"]));
|
||||
|
||||
// Extract the settings
|
||||
let mut settings: Settings = fig.extract()?;
|
||||
|
||||
// Manually handle standard provider API key env vars (ANTHROPIC_API_KEY, OPENAI_API_KEY)
|
||||
// These override config files but are overridden by OWLEN_* vars
|
||||
if settings.anthropic_api_key.is_none() {
|
||||
if let Ok(key) = env::var("ANTHROPIC_API_KEY") {
|
||||
settings.anthropic_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
if settings.openai_api_key.is_none() {
|
||||
if let Ok(key) = env::var("OPENAI_API_KEY") {
|
||||
settings.openai_api_key = Some(key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user