refactor(core): remove provider module, migrate to LLMProvider, add client mode handling, improve serialization error handling, update workspace edition, and clean up conditionals and imports

This commit is contained in:
2025-10-12 12:38:55 +02:00
parent c2f5ccea3b
commit 7851af14a9
63 changed files with 2221 additions and 1236 deletions

View File

@@ -14,7 +14,7 @@ exclude = []
[workspace.package] [workspace.package]
version = "0.1.9" version = "0.1.9"
edition = "2021" edition = "2024"
authors = ["Owlibou"] authors = ["Owlibou"]
license = "AGPL-3.0" license = "AGPL-3.0"
repository = "https://somegit.dev/Owlibou/owlen" repository = "https://somegit.dev/Owlibou/owlen"
@@ -43,7 +43,7 @@ serde_json = { version = "1.0" }
# Utilities # Utilities
uuid = { version = "1.0", features = ["v4", "serde"] } uuid = { version = "1.0", features = ["v4", "serde"] }
anyhow = "1.0" anyhow = "1.0"
thiserror = "1.0" thiserror = "2.0"
nix = "0.29" nix = "0.29"
which = "6.0" which = "6.0"
tempfile = "3.8" tempfile = "3.8"

View File

@@ -30,6 +30,7 @@ The OWLEN interface features a clean, multi-panel layout with vim-inspired navig
- **Streaming Responses**: Real-time token streaming from Ollama. - **Streaming Responses**: Real-time token streaming from Ollama.
- **Advanced Text Editing**: Multi-line input, history, and clipboard support. - **Advanced Text Editing**: Multi-line input, history, and clipboard support.
- **Session Management**: Save, load, and manage conversations. - **Session Management**: Save, load, and manage conversations.
- **Code Side Panel**: Switch to code mode (`:mode code`) and open files inline with `:open <path>` for LLM-assisted coding.
- **Theming System**: 10 built-in themes and support for custom themes. - **Theming System**: 10 built-in themes and support for custom themes.
- **Modular Architecture**: Extensible provider system (Ollama today, additional providers on the roadmap). - **Modular Architecture**: Extensible provider system (Ollama today, additional providers on the roadmap).
- **Guided Setup**: `owlen config doctor` upgrades legacy configs and verifies your environment in seconds. - **Guided Setup**: `owlen config doctor` upgrades legacy configs and verifies your environment in seconds.
@@ -118,6 +119,16 @@ You can also add custom themes alongside the config directory (e.g., `~/.config/
See the [themes/README.md](themes/README.md) for more details on theming. See the [themes/README.md](themes/README.md) for more details on theming.
## Testing
Owlen uses standard Rust tooling for verification. Run the full test suite with:
```bash
cargo test
```
Unit tests cover the command palette state machine, agent response parsing, and key MCP abstractions. Formatting and lint checks can be run with `cargo fmt --all` and `cargo clippy` respectively.
## Roadmap ## Roadmap
Upcoming milestones focus on feature parity with modern code assistants while keeping Owlen local-first: Upcoming milestones focus on feature parity with modern code assistants while keeping Owlen local-first:

View File

@@ -1,13 +1,15 @@
use std::ffi::OsStr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{Context, Result, anyhow, bail};
use clap::Subcommand; use clap::Subcommand;
use owlen_core::LlmProvider;
use owlen_core::ProviderConfig;
use owlen_core::config as core_config; use owlen_core::config as core_config;
use owlen_core::config::Config; use owlen_core::config::Config;
use owlen_core::credentials::{ApiCredentials, CredentialManager, OLLAMA_CLOUD_CREDENTIAL_ID}; use owlen_core::credentials::{ApiCredentials, CredentialManager, OLLAMA_CLOUD_CREDENTIAL_ID};
use owlen_core::encryption; use owlen_core::encryption;
use owlen_core::provider::{LLMProvider, ProviderConfig};
use owlen_core::providers::OllamaProvider; use owlen_core::providers::OllamaProvider;
use owlen_core::storage::StorageManager; use owlen_core::storage::StorageManager;
@@ -219,12 +221,11 @@ fn ensure_provider_entry(config: &mut Config, provider: &str, endpoint: &str) {
if provider == "ollama" if provider == "ollama"
&& config.providers.contains_key("ollama-cloud") && config.providers.contains_key("ollama-cloud")
&& !config.providers.contains_key("ollama") && !config.providers.contains_key("ollama")
&& let Some(mut legacy) = config.providers.remove("ollama-cloud")
{ {
if let Some(mut legacy) = config.providers.remove("ollama-cloud") {
legacy.provider_type = "ollama".to_string(); legacy.provider_type = "ollama".to_string();
config.providers.insert("ollama".to_string(), legacy); config.providers.insert("ollama".to_string(), legacy);
} }
}
core_config::ensure_provider_config(config, provider); core_config::ensure_provider_config(config, provider);
@@ -247,12 +248,24 @@ fn canonical_provider_name(provider: &str) -> String {
} }
} }
pub(crate) fn set_env_var<K, V>(key: K, value: V)
where
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
// Safety: the CLI updates process-wide environment variables during startup while no
// other threads are mutating the environment.
unsafe {
std::env::set_var(key, value);
}
}
fn set_env_if_missing(var: &str, value: &str) { fn set_env_if_missing(var: &str, value: &str) {
if std::env::var(var) if std::env::var(var)
.map(|v| v.trim().is_empty()) .map(|v| v.trim().is_empty())
.unwrap_or(true) .unwrap_or(true)
{ {
std::env::set_var(var, value); set_env_var(var, value);
} }
} }
@@ -302,18 +315,18 @@ fn unlock_vault(path: &Path) -> Result<encryption::VaultHandle> {
use std::env; use std::env;
if path.exists() { if path.exists() {
if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD") { if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD")
if !password.trim().is_empty() { && !password.trim().is_empty()
{
return encryption::unlock_with_password(path.to_path_buf(), &password) return encryption::unlock_with_password(path.to_path_buf(), &password)
.context("Failed to unlock vault with OWLEN_MASTER_PASSWORD"); .context("Failed to unlock vault with OWLEN_MASTER_PASSWORD");
} }
}
for attempt in 0..3 { for attempt in 0..3 {
let password = encryption::prompt_password("Enter master password: ")?; let password = encryption::prompt_password("Enter master password: ")?;
match encryption::unlock_with_password(path.to_path_buf(), &password) { match encryption::unlock_with_password(path.to_path_buf(), &password) {
Ok(handle) => { Ok(handle) => {
env::set_var("OWLEN_MASTER_PASSWORD", password); set_env_var("OWLEN_MASTER_PASSWORD", password);
return Ok(handle); return Ok(handle);
} }
Err(err) => { Err(err) => {
@@ -334,7 +347,7 @@ fn unlock_vault(path: &Path) -> Result<encryption::VaultHandle> {
.unwrap_or(true) .unwrap_or(true)
{ {
let password = encryption::prompt_password("Cache master password for this session: ")?; let password = encryption::prompt_password("Cache master password for this session: ")?;
env::set_var("OWLEN_MASTER_PASSWORD", password); set_env_var("OWLEN_MASTER_PASSWORD", password);
} }
Ok(handle) Ok(handle)
} }
@@ -343,25 +356,26 @@ async fn hydrate_api_key(
config: &mut Config, config: &mut Config,
manager: Option<&Arc<CredentialManager>>, manager: Option<&Arc<CredentialManager>>,
) -> Result<Option<String>> { ) -> Result<Option<String>> {
if let Some(manager) = manager { if let Some(manager) = manager
if let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await? { && let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await?
{
let key = credentials.api_key.trim().to_string(); let key = credentials.api_key.trim().to_string();
if !key.is_empty() { if !key.is_empty() {
set_env_if_missing("OLLAMA_API_KEY", &key); set_env_if_missing("OLLAMA_API_KEY", &key);
set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key); set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key);
} }
if let Some(cfg) = provider_entry_mut(config) { if let Some(cfg) = provider_entry_mut(config)
if cfg.base_url.is_none() && !credentials.endpoint.trim().is_empty() { && cfg.base_url.is_none()
&& !credentials.endpoint.trim().is_empty()
{
cfg.base_url = Some(credentials.endpoint); cfg.base_url = Some(credentials.endpoint);
} }
}
return Ok(Some(key)); return Ok(Some(key));
} }
}
if let Some(cfg) = provider_entry(config) { if let Some(cfg) = provider_entry(config)
if let Some(key) = cfg && let Some(key) = cfg
.api_key .api_key
.as_ref() .as_ref()
.map(|value| value.trim()) .map(|value| value.trim())
@@ -371,7 +385,6 @@ async fn hydrate_api_key(
set_env_if_missing("OLLAMA_CLOUD_API_KEY", key); set_env_if_missing("OLLAMA_CLOUD_API_KEY", key);
return Ok(Some(key.to_string())); return Ok(Some(key.to_string()));
} }
}
Ok(None) Ok(None)
} }

View File

@@ -2,24 +2,23 @@
mod cloud; mod cloud;
use anyhow::{anyhow, Result}; use anyhow::{Result, anyhow};
use async_trait::async_trait; use async_trait::async_trait;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use cloud::{load_runtime_credentials, CloudCommand}; use cloud::{CloudCommand, load_runtime_credentials, set_env_var};
use owlen_core::config as core_config; use owlen_core::config as core_config;
use owlen_core::{ use owlen_core::{
ChatStream, Error, Provider,
config::{Config, McpMode}, config::{Config, McpMode},
mcp::remote_client::RemoteMcpClient, mcp::remote_client::RemoteMcpClient,
mode::Mode, mode::Mode,
provider::ChatStream,
providers::OllamaProvider, providers::OllamaProvider,
session::SessionController, session::SessionController,
storage::StorageManager, storage::StorageManager,
types::{ChatRequest, ChatResponse, Message, ModelInfo}, types::{ChatRequest, ChatResponse, Message, ModelInfo},
Error, Provider,
}; };
use owlen_tui::tui_controller::{TuiController, TuiRequest}; use owlen_tui::tui_controller::{TuiController, TuiRequest};
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent}; use owlen_tui::{AppState, ChatApp, Event, EventHandler, SessionEvent, config, ui};
use std::any::Any; use std::any::Any;
use std::borrow::Cow; use std::borrow::Cow;
use std::io; use std::io;
@@ -30,10 +29,10 @@ use tokio_util::sync::CancellationToken;
use crossterm::{ use crossterm::{
event::{DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture}, event::{DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture},
execute, execute,
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
}; };
use futures::stream; use futures::stream;
use ratatui::{prelude::CrosstermBackend, Terminal}; use ratatui::{Terminal, prelude::CrosstermBackend};
/// Owlen - Terminal UI for LLM chat /// Owlen - Terminal UI for LLM chat
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
@@ -132,7 +131,9 @@ async fn run_command(command: OwlenCommand) -> Result<()> {
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd), OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await, OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await,
OwlenCommand::Upgrade => { OwlenCommand::Upgrade => {
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force"); println!(
"To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force"
);
println!( println!(
"If you installed from the AUR, use your package manager (e.g., yay -S owlen-git)." "If you installed from the AUR, use your package manager (e.g., yay -S owlen-git)."
); );
@@ -333,11 +334,11 @@ impl Provider for OfflineProvider {
}]) }])
} }
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse, Error> { async fn send_prompt(&self, request: ChatRequest) -> Result<ChatResponse, Error> {
Ok(self.friendly_response(&request.model)) Ok(self.friendly_response(&request.model))
} }
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream, Error> { async fn stream_prompt(&self, request: ChatRequest) -> Result<ChatStream, Error> {
let response = self.friendly_response(&request.model); let response = self.friendly_response(&request.model);
Ok(Box::pin(stream::iter(vec![Ok(response)]))) Ok(Box::pin(stream::iter(vec![Ok(response)])))
} }
@@ -363,7 +364,7 @@ async fn main() -> Result<()> {
let initial_mode = if code { Mode::Code } else { Mode::Chat }; let initial_mode = if code { Mode::Code } else { Mode::Chat };
// Set auto-consent for TUI mode to prevent blocking stdin reads // Set auto-consent for TUI mode to prevent blocking stdin reads
std::env::set_var("OWLEN_AUTO_CONSENT", "1"); set_env_var("OWLEN_AUTO_CONSENT", "1");
let color_support = detect_terminal_color_support(); let color_support = detect_terminal_color_support();
// Load configuration (or fall back to defaults) for the session controller. // Load configuration (or fall back to defaults) for the session controller.

View File

@@ -3,8 +3,8 @@
//! This module provides the core agent orchestration logic that allows an LLM //! This module provides the core agent orchestration logic that allows an LLM
//! to reason about tasks, execute tools, and observe results in an iterative loop. //! to reason about tasks, execute tools, and observe results in an iterative loop.
use crate::Provider;
use crate::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; use crate::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse};
use crate::provider::Provider;
use crate::types::{ChatParameters, ChatRequest, Message}; use crate::types::{ChatParameters, ChatRequest, Message};
use crate::{Error, Result}; use crate::{Error, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -189,7 +189,7 @@ impl AgentExecutor {
fn build_system_prompt(&self, tools: &[McpToolDescriptor]) -> String { fn build_system_prompt(&self, tools: &[McpToolDescriptor]) -> String {
let mut prompt = String::from( let mut prompt = String::from(
"You are an AI assistant that uses the ReAct (Reasoning and Acting) pattern to solve tasks.\n\n\ "You are an AI assistant that uses the ReAct (Reasoning and Acting) pattern to solve tasks.\n\n\
You have access to the following tools:\n\n" You have access to the following tools:\n\n",
); );
for tool in tools { for tool in tools {
@@ -230,7 +230,7 @@ impl AgentExecutor {
tools: None, tools: None,
}; };
let response = self.llm_client.chat(request).await?; let response = self.llm_client.send_prompt(request).await?;
Ok(response.message.content) Ok(response.message.content)
} }
@@ -364,13 +364,13 @@ impl AgentExecutor {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::llm::test_utils::MockProvider;
use crate::mcp::test_utils::MockMcpClient; use crate::mcp::test_utils::MockMcpClient;
use crate::provider::test_utils::MockProvider;
#[test] #[test]
fn test_parse_tool_call() { fn test_parse_tool_call() {
let executor = AgentExecutor { let executor = AgentExecutor {
llm_client: Arc::new(MockProvider), llm_client: Arc::new(MockProvider::default()),
tool_client: Arc::new(MockMcpClient), tool_client: Arc::new(MockMcpClient),
config: AgentConfig::default(), config: AgentConfig::default(),
}; };
@@ -399,7 +399,7 @@ ACTION_INPUT: {"query": "Rust programming language"}
#[test] #[test]
fn test_parse_final_answer() { fn test_parse_final_answer() {
let executor = AgentExecutor { let executor = AgentExecutor {
llm_client: Arc::new(MockProvider), llm_client: Arc::new(MockProvider::default()),
tool_client: Arc::new(MockMcpClient), tool_client: Arc::new(MockMcpClient),
config: AgentConfig::default(), config: AgentConfig::default(),
}; };

View File

@@ -1,6 +1,6 @@
use crate::mode::ModeConfig; use crate::ProviderConfig;
use crate::provider::ProviderConfig;
use crate::Result; use crate::Result;
use crate::mode::ModeConfig;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use std::fs; use std::fs;
@@ -120,14 +120,14 @@ impl Config {
.and_then(|value| value.as_str()) .and_then(|value| value.as_str())
.unwrap_or("0.0.0") .unwrap_or("0.0.0")
.to_string(); .to_string();
if let Some(agent_table) = parsed.get("agent").and_then(|value| value.as_table()) { if let Some(agent_table) = parsed.get("agent").and_then(|value| value.as_table())
if agent_table.contains_key("max_tool_calls") { && agent_table.contains_key("max_tool_calls")
{
log::warn!( log::warn!(
"Configuration option agent.max_tool_calls is deprecated and ignored. \ "Configuration option agent.max_tool_calls is deprecated and ignored. \
The agent now uses agent.max_iterations." The agent now uses agent.max_iterations."
); );
} }
}
let mut config: Config = parsed let mut config: Config = parsed
.try_into() .try_into()
.map_err(|e: toml::de::Error| crate::Error::Config(e.to_string()))?; .map_err(|e: toml::de::Error| crate::Error::Config(e.to_string()))?;
@@ -180,11 +180,11 @@ impl Config {
&'a self, &'a self,
models: &'a [crate::types::ModelInfo], models: &'a [crate::types::ModelInfo],
) -> Option<&'a str> { ) -> Option<&'a str> {
if let Some(model) = self.general.default_model.as_deref() { if let Some(model) = self.general.default_model.as_deref()
if models.iter().any(|m| m.id == model || m.name == model) { && models.iter().any(|m| m.id == model || m.name == model)
{
return Some(model); return Some(model);
} }
}
if let Some(first) = models.first() { if let Some(first) = models.first() {
return Some(&first.id); return Some(&first.id);
@@ -963,9 +963,10 @@ mod tests {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
{ {
// macOS should use ~/Library/Application Support // macOS should use ~/Library/Application Support
assert!(path assert!(
.to_string_lossy() path.to_string_lossy()
.contains("Library/Application Support")); .contains("Library/Application Support")
);
} }
println!("Config conversation path: {}", path.display()); println!("Config conversation path: {}", path.display());

View File

@@ -58,8 +58,8 @@ impl ConsentManager {
/// Load consent records from vault storage /// Load consent records from vault storage
pub fn from_vault(vault: &Arc<std::sync::Mutex<VaultHandle>>) -> Self { pub fn from_vault(vault: &Arc<std::sync::Mutex<VaultHandle>>) -> Self {
let guard = vault.lock().expect("Vault mutex poisoned"); let guard = vault.lock().expect("Vault mutex poisoned");
if let Some(consent_data) = guard.settings().get("consent_records") { if let Some(consent_data) = guard.settings().get("consent_records")
if let Ok(permanent_records) = && let Ok(permanent_records) =
serde_json::from_value::<HashMap<String, ConsentRecord>>(consent_data.clone()) serde_json::from_value::<HashMap<String, ConsentRecord>>(consent_data.clone())
{ {
return Self { return Self {
@@ -69,7 +69,6 @@ impl ConsentManager {
pending_requests: HashMap::new(), pending_requests: HashMap::new(),
}; };
} }
}
Self::default() Self::default()
} }
@@ -91,18 +90,18 @@ impl ConsentManager {
endpoints: Vec<String>, endpoints: Vec<String>,
) -> Result<ConsentScope> { ) -> Result<ConsentScope> {
// Check if already granted permanently // Check if already granted permanently
if let Some(existing) = self.permanent_records.get(tool_name) { if let Some(existing) = self.permanent_records.get(tool_name)
if existing.scope == ConsentScope::Permanent { && existing.scope == ConsentScope::Permanent
{
return Ok(ConsentScope::Permanent); return Ok(ConsentScope::Permanent);
} }
}
// Check if granted for session // Check if granted for session
if let Some(existing) = self.session_records.get(tool_name) { if let Some(existing) = self.session_records.get(tool_name)
if existing.scope == ConsentScope::Session { && existing.scope == ConsentScope::Session
{
return Ok(ConsentScope::Session); return Ok(ConsentScope::Session);
} }
}
// Check if request is already pending (prevent duplicate prompts) // Check if request is already pending (prevent duplicate prompts)
if self.pending_requests.contains_key(tool_name) { if self.pending_requests.contains_key(tool_name) {

View File

@@ -1,6 +1,6 @@
use crate::Result;
use crate::storage::StorageManager; use crate::storage::StorageManager;
use crate::types::{Conversation, Message}; use crate::types::{Conversation, Message};
use crate::Result;
use serde_json::{Number, Value}; use serde_json::{Number, Value};
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, VecDeque};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -213,6 +213,34 @@ impl ConversationManager {
Ok(()) Ok(())
} }
pub fn cancel_stream(&mut self, message_id: Uuid, notice: impl Into<String>) -> Result<()> {
let index = self
.message_index
.get(&message_id)
.copied()
.ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?;
if let Some(message) = self.active_mut().messages.get_mut(index) {
message.content = notice.into();
message.timestamp = std::time::SystemTime::now();
message
.metadata
.insert(STREAMING_FLAG.to_string(), Value::Bool(false));
message.metadata.remove(PLACEHOLDER_FLAG);
let millis = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis() as u64;
message.metadata.insert(
LAST_CHUNK_TS.to_string(),
Value::Number(Number::from(millis)),
);
}
self.streaming.remove(&message_id);
Ok(())
}
/// Set tool calls on a streaming message /// Set tool calls on a streaming message
pub fn set_tool_calls_on_message( pub fn set_tool_calls_on_message(
&mut self, &mut self,

View File

@@ -2,7 +2,7 @@ use std::sync::Arc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{storage::StorageManager, Error, Result}; use crate::{Error, Result, storage::StorageManager};
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct ApiCredentials { pub struct ApiCredentials {

View File

@@ -3,10 +3,10 @@ use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use aes_gcm::{ use aes_gcm::{
aead::{Aead, KeyInit},
Aes256Gcm, Nonce, Aes256Gcm, Nonce,
aead::{Aead, KeyInit},
}; };
use anyhow::{bail, Context, Result}; use anyhow::{Context, Result, bail};
use ring::digest; use ring::digest;
use ring::rand::{SecureRandom, SystemRandom}; use ring::rand::{SecureRandom, SystemRandom};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View File

@@ -191,6 +191,12 @@ impl InputBuffer {
self.history.pop_back(); self.history.pop_back();
} }
} }
/// Clear saved input history entries.
pub fn clear_history(&mut self) {
self.history.clear();
self.history_index = None;
}
} }
fn prev_char_boundary(buffer: &str, cursor: usize) -> usize { fn prev_char_boundary(buffer: &str, cursor: usize) -> usize {

View File

@@ -11,14 +11,15 @@ pub mod credentials;
pub mod encryption; pub mod encryption;
pub mod formatting; pub mod formatting;
pub mod input; pub mod input;
pub mod llm;
pub mod mcp; pub mod mcp;
pub mod mode; pub mod mode;
pub mod model; pub mod model;
pub mod provider;
pub mod providers; pub mod providers;
pub mod router; pub mod router;
pub mod sandbox; pub mod sandbox;
pub mod session; pub mod session;
pub mod state;
pub mod storage; pub mod storage;
pub mod theme; pub mod theme;
pub mod tools; pub mod tools;
@@ -36,18 +37,20 @@ pub use encryption::*;
pub use formatting::*; pub use formatting::*;
pub use input::*; pub use input::*;
// Export MCP types but exclude test_utils to avoid ambiguity // Export MCP types but exclude test_utils to avoid ambiguity
pub use llm::{
ChatStream, LlmProvider, Provider, ProviderConfig, ProviderRegistry, send_via_stream,
};
pub use mcp::{ pub use mcp::{
client, factory, failover, permission, protocol, remote_client, LocalMcpClient, McpServer, LocalMcpClient, McpServer, McpToolCall, McpToolDescriptor, McpToolResponse, client, factory,
McpToolCall, McpToolDescriptor, McpToolResponse, failover, permission, protocol, remote_client,
}; };
pub use mode::*; pub use mode::*;
pub use model::*; pub use model::*;
// Export provider types but exclude test_utils to avoid ambiguity
pub use provider::{ChatStream, LLMProvider, Provider, ProviderConfig, ProviderRegistry};
pub use providers::*; pub use providers::*;
pub use router::*; pub use router::*;
pub use sandbox::*; pub use sandbox::*;
pub use session::*; pub use session::*;
pub use state::*;
pub use theme::*; pub use theme::*;
pub use tools::*; pub use tools::*;
pub use validation::*; pub use validation::*;

View File

@@ -0,0 +1,297 @@
//! LLM provider abstractions and registry.
//!
//! This module defines the provider trait hierarchy along with helpers that
//! make it easy to register concrete LLM backends and access them through
//! dynamic dispatch when wiring the application together.
use crate::{Error, Result, types::*};
use anyhow::anyhow;
use futures::{Stream, StreamExt};
use serde_json::Value;
use std::any::Any;
use std::collections::HashMap;
use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
/// A boxed stream of chat responses produced by a provider.
pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
/// Trait implemented by every LLM backend Owlen can speak to.
///
/// Providers expose both one-shot and streaming prompt APIs. Concrete
/// implementations typically live in `crate::providers`.
pub trait LlmProvider: Send + Sync + 'static + Any + Sized {
/// Stream type returned by [`Self::stream_prompt`].
type Stream: Stream<Item = Result<ChatResponse>> + Send + 'static;
type ListModelsFuture<'a>: Future<Output = Result<Vec<ModelInfo>>> + Send
where
Self: 'a;
type SendPromptFuture<'a>: Future<Output = Result<ChatResponse>> + Send
where
Self: 'a;
type StreamPromptFuture<'a>: Future<Output = Result<Self::Stream>> + Send
where
Self: 'a;
type HealthCheckFuture<'a>: Future<Output = Result<()>> + Send
where
Self: 'a;
/// Human-readable provider identifier.
fn name(&self) -> &str;
/// Return metadata on all models exposed by this provider.
fn list_models(&self) -> Self::ListModelsFuture<'_>;
/// Issue a prompt and wait for the provider to return the full response.
fn send_prompt(&self, request: ChatRequest) -> Self::SendPromptFuture<'_>;
/// Issue a prompt and receive responses incrementally as a stream.
fn stream_prompt(&self, request: ChatRequest) -> Self::StreamPromptFuture<'_>;
/// Perform a lightweight health check.
fn health_check(&self) -> Self::HealthCheckFuture<'_>;
/// Provider-specific configuration schema (optional).
fn config_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
/// Access the provider as an `Any` for downcasting.
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
}
/// Helper that requests a streamed generation and yields the first chunk as a
/// regular response. This is handy for providers that only implement the
/// streaming API.
pub async fn send_via_stream<'a, P>(provider: &'a P, request: ChatRequest) -> Result<ChatResponse>
where
P: LlmProvider + 'a,
{
let stream = provider.stream_prompt(request).await?;
let mut boxed: ChatStream = Box::pin(stream);
match boxed.next().await {
Some(Ok(response)) => Ok(response),
Some(Err(err)) => Err(err),
None => Err(Error::Provider(anyhow!(
"Empty chat stream from provider {}",
provider.name()
))),
}
}
/// Object-safe wrapper around [`LlmProvider`] for dynamic dispatch scenarios.
#[async_trait::async_trait]
pub trait Provider: Send + Sync {
fn name(&self) -> &str;
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
async fn send_prompt(&self, request: ChatRequest) -> Result<ChatResponse>;
async fn stream_prompt(&self, request: ChatRequest) -> Result<ChatStream>;
async fn health_check(&self) -> Result<()>;
fn config_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
fn as_any(&self) -> &(dyn Any + Send + Sync);
}
#[async_trait::async_trait]
impl<T> Provider for T
where
T: LlmProvider,
{
fn name(&self) -> &str {
LlmProvider::name(self)
}
async fn list_models(&self) -> Result<Vec<ModelInfo>> {
LlmProvider::list_models(self).await
}
async fn send_prompt(&self, request: ChatRequest) -> Result<ChatResponse> {
LlmProvider::send_prompt(self, request).await
}
async fn stream_prompt(&self, request: ChatRequest) -> Result<ChatStream> {
let stream = LlmProvider::stream_prompt(self, request).await?;
Ok(Box::pin(stream))
}
async fn health_check(&self) -> Result<()> {
LlmProvider::health_check(self).await
}
fn config_schema(&self) -> serde_json::Value {
LlmProvider::config_schema(self)
}
fn as_any(&self) -> &(dyn Any + Send + Sync) {
LlmProvider::as_any(self)
}
}
/// Runtime configuration for a provider instance.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ProviderConfig {
/// Provider type identifier.
pub provider_type: String,
/// Base URL for API calls.
pub base_url: Option<String>,
/// API key or token material.
pub api_key: Option<String>,
/// Additional provider-specific configuration.
#[serde(flatten)]
pub extra: HashMap<String, Value>,
}
/// Static registry of providers available to the application.
pub struct ProviderRegistry {
providers: HashMap<String, Arc<dyn Provider>>,
}
impl ProviderRegistry {
pub fn new() -> Self {
Self {
providers: HashMap::new(),
}
}
pub fn register<P: LlmProvider + 'static>(&mut self, provider: P) {
self.register_arc(Arc::new(provider));
}
pub fn register_arc(&mut self, provider: Arc<dyn Provider>) {
let name = provider.name().to_string();
self.providers.insert(name, provider);
}
pub fn get(&self, name: &str) -> Option<Arc<dyn Provider>> {
self.providers.get(name).cloned()
}
pub fn list_providers(&self) -> Vec<String> {
self.providers.keys().cloned().collect()
}
pub async fn list_all_models(&self) -> Result<Vec<ModelInfo>> {
let mut all_models = Vec::new();
for provider in self.providers.values() {
match provider.list_models().await {
Ok(mut models) => all_models.append(&mut models),
Err(_) => {
// Ignore failing providers and continue.
}
}
}
Ok(all_models)
}
}
impl Default for ProviderRegistry {
fn default() -> Self {
Self::new()
}
}
/// Test utilities for constructing mock providers.
#[cfg(test)]
pub mod test_utils {
use super::*;
use futures::stream;
use std::sync::atomic::{AtomicUsize, Ordering};
/// Simple provider stub that always returns the same response.
pub struct MockProvider {
name: String,
response: ChatResponse,
call_count: AtomicUsize,
}
impl MockProvider {
pub fn new(name: impl Into<String>, response: ChatResponse) -> Self {
Self {
name: name.into(),
response,
call_count: AtomicUsize::new(0),
}
}
pub fn call_count(&self) -> usize {
self.call_count.load(Ordering::Relaxed)
}
}
impl Default for MockProvider {
fn default() -> Self {
Self::new(
"mock-provider",
ChatResponse {
message: Message::assistant("mock response".to_string()),
usage: None,
is_streaming: false,
is_final: true,
},
)
}
}
impl LlmProvider for MockProvider {
type Stream = stream::Iter<std::vec::IntoIter<Result<ChatResponse>>>;
type ListModelsFuture<'a>
= futures::future::Ready<Result<Vec<ModelInfo>>>
where
Self: 'a;
type SendPromptFuture<'a>
= futures::future::Ready<Result<ChatResponse>>
where
Self: 'a;
type StreamPromptFuture<'a>
= futures::future::Ready<Result<Self::Stream>>
where
Self: 'a;
type HealthCheckFuture<'a>
= futures::future::Ready<Result<()>>
where
Self: 'a;
fn name(&self) -> &str {
&self.name
}
fn list_models(&self) -> Self::ListModelsFuture<'_> {
futures::future::ready(Ok(vec![]))
}
fn send_prompt(&self, _request: ChatRequest) -> Self::SendPromptFuture<'_> {
self.call_count.fetch_add(1, Ordering::Relaxed);
futures::future::ready(Ok(self.response.clone()))
}
fn stream_prompt(&self, _request: ChatRequest) -> Self::StreamPromptFuture<'_> {
self.call_count.fetch_add(1, Ordering::Relaxed);
let response = self.response.clone();
futures::future::ready(Ok(stream::iter(vec![Ok(response)])))
}
fn health_check(&self) -> Self::HealthCheckFuture<'_> {
futures::future::ready(Ok(()))
}
}
}

View File

@@ -1,7 +1,7 @@
use crate::Result;
use crate::mode::Mode; use crate::mode::Mode;
use crate::tools::registry::ToolRegistry; use crate::tools::registry::ToolRegistry;
use crate::validation::SchemaValidator; use crate::validation::SchemaValidator;
use crate::Result;
use async_trait::async_trait; use async_trait::async_trait;
pub use client::McpClient; pub use client::McpClient;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -142,6 +142,11 @@ impl McpClient for LocalMcpClient {
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse> { async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse> {
self.server.call_tool(call).await self.server.call_tool(call).await
} }
async fn set_mode(&self, mode: Mode) -> Result<()> {
self.server.set_mode(mode).await;
Ok(())
}
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -1,5 +1,5 @@
use super::{McpToolCall, McpToolDescriptor, McpToolResponse}; use super::{McpToolCall, McpToolDescriptor, McpToolResponse};
use crate::Result; use crate::{Result, mode::Mode};
use async_trait::async_trait; use async_trait::async_trait;
/// Trait for a client that can interact with an MCP server /// Trait for a client that can interact with an MCP server
@@ -10,6 +10,11 @@ pub trait McpClient: Send + Sync {
/// Call a tool on the server /// Call a tool on the server
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse>; async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse>;
/// Update the server with the active operating mode.
async fn set_mode(&self, _mode: Mode) -> Result<()> {
Ok(())
}
} }
// Re-export the concrete implementation that supports stdio and HTTP transports. // Re-export the concrete implementation that supports stdio and HTTP transports.

View File

@@ -3,7 +3,7 @@
/// Provides a unified interface for creating MCP clients based on configuration. /// Provides a unified interface for creating MCP clients based on configuration.
/// Supports switching between local (in-process) and remote (STDIO) execution modes. /// Supports switching between local (in-process) and remote (STDIO) execution modes.
use super::client::McpClient; use super::client::McpClient;
use super::{remote_client::RemoteMcpClient, LocalMcpClient}; use super::{LocalMcpClient, remote_client::RemoteMcpClient};
use crate::config::{Config, McpMode}; use crate::config::{Config, McpMode};
use crate::tools::registry::ToolRegistry; use crate::tools::registry::ToolRegistry;
use crate::validation::SchemaValidator; use crate::validation::SchemaValidator;
@@ -109,8 +109,8 @@ impl McpClientFactory {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::config::McpServerConfig;
use crate::Error; use crate::Error;
use crate::config::McpServerConfig;
fn build_factory(config: Config) -> McpClientFactory { fn build_factory(config: Config) -> McpClientFactory {
let ui = Arc::new(crate::ui::NoOpUiController); let ui = Arc::new(crate::ui::NoOpUiController);

View File

@@ -4,8 +4,8 @@
/// It wraps MCP clients to filter/whitelist tool calls, log invocations, and prompt for consent. /// It wraps MCP clients to filter/whitelist tool calls, log invocations, and prompt for consent.
use super::client::McpClient; use super::client::McpClient;
use super::{McpToolCall, McpToolDescriptor, McpToolResponse}; use super::{McpToolCall, McpToolDescriptor, McpToolResponse};
use crate::config::Config;
use crate::{Error, Result}; use crate::{Error, Result};
use crate::{config::Config, mode::Mode};
use async_trait::async_trait; use async_trait::async_trait;
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
@@ -145,6 +145,10 @@ impl McpClient for PermissionLayer {
result result
} }
async fn set_mode(&self, mode: Mode) -> Result<()> {
self.inner.set_mode(mode).await
}
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -1,24 +1,25 @@
use super::protocol::methods; use super::protocol::methods;
use super::protocol::{ use super::protocol::{
RequestId, RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, PROTOCOL_VERSION, PROTOCOL_VERSION, RequestId, RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse,
}; };
use super::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; use super::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse};
use crate::consent::{ConsentManager, ConsentScope}; use crate::consent::{ConsentManager, ConsentScope};
use crate::tools::{Tool, WebScrapeTool, WebSearchTool}; use crate::tools::{Tool, WebScrapeTool, WebSearchTool};
use crate::types::ModelInfo; use crate::types::ModelInfo;
use crate::types::{ChatResponse, Message, Role}; use crate::types::{ChatResponse, Message, Role};
use crate::{provider::chat_via_stream, Error, LLMProvider, Result}; use crate::{Error, LlmProvider, Result, mode::Mode, send_via_stream};
use futures::{future::BoxFuture, stream, StreamExt}; use anyhow::anyhow;
use futures::{StreamExt, future::BoxFuture, stream};
use reqwest::Client as HttpClient; use reqwest::Client as HttpClient;
use serde_json::json; use serde_json::json;
use std::path::Path; use std::path::Path;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use std::time::Duration; use std::time::Duration;
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
use tokio::process::{Child, Command}; use tokio::process::{Child, Command};
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tokio_tungstenite::{connect_async, MaybeTlsStream, WebSocketStream}; use tokio_tungstenite::{MaybeTlsStream, WebSocketStream, connect_async};
use tungstenite::protocol::Message as WsMessage; use tungstenite::protocol::Message as WsMessage;
/// Client that talks to the external `owlen-mcp-server` over STDIO, HTTP, or WebSocket. /// Client that talks to the external `owlen-mcp-server` over STDIO, HTTP, or WebSocket.
@@ -203,11 +204,11 @@ impl RemoteMcpClient {
.await .await
.map_err(|e| Error::Network(e.to_string()))?; .map_err(|e| Error::Network(e.to_string()))?;
// Try to parse as success then error. // Try to parse as success then error.
if let Ok(r) = serde_json::from_str::<RpcResponse>(&text) { if let Ok(r) = serde_json::from_str::<RpcResponse>(&text)
if r.id == id { && r.id == id
{
return Ok(r.result); return Ok(r.result);
} }
}
let err_resp: RpcErrorResponse = let err_resp: RpcErrorResponse =
serde_json::from_str(&text).map_err(Error::Serialization)?; serde_json::from_str(&text).map_err(Error::Serialization)?;
return Err(Error::Network(format!( return Err(Error::Network(format!(
@@ -249,11 +250,11 @@ impl RemoteMcpClient {
}; };
// Try to parse as success then error. // Try to parse as success then error.
if let Ok(r) = serde_json::from_str::<RpcResponse>(&response_text) { if let Ok(r) = serde_json::from_str::<RpcResponse>(&response_text)
if r.id == id { && r.id == id
{
return Ok(r.result); return Ok(r.result);
} }
}
let err_resp: RpcErrorResponse = let err_resp: RpcErrorResponse =
serde_json::from_str(&response_text).map_err(Error::Serialization)?; serde_json::from_str(&response_text).map_err(Error::Serialization)?;
return Err(Error::Network(format!( return Err(Error::Network(format!(
@@ -416,7 +417,9 @@ impl McpClient for RemoteMcpClient {
// Autogrant consent for the web_search tool (permanent for this process). // Autogrant consent for the web_search tool (permanent for this process).
let consent_manager = std::sync::Arc::new(std::sync::Mutex::new(ConsentManager::new())); let consent_manager = std::sync::Arc::new(std::sync::Mutex::new(ConsentManager::new()));
{ {
let mut cm = consent_manager.lock().unwrap(); let mut cm = consent_manager
.lock()
.map_err(|_| Error::Provider(anyhow!("Consent manager mutex poisoned")))?;
cm.grant_consent_with_scope( cm.grant_consent_with_scope(
"web_search", "web_search",
Vec::new(), Vec::new(),
@@ -459,17 +462,22 @@ impl McpClient for RemoteMcpClient {
let response: McpToolResponse = serde_json::from_value(result)?; let response: McpToolResponse = serde_json::from_value(result)?;
Ok(response) Ok(response)
} }
async fn set_mode(&self, _mode: Mode) -> Result<()> {
// Remote servers manage their own mode settings; treat as best-effort no-op.
Ok(())
}
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Provider implementation forwards chat requests to the generate_text tool. // Provider implementation forwards chat requests to the generate_text tool.
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
impl LLMProvider for RemoteMcpClient { impl LlmProvider for RemoteMcpClient {
type Stream = stream::Iter<std::vec::IntoIter<Result<ChatResponse>>>; type Stream = stream::Iter<std::vec::IntoIter<Result<ChatResponse>>>;
type ListModelsFuture<'a> = BoxFuture<'a, Result<Vec<ModelInfo>>>; type ListModelsFuture<'a> = BoxFuture<'a, Result<Vec<ModelInfo>>>;
type ChatFuture<'a> = BoxFuture<'a, Result<ChatResponse>>; type SendPromptFuture<'a> = BoxFuture<'a, Result<ChatResponse>>;
type ChatStreamFuture<'a> = BoxFuture<'a, Result<Self::Stream>>; type StreamPromptFuture<'a> = BoxFuture<'a, Result<Self::Stream>>;
type HealthCheckFuture<'a> = BoxFuture<'a, Result<()>>; type HealthCheckFuture<'a> = BoxFuture<'a, Result<()>>;
fn name(&self) -> &str { fn name(&self) -> &str {
@@ -484,11 +492,11 @@ impl LLMProvider for RemoteMcpClient {
}) })
} }
fn chat(&self, request: crate::types::ChatRequest) -> Self::ChatFuture<'_> { fn send_prompt(&self, request: crate::types::ChatRequest) -> Self::SendPromptFuture<'_> {
Box::pin(chat_via_stream(self, request)) Box::pin(send_via_stream(self, request))
} }
fn chat_stream(&self, request: crate::types::ChatRequest) -> Self::ChatStreamFuture<'_> { fn stream_prompt(&self, request: crate::types::ChatRequest) -> Self::StreamPromptFuture<'_> {
Box::pin(async move { Box::pin(async move {
let args = serde_json::json!({ let args = serde_json::json!({
"messages": request.messages, "messages": request.messages,

View File

@@ -2,8 +2,8 @@ pub mod details;
pub use details::{DetailedModelInfo, ModelInfoRetrievalError}; pub use details::{DetailedModelInfo, ModelInfoRetrievalError};
use crate::types::ModelInfo;
use crate::Result; use crate::Result;
use crate::types::ModelInfo;
use std::collections::HashMap; use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::sync::Arc; use std::sync::Arc;
@@ -42,11 +42,9 @@ impl ModelManager {
F: FnOnce() -> Fut, F: FnOnce() -> Fut,
Fut: Future<Output = Result<Vec<ModelInfo>>>, Fut: Future<Output = Result<Vec<ModelInfo>>>,
{ {
if !force_refresh { if !force_refresh && let Some(models) = self.cached_if_fresh().await {
if let Some(models) = self.cached_if_fresh().await {
return Ok(models); return Ok(models);
} }
}
let models = fetcher().await?; let models = fetcher().await?;
let mut cache = self.cache.write().await; let mut cache = self.cache.write().await;
@@ -134,12 +132,12 @@ impl ModelDetailsCache {
let mut inner = self.inner.write().await; let mut inner = self.inner.write().await;
// Remove prior mappings for this model name (possibly different digest). // Remove prior mappings for this model name (possibly different digest).
if let Some(previous_key) = inner.name_to_key.get(&info.name).cloned() { if let Some(previous_key) = inner.name_to_key.get(&info.name).cloned()
if previous_key != key { && previous_key != key
{
inner.by_key.remove(&previous_key); inner.by_key.remove(&previous_key);
inner.fetched_at.remove(&previous_key); inner.fetched_at.remove(&previous_key);
} }
}
inner.fetched_at.insert(key.clone(), Instant::now()); inner.fetched_at.insert(key.clone(), Instant::now());
inner.name_to_key.insert(info.name.clone(), key.clone()); inner.name_to_key.insert(info.name.clone(), key.clone());

View File

@@ -1,380 +0,0 @@
//! Provider traits and registries.
use crate::{types::*, Error, Result};
use anyhow::anyhow;
use futures::{Stream, StreamExt};
use std::any::Any;
use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
/// A stream of chat responses
pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
/// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.) with zero-cost static dispatch.
pub trait LLMProvider: Send + Sync + 'static + Any + Sized {
type Stream: Stream<Item = Result<ChatResponse>> + Send + 'static;
type ListModelsFuture<'a>: Future<Output = Result<Vec<ModelInfo>>> + Send
where
Self: 'a;
type ChatFuture<'a>: Future<Output = Result<ChatResponse>> + Send
where
Self: 'a;
type ChatStreamFuture<'a>: Future<Output = Result<Self::Stream>> + Send
where
Self: 'a;
type HealthCheckFuture<'a>: Future<Output = Result<()>> + Send
where
Self: 'a;
fn name(&self) -> &str;
fn list_models(&self) -> Self::ListModelsFuture<'_>;
fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_>;
fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_>;
fn health_check(&self) -> Self::HealthCheckFuture<'_>;
fn config_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
}
/// Helper that implements [`LLMProvider::chat`] in terms of [`LLMProvider::chat_stream`].
pub async fn chat_via_stream<'a, P>(provider: &'a P, request: ChatRequest) -> Result<ChatResponse>
where
P: LLMProvider + 'a,
{
let stream = provider.chat_stream(request).await?;
let mut boxed: ChatStream = Box::pin(stream);
match boxed.next().await {
Some(Ok(response)) => Ok(response),
Some(Err(err)) => Err(err),
None => Err(Error::Provider(anyhow!(
"Empty chat stream from provider {}",
provider.name()
))),
}
}
/// Object-safe wrapper trait for runtime-configurable provider usage.
#[async_trait::async_trait]
pub trait Provider: Send + Sync {
/// Get the name of this provider.
fn name(&self) -> &str;
/// List available models from this provider.
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
/// Send a chat completion request.
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse>;
/// Send a streaming chat completion request.
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream>;
/// Check if the provider is available/healthy.
async fn health_check(&self) -> Result<()>;
/// Get provider-specific configuration schema.
fn config_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
fn as_any(&self) -> &(dyn Any + Send + Sync);
}
#[async_trait::async_trait]
impl<T> Provider for T
where
T: LLMProvider,
{
fn name(&self) -> &str {
LLMProvider::name(self)
}
async fn list_models(&self) -> Result<Vec<ModelInfo>> {
LLMProvider::list_models(self).await
}
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> {
LLMProvider::chat(self, request).await
}
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> {
let stream = LLMProvider::chat_stream(self, request).await?;
Ok(Box::pin(stream))
}
async fn health_check(&self) -> Result<()> {
LLMProvider::health_check(self).await
}
fn config_schema(&self) -> serde_json::Value {
LLMProvider::config_schema(self)
}
fn as_any(&self) -> &(dyn Any + Send + Sync) {
LLMProvider::as_any(self)
}
}
/// Configuration for a provider
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ProviderConfig {
/// Provider type identifier
pub provider_type: String,
/// Base URL for API calls
pub base_url: Option<String>,
/// API key or token
pub api_key: Option<String>,
/// Additional provider-specific configuration
#[serde(flatten)]
pub extra: std::collections::HashMap<String, serde_json::Value>,
}
/// A registry of providers
pub struct ProviderRegistry {
providers: std::collections::HashMap<String, Arc<dyn Provider>>,
}
impl ProviderRegistry {
/// Create a new provider registry
pub fn new() -> Self {
Self {
providers: std::collections::HashMap::new(),
}
}
/// Register a provider using static dispatch.
pub fn register<P: LLMProvider + 'static>(&mut self, provider: P) {
self.register_arc(Arc::new(provider));
}
/// Register an already wrapped provider
pub fn register_arc(&mut self, provider: Arc<dyn Provider>) {
let name = provider.name().to_string();
self.providers.insert(name, provider);
}
/// Get a provider by name
pub fn get(&self, name: &str) -> Option<Arc<dyn Provider>> {
self.providers.get(name).cloned()
}
/// List all registered provider names
pub fn list_providers(&self) -> Vec<String> {
self.providers.keys().cloned().collect()
}
/// Get all models from all providers
pub async fn list_all_models(&self) -> Result<Vec<ModelInfo>> {
let mut all_models = Vec::new();
for provider in self.providers.values() {
match provider.list_models().await {
Ok(mut models) => all_models.append(&mut models),
Err(_) => {
// Continue with other providers
}
}
}
Ok(all_models)
}
}
impl Default for ProviderRegistry {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
pub mod test_utils {
use super::*;
use crate::types::{ChatRequest, ChatResponse, Message, ModelInfo, Role};
use futures::stream;
use std::future::{ready, Ready};
/// Mock provider for testing
#[derive(Default)]
pub struct MockProvider;
impl LLMProvider for MockProvider {
type Stream = stream::Iter<std::vec::IntoIter<Result<ChatResponse>>>;
type ListModelsFuture<'a> = Ready<Result<Vec<ModelInfo>>>;
type ChatFuture<'a> = Ready<Result<ChatResponse>>;
type ChatStreamFuture<'a> = Ready<Result<Self::Stream>>;
type HealthCheckFuture<'a> = Ready<Result<()>>;
fn name(&self) -> &str {
"mock"
}
fn list_models(&self) -> Self::ListModelsFuture<'_> {
ready(Ok(vec![ModelInfo {
id: "mock-model".to_string(),
provider: "mock".to_string(),
name: "mock-model".to_string(),
description: None,
context_window: None,
capabilities: vec![],
supports_tools: false,
}]))
}
fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> {
ready(Ok(self.build_response(&request)))
}
fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> {
let response = self.build_response(&request);
ready(Ok(stream::iter(vec![Ok(response)])))
}
fn health_check(&self) -> Self::HealthCheckFuture<'_> {
ready(Ok(()))
}
}
impl MockProvider {
fn build_response(&self, request: &ChatRequest) -> ChatResponse {
let content = format!(
"Mock response to: {}",
request
.messages
.last()
.map(|m| m.content.clone())
.unwrap_or_default()
);
ChatResponse {
message: Message::new(Role::Assistant, content),
usage: None,
is_streaming: false,
is_final: true,
}
}
}
}
#[cfg(test)]
mod tests {
use super::test_utils::MockProvider;
use super::*;
use crate::types::{ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role};
use futures::stream;
use std::future::{ready, Ready};
use std::sync::Arc;
struct StreamingProvider;
impl LLMProvider for StreamingProvider {
type Stream = stream::Iter<std::vec::IntoIter<Result<ChatResponse>>>;
type ListModelsFuture<'a> = Ready<Result<Vec<ModelInfo>>>;
type ChatFuture<'a> = Ready<Result<ChatResponse>>;
type ChatStreamFuture<'a> = Ready<Result<Self::Stream>>;
type HealthCheckFuture<'a> = Ready<Result<()>>;
fn name(&self) -> &str {
"streaming"
}
fn list_models(&self) -> Self::ListModelsFuture<'_> {
ready(Ok(vec![ModelInfo {
id: "stream-model".to_string(),
provider: "streaming".to_string(),
name: "stream-model".to_string(),
description: None,
context_window: None,
capabilities: vec!["chat".to_string()],
supports_tools: false,
}]))
}
fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> {
ready(Ok(self.response(&request)))
}
fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> {
let response = self.response(&request);
ready(Ok(stream::iter(vec![Ok(response)])))
}
fn health_check(&self) -> Self::HealthCheckFuture<'_> {
ready(Ok(()))
}
}
impl StreamingProvider {
fn response(&self, request: &ChatRequest) -> ChatResponse {
let reply = format!(
"echo:{}",
request
.messages
.last()
.map(|m| m.content.clone())
.unwrap_or_default()
);
ChatResponse {
message: Message::new(Role::Assistant, reply),
usage: None,
is_streaming: true,
is_final: true,
}
}
}
#[tokio::test]
async fn default_chat_reads_from_stream() {
let provider = StreamingProvider;
let request = ChatRequest {
model: "stream-model".to_string(),
messages: vec![Message::new(Role::User, "ping".to_string())],
parameters: ChatParameters::default(),
tools: None,
};
let response = LLMProvider::chat(&provider, request)
.await
.expect("chat succeeded");
assert_eq!(response.message.content, "echo:ping");
assert!(response.is_final);
}
#[tokio::test]
async fn registry_registers_static_provider() {
let mut registry = ProviderRegistry::new();
registry.register(StreamingProvider);
let provider = registry.get("streaming").expect("provider registered");
let models = provider.list_models().await.expect("models listed");
assert_eq!(models[0].id, "stream-model");
}
#[tokio::test]
async fn registry_accepts_dynamic_provider() {
let mut registry = ProviderRegistry::new();
let provider: Arc<dyn Provider> = Arc::new(MockProvider::default());
registry.register_arc(provider.clone());
let fetched = registry.get("mock").expect("mock provider present");
let request = ChatRequest {
model: "mock-model".to_string(),
messages: vec![Message::new(Role::User, "hi".to_string())],
parameters: ChatParameters::default(),
tools: None,
};
let response = Provider::chat(fetched.as_ref(), request)
.await
.expect("chat succeeded");
assert_eq!(response.message.content, "Mock response to: hi");
}
}

View File

@@ -7,32 +7,32 @@ use std::{
}; };
use anyhow::anyhow; use anyhow::anyhow;
use futures::{future::join_all, future::BoxFuture, Stream, StreamExt}; use futures::{Stream, StreamExt, future::BoxFuture, future::join_all};
use log::{debug, warn}; use log::{debug, warn};
use ollama_rs::{ use ollama_rs::{
Ollama,
error::OllamaError, error::OllamaError,
generation::chat::{ generation::chat::{
request::ChatMessageRequest as OllamaChatRequest, ChatMessage as OllamaMessage, ChatMessage as OllamaMessage, ChatMessageResponse as OllamaChatResponse,
ChatMessageResponse as OllamaChatResponse, MessageRole as OllamaRole, MessageRole as OllamaRole, request::ChatMessageRequest as OllamaChatRequest,
}, },
generation::tools::{ToolCall as OllamaToolCall, ToolCallFunction as OllamaToolCallFunction}, generation::tools::{ToolCall as OllamaToolCall, ToolCallFunction as OllamaToolCallFunction},
headers::{HeaderMap, HeaderValue, AUTHORIZATION}, headers::{AUTHORIZATION, HeaderMap, HeaderValue},
models::{LocalModel, ModelInfo as OllamaModelInfo, ModelOptions}, models::{LocalModel, ModelInfo as OllamaModelInfo, ModelOptions},
Ollama,
}; };
use reqwest::{Client, StatusCode, Url}; use reqwest::{Client, StatusCode, Url};
use serde_json::{json, Map as JsonMap, Value}; use serde_json::{Map as JsonMap, Value, json};
use uuid::Uuid; use uuid::Uuid;
use crate::{ use crate::{
Error, Result,
config::GeneralSettings, config::GeneralSettings,
llm::{LlmProvider, ProviderConfig},
mcp::McpToolDescriptor, mcp::McpToolDescriptor,
model::{DetailedModelInfo, ModelDetailsCache, ModelManager}, model::{DetailedModelInfo, ModelDetailsCache, ModelManager},
provider::{LLMProvider, ProviderConfig},
types::{ types::{
ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall, ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall,
}, },
Error, Result,
}; };
const DEFAULT_TIMEOUT_SECS: u64 = 120; const DEFAULT_TIMEOUT_SECS: u64 = 120;
@@ -292,14 +292,14 @@ impl OllamaProvider {
); );
} }
if let Some(descriptors) = &tools { if let Some(descriptors) = &tools
if !descriptors.is_empty() { && !descriptors.is_empty()
{
debug!( debug!(
"Ignoring {} MCP tool descriptors for Ollama request (tool calling unsupported)", "Ignoring {} MCP tool descriptors for Ollama request (tool calling unsupported)",
descriptors.len() descriptors.len()
); );
} }
}
let converted_messages = messages.into_iter().map(convert_message).collect(); let converted_messages = messages.into_iter().map(convert_message).collect();
let mut request = OllamaChatRequest::new(model.clone(), converted_messages); let mut request = OllamaChatRequest::new(model.clone(), converted_messages);
@@ -378,11 +378,11 @@ impl OllamaProvider {
let family = pick_first_string(map, &["family", "model_family"]); let family = pick_first_string(map, &["family", "model_family"]);
let mut families = pick_string_list(map, &["families", "model_families"]); let mut families = pick_string_list(map, &["families", "model_families"]);
if families.is_empty() { if families.is_empty()
if let Some(single) = family.clone() { && let Some(single) = family.clone()
{
families.push(single); families.push(single);
} }
}
let system = pick_first_string(map, &["system"]); let system = pick_first_string(map, &["system"]);
@@ -529,32 +529,28 @@ impl OllamaProvider {
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => Error::Auth(format!( StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => Error::Auth(format!(
"Ollama rejected the request ({status}): {detail}. Check your API key and account permissions." "Ollama rejected the request ({status}): {detail}. Check your API key and account permissions."
)), )),
StatusCode::BAD_REQUEST => Error::InvalidInput(format!( StatusCode::BAD_REQUEST => {
"{action} rejected by Ollama ({status}): {detail}" Error::InvalidInput(format!("{action} rejected by Ollama ({status}): {detail}"))
)), }
StatusCode::SERVICE_UNAVAILABLE | StatusCode::GATEWAY_TIMEOUT => Error::Timeout( StatusCode::SERVICE_UNAVAILABLE | StatusCode::GATEWAY_TIMEOUT => Error::Timeout(
format!( format!("Ollama {action} timed out ({status}). The model may still be loading."),
"Ollama {action} timed out ({status}). The model may still be loading."
), ),
), _ => Error::Network(format!("Ollama {action} failed ({status}): {detail}")),
_ => Error::Network(format!(
"Ollama {action} failed ({status}): {detail}"
)),
} }
} }
} }
impl LLMProvider for OllamaProvider { impl LlmProvider for OllamaProvider {
type Stream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>; type Stream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
type ListModelsFuture<'a> type ListModelsFuture<'a>
= BoxFuture<'a, Result<Vec<ModelInfo>>> = BoxFuture<'a, Result<Vec<ModelInfo>>>
where where
Self: 'a; Self: 'a;
type ChatFuture<'a> type SendPromptFuture<'a>
= BoxFuture<'a, Result<ChatResponse>> = BoxFuture<'a, Result<ChatResponse>>
where where
Self: 'a; Self: 'a;
type ChatStreamFuture<'a> type StreamPromptFuture<'a>
= BoxFuture<'a, Result<Self::Stream>> = BoxFuture<'a, Result<Self::Stream>>
where where
Self: 'a; Self: 'a;
@@ -575,7 +571,7 @@ impl LLMProvider for OllamaProvider {
}) })
} }
fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> { fn send_prompt(&self, request: ChatRequest) -> Self::SendPromptFuture<'_> {
Box::pin(async move { Box::pin(async move {
let ChatRequest { let ChatRequest {
model, model,
@@ -597,7 +593,7 @@ impl LLMProvider for OllamaProvider {
}) })
} }
fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> { fn stream_prompt(&self, request: ChatRequest) -> Self::StreamPromptFuture<'_> {
Box::pin(async move { Box::pin(async move {
let ChatRequest { let ChatRequest {
model, model,
@@ -926,11 +922,7 @@ fn value_to_u64(value: &Value) -> Option<u64> {
} else if let Some(v) = num.as_i64() { } else if let Some(v) = num.as_i64() {
v.try_into().ok() v.try_into().ok()
} else if let Some(v) = num.as_f64() { } else if let Some(v) = num.as_f64() {
if v >= 0.0 { if v >= 0.0 { Some(v as u64) } else { None }
Some(v as u64)
} else {
None
}
} else { } else {
None None
} }

View File

@@ -1,6 +1,7 @@
//! Router for managing multiple providers and routing requests //! Router for managing multiple providers and routing requests
use crate::{provider::*, types::*, Result}; use crate::{Result, llm::*, types::*};
use anyhow::anyhow;
use std::sync::Arc; use std::sync::Arc;
/// A router that can distribute requests across multiple providers /// A router that can distribute requests across multiple providers
@@ -32,7 +33,7 @@ impl Router {
} }
/// Register a provider with the router /// Register a provider with the router
pub fn register_provider<P: LLMProvider + 'static>(&mut self, provider: P) { pub fn register_provider<P: LlmProvider + 'static>(&mut self, provider: P) {
self.registry.register(provider); self.registry.register(provider);
} }
@@ -52,13 +53,13 @@ impl Router {
/// Route a request to the appropriate provider /// Route a request to the appropriate provider
pub async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> { pub async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> {
let provider = self.find_provider_for_model(&request.model)?; let provider = self.find_provider_for_model(&request.model)?;
provider.chat(request).await provider.send_prompt(request).await
} }
/// Route a streaming request to the appropriate provider /// Route a streaming request to the appropriate provider
pub async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> { pub async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> {
let provider = self.find_provider_for_model(&request.model)?; let provider = self.find_provider_for_model(&request.model)?;
provider.chat_stream(request).await provider.stream_prompt(request).await
} }
/// List all available models from all providers /// List all available models from all providers
@@ -70,19 +71,19 @@ impl Router {
fn find_provider_for_model(&self, model: &str) -> Result<Arc<dyn Provider>> { fn find_provider_for_model(&self, model: &str) -> Result<Arc<dyn Provider>> {
// Check routing rules first // Check routing rules first
for rule in &self.routing_rules { for rule in &self.routing_rules {
if self.matches_pattern(&rule.model_pattern, model) { if self.matches_pattern(&rule.model_pattern, model)
if let Some(provider) = self.registry.get(&rule.provider) { && let Some(provider) = self.registry.get(&rule.provider)
{
return Ok(provider); return Ok(provider);
} }
} }
}
// Fall back to default provider // Fall back to default provider
if let Some(default) = &self.default_provider { if let Some(default) = &self.default_provider
if let Some(provider) = self.registry.get(default) { && let Some(provider) = self.registry.get(default)
{
return Ok(provider); return Ok(provider);
} }
}
// If no default, try to find any provider that has this model // If no default, try to find any provider that has this model
// This is a fallback for cases where routing isn't configured // This is a fallback for cases where routing isn't configured
@@ -92,7 +93,7 @@ impl Router {
} }
} }
Err(crate::Error::Provider(anyhow::anyhow!( Err(crate::Error::Provider(anyhow!(
"No provider found for model: {}", "No provider found for model: {}",
model model
))) )))

View File

@@ -2,7 +2,7 @@ use std::path::PathBuf;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use anyhow::{bail, Context, Result}; use anyhow::{Context, Result, bail};
use tempfile::TempDir; use tempfile::TempDir;
/// Configuration options for sandboxed process execution. /// Configuration options for sandboxed process execution.
@@ -185,17 +185,15 @@ impl SandboxedProcess {
if let Ok(output) = output { if let Ok(output) = output {
let version_str = String::from_utf8_lossy(&output.stdout); let version_str = String::from_utf8_lossy(&output.stdout);
// Parse version like "bubblewrap 0.11.0" or "0.11.0" // Parse version like "bubblewrap 0.11.0" or "0.11.0"
if let Some(version_part) = version_str.split_whitespace().last() { if let Some(version_part) = version_str.split_whitespace().last()
if let Some((major, rest)) = version_part.split_once('.') { && let Some((major, rest)) = version_part.split_once('.')
if let Some((minor, _patch)) = rest.split_once('.') { && let Some((minor, _patch)) = rest.split_once('.')
if let (Ok(maj), Ok(min)) = (major.parse::<u32>(), minor.parse::<u32>()) { && let (Ok(maj), Ok(min)) = (major.parse::<u32>(), minor.parse::<u32>())
{
// --rlimit-as was added in 0.12.0 // --rlimit-as was added in 0.12.0
return maj > 0 || (maj == 0 && min >= 12); return maj > 0 || (maj == 0 && min >= 12);
} }
} }
}
}
}
// If we can't determine the version, assume it doesn't support it (safer default) // If we can't determine the version, assume it doesn't support it (safer default)
false false

View File

@@ -5,25 +5,27 @@ use crate::credentials::CredentialManager;
use crate::encryption::{self, VaultHandle}; use crate::encryption::{self, VaultHandle};
use crate::formatting::MessageFormatter; use crate::formatting::MessageFormatter;
use crate::input::InputBuffer; use crate::input::InputBuffer;
use crate::mcp::McpToolCall;
use crate::mcp::client::McpClient; use crate::mcp::client::McpClient;
use crate::mcp::factory::McpClientFactory; use crate::mcp::factory::McpClientFactory;
use crate::mcp::permission::PermissionLayer; use crate::mcp::permission::PermissionLayer;
use crate::mcp::McpToolCall; use crate::mode::Mode;
use crate::model::{DetailedModelInfo, ModelManager}; use crate::model::{DetailedModelInfo, ModelManager};
use crate::provider::{ChatStream, Provider};
use crate::providers::OllamaProvider; use crate::providers::OllamaProvider;
use crate::storage::{SessionMeta, StorageManager}; use crate::storage::{SessionMeta, StorageManager};
use crate::types::{ use crate::types::{
ChatParameters, ChatRequest, ChatResponse, Conversation, Message, ModelInfo, ToolCall, ChatParameters, ChatRequest, ChatResponse, Conversation, Message, ModelInfo, ToolCall,
}; };
use crate::ui::UiController; use crate::ui::UiController;
use crate::validation::{get_builtin_schemas, SchemaValidator}; use crate::validation::{SchemaValidator, get_builtin_schemas};
use crate::{ChatStream, Provider};
use crate::{ use crate::{
CodeExecTool, ResourcesDeleteTool, ResourcesGetTool, ResourcesListTool, ResourcesWriteTool, CodeExecTool, ResourcesDeleteTool, ResourcesGetTool, ResourcesListTool, ResourcesWriteTool,
ToolRegistry, WebScrapeTool, WebSearchDetailedTool, WebSearchTool, ToolRegistry, WebScrapeTool, WebSearchDetailedTool, WebSearchTool,
}; };
use crate::{Error, Result}; use crate::{Error, Result};
use log::warn; use log::warn;
use serde_json::Value;
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@@ -38,6 +40,51 @@ pub enum SessionOutcome {
}, },
} }
fn extract_resource_content(value: &Value) -> Option<String> {
match value {
Value::Null => Some(String::new()),
Value::Bool(flag) => Some(flag.to_string()),
Value::Number(num) => Some(num.to_string()),
Value::String(text) => Some(text.clone()),
Value::Array(items) => {
let mut segments = Vec::new();
for item in items {
if let Some(segment) = extract_resource_content(item)
&& !segment.is_empty()
{
segments.push(segment);
}
}
if segments.is_empty() {
None
} else {
Some(segments.join("\n"))
}
}
Value::Object(map) => {
const PREFERRED_FIELDS: [&str; 6] =
["content", "contents", "text", "value", "body", "data"];
for key in PREFERRED_FIELDS.iter() {
if let Some(inner) = map.get(*key)
&& let Some(text) = extract_resource_content(inner)
&& !text.is_empty()
{
return Some(text);
}
}
if let Some(inner) = map.get("chunks")
&& let Some(text) = extract_resource_content(inner)
&& !text.is_empty()
{
return Some(text);
}
None
}
}
}
pub struct SessionController { pub struct SessionController {
provider: Arc<dyn Provider>, provider: Arc<dyn Provider>,
conversation: ConversationManager, conversation: ConversationManager,
@@ -55,6 +102,7 @@ pub struct SessionController {
credential_manager: Option<Arc<CredentialManager>>, credential_manager: Option<Arc<CredentialManager>>,
ui: Arc<dyn UiController>, ui: Arc<dyn UiController>,
enable_code_tools: bool, enable_code_tools: bool,
current_mode: Mode,
} }
async fn build_tools( async fn build_tools(
@@ -228,6 +276,12 @@ impl SessionController {
drop(config_guard); // Release the lock before calling build_tools drop(config_guard); // Release the lock before calling build_tools
let initial_mode = if enable_code_tools {
Mode::Code
} else {
Mode::Chat
};
let (tool_registry, schema_validator) = build_tools( let (tool_registry, schema_validator) = build_tools(
config_arc.clone(), config_arc.clone(),
ui.clone(), ui.clone(),
@@ -247,8 +301,9 @@ impl SessionController {
schema_validator.clone(), schema_validator.clone(),
); );
let base_client = factory.create()?; let base_client = factory.create()?;
let permission_client = PermissionLayer::new(base_client, Arc::new(guard.clone())); let client = Arc::new(PermissionLayer::new(base_client, Arc::new(guard.clone())));
Arc::new(permission_client) client.set_mode(initial_mode).await?;
client
}; };
Ok(Self { Ok(Self {
@@ -268,6 +323,7 @@ impl SessionController {
credential_manager, credential_manager,
ui, ui,
enable_code_tools, enable_code_tools,
current_mode: initial_mode,
}) })
} }
@@ -325,12 +381,12 @@ impl SessionController {
.expect("Consent manager mutex poisoned"); .expect("Consent manager mutex poisoned");
consent.grant_consent(tool_name, data_types, endpoints); consent.grant_consent(tool_name, data_types, endpoints);
if let Some(vault) = &self.vault { if let Some(vault) = &self.vault
if let Err(e) = consent.persist_to_vault(vault) { && let Err(e) = consent.persist_to_vault(vault)
{
eprintln!("Warning: Failed to persist consent to vault: {}", e); eprintln!("Warning: Failed to persist consent to vault: {}", e);
} }
} }
}
pub fn grant_consent_with_scope( pub fn grant_consent_with_scope(
&self, &self,
@@ -347,14 +403,13 @@ impl SessionController {
consent.grant_consent_with_scope(tool_name, data_types, endpoints, scope); consent.grant_consent_with_scope(tool_name, data_types, endpoints, scope);
// Only persist to vault for permanent consent // Only persist to vault for permanent consent
if is_permanent { if is_permanent
if let Some(vault) = &self.vault { && let Some(vault) = &self.vault
if let Err(e) = consent.persist_to_vault(vault) { && let Err(e) = consent.persist_to_vault(vault)
{
eprintln!("Warning: Failed to persist consent to vault: {}", e); eprintln!("Warning: Failed to persist consent to vault: {}", e);
} }
} }
}
}
pub fn check_tools_consent_needed( pub fn check_tools_consent_needed(
&self, &self,
@@ -489,8 +544,13 @@ impl SessionController {
}; };
match self.mcp_client.call_tool(call).await { match self.mcp_client.call_tool(call).await {
Ok(response) => { Ok(response) => {
let content: String = serde_json::from_value(response.output)?; if let Some(text) = extract_resource_content(&response.output) {
Ok(content) return Ok(text);
}
let formatted = serde_json::to_string_pretty(&response.output)
.unwrap_or_else(|_| response.output.to_string());
Ok(formatted)
} }
Err(err) => { Err(err) => {
log::warn!("MCP file read failed ({}); falling back to local read", err); log::warn!("MCP file read failed ({}); falling back to local read", err);
@@ -500,6 +560,48 @@ impl SessionController {
} }
} }
pub async fn read_file_with_tools(&self, path: &str) -> Result<String> {
if !self.enable_code_tools {
return Err(Error::InvalidInput(
"Code tools are disabled in chat mode. Run `:mode code` to switch.".to_string(),
));
}
let call = McpToolCall {
name: "resources/get".to_string(),
arguments: serde_json::json!({ "path": path }),
};
let response = self.mcp_client.call_tool(call).await?;
if let Some(text) = extract_resource_content(&response.output) {
Ok(text)
} else {
let formatted = serde_json::to_string_pretty(&response.output)
.unwrap_or_else(|_| response.output.to_string());
Ok(formatted)
}
}
pub fn code_tools_enabled(&self) -> bool {
self.enable_code_tools
}
pub async fn set_code_tools_enabled(&mut self, enabled: bool) -> Result<()> {
if self.enable_code_tools == enabled {
return Ok(());
}
self.enable_code_tools = enabled;
self.rebuild_tools().await
}
pub async fn set_operating_mode(&mut self, mode: Mode) -> Result<()> {
self.current_mode = mode;
let enable_code_tools = matches!(mode, Mode::Code);
self.set_code_tools_enabled(enable_code_tools).await?;
self.mcp_client.set_mode(mode).await
}
pub async fn list_dir(&self, path: &str) -> Result<Vec<String>> { pub async fn list_dir(&self, path: &str) -> Result<Vec<String>> {
let call = McpToolCall { let call = McpToolCall {
name: "resources/list".to_string(), name: "resources/list".to_string(),
@@ -587,7 +689,9 @@ impl SessionController {
); );
let base_client = factory.create()?; let base_client = factory.create()?;
let permission_client = PermissionLayer::new(base_client, Arc::new(config.clone())); let permission_client = PermissionLayer::new(base_client, Arc::new(config.clone()));
self.mcp_client = Arc::new(permission_client); let client = Arc::new(permission_client);
client.set_mode(self.current_mode).await?;
self.mcp_client = client;
Ok(()) Ok(())
} }
@@ -741,7 +845,7 @@ impl SessionController {
if !streaming { if !streaming {
const MAX_TOOL_ITERATIONS: usize = 5; const MAX_TOOL_ITERATIONS: usize = 5;
for _iteration in 0..MAX_TOOL_ITERATIONS { for _iteration in 0..MAX_TOOL_ITERATIONS {
match self.provider.chat(request.clone()).await { match self.provider.send_prompt(request.clone()).await {
Ok(response) => { Ok(response) => {
if response.message.has_tool_calls() { if response.message.has_tool_calls() {
self.conversation.push_message(response.message.clone()); self.conversation.push_message(response.message.clone());
@@ -786,7 +890,7 @@ impl SessionController {
))); )));
} }
match self.provider.chat_stream(request).await { match self.provider.stream_prompt(request).await {
Ok(stream) => { Ok(stream) => {
let response_id = self.conversation.start_streaming_response(); let response_id = self.conversation.start_streaming_response();
Ok(SessionOutcome::Streaming { Ok(SessionOutcome::Streaming {
@@ -828,6 +932,11 @@ impl SessionController {
.filter(|calls| !calls.is_empty()) .filter(|calls| !calls.is_empty())
} }
pub fn cancel_stream(&mut self, message_id: Uuid, notice: &str) -> Result<()> {
self.conversation
.cancel_stream(message_id, notice.to_string())
}
pub async fn execute_streaming_tools( pub async fn execute_streaming_tools(
&mut self, &mut self,
_message_id: Uuid, _message_id: Uuid,

View File

@@ -0,0 +1,194 @@
//! Shared application state types used across TUI frontends.
use std::fmt;
/// High-level application state reported by the UI loop.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AppState {
Running,
Quit,
}
/// Vim-style input modes supported by the TUI.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InputMode {
Normal,
Editing,
ProviderSelection,
ModelSelection,
Help,
Visual,
Command,
SessionBrowser,
ThemeBrowser,
}
impl fmt::Display for InputMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
InputMode::Normal => "Normal",
InputMode::Editing => "Editing",
InputMode::ModelSelection => "Model",
InputMode::ProviderSelection => "Provider",
InputMode::Help => "Help",
InputMode::Visual => "Visual",
InputMode::Command => "Command",
InputMode::SessionBrowser => "Sessions",
InputMode::ThemeBrowser => "Themes",
};
f.write_str(label)
}
}
/// Represents which panel is currently focused in the TUI layout.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FocusedPanel {
Chat,
Thinking,
Input,
Code,
}
/// Auto-scroll state manager for scrollable panels.
#[derive(Debug, Clone)]
pub struct AutoScroll {
pub scroll: usize,
pub content_len: usize,
pub stick_to_bottom: bool,
}
impl Default for AutoScroll {
fn default() -> Self {
Self {
scroll: 0,
content_len: 0,
stick_to_bottom: true,
}
}
}
impl AutoScroll {
/// Update scroll position based on viewport height.
pub fn on_viewport(&mut self, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h);
if self.stick_to_bottom {
self.scroll = max;
} else {
self.scroll = self.scroll.min(max);
}
}
/// Handle user scroll input.
pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h) as isize;
let s = (self.scroll as isize + delta).clamp(0, max) as usize;
self.scroll = s;
self.stick_to_bottom = s as isize == max;
}
pub fn scroll_half_page_down(&mut self, viewport_h: usize) {
let delta = (viewport_h / 2) as isize;
self.on_user_scroll(delta, viewport_h);
}
pub fn scroll_half_page_up(&mut self, viewport_h: usize) {
let delta = -((viewport_h / 2) as isize);
self.on_user_scroll(delta, viewport_h);
}
pub fn scroll_full_page_down(&mut self, viewport_h: usize) {
let delta = viewport_h as isize;
self.on_user_scroll(delta, viewport_h);
}
pub fn scroll_full_page_up(&mut self, viewport_h: usize) {
let delta = -(viewport_h as isize);
self.on_user_scroll(delta, viewport_h);
}
pub fn jump_to_top(&mut self) {
self.scroll = 0;
self.stick_to_bottom = false;
}
pub fn jump_to_bottom(&mut self, viewport_h: usize) {
self.stick_to_bottom = true;
self.on_viewport(viewport_h);
}
}
/// Visual selection state for text selection.
#[derive(Debug, Clone, Default)]
pub struct VisualSelection {
pub start: Option<(usize, usize)>,
pub end: Option<(usize, usize)>,
}
impl VisualSelection {
pub fn new() -> Self {
Self::default()
}
pub fn start_at(&mut self, pos: (usize, usize)) {
self.start = Some(pos);
self.end = Some(pos);
}
pub fn extend_to(&mut self, pos: (usize, usize)) {
self.end = Some(pos);
}
pub fn clear(&mut self) {
self.start = None;
self.end = None;
}
pub fn is_active(&self) -> bool {
self.start.is_some() && self.end.is_some()
}
pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> {
if let (Some(s), Some(e)) = (self.start, self.end) {
if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) {
Some((s, e))
} else {
Some((e, s))
}
} else {
None
}
}
}
/// Cursor position helper for navigating scrollable content.
#[derive(Debug, Clone, Copy, Default)]
pub struct CursorPosition {
pub row: usize,
pub col: usize,
}
impl CursorPosition {
pub fn new(row: usize, col: usize) -> Self {
Self { row, col }
}
pub fn move_up(&mut self, amount: usize) {
self.row = self.row.saturating_sub(amount);
}
pub fn move_down(&mut self, amount: usize, max: usize) {
self.row = (self.row + amount).min(max);
}
pub fn move_left(&mut self, amount: usize) {
self.col = self.col.saturating_sub(amount);
}
pub fn move_right(&mut self, amount: usize, max: usize) {
self.col = (self.col + amount).min(max);
}
pub fn as_tuple(&self) -> (usize, usize) {
(self.row, self.col)
}
}

View File

@@ -50,15 +50,15 @@ impl StorageManager {
/// Create a storage manager using the provided database path /// Create a storage manager using the provided database path
pub async fn with_database_path(database_path: PathBuf) -> Result<Self> { pub async fn with_database_path(database_path: PathBuf) -> Result<Self> {
if let Some(parent) = database_path.parent() { if let Some(parent) = database_path.parent()
if !parent.exists() { && !parent.exists()
{
std::fs::create_dir_all(parent).map_err(|e| { std::fs::create_dir_all(parent).map_err(|e| {
Error::Storage(format!( Error::Storage(format!(
"Failed to create database directory {parent:?}: {e}" "Failed to create database directory {parent:?}: {e}"
)) ))
})?; })?;
} }
}
let options = SqliteConnectOptions::from_str(&format!( let options = SqliteConnectOptions::from_str(&format!(
"sqlite://{}", "sqlite://{}",
@@ -431,14 +431,14 @@ impl StorageManager {
} }
} }
if migrated > 0 { if migrated > 0
if let Err(err) = archive_legacy_directory(&legacy_dir) { && let Err(err) = archive_legacy_directory(&legacy_dir)
{
println!( println!(
"Warning: migrated sessions but failed to archive legacy directory: {}", "Warning: migrated sessions but failed to archive legacy directory: {}",
err err
); );
} }
}
println!("Migrated {} legacy sessions.", migrated); println!("Migrated {} legacy sessions.", migrated);
Ok(()) Ok(())

View File

@@ -586,17 +586,17 @@ where
} }
fn parse_color(s: &str) -> Result<Color, String> { fn parse_color(s: &str) -> Result<Color, String> {
if let Some(hex) = s.strip_prefix('#') { if let Some(hex) = s.strip_prefix('#')
if hex.len() == 6 { && hex.len() == 6
let r = u8::from_str_radix(&hex[0..2], 16) {
.map_err(|_| format!("Invalid hex color: {}", s))?; let r =
let g = u8::from_str_radix(&hex[2..4], 16) u8::from_str_radix(&hex[0..2], 16).map_err(|_| format!("Invalid hex color: {}", s))?;
.map_err(|_| format!("Invalid hex color: {}", s))?; let g =
let b = u8::from_str_radix(&hex[4..6], 16) u8::from_str_radix(&hex[2..4], 16).map_err(|_| format!("Invalid hex color: {}", s))?;
.map_err(|_| format!("Invalid hex color: {}", s))?; let b =
u8::from_str_radix(&hex[4..6], 16).map_err(|_| format!("Invalid hex color: {}", s))?;
return Ok(Color::Rgb(r, g, b)); return Ok(Color::Rgb(r, g, b));
} }
}
// Try named colors // Try named colors
match s.to_lowercase().as_str() { match s.to_lowercase().as_str() {

View File

@@ -13,7 +13,7 @@ pub mod web_search;
pub mod web_search_detailed; pub mod web_search_detailed;
use async_trait::async_trait; use async_trait::async_trait;
use serde_json::{json, Value}; use serde_json::{Value, json};
use std::collections::HashMap; use std::collections::HashMap;
use std::time::Duration; use std::time::Duration;

View File

@@ -2,9 +2,9 @@ use std::sync::Arc;
use std::time::Instant; use std::time::Instant;
use crate::Result; use crate::Result;
use anyhow::{anyhow, Context}; use anyhow::{Context, anyhow};
use async_trait::async_trait; use async_trait::async_trait;
use serde_json::{json, Value}; use serde_json::{Value, json};
use super::{Tool, ToolResult}; use super::{Tool, ToolResult};
use crate::sandbox::{SandboxConfig, SandboxedProcess}; use crate::sandbox::{SandboxConfig, SandboxedProcess};

View File

@@ -2,7 +2,7 @@ use super::{Tool, ToolResult};
use crate::Result; use crate::Result;
use anyhow::Context; use anyhow::Context;
use async_trait::async_trait; use async_trait::async_trait;
use serde_json::{json, Value}; use serde_json::{Value, json};
/// Tool that fetches the raw HTML content for a list of URLs. /// Tool that fetches the raw HTML content for a list of URLs.
/// ///

View File

@@ -4,7 +4,7 @@ use std::time::Instant;
use crate::Result; use crate::Result;
use anyhow::Context; use anyhow::Context;
use async_trait::async_trait; use async_trait::async_trait;
use serde_json::{json, Value}; use serde_json::{Value, json};
use super::{Tool, ToolResult}; use super::{Tool, ToolResult};
use crate::consent::ConsentManager; use crate::consent::ConsentManager;

View File

@@ -4,7 +4,7 @@ use std::time::Instant;
use crate::Result; use crate::Result;
use anyhow::Context; use anyhow::Context;
use async_trait::async_trait; use async_trait::async_trait;
use serde_json::{json, Value}; use serde_json::{Value, json};
use super::{Tool, ToolResult}; use super::{Tool, ToolResult};
use crate::consent::ConsentManager; use crate::consent::ConsentManager;
@@ -86,7 +86,9 @@ impl Tool for WebSearchDetailedTool {
.expect("Consent manager mutex poisoned"); .expect("Consent manager mutex poisoned");
if !consent.has_consent(self.name()) { if !consent.has_consent(self.name()) {
return Ok(ToolResult::error("Consent not granted for detailed web search. This should have been handled by the TUI.")); return Ok(ToolResult::error(
"Consent not granted for detailed web search. This should have been handled by the TUI.",
));
} }
} }

View File

@@ -3,171 +3,20 @@
//! This module contains reusable UI components that can be shared between //! This module contains reusable UI components that can be shared between
//! different TUI applications (chat, code, etc.) //! different TUI applications (chat, code, etc.)
use std::fmt;
/// Application state /// Application state
#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub use crate::state::AppState;
pub enum AppState {
Running,
Quit,
}
/// Input modes for TUI applications /// Input modes for TUI applications
#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub use crate::state::InputMode;
pub enum InputMode {
Normal,
Editing,
ProviderSelection,
ModelSelection,
Help,
Visual,
Command,
SessionBrowser,
ThemeBrowser,
}
impl fmt::Display for InputMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
InputMode::Normal => "Normal",
InputMode::Editing => "Editing",
InputMode::ModelSelection => "Model",
InputMode::ProviderSelection => "Provider",
InputMode::Help => "Help",
InputMode::Visual => "Visual",
InputMode::Command => "Command",
InputMode::SessionBrowser => "Sessions",
InputMode::ThemeBrowser => "Themes",
};
f.write_str(label)
}
}
/// Represents which panel is currently focused /// Represents which panel is currently focused
#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub use crate::state::FocusedPanel;
pub enum FocusedPanel {
Chat,
Thinking,
Input,
}
/// Auto-scroll state manager for scrollable panels /// Auto-scroll state manager for scrollable panels
#[derive(Debug, Clone)] pub use crate::state::AutoScroll;
pub struct AutoScroll {
pub scroll: usize,
pub content_len: usize,
pub stick_to_bottom: bool,
}
impl Default for AutoScroll {
fn default() -> Self {
Self {
scroll: 0,
content_len: 0,
stick_to_bottom: true,
}
}
}
impl AutoScroll {
/// Update scroll position based on viewport height
pub fn on_viewport(&mut self, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h);
if self.stick_to_bottom {
self.scroll = max;
} else {
self.scroll = self.scroll.min(max);
}
}
/// Handle user scroll input
pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h) as isize;
let s = (self.scroll as isize + delta).clamp(0, max) as usize;
self.scroll = s;
self.stick_to_bottom = s as isize == max;
}
/// Scroll down half page
pub fn scroll_half_page_down(&mut self, viewport_h: usize) {
let delta = (viewport_h / 2) as isize;
self.on_user_scroll(delta, viewport_h);
}
/// Scroll up half page
pub fn scroll_half_page_up(&mut self, viewport_h: usize) {
let delta = -((viewport_h / 2) as isize);
self.on_user_scroll(delta, viewport_h);
}
/// Scroll down full page
pub fn scroll_full_page_down(&mut self, viewport_h: usize) {
let delta = viewport_h as isize;
self.on_user_scroll(delta, viewport_h);
}
/// Scroll up full page
pub fn scroll_full_page_up(&mut self, viewport_h: usize) {
let delta = -(viewport_h as isize);
self.on_user_scroll(delta, viewport_h);
}
/// Jump to top
pub fn jump_to_top(&mut self) {
self.scroll = 0;
self.stick_to_bottom = false;
}
/// Jump to bottom
pub fn jump_to_bottom(&mut self, viewport_h: usize) {
self.stick_to_bottom = true;
self.on_viewport(viewport_h);
}
}
/// Visual selection state for text selection /// Visual selection state for text selection
#[derive(Debug, Clone, Default)] pub use crate::state::VisualSelection;
pub struct VisualSelection {
pub start: Option<(usize, usize)>, // (row, col)
pub end: Option<(usize, usize)>, // (row, col)
}
impl VisualSelection {
pub fn new() -> Self {
Self::default()
}
pub fn start_at(&mut self, pos: (usize, usize)) {
self.start = Some(pos);
self.end = Some(pos);
}
pub fn extend_to(&mut self, pos: (usize, usize)) {
self.end = Some(pos);
}
pub fn clear(&mut self) {
self.start = None;
self.end = None;
}
pub fn is_active(&self) -> bool {
self.start.is_some() && self.end.is_some()
}
pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> {
if let (Some(s), Some(e)) = (self.start, self.end) {
// Normalize selection so start is always before end
if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) {
Some((s, e))
} else {
Some((e, s))
}
} else {
None
}
}
}
/// Extract text from a selection range in a list of lines /// Extract text from a selection range in a list of lines
pub fn extract_text_from_selection( pub fn extract_text_from_selection(
@@ -235,37 +84,7 @@ pub fn extract_text_from_selection(
} }
/// Cursor position for navigating scrollable content /// Cursor position for navigating scrollable content
#[derive(Debug, Clone, Copy, Default)] pub use crate::state::CursorPosition;
pub struct CursorPosition {
pub row: usize,
pub col: usize,
}
impl CursorPosition {
pub fn new(row: usize, col: usize) -> Self {
Self { row, col }
}
pub fn move_up(&mut self, amount: usize) {
self.row = self.row.saturating_sub(amount);
}
pub fn move_down(&mut self, amount: usize, max: usize) {
self.row = (self.row + amount).min(max);
}
pub fn move_left(&mut self, amount: usize) {
self.col = self.col.saturating_sub(amount);
}
pub fn move_right(&mut self, amount: usize, max: usize) {
self.col = (self.col + amount).min(max);
}
pub fn as_tuple(&self) -> (usize, usize) {
(self.row, self.col)
}
}
/// Word boundary detection for navigation /// Word boundary detection for navigation
pub fn find_next_word_boundary(line: &str, col: usize) -> Option<usize> { pub fn find_next_word_boundary(line: &str, col: usize) -> Option<usize> {

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use jsonschema::{JSONSchema, ValidationError}; use jsonschema::{JSONSchema, ValidationError};
use serde_json::{json, Value}; use serde_json::{Value, json};
pub struct SchemaValidator { pub struct SchemaValidator {
schemas: HashMap<String, JSONSchema>, schemas: HashMap<String, JSONSchema>,

View File

@@ -1,5 +1,5 @@
use owlen_core::mcp::remote_client::RemoteMcpClient;
use owlen_core::McpToolCall; use owlen_core::McpToolCall;
use owlen_core::mcp::remote_client::RemoteMcpClient;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
use tempfile::tempdir; use tempfile::tempdir;

View File

@@ -1,5 +1,5 @@
use owlen_core::mcp::remote_client::RemoteMcpClient;
use owlen_core::McpToolCall; use owlen_core::McpToolCall;
use owlen_core::mcp::remote_client::RemoteMcpClient;
use tempfile::tempdir; use tempfile::tempdir;
#[tokio::test] #[tokio::test]

View File

@@ -5,8 +5,8 @@
use owlen_core::mcp::failover::{FailoverConfig, FailoverMcpClient, ServerEntry, ServerHealth}; use owlen_core::mcp::failover::{FailoverConfig, FailoverMcpClient, ServerEntry, ServerHealth};
use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor}; use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor};
use owlen_core::{Error, Result}; use owlen_core::{Error, Result};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Duration; use std::time::Duration;
/// Mock MCP client for testing failover behavior /// Mock MCP client for testing failover behavior

View File

@@ -1,9 +1,9 @@
//! Integration test for the MCP prompt rendering server. //! Integration test for the MCP prompt rendering server.
use owlen_core::Result;
use owlen_core::config::McpServerConfig; use owlen_core::config::McpServerConfig;
use owlen_core::mcp::client::RemoteMcpClient; use owlen_core::mcp::client::RemoteMcpClient;
use owlen_core::mcp::{McpToolCall, McpToolResponse}; use owlen_core::mcp::{McpToolCall, McpToolResponse};
use owlen_core::Result;
use serde_json::json; use serde_json::json;
use std::path::PathBuf; use std::path::PathBuf;

View File

@@ -1,6 +1,6 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
use owlen_core::wrap_cursor::{build_cursor_map, ScreenPos}; use owlen_core::wrap_cursor::{ScreenPos, build_cursor_map};
fn assert_cursor_pos(map: &[ScreenPos], byte_idx: usize, expected: ScreenPos) { fn assert_cursor_pos(map: &[ScreenPos], byte_idx: usize, expected: ScreenPos) {
assert_eq!(map[byte_idx], expected, "Mismatch at byte {}", byte_idx); assert_eq!(map[byte_idx], expected, "Mismatch at byte {}", byte_idx);

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "owlen-mcp-client" name = "owlen-mcp-client"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition.workspace = true
description = "Dedicated MCP client library for Owlen, exposing remote MCP server communication" description = "Dedicated MCP client library for Owlen, exposing remote MCP server communication"
license = "AGPL-3.0" license = "AGPL-3.0"

View File

@@ -8,11 +8,8 @@
pub use owlen_core::mcp::remote_client::RemoteMcpClient; pub use owlen_core::mcp::remote_client::RemoteMcpClient;
pub use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; pub use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse};
// Reexport the Provider implementation so the client can also be used as an
// LLM provider when the remote MCP server hosts a languagemodel tool (e.g.
// `generate_text`).
// Reexport the core Provider trait so that the MCP client can also be used as an LLM provider. // Reexport the core Provider trait so that the MCP client can also be used as an LLM provider.
pub use owlen_core::provider::Provider as McpProvider; pub use owlen_core::Provider as McpProvider;
// Note: The `RemoteMcpClient` type provides its own `new` constructor in the core // Note: The `RemoteMcpClient` type provides its own `new` constructor in the core
// crate. Users can call `RemoteMcpClient::new()` directly. No additional wrapper // crate. Users can call `RemoteMcpClient::new()` directly. No additional wrapper

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "owlen-mcp-code-server" name = "owlen-mcp-code-server"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition.workspace = true
description = "MCP server exposing safe code execution tools for Owlen" description = "MCP server exposing safe code execution tools for Owlen"
license = "AGPL-3.0" license = "AGPL-3.0"

View File

@@ -10,11 +10,11 @@ pub mod sandbox;
pub mod tools; pub mod tools;
use owlen_core::mcp::protocol::{ use owlen_core::mcp::protocol::{
methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError,
RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, methods,
}; };
use owlen_core::tools::{Tool, ToolResult}; use owlen_core::tools::{Tool, ToolResult};
use serde_json::{json, Value}; use serde_json::{Value, json};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt}; use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt};
@@ -149,10 +149,10 @@ async fn handle_request(
supports_streaming: Some(false), supports_streaming: Some(false),
}, },
}; };
Ok(RpcResponse::new( let payload = serde_json::to_value(result).map_err(|e| {
req.id, RpcError::internal_error(format!("Failed to serialize initialize result: {}", e))
serde_json::to_value(result).unwrap(), })?;
)) Ok(RpcResponse::new(req.id, payload))
} }
methods::TOOLS_LIST => { methods::TOOLS_LIST => {
let tools = registry.list_tools(); let tools = registry.list_tools();
@@ -176,10 +176,10 @@ async fn handle_request(
metadata: result.metadata, metadata: result.metadata,
duration_ms: result.duration.as_millis() as u128, duration_ms: result.duration.as_millis() as u128,
}; };
Ok(RpcResponse::new( let payload = serde_json::to_value(resp).map_err(|e| {
req.id, RpcError::internal_error(format!("Failed to serialize tool response: {}", e))
serde_json::to_value(resp).unwrap(), })?;
)) Ok(RpcResponse::new(req.id, payload))
} }
_ => Err(RpcError::method_not_found(&req.method)), _ => Err(RpcError::method_not_found(&req.method)),
} }

View File

@@ -1,12 +1,12 @@
//! Docker-based sandboxing for secure code execution //! Docker-based sandboxing for secure code execution
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use bollard::Docker;
use bollard::container::{ use bollard::container::{
Config, CreateContainerOptions, RemoveContainerOptions, StartContainerOptions, Config, CreateContainerOptions, RemoveContainerOptions, StartContainerOptions,
WaitContainerOptions, WaitContainerOptions,
}; };
use bollard::models::{HostConfig, Mount, MountTypeEnum}; use bollard::models::{HostConfig, Mount, MountTypeEnum};
use bollard::Docker;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;

View File

@@ -2,9 +2,9 @@
use crate::sandbox::Sandbox; use crate::sandbox::Sandbox;
use async_trait::async_trait; use async_trait::async_trait;
use owlen_core::tools::{Tool, ToolResult};
use owlen_core::Result; use owlen_core::Result;
use serde_json::{json, Value}; use owlen_core::tools::{Tool, ToolResult};
use serde_json::{Value, json};
use std::path::PathBuf; use std::path::PathBuf;
/// Tool for compiling projects (Rust, Node.js, Python) /// Tool for compiling projects (Rust, Node.js, Python)

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "owlen-mcp-llm-server" name = "owlen-mcp-llm-server"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition.workspace = true
[dependencies] [dependencies]
owlen-core = { path = "../owlen-core" } owlen-core = { path = "../owlen-core" }

View File

@@ -7,18 +7,19 @@
clippy::empty_line_after_outer_attr clippy::empty_line_after_outer_attr
)] )]
use owlen_core::config::{ensure_provider_config, Config as OwlenConfig}; use owlen_core::Provider;
use owlen_core::ProviderConfig;
use owlen_core::config::{Config as OwlenConfig, ensure_provider_config};
use owlen_core::mcp::protocol::{ use owlen_core::mcp::protocol::{
methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError,
RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo,
methods,
}; };
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse}; use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
use owlen_core::provider::ProviderConfig;
use owlen_core::providers::OllamaProvider; use owlen_core::providers::OllamaProvider;
use owlen_core::types::{ChatParameters, ChatRequest, Message}; use owlen_core::types::{ChatParameters, ChatRequest, Message};
use owlen_core::Provider;
use serde::Deserialize; use serde::Deserialize;
use serde_json::{json, Value}; use serde_json::{Value, json};
use std::collections::HashMap; use std::collections::HashMap;
use std::env; use std::env;
use std::sync::Arc; use std::sync::Arc;
@@ -178,7 +179,7 @@ async fn handle_generate_text(args: GenerateTextArgs) -> Result<String, RpcError
// Use streaming API and collect output // Use streaming API and collect output
let mut stream = provider let mut stream = provider
.chat_stream(request) .stream_prompt(request)
.await .await
.map_err(|e| RpcError::internal_error(format!("Chat request failed: {}", e)))?; .map_err(|e| RpcError::internal_error(format!("Chat request failed: {}", e)))?;
let mut content = String::new(); let mut content = String::new();
@@ -228,7 +229,9 @@ async fn handle_request(req: &RpcRequest) -> Result<Value, RpcError> {
supports_streaming: Some(true), supports_streaming: Some(true),
}, },
}; };
Ok(serde_json::to_value(result).unwrap()) serde_json::to_value(result).map_err(|e| {
RpcError::internal_error(format!("Failed to serialize init result: {}", e))
})
} }
methods::TOOLS_LIST => { methods::TOOLS_LIST => {
let tools = vec![ let tools = vec![
@@ -245,7 +248,9 @@ async fn handle_request(req: &RpcRequest) -> Result<Value, RpcError> {
.list_models() .list_models()
.await .await
.map_err(|e| RpcError::internal_error(format!("Failed to list models: {}", e)))?; .map_err(|e| RpcError::internal_error(format!("Failed to list models: {}", e)))?;
Ok(serde_json::to_value(models).unwrap()) serde_json::to_value(models).map_err(|e| {
RpcError::internal_error(format!("Failed to serialize model list: {}", e))
})
} }
methods::TOOLS_CALL => { methods::TOOLS_CALL => {
// For streaming we will send incremental notifications directly from here. // For streaming we will send incremental notifications directly from here.
@@ -331,10 +336,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(), metadata: HashMap::new(),
duration_ms: 0, duration_ms: 0,
}; };
let final_resp = RpcResponse::new( let payload = match serde_json::to_value(&response) {
Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(), id.clone(),
serde_json::to_value(response).unwrap(), RpcError::internal_error(format!(
"Failed to serialize resource response: {}",
e
)),
); );
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?; let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?; stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?; stdout.write_all(b"\n").await?;
@@ -375,10 +394,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(), metadata: HashMap::new(),
duration_ms: 0, duration_ms: 0,
}; };
let final_resp = RpcResponse::new( let payload = match serde_json::to_value(&response) {
Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(), id.clone(),
serde_json::to_value(response).unwrap(), RpcError::internal_error(format!(
"Failed to serialize directory listing: {}",
e
)),
); );
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?; let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?; stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?; stdout.write_all(b"\n").await?;
@@ -454,7 +487,7 @@ async fn main() -> anyhow::Result<()> {
parameters, parameters,
tools: None, tools: None,
}; };
let mut stream = match provider.chat_stream(request).await { let mut stream = match provider.stream_prompt(request).await {
Ok(s) => s, Ok(s) => s,
Err(e) => { Err(e) => {
let err_resp = RpcErrorResponse::new( let err_resp = RpcErrorResponse::new(
@@ -510,8 +543,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(), metadata: HashMap::new(),
duration_ms: 0, duration_ms: 0,
}; };
let final_resp = let payload = match serde_json::to_value(&response) {
RpcResponse::new(id.clone(), serde_json::to_value(response).unwrap()); Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(),
RpcError::internal_error(format!(
"Failed to serialize final streaming response: {}",
e
)),
);
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?; let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?; stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?; stdout.write_all(b"\n").await?;

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "owlen-mcp-prompt-server" name = "owlen-mcp-prompt-server"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition.workspace = true
description = "MCP server that renders prompt templates (YAML) for Owlen" description = "MCP server that renders prompt templates (YAML) for Owlen"
license = "AGPL-3.0" license = "AGPL-3.0"

View File

@@ -6,7 +6,7 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use handlebars::Handlebars; use handlebars::Handlebars;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{json, Value}; use serde_json::{Value, json};
use std::collections::HashMap; use std::collections::HashMap;
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@@ -14,8 +14,8 @@ use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use owlen_core::mcp::protocol::{ use owlen_core::mcp::protocol::{
methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError,
RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, methods,
}; };
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse}; use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt}; use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt};
@@ -148,7 +148,7 @@ FINAL_ANSWER: Summary of what was done"#
template.name, e template.name, e
); );
} else { } else {
let mut templates = futures::executor::block_on(self.templates.write()); let mut templates = self.templates.blocking_write();
templates.insert(template.name.clone(), template); templates.insert(template.name.clone(), template);
} }
} }
@@ -284,10 +284,10 @@ async fn handle_request(
supports_streaming: Some(false), supports_streaming: Some(false),
}, },
}; };
Ok(RpcResponse::new( let payload = serde_json::to_value(result).map_err(|e| {
req.id, RpcError::internal_error(format!("Failed to serialize initialize result: {}", e))
serde_json::to_value(result).unwrap(), })?;
)) Ok(RpcResponse::new(req.id, payload))
} }
methods::TOOLS_LIST => { methods::TOOLS_LIST => {
let tools = vec![ let tools = vec![
@@ -349,9 +349,17 @@ async fn handle_request(
let srv = server.lock().await; let srv = server.lock().await;
match srv.get_template(name).await { match srv.get_template(name).await {
Some(template) => { Some(template) => match serde_json::to_value(template) {
json!({"success": true, "template": serde_json::to_value(template).unwrap()}) Ok(serialized) => {
json!({"success": true, "template": serialized})
} }
Err(e) => {
return Err(RpcError::internal_error(format!(
"Failed to serialize template '{}': {}",
name, e
)));
}
},
None => json!({"success": false, "error": "Template not found"}), None => json!({"success": false, "error": "Template not found"}),
} }
} }
@@ -397,10 +405,10 @@ async fn handle_request(
duration_ms: 0, duration_ms: 0,
}; };
Ok(RpcResponse::new( let payload = serde_json::to_value(resp).map_err(|e| {
req.id, RpcError::internal_error(format!("Failed to serialize tool response: {}", e))
serde_json::to_value(resp).unwrap(), })?;
)) Ok(RpcResponse::new(req.id, payload))
} }
_ => Err(RpcError::method_not_found(&req.method)), _ => Err(RpcError::method_not_found(&req.method)),
} }

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "owlen-mcp-server" name = "owlen-mcp-server"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition.workspace = true
[dependencies] [dependencies]
tokio = { workspace = true } tokio = { workspace = true }

View File

@@ -1,6 +1,6 @@
use owlen_core::mcp::protocol::{ use owlen_core::mcp::protocol::{
is_compatible, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError,
RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, is_compatible,
}; };
use path_clean::PathClean; use path_clean::PathClean;
use serde::Deserialize; use serde::Deserialize;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,191 @@
//! Command catalog and lookup utilities for the command palette.
/// Metadata describing a single command keyword.
#[derive(Debug, Clone, Copy)]
pub struct CommandSpec {
pub keyword: &'static str,
pub description: &'static str,
}
const COMMANDS: &[CommandSpec] = &[
CommandSpec {
keyword: "quit",
description: "Exit the application",
},
CommandSpec {
keyword: "q",
description: "Alias for quit",
},
CommandSpec {
keyword: "clear",
description: "Clear the conversation",
},
CommandSpec {
keyword: "c",
description: "Alias for clear",
},
CommandSpec {
keyword: "w",
description: "Alias for write",
},
CommandSpec {
keyword: "save",
description: "Alias for write",
},
CommandSpec {
keyword: "load",
description: "Load a saved conversation",
},
CommandSpec {
keyword: "o",
description: "Alias for load",
},
CommandSpec {
keyword: "open",
description: "Open a file in the code view",
},
CommandSpec {
keyword: "close",
description: "Close the active code view",
},
CommandSpec {
keyword: "mode",
description: "Switch operating mode (chat/code)",
},
CommandSpec {
keyword: "code",
description: "Switch to code mode",
},
CommandSpec {
keyword: "chat",
description: "Switch to chat mode",
},
CommandSpec {
keyword: "tools",
description: "List available tools in current mode",
},
CommandSpec {
keyword: "sessions",
description: "List saved sessions",
},
CommandSpec {
keyword: "help",
description: "Show help documentation",
},
CommandSpec {
keyword: "h",
description: "Alias for help",
},
CommandSpec {
keyword: "model",
description: "Select a model",
},
CommandSpec {
keyword: "model info",
description: "Show detailed information for a model",
},
CommandSpec {
keyword: "model refresh",
description: "Refresh cached model information",
},
CommandSpec {
keyword: "model details",
description: "Show details for the active model",
},
CommandSpec {
keyword: "m",
description: "Alias for model",
},
CommandSpec {
keyword: "models info",
description: "Prefetch detailed information for all models",
},
CommandSpec {
keyword: "new",
description: "Start a new conversation",
},
CommandSpec {
keyword: "n",
description: "Alias for new",
},
CommandSpec {
keyword: "theme",
description: "Switch theme",
},
CommandSpec {
keyword: "themes",
description: "List available themes",
},
CommandSpec {
keyword: "tutorial",
description: "Show keybinding tutorial",
},
CommandSpec {
keyword: "reload",
description: "Reload configuration and themes",
},
CommandSpec {
keyword: "e",
description: "Edit a file",
},
CommandSpec {
keyword: "edit",
description: "Alias for edit",
},
CommandSpec {
keyword: "ls",
description: "List directory contents",
},
CommandSpec {
keyword: "privacy-enable",
description: "Enable a privacy-sensitive tool",
},
CommandSpec {
keyword: "privacy-disable",
description: "Disable a privacy-sensitive tool",
},
CommandSpec {
keyword: "privacy-clear",
description: "Clear stored secure data",
},
CommandSpec {
keyword: "agent",
description: "Enable agent mode for autonomous task execution",
},
CommandSpec {
keyword: "stop-agent",
description: "Stop the running agent",
},
];
/// Return the static catalog of commands.
pub fn all() -> &'static [CommandSpec] {
COMMANDS
}
/// Return the default suggestion list (all command keywords).
pub fn default_suggestions() -> Vec<String> {
COMMANDS
.iter()
.map(|spec| spec.keyword.to_string())
.collect()
}
/// Generate keyword suggestions for the given input.
pub fn suggestions(input: &str) -> Vec<String> {
let trimmed = input.trim();
if trimmed.is_empty() {
return default_suggestions();
}
COMMANDS
.iter()
.filter_map(|spec| {
if spec.keyword.starts_with(trimmed) {
Some(spec.keyword.to_string())
} else {
None
}
})
.collect()
}

View File

@@ -1,6 +1,6 @@
pub use owlen_core::config::{ pub use owlen_core::config::{
default_config_path, ensure_ollama_config, ensure_provider_config, session_timeout, Config, Config, DEFAULT_CONFIG_PATH, GeneralSettings, InputSettings, StorageSettings, UiSettings,
GeneralSettings, InputSettings, StorageSettings, UiSettings, DEFAULT_CONFIG_PATH, default_config_path, ensure_ollama_config, ensure_provider_config, session_timeout,
}; };
/// Attempt to load configuration from default location /// Attempt to load configuration from default location

View File

@@ -14,9 +14,11 @@
pub mod chat_app; pub mod chat_app;
pub mod code_app; pub mod code_app;
pub mod commands;
pub mod config; pub mod config;
pub mod events; pub mod events;
pub mod model_info_panel; pub mod model_info_panel;
pub mod state;
pub mod tui_controller; pub mod tui_controller;
pub mod ui; pub mod ui;

View File

@@ -1,10 +1,10 @@
use owlen_core::model::DetailedModelInfo; use owlen_core::model::DetailedModelInfo;
use owlen_core::theme::Theme; use owlen_core::theme::Theme;
use ratatui::{ use ratatui::{
Frame,
layout::Rect, layout::Rect,
style::{Color, Modifier, Style}, style::{Color, Modifier, Style},
widgets::{Block, Borders, Paragraph, Wrap}, widgets::{Block, Borders, Paragraph, Wrap},
Frame,
}; };
/// Dedicated panel for presenting detailed model information. /// Dedicated panel for presenting detailed model information.

View File

@@ -0,0 +1,92 @@
use crate::commands;
/// Encapsulates the command-line style palette used in command mode.
///
/// The palette keeps track of the raw buffer, matching suggestions, and the
/// currently highlighted suggestion index. It contains no terminal-specific
/// logic which makes it straightforward to unit test.
#[derive(Debug, Clone, Default)]
pub struct CommandPalette {
buffer: String,
suggestions: Vec<String>,
selected: usize,
}
impl CommandPalette {
pub fn new() -> Self {
Self::default()
}
pub fn buffer(&self) -> &str {
&self.buffer
}
pub fn suggestions(&self) -> &[String] {
&self.suggestions
}
pub fn selected_index(&self) -> usize {
self.selected
}
pub fn clear(&mut self) {
self.buffer.clear();
self.suggestions.clear();
self.selected = 0;
}
pub fn set_buffer(&mut self, value: impl Into<String>) {
self.buffer = value.into();
self.refresh_suggestions();
}
pub fn push_char(&mut self, ch: char) {
self.buffer.push(ch);
self.refresh_suggestions();
}
pub fn pop_char(&mut self) {
self.buffer.pop();
self.refresh_suggestions();
}
pub fn select_previous(&mut self) {
if !self.suggestions.is_empty() {
self.selected = self.selected.saturating_sub(1);
}
}
pub fn select_next(&mut self) {
if !self.suggestions.is_empty() {
let max_index = self.suggestions.len().saturating_sub(1);
self.selected = (self.selected + 1).min(max_index);
}
}
pub fn apply_selected(&mut self) -> Option<String> {
let selected = self
.suggestions
.get(self.selected)
.cloned()
.or_else(|| self.suggestions.first().cloned());
if let Some(value) = selected.clone() {
self.buffer = value;
self.refresh_suggestions();
}
selected
}
pub fn refresh_suggestions(&mut self) {
let trimmed = self.buffer.trim();
self.suggestions = commands::suggestions(trimmed);
if self.selected >= self.suggestions.len() {
self.selected = 0;
}
}
pub fn ensure_suggestions(&mut self) {
if self.suggestions.is_empty() {
self.refresh_suggestions();
}
}
}

View File

@@ -0,0 +1,10 @@
//! State helpers shared across TUI components.
//!
//! The `state` module contains lightweight wrappers that encapsulate UI state
//! shared between widgets. Keeping these helpers out of the main `chat_app`
//! implementation makes the command palette and other stateful widgets easier
//! to test in isolation.
mod command_palette;
pub use command_palette::CommandPalette;

View File

@@ -1,14 +1,14 @@
use ratatui::Frame;
use ratatui::layout::{Alignment, Constraint, Direction, Layout, Rect}; use ratatui::layout::{Alignment, Constraint, Direction, Layout, Rect};
use ratatui::style::{Color, Modifier, Style}; use ratatui::style::{Color, Modifier, Style};
use ratatui::text::{Line, Span}; use ratatui::text::{Line, Span};
use ratatui::widgets::{Block, Borders, Clear, List, ListItem, ListState, Paragraph, Wrap}; use ratatui::widgets::{Block, Borders, Clear, List, ListItem, ListState, Paragraph, Wrap};
use ratatui::Frame;
use serde_json; use serde_json;
use textwrap::{wrap, Options}; use textwrap::{Options, wrap};
use tui_textarea::TextArea; use tui_textarea::TextArea;
use unicode_width::UnicodeWidthStr; use unicode_width::UnicodeWidthStr;
use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT}; use crate::chat_app::{ChatApp, HELP_TAB_COUNT, ModelSelectorItemKind};
use owlen_core::model::DetailedModelInfo; use owlen_core::model::DetailedModelInfo;
use owlen_core::types::{ModelInfo, Role}; use owlen_core::types::{ModelInfo, Role};
use owlen_core::ui::{FocusedPanel, InputMode}; use owlen_core::ui::{FocusedPanel, InputMode};
@@ -22,10 +22,21 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) {
// Set terminal background color // Set terminal background color
let theme = app.theme().clone(); let theme = app.theme().clone();
let background_block = Block::default().style(Style::default().bg(theme.background)); let background_block = Block::default().style(Style::default().bg(theme.background));
frame.render_widget(background_block, frame.area()); let full_area = frame.area();
frame.render_widget(background_block, full_area);
let (chat_area, code_area) = if app.should_show_code_view() {
let segments = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(65), Constraint::Percentage(35)])
.split(full_area);
(segments[0], Some(segments[1]))
} else {
(full_area, None)
};
// Calculate dynamic input height based on textarea content // Calculate dynamic input height based on textarea content
let available_width = frame.area().width; let available_width = chat_area.width;
let input_height = if matches!(app.mode(), InputMode::Editing) { let input_height = if matches!(app.mode(), InputMode::Editing) {
let visual_lines = calculate_wrapped_line_count( let visual_lines = calculate_wrapped_line_count(
app.textarea().lines().iter().map(|s| s.as_str()), app.textarea().lines().iter().map(|s| s.as_str()),
@@ -81,7 +92,7 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) {
let layout = Layout::default() let layout = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.constraints(constraints) .constraints(constraints)
.split(frame.area()); .split(chat_area);
let mut idx = 0; let mut idx = 0;
render_header(frame, layout[idx], app); render_header(frame, layout[idx], app);
@@ -124,19 +135,22 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) {
} }
if app.is_model_info_visible() { if app.is_model_info_visible() {
let panel_width = frame let panel_width = full_area
.area()
.width .width
.saturating_div(3) .saturating_div(3)
.max(30) .max(30)
.min(frame.area().width.saturating_sub(20).max(30)); .min(full_area.width.saturating_sub(20).max(30));
let x = frame.area().x + frame.area().width.saturating_sub(panel_width); let x = full_area.x + full_area.width.saturating_sub(panel_width);
let area = Rect::new(x, frame.area().y, panel_width, frame.area().height); let area = Rect::new(x, full_area.y, panel_width, full_area.height);
frame.render_widget(Clear, area); frame.render_widget(Clear, area);
let viewport_height = area.height.saturating_sub(2) as usize; let viewport_height = area.height.saturating_sub(2) as usize;
app.set_model_info_viewport_height(viewport_height); app.set_model_info_viewport_height(viewport_height);
app.model_info_panel_mut().render(frame, area, &theme); app.model_info_panel_mut().render(frame, area, &theme);
} }
if let Some(area) = code_area {
render_code_view(frame, area, app);
}
} }
fn render_editable_textarea( fn render_editable_textarea(
@@ -219,11 +233,11 @@ fn render_editable_textarea(
let metrics = compute_cursor_metrics(lines_slice, cursor, mask_char, inner, wrap_lines); let metrics = compute_cursor_metrics(lines_slice, cursor, mask_char, inner, wrap_lines);
if let Some(ref metrics) = metrics { if let Some(ref metrics) = metrics
if metrics.scroll_top > 0 { && metrics.scroll_top > 0
{
paragraph = paragraph.scroll((metrics.scroll_top, 0)); paragraph = paragraph.scroll((metrics.scroll_top, 0));
} }
}
if let Some(block) = block { if let Some(block) = block {
paragraph = paragraph.block(block); paragraph = paragraph.block(block);
@@ -374,14 +388,12 @@ fn compute_cursor_metrics(
break; break;
} }
if !cursor_found { if !cursor_found && let Some(last_segment) = segments.last() {
if let Some(last_segment) = segments.last() {
cursor_visual_row = total_visual_rows + segments.len().saturating_sub(1); cursor_visual_row = total_visual_rows + segments.len().saturating_sub(1);
cursor_col_width = UnicodeWidthStr::width(last_segment.as_str()); cursor_col_width = UnicodeWidthStr::width(last_segment.as_str());
cursor_found = true; cursor_found = true;
} }
} }
}
total_visual_rows += segments.len(); total_visual_rows += segments.len();
} }
@@ -469,9 +481,15 @@ fn render_header(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
.fg(theme.focused_panel_border) .fg(theme.focused_panel_border)
.add_modifier(Modifier::BOLD), .add_modifier(Modifier::BOLD),
); );
let provider_span = Span::styled(
app.current_provider().to_string(),
Style::default().fg(theme.text),
);
let model_span = Span::styled( let model_span = Span::styled(
format!("Model: {}", app.selected_model()), app.selected_model().to_string(),
Style::default().fg(theme.user_message_role), Style::default()
.fg(theme.user_message_role)
.add_modifier(Modifier::BOLD),
); );
let header_block = Block::default() let header_block = Block::default()
@@ -482,7 +500,17 @@ fn render_header(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
let inner_area = header_block.inner(area); let inner_area = header_block.inner(area);
let header_text = vec![Line::from(""), Line::from(format!(" {model_span} "))]; let header_text = vec![
Line::default(),
Line::from(vec![
Span::raw(" "),
Span::styled("Provider: ", Style::default().fg(theme.placeholder)),
provider_span,
Span::raw(" "),
Span::styled("Model: ", Style::default().fg(theme.placeholder)),
model_span,
]),
];
let paragraph = Paragraph::new(header_text) let paragraph = Paragraph::new(header_text)
.style(Style::default().bg(theme.background).fg(theme.text)) .style(Style::default().bg(theme.background).fg(theme.text))
@@ -776,12 +804,12 @@ fn render_messages(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) {
} }
// Apply visual selection highlighting if in visual mode and Chat panel is focused // Apply visual selection highlighting if in visual mode and Chat panel is focused
if matches!(app.mode(), InputMode::Visual) && matches!(app.focused_panel(), FocusedPanel::Chat) if matches!(app.mode(), InputMode::Visual)
&& matches!(app.focused_panel(), FocusedPanel::Chat)
&& let Some(selection) = app.visual_selection()
{ {
if let Some(selection) = app.visual_selection() {
lines = apply_visual_selection(lines, Some(selection), &theme); lines = apply_visual_selection(lines, Some(selection), &theme);
} }
}
// Update AutoScroll state with accurate content length // Update AutoScroll state with accurate content length
let auto_scroll = app.auto_scroll_mut(); let auto_scroll = app.auto_scroll_mut();
@@ -864,11 +892,10 @@ fn render_thinking(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) {
// Apply visual selection highlighting if in visual mode and Thinking panel is focused // Apply visual selection highlighting if in visual mode and Thinking panel is focused
if matches!(app.mode(), InputMode::Visual) if matches!(app.mode(), InputMode::Visual)
&& matches!(app.focused_panel(), FocusedPanel::Thinking) && matches!(app.focused_panel(), FocusedPanel::Thinking)
&& let Some(selection) = app.visual_selection()
{ {
if let Some(selection) = app.visual_selection() {
lines = apply_visual_selection(lines, Some(selection), &theme); lines = apply_visual_selection(lines, Some(selection), &theme);
} }
}
// Update AutoScroll state with accurate content length // Update AutoScroll state with accurate content length
let thinking_scroll = app.thinking_scroll_mut(); let thinking_scroll = app.thinking_scroll_mut();
@@ -1264,11 +1291,7 @@ where
total += wrapped.len().max(1); total += wrapped.len().max(1);
} }
if !seen { if !seen { 1 } else { total.max(1) }
1
} else {
total.max(1)
}
} }
fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
@@ -1328,6 +1351,30 @@ fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
.add_modifier(Modifier::BOLD), .add_modifier(Modifier::BOLD),
)); ));
spans.push(Span::styled(" ", Style::default().fg(theme.text)));
spans.push(Span::styled(
"Provider: ",
Style::default()
.fg(theme.placeholder)
.add_modifier(Modifier::ITALIC),
));
spans.push(Span::styled(
app.current_provider().to_string(),
Style::default().fg(theme.text),
));
spans.push(Span::styled(" ", Style::default().fg(theme.text)));
spans.push(Span::styled(
"Model: ",
Style::default()
.fg(theme.placeholder)
.add_modifier(Modifier::ITALIC),
));
spans.push(Span::styled(
app.selected_model().to_string(),
Style::default()
.fg(theme.user_message_role)
.add_modifier(Modifier::BOLD),
));
spans.push(Span::styled(" ", Style::default().fg(theme.text))); spans.push(Span::styled(" ", Style::default().fg(theme.text)));
spans.push(Span::styled(help_text, Style::default().fg(theme.info))); spans.push(Span::styled(help_text, Style::default().fg(theme.info)));
@@ -1344,6 +1391,76 @@ fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
frame.render_widget(paragraph, area); frame.render_widget(paragraph, area);
} }
fn render_code_view(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) {
let path = match app.code_view_path() {
Some(p) => p.to_string(),
None => {
frame.render_widget(Clear, area);
return;
}
};
let theme = app.theme().clone();
frame.render_widget(Clear, area);
let viewport_height = area.height.saturating_sub(2) as usize;
app.set_code_view_viewport_height(viewport_height);
let mut lines: Vec<Line> = Vec::new();
if app.code_view_lines().is_empty() {
lines.push(Line::from(Span::styled(
"(empty file)",
Style::default()
.fg(theme.placeholder)
.add_modifier(Modifier::ITALIC),
)));
} else {
for (idx, content) in app.code_view_lines().iter().enumerate() {
let number = format!("{:>4} ", idx + 1);
let spans = vec![
Span::styled(
number,
Style::default()
.fg(theme.placeholder)
.add_modifier(Modifier::DIM),
),
Span::styled(content.clone(), Style::default().fg(theme.text)),
];
lines.push(Line::from(spans));
}
}
let scroll_state = app.code_view_scroll_mut();
scroll_state.content_len = lines.len();
scroll_state.on_viewport(viewport_height);
let scroll_position = scroll_state.scroll.min(u16::MAX as usize) as u16;
let border_color = if matches!(app.focused_panel(), FocusedPanel::Code) {
theme.focused_panel_border
} else {
theme.unfocused_panel_border
};
let block = Block::default()
.borders(Borders::ALL)
.border_style(Style::default().fg(border_color))
.style(Style::default().bg(theme.background).fg(theme.text))
.title(Span::styled(
path,
Style::default()
.fg(theme.focused_panel_border)
.add_modifier(Modifier::BOLD),
));
let paragraph = Paragraph::new(lines)
.style(Style::default().bg(theme.background).fg(theme.text))
.block(block)
.scroll((scroll_position, 0))
.wrap(Wrap { trim: false });
frame.render_widget(paragraph, area);
}
fn render_provider_selector(frame: &mut Frame<'_>, app: &ChatApp) { fn render_provider_selector(frame: &mut Frame<'_>, app: &ChatApp) {
let theme = app.theme(); let theme = app.theme();
let area = centered_rect(60, 60, frame.area()); let area = centered_rect(60, 60, frame.area());
@@ -1510,11 +1627,10 @@ fn build_model_selector_label(
.parameter_size .parameter_size
.as_ref() .as_ref()
.or(detail.parameters.as_ref()) .or(detail.parameters.as_ref())
&& !parameters.trim().is_empty()
{ {
if !parameters.trim().is_empty() {
parts.push(parameters.trim().to_string()); parts.push(parameters.trim().to_string());
} }
}
if let Some(size) = detail.size { if let Some(size) = detail.size {
parts.push(format_short_size(size)); parts.push(format_short_size(size));
@@ -2032,8 +2148,17 @@ fn render_help(frame: &mut Frame<'_>, app: &ChatApp) {
)]), )]),
Line::from(" :save [name] → save current session (with optional name)"), Line::from(" :save [name] → save current session (with optional name)"),
Line::from(" :w [name] → alias for :save"), Line::from(" :w [name] → alias for :save"),
Line::from(" :load, :o, :open → browse and load saved sessions"), Line::from(" :load, :o → browse and load saved sessions"),
Line::from(" :sessions, :ls → browse saved sessions"), Line::from(" :sessions, :ls → browse saved sessions"),
Line::from(""),
Line::from(vec![Span::styled(
"CODE VIEW",
Style::default()
.add_modifier(Modifier::BOLD)
.fg(theme.user_message_role),
)]),
Line::from(" :open <path> → open file in code side panel"),
Line::from(" :close → close the code side panel"),
// New mode and tool commands added in phases 05 // New mode and tool commands added in phases 05
Line::from(" :code → switch to code mode (CLI: owlen --code)"), Line::from(" :code → switch to code mode (CLI: owlen --code)"),
Line::from(" :mode <chat|code> → change current mode explicitly"), Line::from(" :mode <chat|code> → change current mode explicitly"),
@@ -2066,7 +2191,7 @@ fn render_help(frame: &mut Frame<'_>, app: &ChatApp) {
.add_modifier(Modifier::BOLD) .add_modifier(Modifier::BOLD)
.fg(theme.user_message_role), .fg(theme.user_message_role),
)]), )]),
Line::from(" :load, :o, :open → browse and select session"), Line::from(" :load, :o → browse and select session"),
Line::from(" :sessions, :ls → browse saved sessions"), Line::from(" :sessions, :ls → browse saved sessions"),
Line::from(""), Line::from(""),
Line::from(vec![Span::styled( Line::from(vec![Span::styled(
@@ -2291,14 +2416,14 @@ fn render_session_browser(frame: &mut Frame<'_>, app: &ChatApp) {
let mut lines = vec![Line::from(Span::styled(name, style))]; let mut lines = vec![Line::from(Span::styled(name, style))];
// Add description if available and not empty // Add description if available and not empty
if let Some(description) = &session.description { if let Some(description) = &session.description
if !description.is_empty() { && !description.is_empty()
{
lines.push(Line::from(Span::styled( lines.push(Line::from(Span::styled(
format!(" \"{}\"", description), format!(" \"{}\"", description),
desc_style, desc_style,
))); )));
} }
}
// Add metadata line // Add metadata line
lines.push(Line::from(Span::styled(format!(" {}", info), info_style))); lines.push(Line::from(Span::styled(format!(" {}", info), info_style)));
@@ -2548,7 +2673,7 @@ fn role_color(role: &Role, theme: &owlen_core::theme::Theme) -> Style {
} }
/// Format tool output JSON into a nice human-readable format /// Format tool output JSON into a nice human-readable format
fn format_tool_output(content: &str) -> String { pub(crate) fn format_tool_output(content: &str) -> String {
// Try to parse as JSON // Try to parse as JSON
if let Ok(json) = serde_json::from_str::<serde_json::Value>(content) { if let Ok(json) = serde_json::from_str::<serde_json::Value>(content) {
let mut output = String::new(); let mut output = String::new();
@@ -2592,8 +2717,9 @@ fn format_tool_output(content: &str) -> String {
} }
// Snippet (truncated if too long) // Snippet (truncated if too long)
if let Some(snippet) = result.get("snippet").and_then(|v| v.as_str()) { if let Some(snippet) = result.get("snippet").and_then(|v| v.as_str())
if !snippet.is_empty() { && !snippet.is_empty()
{
// Strip HTML tags // Strip HTML tags
let clean_snippet = snippet let clean_snippet = snippet
.replace("<b>", "") .replace("<b>", "")
@@ -2609,7 +2735,6 @@ fn format_tool_output(content: &str) -> String {
}; };
output.push_str(&format!(" {}\n", truncated)); output.push_str(&format!(" {}\n", truncated));
} }
}
// URL (shortened if too long) // URL (shortened if too long)
if let Some(url) = result.get("url").and_then(|v| v.as_str()) { if let Some(url) = result.get("url").and_then(|v| v.as_str()) {

View File

@@ -0,0 +1,56 @@
use owlen_tui::commands;
use owlen_tui::state::CommandPalette;
#[test]
fn palette_tracks_buffer_and_suggestions() {
let mut palette = CommandPalette::new();
assert_eq!(palette.buffer(), "");
assert!(palette.suggestions().is_empty());
palette.set_buffer("mo");
assert_eq!(palette.buffer(), "mo");
assert!(palette.suggestions().iter().all(|s| s.starts_with("mo")));
palette.push_char('d');
assert_eq!(palette.buffer(), "mod");
assert!(palette.suggestions().iter().all(|s| s.starts_with("mod")));
palette.pop_char();
assert_eq!(palette.buffer(), "mo");
}
#[test]
fn palette_selection_wraps_safely() {
let mut palette = CommandPalette::new();
palette.set_buffer("m");
let suggestions = palette.suggestions().len();
assert!(suggestions > 0);
palette.select_previous();
assert_eq!(palette.selected_index(), 0);
for _ in 0..suggestions * 2 {
palette.select_next();
}
assert!(palette.selected_index() < palette.suggestions().len());
}
#[test]
fn palette_apply_selected_updates_buffer() {
let mut palette = CommandPalette::new();
palette.set_buffer("mo");
palette.select_next();
let selected = palette.apply_selected().expect("suggestion");
assert_eq!(palette.buffer(), selected);
assert!(selected.starts_with("m"));
}
#[test]
fn command_catalog_contains_expected_aliases() {
let keywords: Vec<_> = commands::all().iter().map(|spec| spec.keyword).collect();
assert!(keywords.contains(&"model"));
assert!(keywords.contains(&"open"));
assert!(keywords.contains(&"close"));
assert!(keywords.contains(&"sessions"));
assert!(keywords.contains(&"new"));
}

View File

@@ -31,7 +31,7 @@ A simplified diagram of how components interact:
## Crate Breakdown ## Crate Breakdown
- `owlen-core`: Defines the `LLMProvider` abstraction, routing, configuration, session state, encryption, and the MCP client layer. This crate is UI-agnostic and must not depend on concrete providers, terminals, or blocking I/O. - `owlen-core`: Defines the `LlmProvider` abstraction, routing, configuration, session state, encryption, and the MCP client layer. This crate is UI-agnostic and must not depend on concrete providers, terminals, or blocking I/O.
- `owlen-tui`: Hosts all terminal UI behaviour (event loop, rendering, input modes) while delegating business logic and provider access back to `owlen-core`. - `owlen-tui`: Hosts all terminal UI behaviour (event loop, rendering, input modes) while delegating business logic and provider access back to `owlen-core`.
- `owlen-cli`: Small entry point that parses command-line options, resolves configuration, selects providers, and launches either the TUI or headless agent flows by calling into `owlen-core`. - `owlen-cli`: Small entry point that parses command-line options, resolves configuration, selects providers, and launches either the TUI or headless agent flows by calling into `owlen-core`.
- `owlen-mcp-llm-server`: Runs concrete providers (e.g., Ollama) behind an MCP boundary, exposing them as `generate_text` tools. This crate owns provider-specific wiring and process sandboxing. - `owlen-mcp-llm-server`: Runs concrete providers (e.g., Ollama) behind an MCP boundary, exposing them as `generate_text` tools. This crate owns provider-specific wiring and process sandboxing.
@@ -131,3 +131,5 @@ The TUI is rendered on each iteration of the main application loop in `owlen-tui
3. **UI Composition**: Inside the closure, the UI is built by composing `ratatui` widgets. The root UI is defined in `owlen_tui::ui::render`, which builds the main layout and calls other functions to render specific components (like the chat panel, input box, etc.). 3. **UI Composition**: Inside the closure, the UI is built by composing `ratatui` widgets. The root UI is defined in `owlen_tui::ui::render`, which builds the main layout and calls other functions to render specific components (like the chat panel, input box, etc.).
4. **State-Driven Rendering**: Each rendering function takes the current application state as an argument. It uses this state to decide what and how to render. For example, the border color of a panel might change if it is focused. 4. **State-Driven Rendering**: Each rendering function takes the current application state as an argument. It uses this state to decide what and how to render. For example, the border color of a panel might change if it is focused.
5. **Buffer and Diff**: `ratatui` does not draw directly to the terminal. Instead, it renders the widgets to an in-memory buffer. It then compares this buffer to the previous buffer and only sends the necessary changes to the terminal. This is highly efficient and prevents flickering. 5. **Buffer and Diff**: `ratatui` does not draw directly to the terminal. Instead, it renders the widgets to an in-memory buffer. It then compares this buffer to the previous buffer and only sends the necessary changes to the terminal. This is highly efficient and prevents flickering.
The command palette and other modal helpers expose lightweight state structs in `owlen_tui::state`. These components keep business logic (suggestion filtering, selection state, etc.) independent from rendering, which in turn makes them straightforward to unit test.

View File

@@ -36,7 +36,7 @@ In your new crate's `lib.rs`, you will define a struct for your provider and imp
```rust ```rust
use async_trait::async_trait; use async_trait::async_trait;
use owlen_core::model::Model; use owlen_core::model::Model;
use owlen_core::provider::Provider; use owlen_core::Provider;
use owlen_core::session::Session; use owlen_core::session::Session;
pub struct MyProvider; pub struct MyProvider;

View File

@@ -8,9 +8,9 @@
//! - Ensure Ollama is running with a model available //! - Ensure Ollama is running with a model available
use owlen_core::{ use owlen_core::{
Provider,
mcp::remote_client::RemoteMcpClient, mcp::remote_client::RemoteMcpClient,
types::{ChatParameters, ChatRequest, Message, Role}, types::{ChatParameters, ChatRequest, Message, Role},
Provider,
}; };
use std::sync::Arc; use std::sync::Arc;
@@ -57,7 +57,7 @@ async fn main() -> Result<(), anyhow::Error> {
// Send request and get response // Send request and get response
println!("\nAssistant: "); println!("\nAssistant: ");
let response = client.chat(request).await?; let response = client.send_prompt(request).await?;
println!("{}", response.message.content); println!("{}", response.message.content);
if let Some(usage) = response.usage { if let Some(usage) = response.usage {