diff --git a/Cargo.toml b/Cargo.toml index 4f51d25..b75161b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ exclude = [] [workspace.package] version = "0.1.9" -edition = "2021" +edition = "2024" authors = ["Owlibou"] license = "AGPL-3.0" repository = "https://somegit.dev/Owlibou/owlen" @@ -43,7 +43,7 @@ serde_json = { version = "1.0" } # Utilities uuid = { version = "1.0", features = ["v4", "serde"] } anyhow = "1.0" -thiserror = "1.0" +thiserror = "2.0" nix = "0.29" which = "6.0" tempfile = "3.8" diff --git a/README.md b/README.md index 1ff5f41..dcf4212 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,7 @@ The OWLEN interface features a clean, multi-panel layout with vim-inspired navig - **Streaming Responses**: Real-time token streaming from Ollama. - **Advanced Text Editing**: Multi-line input, history, and clipboard support. - **Session Management**: Save, load, and manage conversations. +- **Code Side Panel**: Switch to code mode (`:mode code`) and open files inline with `:open ` for LLM-assisted coding. - **Theming System**: 10 built-in themes and support for custom themes. - **Modular Architecture**: Extensible provider system (Ollama today, additional providers on the roadmap). - **Guided Setup**: `owlen config doctor` upgrades legacy configs and verifies your environment in seconds. @@ -118,6 +119,16 @@ You can also add custom themes alongside the config directory (e.g., `~/.config/ See the [themes/README.md](themes/README.md) for more details on theming. +## Testing + +Owlen uses standard Rust tooling for verification. Run the full test suite with: + +```bash +cargo test +``` + +Unit tests cover the command palette state machine, agent response parsing, and key MCP abstractions. Formatting and lint checks can be run with `cargo fmt --all` and `cargo clippy` respectively. + ## Roadmap Upcoming milestones focus on feature parity with modern code assistants while keeping Owlen local-first: diff --git a/crates/owlen-cli/src/cloud.rs b/crates/owlen-cli/src/cloud.rs index 49a7b98..8e31a63 100644 --- a/crates/owlen-cli/src/cloud.rs +++ b/crates/owlen-cli/src/cloud.rs @@ -1,13 +1,15 @@ +use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::sync::Arc; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{Context, Result, anyhow, bail}; use clap::Subcommand; +use owlen_core::LlmProvider; +use owlen_core::ProviderConfig; use owlen_core::config as core_config; use owlen_core::config::Config; use owlen_core::credentials::{ApiCredentials, CredentialManager, OLLAMA_CLOUD_CREDENTIAL_ID}; use owlen_core::encryption; -use owlen_core::provider::{LLMProvider, ProviderConfig}; use owlen_core::providers::OllamaProvider; use owlen_core::storage::StorageManager; @@ -219,11 +221,10 @@ fn ensure_provider_entry(config: &mut Config, provider: &str, endpoint: &str) { if provider == "ollama" && config.providers.contains_key("ollama-cloud") && !config.providers.contains_key("ollama") + && let Some(mut legacy) = config.providers.remove("ollama-cloud") { - if let Some(mut legacy) = config.providers.remove("ollama-cloud") { - legacy.provider_type = "ollama".to_string(); - config.providers.insert("ollama".to_string(), legacy); - } + legacy.provider_type = "ollama".to_string(); + config.providers.insert("ollama".to_string(), legacy); } core_config::ensure_provider_config(config, provider); @@ -247,12 +248,24 @@ fn canonical_provider_name(provider: &str) -> String { } } +pub(crate) fn set_env_var(key: K, value: V) +where + K: AsRef, + V: AsRef, +{ + // Safety: the CLI updates process-wide environment variables during startup while no + // other threads are mutating the environment. + unsafe { + std::env::set_var(key, value); + } +} + fn set_env_if_missing(var: &str, value: &str) { if std::env::var(var) .map(|v| v.trim().is_empty()) .unwrap_or(true) { - std::env::set_var(var, value); + set_env_var(var, value); } } @@ -302,18 +315,18 @@ fn unlock_vault(path: &Path) -> Result { use std::env; if path.exists() { - if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD") { - if !password.trim().is_empty() { - return encryption::unlock_with_password(path.to_path_buf(), &password) - .context("Failed to unlock vault with OWLEN_MASTER_PASSWORD"); - } + if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD") + && !password.trim().is_empty() + { + return encryption::unlock_with_password(path.to_path_buf(), &password) + .context("Failed to unlock vault with OWLEN_MASTER_PASSWORD"); } for attempt in 0..3 { let password = encryption::prompt_password("Enter master password: ")?; match encryption::unlock_with_password(path.to_path_buf(), &password) { Ok(handle) => { - env::set_var("OWLEN_MASTER_PASSWORD", password); + set_env_var("OWLEN_MASTER_PASSWORD", password); return Ok(handle); } Err(err) => { @@ -334,7 +347,7 @@ fn unlock_vault(path: &Path) -> Result { .unwrap_or(true) { let password = encryption::prompt_password("Cache master password for this session: ")?; - env::set_var("OWLEN_MASTER_PASSWORD", password); + set_env_var("OWLEN_MASTER_PASSWORD", password); } Ok(handle) } @@ -343,34 +356,34 @@ async fn hydrate_api_key( config: &mut Config, manager: Option<&Arc>, ) -> Result> { - if let Some(manager) = manager { - if let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await? { - let key = credentials.api_key.trim().to_string(); - if !key.is_empty() { - set_env_if_missing("OLLAMA_API_KEY", &key); - set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key); - } - - if let Some(cfg) = provider_entry_mut(config) { - if cfg.base_url.is_none() && !credentials.endpoint.trim().is_empty() { - cfg.base_url = Some(credentials.endpoint); - } - } - return Ok(Some(key)); + if let Some(manager) = manager + && let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await? + { + let key = credentials.api_key.trim().to_string(); + if !key.is_empty() { + set_env_if_missing("OLLAMA_API_KEY", &key); + set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key); } + + if let Some(cfg) = provider_entry_mut(config) + && cfg.base_url.is_none() + && !credentials.endpoint.trim().is_empty() + { + cfg.base_url = Some(credentials.endpoint); + } + return Ok(Some(key)); } - if let Some(cfg) = provider_entry(config) { - if let Some(key) = cfg + if let Some(cfg) = provider_entry(config) + && let Some(key) = cfg .api_key .as_ref() .map(|value| value.trim()) .filter(|value| !value.is_empty()) - { - set_env_if_missing("OLLAMA_API_KEY", key); - set_env_if_missing("OLLAMA_CLOUD_API_KEY", key); - return Ok(Some(key.to_string())); - } + { + set_env_if_missing("OLLAMA_API_KEY", key); + set_env_if_missing("OLLAMA_CLOUD_API_KEY", key); + return Ok(Some(key.to_string())); } Ok(None) } diff --git a/crates/owlen-cli/src/main.rs b/crates/owlen-cli/src/main.rs index 23a6f41..6ff9490 100644 --- a/crates/owlen-cli/src/main.rs +++ b/crates/owlen-cli/src/main.rs @@ -2,24 +2,23 @@ mod cloud; -use anyhow::{anyhow, Result}; +use anyhow::{Result, anyhow}; use async_trait::async_trait; use clap::{Parser, Subcommand}; -use cloud::{load_runtime_credentials, CloudCommand}; +use cloud::{CloudCommand, load_runtime_credentials, set_env_var}; use owlen_core::config as core_config; use owlen_core::{ + ChatStream, Error, Provider, config::{Config, McpMode}, mcp::remote_client::RemoteMcpClient, mode::Mode, - provider::ChatStream, providers::OllamaProvider, session::SessionController, storage::StorageManager, types::{ChatRequest, ChatResponse, Message, ModelInfo}, - Error, Provider, }; use owlen_tui::tui_controller::{TuiController, TuiRequest}; -use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent}; +use owlen_tui::{AppState, ChatApp, Event, EventHandler, SessionEvent, config, ui}; use std::any::Any; use std::borrow::Cow; use std::io; @@ -30,10 +29,10 @@ use tokio_util::sync::CancellationToken; use crossterm::{ event::{DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture}, execute, - terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, + terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode}, }; use futures::stream; -use ratatui::{prelude::CrosstermBackend, Terminal}; +use ratatui::{Terminal, prelude::CrosstermBackend}; /// Owlen - Terminal UI for LLM chat #[derive(Parser, Debug)] @@ -132,7 +131,9 @@ async fn run_command(command: OwlenCommand) -> Result<()> { OwlenCommand::Config(config_cmd) => run_config_command(config_cmd), OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await, OwlenCommand::Upgrade => { - println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force"); + println!( + "To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force" + ); println!( "If you installed from the AUR, use your package manager (e.g., yay -S owlen-git)." ); @@ -333,11 +334,11 @@ impl Provider for OfflineProvider { }]) } - async fn chat(&self, request: ChatRequest) -> Result { + async fn send_prompt(&self, request: ChatRequest) -> Result { Ok(self.friendly_response(&request.model)) } - async fn chat_stream(&self, request: ChatRequest) -> Result { + async fn stream_prompt(&self, request: ChatRequest) -> Result { let response = self.friendly_response(&request.model); Ok(Box::pin(stream::iter(vec![Ok(response)]))) } @@ -363,7 +364,7 @@ async fn main() -> Result<()> { let initial_mode = if code { Mode::Code } else { Mode::Chat }; // Set auto-consent for TUI mode to prevent blocking stdin reads - std::env::set_var("OWLEN_AUTO_CONSENT", "1"); + set_env_var("OWLEN_AUTO_CONSENT", "1"); let color_support = detect_terminal_color_support(); // Load configuration (or fall back to defaults) for the session controller. diff --git a/crates/owlen-core/src/agent.rs b/crates/owlen-core/src/agent.rs index 7c8b5f1..0e455d7 100644 --- a/crates/owlen-core/src/agent.rs +++ b/crates/owlen-core/src/agent.rs @@ -3,8 +3,8 @@ //! This module provides the core agent orchestration logic that allows an LLM //! to reason about tasks, execute tools, and observe results in an iterative loop. +use crate::Provider; use crate::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; -use crate::provider::Provider; use crate::types::{ChatParameters, ChatRequest, Message}; use crate::{Error, Result}; use serde::{Deserialize, Serialize}; @@ -189,7 +189,7 @@ impl AgentExecutor { fn build_system_prompt(&self, tools: &[McpToolDescriptor]) -> String { let mut prompt = String::from( "You are an AI assistant that uses the ReAct (Reasoning and Acting) pattern to solve tasks.\n\n\ - You have access to the following tools:\n\n" + You have access to the following tools:\n\n", ); for tool in tools { @@ -230,7 +230,7 @@ impl AgentExecutor { tools: None, }; - let response = self.llm_client.chat(request).await?; + let response = self.llm_client.send_prompt(request).await?; Ok(response.message.content) } @@ -364,13 +364,13 @@ impl AgentExecutor { #[cfg(test)] mod tests { use super::*; + use crate::llm::test_utils::MockProvider; use crate::mcp::test_utils::MockMcpClient; - use crate::provider::test_utils::MockProvider; #[test] fn test_parse_tool_call() { let executor = AgentExecutor { - llm_client: Arc::new(MockProvider), + llm_client: Arc::new(MockProvider::default()), tool_client: Arc::new(MockMcpClient), config: AgentConfig::default(), }; @@ -399,7 +399,7 @@ ACTION_INPUT: {"query": "Rust programming language"} #[test] fn test_parse_final_answer() { let executor = AgentExecutor { - llm_client: Arc::new(MockProvider), + llm_client: Arc::new(MockProvider::default()), tool_client: Arc::new(MockMcpClient), config: AgentConfig::default(), }; diff --git a/crates/owlen-core/src/config.rs b/crates/owlen-core/src/config.rs index 65dd99f..3c9ed7c 100644 --- a/crates/owlen-core/src/config.rs +++ b/crates/owlen-core/src/config.rs @@ -1,6 +1,6 @@ -use crate::mode::ModeConfig; -use crate::provider::ProviderConfig; +use crate::ProviderConfig; use crate::Result; +use crate::mode::ModeConfig; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::fs; @@ -120,13 +120,13 @@ impl Config { .and_then(|value| value.as_str()) .unwrap_or("0.0.0") .to_string(); - if let Some(agent_table) = parsed.get("agent").and_then(|value| value.as_table()) { - if agent_table.contains_key("max_tool_calls") { - log::warn!( - "Configuration option agent.max_tool_calls is deprecated and ignored. \ - The agent now uses agent.max_iterations." - ); - } + if let Some(agent_table) = parsed.get("agent").and_then(|value| value.as_table()) + && agent_table.contains_key("max_tool_calls") + { + log::warn!( + "Configuration option agent.max_tool_calls is deprecated and ignored. \ + The agent now uses agent.max_iterations." + ); } let mut config: Config = parsed .try_into() @@ -180,10 +180,10 @@ impl Config { &'a self, models: &'a [crate::types::ModelInfo], ) -> Option<&'a str> { - if let Some(model) = self.general.default_model.as_deref() { - if models.iter().any(|m| m.id == model || m.name == model) { - return Some(model); - } + if let Some(model) = self.general.default_model.as_deref() + && models.iter().any(|m| m.id == model || m.name == model) + { + return Some(model); } if let Some(first) = models.first() { @@ -963,9 +963,10 @@ mod tests { #[cfg(target_os = "macos")] { // macOS should use ~/Library/Application Support - assert!(path - .to_string_lossy() - .contains("Library/Application Support")); + assert!( + path.to_string_lossy() + .contains("Library/Application Support") + ); } println!("Config conversation path: {}", path.display()); diff --git a/crates/owlen-core/src/consent.rs b/crates/owlen-core/src/consent.rs index 32607f1..e45fcf7 100644 --- a/crates/owlen-core/src/consent.rs +++ b/crates/owlen-core/src/consent.rs @@ -58,17 +58,16 @@ impl ConsentManager { /// Load consent records from vault storage pub fn from_vault(vault: &Arc>) -> Self { let guard = vault.lock().expect("Vault mutex poisoned"); - if let Some(consent_data) = guard.settings().get("consent_records") { - if let Ok(permanent_records) = + if let Some(consent_data) = guard.settings().get("consent_records") + && let Ok(permanent_records) = serde_json::from_value::>(consent_data.clone()) - { - return Self { - permanent_records, - session_records: HashMap::new(), - once_records: HashMap::new(), - pending_requests: HashMap::new(), - }; - } + { + return Self { + permanent_records, + session_records: HashMap::new(), + once_records: HashMap::new(), + pending_requests: HashMap::new(), + }; } Self::default() } @@ -91,17 +90,17 @@ impl ConsentManager { endpoints: Vec, ) -> Result { // Check if already granted permanently - if let Some(existing) = self.permanent_records.get(tool_name) { - if existing.scope == ConsentScope::Permanent { - return Ok(ConsentScope::Permanent); - } + if let Some(existing) = self.permanent_records.get(tool_name) + && existing.scope == ConsentScope::Permanent + { + return Ok(ConsentScope::Permanent); } // Check if granted for session - if let Some(existing) = self.session_records.get(tool_name) { - if existing.scope == ConsentScope::Session { - return Ok(ConsentScope::Session); - } + if let Some(existing) = self.session_records.get(tool_name) + && existing.scope == ConsentScope::Session + { + return Ok(ConsentScope::Session); } // Check if request is already pending (prevent duplicate prompts) diff --git a/crates/owlen-core/src/conversation.rs b/crates/owlen-core/src/conversation.rs index 4adb650..4c1bf40 100644 --- a/crates/owlen-core/src/conversation.rs +++ b/crates/owlen-core/src/conversation.rs @@ -1,6 +1,6 @@ +use crate::Result; use crate::storage::StorageManager; use crate::types::{Conversation, Message}; -use crate::Result; use serde_json::{Number, Value}; use std::collections::{HashMap, VecDeque}; use std::time::{Duration, Instant}; @@ -213,6 +213,34 @@ impl ConversationManager { Ok(()) } + pub fn cancel_stream(&mut self, message_id: Uuid, notice: impl Into) -> Result<()> { + let index = self + .message_index + .get(&message_id) + .copied() + .ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?; + + if let Some(message) = self.active_mut().messages.get_mut(index) { + message.content = notice.into(); + message.timestamp = std::time::SystemTime::now(); + message + .metadata + .insert(STREAMING_FLAG.to_string(), Value::Bool(false)); + message.metadata.remove(PLACEHOLDER_FLAG); + let millis = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap_or_default() + .as_millis() as u64; + message.metadata.insert( + LAST_CHUNK_TS.to_string(), + Value::Number(Number::from(millis)), + ); + } + + self.streaming.remove(&message_id); + Ok(()) + } + /// Set tool calls on a streaming message pub fn set_tool_calls_on_message( &mut self, diff --git a/crates/owlen-core/src/credentials.rs b/crates/owlen-core/src/credentials.rs index 2ae327e..785b43e 100644 --- a/crates/owlen-core/src/credentials.rs +++ b/crates/owlen-core/src/credentials.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use serde::{Deserialize, Serialize}; -use crate::{storage::StorageManager, Error, Result}; +use crate::{Error, Result, storage::StorageManager}; #[derive(Serialize, Deserialize, Debug)] pub struct ApiCredentials { diff --git a/crates/owlen-core/src/encryption.rs b/crates/owlen-core/src/encryption.rs index f539a2b..3e61ccc 100644 --- a/crates/owlen-core/src/encryption.rs +++ b/crates/owlen-core/src/encryption.rs @@ -3,10 +3,10 @@ use std::fs; use std::path::PathBuf; use aes_gcm::{ - aead::{Aead, KeyInit}, Aes256Gcm, Nonce, + aead::{Aead, KeyInit}, }; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use ring::digest; use ring::rand::{SecureRandom, SystemRandom}; use serde::{Deserialize, Serialize}; diff --git a/crates/owlen-core/src/input.rs b/crates/owlen-core/src/input.rs index c797aa3..ebe2e3f 100644 --- a/crates/owlen-core/src/input.rs +++ b/crates/owlen-core/src/input.rs @@ -191,6 +191,12 @@ impl InputBuffer { self.history.pop_back(); } } + + /// Clear saved input history entries. + pub fn clear_history(&mut self) { + self.history.clear(); + self.history_index = None; + } } fn prev_char_boundary(buffer: &str, cursor: usize) -> usize { diff --git a/crates/owlen-core/src/lib.rs b/crates/owlen-core/src/lib.rs index f49b1ea..8b1692b 100644 --- a/crates/owlen-core/src/lib.rs +++ b/crates/owlen-core/src/lib.rs @@ -11,14 +11,15 @@ pub mod credentials; pub mod encryption; pub mod formatting; pub mod input; +pub mod llm; pub mod mcp; pub mod mode; pub mod model; -pub mod provider; pub mod providers; pub mod router; pub mod sandbox; pub mod session; +pub mod state; pub mod storage; pub mod theme; pub mod tools; @@ -36,18 +37,20 @@ pub use encryption::*; pub use formatting::*; pub use input::*; // Export MCP types but exclude test_utils to avoid ambiguity +pub use llm::{ + ChatStream, LlmProvider, Provider, ProviderConfig, ProviderRegistry, send_via_stream, +}; pub use mcp::{ - client, factory, failover, permission, protocol, remote_client, LocalMcpClient, McpServer, - McpToolCall, McpToolDescriptor, McpToolResponse, + LocalMcpClient, McpServer, McpToolCall, McpToolDescriptor, McpToolResponse, client, factory, + failover, permission, protocol, remote_client, }; pub use mode::*; pub use model::*; -// Export provider types but exclude test_utils to avoid ambiguity -pub use provider::{ChatStream, LLMProvider, Provider, ProviderConfig, ProviderRegistry}; pub use providers::*; pub use router::*; pub use sandbox::*; pub use session::*; +pub use state::*; pub use theme::*; pub use tools::*; pub use validation::*; diff --git a/crates/owlen-core/src/llm/mod.rs b/crates/owlen-core/src/llm/mod.rs new file mode 100644 index 0000000..a03d9b9 --- /dev/null +++ b/crates/owlen-core/src/llm/mod.rs @@ -0,0 +1,297 @@ +//! LLM provider abstractions and registry. +//! +//! This module defines the provider trait hierarchy along with helpers that +//! make it easy to register concrete LLM backends and access them through +//! dynamic dispatch when wiring the application together. + +use crate::{Error, Result, types::*}; +use anyhow::anyhow; +use futures::{Stream, StreamExt}; +use serde_json::Value; +use std::any::Any; +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; + +/// A boxed stream of chat responses produced by a provider. +pub type ChatStream = Pin> + Send>>; + +/// Trait implemented by every LLM backend Owlen can speak to. +/// +/// Providers expose both one-shot and streaming prompt APIs. Concrete +/// implementations typically live in `crate::providers`. +pub trait LlmProvider: Send + Sync + 'static + Any + Sized { + /// Stream type returned by [`Self::stream_prompt`]. + type Stream: Stream> + Send + 'static; + + type ListModelsFuture<'a>: Future>> + Send + where + Self: 'a; + + type SendPromptFuture<'a>: Future> + Send + where + Self: 'a; + + type StreamPromptFuture<'a>: Future> + Send + where + Self: 'a; + + type HealthCheckFuture<'a>: Future> + Send + where + Self: 'a; + + /// Human-readable provider identifier. + fn name(&self) -> &str; + + /// Return metadata on all models exposed by this provider. + fn list_models(&self) -> Self::ListModelsFuture<'_>; + + /// Issue a prompt and wait for the provider to return the full response. + fn send_prompt(&self, request: ChatRequest) -> Self::SendPromptFuture<'_>; + + /// Issue a prompt and receive responses incrementally as a stream. + fn stream_prompt(&self, request: ChatRequest) -> Self::StreamPromptFuture<'_>; + + /// Perform a lightweight health check. + fn health_check(&self) -> Self::HealthCheckFuture<'_>; + + /// Provider-specific configuration schema (optional). + fn config_schema(&self) -> serde_json::Value { + serde_json::json!({}) + } + + /// Access the provider as an `Any` for downcasting. + fn as_any(&self) -> &(dyn Any + Send + Sync) { + self + } +} + +/// Helper that requests a streamed generation and yields the first chunk as a +/// regular response. This is handy for providers that only implement the +/// streaming API. +pub async fn send_via_stream<'a, P>(provider: &'a P, request: ChatRequest) -> Result +where + P: LlmProvider + 'a, +{ + let stream = provider.stream_prompt(request).await?; + let mut boxed: ChatStream = Box::pin(stream); + match boxed.next().await { + Some(Ok(response)) => Ok(response), + Some(Err(err)) => Err(err), + None => Err(Error::Provider(anyhow!( + "Empty chat stream from provider {}", + provider.name() + ))), + } +} + +/// Object-safe wrapper around [`LlmProvider`] for dynamic dispatch scenarios. +#[async_trait::async_trait] +pub trait Provider: Send + Sync { + fn name(&self) -> &str; + + async fn list_models(&self) -> Result>; + + async fn send_prompt(&self, request: ChatRequest) -> Result; + + async fn stream_prompt(&self, request: ChatRequest) -> Result; + + async fn health_check(&self) -> Result<()>; + + fn config_schema(&self) -> serde_json::Value { + serde_json::json!({}) + } + + fn as_any(&self) -> &(dyn Any + Send + Sync); +} + +#[async_trait::async_trait] +impl Provider for T +where + T: LlmProvider, +{ + fn name(&self) -> &str { + LlmProvider::name(self) + } + + async fn list_models(&self) -> Result> { + LlmProvider::list_models(self).await + } + + async fn send_prompt(&self, request: ChatRequest) -> Result { + LlmProvider::send_prompt(self, request).await + } + + async fn stream_prompt(&self, request: ChatRequest) -> Result { + let stream = LlmProvider::stream_prompt(self, request).await?; + Ok(Box::pin(stream)) + } + + async fn health_check(&self) -> Result<()> { + LlmProvider::health_check(self).await + } + + fn config_schema(&self) -> serde_json::Value { + LlmProvider::config_schema(self) + } + + fn as_any(&self) -> &(dyn Any + Send + Sync) { + LlmProvider::as_any(self) + } +} + +/// Runtime configuration for a provider instance. +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ProviderConfig { + /// Provider type identifier. + pub provider_type: String, + /// Base URL for API calls. + pub base_url: Option, + /// API key or token material. + pub api_key: Option, + /// Additional provider-specific configuration. + #[serde(flatten)] + pub extra: HashMap, +} + +/// Static registry of providers available to the application. +pub struct ProviderRegistry { + providers: HashMap>, +} + +impl ProviderRegistry { + pub fn new() -> Self { + Self { + providers: HashMap::new(), + } + } + + pub fn register(&mut self, provider: P) { + self.register_arc(Arc::new(provider)); + } + + pub fn register_arc(&mut self, provider: Arc) { + let name = provider.name().to_string(); + self.providers.insert(name, provider); + } + + pub fn get(&self, name: &str) -> Option> { + self.providers.get(name).cloned() + } + + pub fn list_providers(&self) -> Vec { + self.providers.keys().cloned().collect() + } + + pub async fn list_all_models(&self) -> Result> { + let mut all_models = Vec::new(); + + for provider in self.providers.values() { + match provider.list_models().await { + Ok(mut models) => all_models.append(&mut models), + Err(_) => { + // Ignore failing providers and continue. + } + } + } + + Ok(all_models) + } +} + +impl Default for ProviderRegistry { + fn default() -> Self { + Self::new() + } +} + +/// Test utilities for constructing mock providers. +#[cfg(test)] +pub mod test_utils { + use super::*; + use futures::stream; + use std::sync::atomic::{AtomicUsize, Ordering}; + + /// Simple provider stub that always returns the same response. + pub struct MockProvider { + name: String, + response: ChatResponse, + call_count: AtomicUsize, + } + + impl MockProvider { + pub fn new(name: impl Into, response: ChatResponse) -> Self { + Self { + name: name.into(), + response, + call_count: AtomicUsize::new(0), + } + } + + pub fn call_count(&self) -> usize { + self.call_count.load(Ordering::Relaxed) + } + } + + impl Default for MockProvider { + fn default() -> Self { + Self::new( + "mock-provider", + ChatResponse { + message: Message::assistant("mock response".to_string()), + usage: None, + is_streaming: false, + is_final: true, + }, + ) + } + } + + impl LlmProvider for MockProvider { + type Stream = stream::Iter>>; + + type ListModelsFuture<'a> + = futures::future::Ready>> + where + Self: 'a; + + type SendPromptFuture<'a> + = futures::future::Ready> + where + Self: 'a; + + type StreamPromptFuture<'a> + = futures::future::Ready> + where + Self: 'a; + + type HealthCheckFuture<'a> + = futures::future::Ready> + where + Self: 'a; + + fn name(&self) -> &str { + &self.name + } + + fn list_models(&self) -> Self::ListModelsFuture<'_> { + futures::future::ready(Ok(vec![])) + } + + fn send_prompt(&self, _request: ChatRequest) -> Self::SendPromptFuture<'_> { + self.call_count.fetch_add(1, Ordering::Relaxed); + futures::future::ready(Ok(self.response.clone())) + } + + fn stream_prompt(&self, _request: ChatRequest) -> Self::StreamPromptFuture<'_> { + self.call_count.fetch_add(1, Ordering::Relaxed); + let response = self.response.clone(); + futures::future::ready(Ok(stream::iter(vec![Ok(response)]))) + } + + fn health_check(&self) -> Self::HealthCheckFuture<'_> { + futures::future::ready(Ok(())) + } + } +} diff --git a/crates/owlen-core/src/mcp.rs b/crates/owlen-core/src/mcp.rs index 24a99ae..7a4ecf7 100644 --- a/crates/owlen-core/src/mcp.rs +++ b/crates/owlen-core/src/mcp.rs @@ -1,7 +1,7 @@ +use crate::Result; use crate::mode::Mode; use crate::tools::registry::ToolRegistry; use crate::validation::SchemaValidator; -use crate::Result; use async_trait::async_trait; pub use client::McpClient; use serde::{Deserialize, Serialize}; @@ -142,6 +142,11 @@ impl McpClient for LocalMcpClient { async fn call_tool(&self, call: McpToolCall) -> Result { self.server.call_tool(call).await } + + async fn set_mode(&self, mode: Mode) -> Result<()> { + self.server.set_mode(mode).await; + Ok(()) + } } #[cfg(test)] diff --git a/crates/owlen-core/src/mcp/client.rs b/crates/owlen-core/src/mcp/client.rs index 740251a..85a91b0 100644 --- a/crates/owlen-core/src/mcp/client.rs +++ b/crates/owlen-core/src/mcp/client.rs @@ -1,5 +1,5 @@ use super::{McpToolCall, McpToolDescriptor, McpToolResponse}; -use crate::Result; +use crate::{Result, mode::Mode}; use async_trait::async_trait; /// Trait for a client that can interact with an MCP server @@ -10,6 +10,11 @@ pub trait McpClient: Send + Sync { /// Call a tool on the server async fn call_tool(&self, call: McpToolCall) -> Result; + + /// Update the server with the active operating mode. + async fn set_mode(&self, _mode: Mode) -> Result<()> { + Ok(()) + } } // Re-export the concrete implementation that supports stdio and HTTP transports. diff --git a/crates/owlen-core/src/mcp/factory.rs b/crates/owlen-core/src/mcp/factory.rs index f1de5a8..a17a99e 100644 --- a/crates/owlen-core/src/mcp/factory.rs +++ b/crates/owlen-core/src/mcp/factory.rs @@ -3,7 +3,7 @@ /// Provides a unified interface for creating MCP clients based on configuration. /// Supports switching between local (in-process) and remote (STDIO) execution modes. use super::client::McpClient; -use super::{remote_client::RemoteMcpClient, LocalMcpClient}; +use super::{LocalMcpClient, remote_client::RemoteMcpClient}; use crate::config::{Config, McpMode}; use crate::tools::registry::ToolRegistry; use crate::validation::SchemaValidator; @@ -109,8 +109,8 @@ impl McpClientFactory { #[cfg(test)] mod tests { use super::*; - use crate::config::McpServerConfig; use crate::Error; + use crate::config::McpServerConfig; fn build_factory(config: Config) -> McpClientFactory { let ui = Arc::new(crate::ui::NoOpUiController); diff --git a/crates/owlen-core/src/mcp/permission.rs b/crates/owlen-core/src/mcp/permission.rs index c27aced..8642ac4 100644 --- a/crates/owlen-core/src/mcp/permission.rs +++ b/crates/owlen-core/src/mcp/permission.rs @@ -4,8 +4,8 @@ /// It wraps MCP clients to filter/whitelist tool calls, log invocations, and prompt for consent. use super::client::McpClient; use super::{McpToolCall, McpToolDescriptor, McpToolResponse}; -use crate::config::Config; use crate::{Error, Result}; +use crate::{config::Config, mode::Mode}; use async_trait::async_trait; use std::collections::HashSet; use std::sync::Arc; @@ -145,6 +145,10 @@ impl McpClient for PermissionLayer { result } + + async fn set_mode(&self, mode: Mode) -> Result<()> { + self.inner.set_mode(mode).await + } } #[cfg(test)] diff --git a/crates/owlen-core/src/mcp/remote_client.rs b/crates/owlen-core/src/mcp/remote_client.rs index 33b8188..f55b6cb 100644 --- a/crates/owlen-core/src/mcp/remote_client.rs +++ b/crates/owlen-core/src/mcp/remote_client.rs @@ -1,24 +1,25 @@ use super::protocol::methods; use super::protocol::{ - RequestId, RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, PROTOCOL_VERSION, + PROTOCOL_VERSION, RequestId, RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, }; use super::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; use crate::consent::{ConsentManager, ConsentScope}; use crate::tools::{Tool, WebScrapeTool, WebSearchTool}; use crate::types::ModelInfo; use crate::types::{ChatResponse, Message, Role}; -use crate::{provider::chat_via_stream, Error, LLMProvider, Result}; -use futures::{future::BoxFuture, stream, StreamExt}; +use crate::{Error, LlmProvider, Result, mode::Mode, send_via_stream}; +use anyhow::anyhow; +use futures::{StreamExt, future::BoxFuture, stream}; use reqwest::Client as HttpClient; use serde_json::json; use std::path::Path; -use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::Arc; +use std::sync::atomic::{AtomicU64, Ordering}; use std::time::Duration; use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; use tokio::process::{Child, Command}; use tokio::sync::Mutex; -use tokio_tungstenite::{connect_async, MaybeTlsStream, WebSocketStream}; +use tokio_tungstenite::{MaybeTlsStream, WebSocketStream, connect_async}; use tungstenite::protocol::Message as WsMessage; /// Client that talks to the external `owlen-mcp-server` over STDIO, HTTP, or WebSocket. @@ -203,10 +204,10 @@ impl RemoteMcpClient { .await .map_err(|e| Error::Network(e.to_string()))?; // Try to parse as success then error. - if let Ok(r) = serde_json::from_str::(&text) { - if r.id == id { - return Ok(r.result); - } + if let Ok(r) = serde_json::from_str::(&text) + && r.id == id + { + return Ok(r.result); } let err_resp: RpcErrorResponse = serde_json::from_str(&text).map_err(Error::Serialization)?; @@ -249,10 +250,10 @@ impl RemoteMcpClient { }; // Try to parse as success then error. - if let Ok(r) = serde_json::from_str::(&response_text) { - if r.id == id { - return Ok(r.result); - } + if let Ok(r) = serde_json::from_str::(&response_text) + && r.id == id + { + return Ok(r.result); } let err_resp: RpcErrorResponse = serde_json::from_str(&response_text).map_err(Error::Serialization)?; @@ -416,7 +417,9 @@ impl McpClient for RemoteMcpClient { // Auto‑grant consent for the web_search tool (permanent for this process). let consent_manager = std::sync::Arc::new(std::sync::Mutex::new(ConsentManager::new())); { - let mut cm = consent_manager.lock().unwrap(); + let mut cm = consent_manager + .lock() + .map_err(|_| Error::Provider(anyhow!("Consent manager mutex poisoned")))?; cm.grant_consent_with_scope( "web_search", Vec::new(), @@ -459,17 +462,22 @@ impl McpClient for RemoteMcpClient { let response: McpToolResponse = serde_json::from_value(result)?; Ok(response) } + + async fn set_mode(&self, _mode: Mode) -> Result<()> { + // Remote servers manage their own mode settings; treat as best-effort no-op. + Ok(()) + } } // --------------------------------------------------------------------------- // Provider implementation – forwards chat requests to the generate_text tool. // --------------------------------------------------------------------------- -impl LLMProvider for RemoteMcpClient { +impl LlmProvider for RemoteMcpClient { type Stream = stream::Iter>>; type ListModelsFuture<'a> = BoxFuture<'a, Result>>; - type ChatFuture<'a> = BoxFuture<'a, Result>; - type ChatStreamFuture<'a> = BoxFuture<'a, Result>; + type SendPromptFuture<'a> = BoxFuture<'a, Result>; + type StreamPromptFuture<'a> = BoxFuture<'a, Result>; type HealthCheckFuture<'a> = BoxFuture<'a, Result<()>>; fn name(&self) -> &str { @@ -484,11 +492,11 @@ impl LLMProvider for RemoteMcpClient { }) } - fn chat(&self, request: crate::types::ChatRequest) -> Self::ChatFuture<'_> { - Box::pin(chat_via_stream(self, request)) + fn send_prompt(&self, request: crate::types::ChatRequest) -> Self::SendPromptFuture<'_> { + Box::pin(send_via_stream(self, request)) } - fn chat_stream(&self, request: crate::types::ChatRequest) -> Self::ChatStreamFuture<'_> { + fn stream_prompt(&self, request: crate::types::ChatRequest) -> Self::StreamPromptFuture<'_> { Box::pin(async move { let args = serde_json::json!({ "messages": request.messages, diff --git a/crates/owlen-core/src/model.rs b/crates/owlen-core/src/model.rs index 6b2530c..9f5aebe 100644 --- a/crates/owlen-core/src/model.rs +++ b/crates/owlen-core/src/model.rs @@ -2,8 +2,8 @@ pub mod details; pub use details::{DetailedModelInfo, ModelInfoRetrievalError}; -use crate::types::ModelInfo; use crate::Result; +use crate::types::ModelInfo; use std::collections::HashMap; use std::future::Future; use std::sync::Arc; @@ -42,10 +42,8 @@ impl ModelManager { F: FnOnce() -> Fut, Fut: Future>>, { - if !force_refresh { - if let Some(models) = self.cached_if_fresh().await { - return Ok(models); - } + if !force_refresh && let Some(models) = self.cached_if_fresh().await { + return Ok(models); } let models = fetcher().await?; @@ -134,11 +132,11 @@ impl ModelDetailsCache { let mut inner = self.inner.write().await; // Remove prior mappings for this model name (possibly different digest). - if let Some(previous_key) = inner.name_to_key.get(&info.name).cloned() { - if previous_key != key { - inner.by_key.remove(&previous_key); - inner.fetched_at.remove(&previous_key); - } + if let Some(previous_key) = inner.name_to_key.get(&info.name).cloned() + && previous_key != key + { + inner.by_key.remove(&previous_key); + inner.fetched_at.remove(&previous_key); } inner.fetched_at.insert(key.clone(), Instant::now()); diff --git a/crates/owlen-core/src/provider.rs b/crates/owlen-core/src/provider.rs deleted file mode 100644 index 7ce8162..0000000 --- a/crates/owlen-core/src/provider.rs +++ /dev/null @@ -1,380 +0,0 @@ -//! Provider traits and registries. - -use crate::{types::*, Error, Result}; -use anyhow::anyhow; -use futures::{Stream, StreamExt}; -use std::any::Any; -use std::future::Future; -use std::pin::Pin; -use std::sync::Arc; - -/// A stream of chat responses -pub type ChatStream = Pin> + Send>>; - -/// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.) with zero-cost static dispatch. -pub trait LLMProvider: Send + Sync + 'static + Any + Sized { - type Stream: Stream> + Send + 'static; - - type ListModelsFuture<'a>: Future>> + Send - where - Self: 'a; - - type ChatFuture<'a>: Future> + Send - where - Self: 'a; - - type ChatStreamFuture<'a>: Future> + Send - where - Self: 'a; - - type HealthCheckFuture<'a>: Future> + Send - where - Self: 'a; - - fn name(&self) -> &str; - - fn list_models(&self) -> Self::ListModelsFuture<'_>; - fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_>; - fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_>; - fn health_check(&self) -> Self::HealthCheckFuture<'_>; - - fn config_schema(&self) -> serde_json::Value { - serde_json::json!({}) - } - - fn as_any(&self) -> &(dyn Any + Send + Sync) { - self - } -} - -/// Helper that implements [`LLMProvider::chat`] in terms of [`LLMProvider::chat_stream`]. -pub async fn chat_via_stream<'a, P>(provider: &'a P, request: ChatRequest) -> Result -where - P: LLMProvider + 'a, -{ - let stream = provider.chat_stream(request).await?; - let mut boxed: ChatStream = Box::pin(stream); - match boxed.next().await { - Some(Ok(response)) => Ok(response), - Some(Err(err)) => Err(err), - None => Err(Error::Provider(anyhow!( - "Empty chat stream from provider {}", - provider.name() - ))), - } -} - -/// Object-safe wrapper trait for runtime-configurable provider usage. -#[async_trait::async_trait] -pub trait Provider: Send + Sync { - /// Get the name of this provider. - fn name(&self) -> &str; - - /// List available models from this provider. - async fn list_models(&self) -> Result>; - - /// Send a chat completion request. - async fn chat(&self, request: ChatRequest) -> Result; - - /// Send a streaming chat completion request. - async fn chat_stream(&self, request: ChatRequest) -> Result; - - /// Check if the provider is available/healthy. - async fn health_check(&self) -> Result<()>; - - /// Get provider-specific configuration schema. - fn config_schema(&self) -> serde_json::Value { - serde_json::json!({}) - } - - fn as_any(&self) -> &(dyn Any + Send + Sync); -} - -#[async_trait::async_trait] -impl Provider for T -where - T: LLMProvider, -{ - fn name(&self) -> &str { - LLMProvider::name(self) - } - - async fn list_models(&self) -> Result> { - LLMProvider::list_models(self).await - } - - async fn chat(&self, request: ChatRequest) -> Result { - LLMProvider::chat(self, request).await - } - - async fn chat_stream(&self, request: ChatRequest) -> Result { - let stream = LLMProvider::chat_stream(self, request).await?; - Ok(Box::pin(stream)) - } - - async fn health_check(&self) -> Result<()> { - LLMProvider::health_check(self).await - } - - fn config_schema(&self) -> serde_json::Value { - LLMProvider::config_schema(self) - } - - fn as_any(&self) -> &(dyn Any + Send + Sync) { - LLMProvider::as_any(self) - } -} - -/// Configuration for a provider -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct ProviderConfig { - /// Provider type identifier - pub provider_type: String, - /// Base URL for API calls - pub base_url: Option, - /// API key or token - pub api_key: Option, - /// Additional provider-specific configuration - #[serde(flatten)] - pub extra: std::collections::HashMap, -} - -/// A registry of providers -pub struct ProviderRegistry { - providers: std::collections::HashMap>, -} - -impl ProviderRegistry { - /// Create a new provider registry - pub fn new() -> Self { - Self { - providers: std::collections::HashMap::new(), - } - } - - /// Register a provider using static dispatch. - pub fn register(&mut self, provider: P) { - self.register_arc(Arc::new(provider)); - } - - /// Register an already wrapped provider - pub fn register_arc(&mut self, provider: Arc) { - let name = provider.name().to_string(); - self.providers.insert(name, provider); - } - - /// Get a provider by name - pub fn get(&self, name: &str) -> Option> { - self.providers.get(name).cloned() - } - - /// List all registered provider names - pub fn list_providers(&self) -> Vec { - self.providers.keys().cloned().collect() - } - - /// Get all models from all providers - pub async fn list_all_models(&self) -> Result> { - let mut all_models = Vec::new(); - - for provider in self.providers.values() { - match provider.list_models().await { - Ok(mut models) => all_models.append(&mut models), - Err(_) => { - // Continue with other providers - } - } - } - - Ok(all_models) - } -} - -impl Default for ProviderRegistry { - fn default() -> Self { - Self::new() - } -} - -#[cfg(test)] -pub mod test_utils { - use super::*; - use crate::types::{ChatRequest, ChatResponse, Message, ModelInfo, Role}; - use futures::stream; - use std::future::{ready, Ready}; - - /// Mock provider for testing - #[derive(Default)] - pub struct MockProvider; - - impl LLMProvider for MockProvider { - type Stream = stream::Iter>>; - type ListModelsFuture<'a> = Ready>>; - type ChatFuture<'a> = Ready>; - type ChatStreamFuture<'a> = Ready>; - type HealthCheckFuture<'a> = Ready>; - - fn name(&self) -> &str { - "mock" - } - - fn list_models(&self) -> Self::ListModelsFuture<'_> { - ready(Ok(vec![ModelInfo { - id: "mock-model".to_string(), - provider: "mock".to_string(), - name: "mock-model".to_string(), - description: None, - context_window: None, - capabilities: vec![], - supports_tools: false, - }])) - } - - fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> { - ready(Ok(self.build_response(&request))) - } - - fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> { - let response = self.build_response(&request); - ready(Ok(stream::iter(vec![Ok(response)]))) - } - - fn health_check(&self) -> Self::HealthCheckFuture<'_> { - ready(Ok(())) - } - } - - impl MockProvider { - fn build_response(&self, request: &ChatRequest) -> ChatResponse { - let content = format!( - "Mock response to: {}", - request - .messages - .last() - .map(|m| m.content.clone()) - .unwrap_or_default() - ); - - ChatResponse { - message: Message::new(Role::Assistant, content), - usage: None, - is_streaming: false, - is_final: true, - } - } - } -} - -#[cfg(test)] -mod tests { - use super::test_utils::MockProvider; - use super::*; - use crate::types::{ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role}; - use futures::stream; - use std::future::{ready, Ready}; - use std::sync::Arc; - - struct StreamingProvider; - - impl LLMProvider for StreamingProvider { - type Stream = stream::Iter>>; - type ListModelsFuture<'a> = Ready>>; - type ChatFuture<'a> = Ready>; - type ChatStreamFuture<'a> = Ready>; - type HealthCheckFuture<'a> = Ready>; - - fn name(&self) -> &str { - "streaming" - } - - fn list_models(&self) -> Self::ListModelsFuture<'_> { - ready(Ok(vec![ModelInfo { - id: "stream-model".to_string(), - provider: "streaming".to_string(), - name: "stream-model".to_string(), - description: None, - context_window: None, - capabilities: vec!["chat".to_string()], - supports_tools: false, - }])) - } - - fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> { - ready(Ok(self.response(&request))) - } - - fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> { - let response = self.response(&request); - ready(Ok(stream::iter(vec![Ok(response)]))) - } - - fn health_check(&self) -> Self::HealthCheckFuture<'_> { - ready(Ok(())) - } - } - - impl StreamingProvider { - fn response(&self, request: &ChatRequest) -> ChatResponse { - let reply = format!( - "echo:{}", - request - .messages - .last() - .map(|m| m.content.clone()) - .unwrap_or_default() - ); - ChatResponse { - message: Message::new(Role::Assistant, reply), - usage: None, - is_streaming: true, - is_final: true, - } - } - } - - #[tokio::test] - async fn default_chat_reads_from_stream() { - let provider = StreamingProvider; - let request = ChatRequest { - model: "stream-model".to_string(), - messages: vec![Message::new(Role::User, "ping".to_string())], - parameters: ChatParameters::default(), - tools: None, - }; - - let response = LLMProvider::chat(&provider, request) - .await - .expect("chat succeeded"); - assert_eq!(response.message.content, "echo:ping"); - assert!(response.is_final); - } - - #[tokio::test] - async fn registry_registers_static_provider() { - let mut registry = ProviderRegistry::new(); - registry.register(StreamingProvider); - - let provider = registry.get("streaming").expect("provider registered"); - let models = provider.list_models().await.expect("models listed"); - assert_eq!(models[0].id, "stream-model"); - } - - #[tokio::test] - async fn registry_accepts_dynamic_provider() { - let mut registry = ProviderRegistry::new(); - let provider: Arc = Arc::new(MockProvider::default()); - registry.register_arc(provider.clone()); - - let fetched = registry.get("mock").expect("mock provider present"); - let request = ChatRequest { - model: "mock-model".to_string(), - messages: vec![Message::new(Role::User, "hi".to_string())], - parameters: ChatParameters::default(), - tools: None, - }; - let response = Provider::chat(fetched.as_ref(), request) - .await - .expect("chat succeeded"); - assert_eq!(response.message.content, "Mock response to: hi"); - } -} diff --git a/crates/owlen-core/src/providers/ollama.rs b/crates/owlen-core/src/providers/ollama.rs index 94058af..96fe8e5 100644 --- a/crates/owlen-core/src/providers/ollama.rs +++ b/crates/owlen-core/src/providers/ollama.rs @@ -7,32 +7,32 @@ use std::{ }; use anyhow::anyhow; -use futures::{future::join_all, future::BoxFuture, Stream, StreamExt}; +use futures::{Stream, StreamExt, future::BoxFuture, future::join_all}; use log::{debug, warn}; use ollama_rs::{ + Ollama, error::OllamaError, generation::chat::{ - request::ChatMessageRequest as OllamaChatRequest, ChatMessage as OllamaMessage, - ChatMessageResponse as OllamaChatResponse, MessageRole as OllamaRole, + ChatMessage as OllamaMessage, ChatMessageResponse as OllamaChatResponse, + MessageRole as OllamaRole, request::ChatMessageRequest as OllamaChatRequest, }, generation::tools::{ToolCall as OllamaToolCall, ToolCallFunction as OllamaToolCallFunction}, - headers::{HeaderMap, HeaderValue, AUTHORIZATION}, + headers::{AUTHORIZATION, HeaderMap, HeaderValue}, models::{LocalModel, ModelInfo as OllamaModelInfo, ModelOptions}, - Ollama, }; use reqwest::{Client, StatusCode, Url}; -use serde_json::{json, Map as JsonMap, Value}; +use serde_json::{Map as JsonMap, Value, json}; use uuid::Uuid; use crate::{ + Error, Result, config::GeneralSettings, + llm::{LlmProvider, ProviderConfig}, mcp::McpToolDescriptor, model::{DetailedModelInfo, ModelDetailsCache, ModelManager}, - provider::{LLMProvider, ProviderConfig}, types::{ ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall, }, - Error, Result, }; const DEFAULT_TIMEOUT_SECS: u64 = 120; @@ -292,13 +292,13 @@ impl OllamaProvider { ); } - if let Some(descriptors) = &tools { - if !descriptors.is_empty() { - debug!( - "Ignoring {} MCP tool descriptors for Ollama request (tool calling unsupported)", - descriptors.len() - ); - } + if let Some(descriptors) = &tools + && !descriptors.is_empty() + { + debug!( + "Ignoring {} MCP tool descriptors for Ollama request (tool calling unsupported)", + descriptors.len() + ); } let converted_messages = messages.into_iter().map(convert_message).collect(); @@ -378,10 +378,10 @@ impl OllamaProvider { let family = pick_first_string(map, &["family", "model_family"]); let mut families = pick_string_list(map, &["families", "model_families"]); - if families.is_empty() { - if let Some(single) = family.clone() { - families.push(single); - } + if families.is_empty() + && let Some(single) = family.clone() + { + families.push(single); } let system = pick_first_string(map, &["system"]); @@ -529,32 +529,28 @@ impl OllamaProvider { StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => Error::Auth(format!( "Ollama rejected the request ({status}): {detail}. Check your API key and account permissions." )), - StatusCode::BAD_REQUEST => Error::InvalidInput(format!( - "{action} rejected by Ollama ({status}): {detail}" - )), + StatusCode::BAD_REQUEST => { + Error::InvalidInput(format!("{action} rejected by Ollama ({status}): {detail}")) + } StatusCode::SERVICE_UNAVAILABLE | StatusCode::GATEWAY_TIMEOUT => Error::Timeout( - format!( - "Ollama {action} timed out ({status}). The model may still be loading." - ), + format!("Ollama {action} timed out ({status}). The model may still be loading."), ), - _ => Error::Network(format!( - "Ollama {action} failed ({status}): {detail}" - )), + _ => Error::Network(format!("Ollama {action} failed ({status}): {detail}")), } } } -impl LLMProvider for OllamaProvider { +impl LlmProvider for OllamaProvider { type Stream = Pin> + Send>>; type ListModelsFuture<'a> = BoxFuture<'a, Result>> where Self: 'a; - type ChatFuture<'a> + type SendPromptFuture<'a> = BoxFuture<'a, Result> where Self: 'a; - type ChatStreamFuture<'a> + type StreamPromptFuture<'a> = BoxFuture<'a, Result> where Self: 'a; @@ -575,7 +571,7 @@ impl LLMProvider for OllamaProvider { }) } - fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> { + fn send_prompt(&self, request: ChatRequest) -> Self::SendPromptFuture<'_> { Box::pin(async move { let ChatRequest { model, @@ -597,7 +593,7 @@ impl LLMProvider for OllamaProvider { }) } - fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> { + fn stream_prompt(&self, request: ChatRequest) -> Self::StreamPromptFuture<'_> { Box::pin(async move { let ChatRequest { model, @@ -926,11 +922,7 @@ fn value_to_u64(value: &Value) -> Option { } else if let Some(v) = num.as_i64() { v.try_into().ok() } else if let Some(v) = num.as_f64() { - if v >= 0.0 { - Some(v as u64) - } else { - None - } + if v >= 0.0 { Some(v as u64) } else { None } } else { None } diff --git a/crates/owlen-core/src/router.rs b/crates/owlen-core/src/router.rs index 23f6eec..2060abf 100644 --- a/crates/owlen-core/src/router.rs +++ b/crates/owlen-core/src/router.rs @@ -1,6 +1,7 @@ //! Router for managing multiple providers and routing requests -use crate::{provider::*, types::*, Result}; +use crate::{Result, llm::*, types::*}; +use anyhow::anyhow; use std::sync::Arc; /// A router that can distribute requests across multiple providers @@ -32,7 +33,7 @@ impl Router { } /// Register a provider with the router - pub fn register_provider(&mut self, provider: P) { + pub fn register_provider(&mut self, provider: P) { self.registry.register(provider); } @@ -52,13 +53,13 @@ impl Router { /// Route a request to the appropriate provider pub async fn chat(&self, request: ChatRequest) -> Result { let provider = self.find_provider_for_model(&request.model)?; - provider.chat(request).await + provider.send_prompt(request).await } /// Route a streaming request to the appropriate provider pub async fn chat_stream(&self, request: ChatRequest) -> Result { let provider = self.find_provider_for_model(&request.model)?; - provider.chat_stream(request).await + provider.stream_prompt(request).await } /// List all available models from all providers @@ -70,18 +71,18 @@ impl Router { fn find_provider_for_model(&self, model: &str) -> Result> { // Check routing rules first for rule in &self.routing_rules { - if self.matches_pattern(&rule.model_pattern, model) { - if let Some(provider) = self.registry.get(&rule.provider) { - return Ok(provider); - } + if self.matches_pattern(&rule.model_pattern, model) + && let Some(provider) = self.registry.get(&rule.provider) + { + return Ok(provider); } } // Fall back to default provider - if let Some(default) = &self.default_provider { - if let Some(provider) = self.registry.get(default) { - return Ok(provider); - } + if let Some(default) = &self.default_provider + && let Some(provider) = self.registry.get(default) + { + return Ok(provider); } // If no default, try to find any provider that has this model @@ -92,7 +93,7 @@ impl Router { } } - Err(crate::Error::Provider(anyhow::anyhow!( + Err(crate::Error::Provider(anyhow!( "No provider found for model: {}", model ))) diff --git a/crates/owlen-core/src/sandbox.rs b/crates/owlen-core/src/sandbox.rs index f6282e6..4643d4a 100644 --- a/crates/owlen-core/src/sandbox.rs +++ b/crates/owlen-core/src/sandbox.rs @@ -2,7 +2,7 @@ use std::path::PathBuf; use std::process::{Command, Stdio}; use std::time::{Duration, Instant}; -use anyhow::{bail, Context, Result}; +use anyhow::{Context, Result, bail}; use tempfile::TempDir; /// Configuration options for sandboxed process execution. @@ -185,15 +185,13 @@ impl SandboxedProcess { if let Ok(output) = output { let version_str = String::from_utf8_lossy(&output.stdout); // Parse version like "bubblewrap 0.11.0" or "0.11.0" - if let Some(version_part) = version_str.split_whitespace().last() { - if let Some((major, rest)) = version_part.split_once('.') { - if let Some((minor, _patch)) = rest.split_once('.') { - if let (Ok(maj), Ok(min)) = (major.parse::(), minor.parse::()) { - // --rlimit-as was added in 0.12.0 - return maj > 0 || (maj == 0 && min >= 12); - } - } - } + if let Some(version_part) = version_str.split_whitespace().last() + && let Some((major, rest)) = version_part.split_once('.') + && let Some((minor, _patch)) = rest.split_once('.') + && let (Ok(maj), Ok(min)) = (major.parse::(), minor.parse::()) + { + // --rlimit-as was added in 0.12.0 + return maj > 0 || (maj == 0 && min >= 12); } } diff --git a/crates/owlen-core/src/session.rs b/crates/owlen-core/src/session.rs index 6336672..66c881b 100644 --- a/crates/owlen-core/src/session.rs +++ b/crates/owlen-core/src/session.rs @@ -5,25 +5,27 @@ use crate::credentials::CredentialManager; use crate::encryption::{self, VaultHandle}; use crate::formatting::MessageFormatter; use crate::input::InputBuffer; +use crate::mcp::McpToolCall; use crate::mcp::client::McpClient; use crate::mcp::factory::McpClientFactory; use crate::mcp::permission::PermissionLayer; -use crate::mcp::McpToolCall; +use crate::mode::Mode; use crate::model::{DetailedModelInfo, ModelManager}; -use crate::provider::{ChatStream, Provider}; use crate::providers::OllamaProvider; use crate::storage::{SessionMeta, StorageManager}; use crate::types::{ ChatParameters, ChatRequest, ChatResponse, Conversation, Message, ModelInfo, ToolCall, }; use crate::ui::UiController; -use crate::validation::{get_builtin_schemas, SchemaValidator}; +use crate::validation::{SchemaValidator, get_builtin_schemas}; +use crate::{ChatStream, Provider}; use crate::{ CodeExecTool, ResourcesDeleteTool, ResourcesGetTool, ResourcesListTool, ResourcesWriteTool, ToolRegistry, WebScrapeTool, WebSearchDetailedTool, WebSearchTool, }; use crate::{Error, Result}; use log::warn; +use serde_json::Value; use std::env; use std::path::PathBuf; use std::sync::{Arc, Mutex}; @@ -38,6 +40,51 @@ pub enum SessionOutcome { }, } +fn extract_resource_content(value: &Value) -> Option { + match value { + Value::Null => Some(String::new()), + Value::Bool(flag) => Some(flag.to_string()), + Value::Number(num) => Some(num.to_string()), + Value::String(text) => Some(text.clone()), + Value::Array(items) => { + let mut segments = Vec::new(); + for item in items { + if let Some(segment) = extract_resource_content(item) + && !segment.is_empty() + { + segments.push(segment); + } + } + if segments.is_empty() { + None + } else { + Some(segments.join("\n")) + } + } + Value::Object(map) => { + const PREFERRED_FIELDS: [&str; 6] = + ["content", "contents", "text", "value", "body", "data"]; + for key in PREFERRED_FIELDS.iter() { + if let Some(inner) = map.get(*key) + && let Some(text) = extract_resource_content(inner) + && !text.is_empty() + { + return Some(text); + } + } + + if let Some(inner) = map.get("chunks") + && let Some(text) = extract_resource_content(inner) + && !text.is_empty() + { + return Some(text); + } + + None + } + } +} + pub struct SessionController { provider: Arc, conversation: ConversationManager, @@ -55,6 +102,7 @@ pub struct SessionController { credential_manager: Option>, ui: Arc, enable_code_tools: bool, + current_mode: Mode, } async fn build_tools( @@ -228,6 +276,12 @@ impl SessionController { drop(config_guard); // Release the lock before calling build_tools + let initial_mode = if enable_code_tools { + Mode::Code + } else { + Mode::Chat + }; + let (tool_registry, schema_validator) = build_tools( config_arc.clone(), ui.clone(), @@ -247,8 +301,9 @@ impl SessionController { schema_validator.clone(), ); let base_client = factory.create()?; - let permission_client = PermissionLayer::new(base_client, Arc::new(guard.clone())); - Arc::new(permission_client) + let client = Arc::new(PermissionLayer::new(base_client, Arc::new(guard.clone()))); + client.set_mode(initial_mode).await?; + client }; Ok(Self { @@ -268,6 +323,7 @@ impl SessionController { credential_manager, ui, enable_code_tools, + current_mode: initial_mode, }) } @@ -325,10 +381,10 @@ impl SessionController { .expect("Consent manager mutex poisoned"); consent.grant_consent(tool_name, data_types, endpoints); - if let Some(vault) = &self.vault { - if let Err(e) = consent.persist_to_vault(vault) { - eprintln!("Warning: Failed to persist consent to vault: {}", e); - } + if let Some(vault) = &self.vault + && let Err(e) = consent.persist_to_vault(vault) + { + eprintln!("Warning: Failed to persist consent to vault: {}", e); } } @@ -347,12 +403,11 @@ impl SessionController { consent.grant_consent_with_scope(tool_name, data_types, endpoints, scope); // Only persist to vault for permanent consent - if is_permanent { - if let Some(vault) = &self.vault { - if let Err(e) = consent.persist_to_vault(vault) { - eprintln!("Warning: Failed to persist consent to vault: {}", e); - } - } + if is_permanent + && let Some(vault) = &self.vault + && let Err(e) = consent.persist_to_vault(vault) + { + eprintln!("Warning: Failed to persist consent to vault: {}", e); } } @@ -489,8 +544,13 @@ impl SessionController { }; match self.mcp_client.call_tool(call).await { Ok(response) => { - let content: String = serde_json::from_value(response.output)?; - Ok(content) + if let Some(text) = extract_resource_content(&response.output) { + return Ok(text); + } + + let formatted = serde_json::to_string_pretty(&response.output) + .unwrap_or_else(|_| response.output.to_string()); + Ok(formatted) } Err(err) => { log::warn!("MCP file read failed ({}); falling back to local read", err); @@ -500,6 +560,48 @@ impl SessionController { } } + pub async fn read_file_with_tools(&self, path: &str) -> Result { + if !self.enable_code_tools { + return Err(Error::InvalidInput( + "Code tools are disabled in chat mode. Run `:mode code` to switch.".to_string(), + )); + } + + let call = McpToolCall { + name: "resources/get".to_string(), + arguments: serde_json::json!({ "path": path }), + }; + + let response = self.mcp_client.call_tool(call).await?; + if let Some(text) = extract_resource_content(&response.output) { + Ok(text) + } else { + let formatted = serde_json::to_string_pretty(&response.output) + .unwrap_or_else(|_| response.output.to_string()); + Ok(formatted) + } + } + + pub fn code_tools_enabled(&self) -> bool { + self.enable_code_tools + } + + pub async fn set_code_tools_enabled(&mut self, enabled: bool) -> Result<()> { + if self.enable_code_tools == enabled { + return Ok(()); + } + + self.enable_code_tools = enabled; + self.rebuild_tools().await + } + + pub async fn set_operating_mode(&mut self, mode: Mode) -> Result<()> { + self.current_mode = mode; + let enable_code_tools = matches!(mode, Mode::Code); + self.set_code_tools_enabled(enable_code_tools).await?; + self.mcp_client.set_mode(mode).await + } + pub async fn list_dir(&self, path: &str) -> Result> { let call = McpToolCall { name: "resources/list".to_string(), @@ -587,7 +689,9 @@ impl SessionController { ); let base_client = factory.create()?; let permission_client = PermissionLayer::new(base_client, Arc::new(config.clone())); - self.mcp_client = Arc::new(permission_client); + let client = Arc::new(permission_client); + client.set_mode(self.current_mode).await?; + self.mcp_client = client; Ok(()) } @@ -741,7 +845,7 @@ impl SessionController { if !streaming { const MAX_TOOL_ITERATIONS: usize = 5; for _iteration in 0..MAX_TOOL_ITERATIONS { - match self.provider.chat(request.clone()).await { + match self.provider.send_prompt(request.clone()).await { Ok(response) => { if response.message.has_tool_calls() { self.conversation.push_message(response.message.clone()); @@ -786,7 +890,7 @@ impl SessionController { ))); } - match self.provider.chat_stream(request).await { + match self.provider.stream_prompt(request).await { Ok(stream) => { let response_id = self.conversation.start_streaming_response(); Ok(SessionOutcome::Streaming { @@ -828,6 +932,11 @@ impl SessionController { .filter(|calls| !calls.is_empty()) } + pub fn cancel_stream(&mut self, message_id: Uuid, notice: &str) -> Result<()> { + self.conversation + .cancel_stream(message_id, notice.to_string()) + } + pub async fn execute_streaming_tools( &mut self, _message_id: Uuid, diff --git a/crates/owlen-core/src/state/mod.rs b/crates/owlen-core/src/state/mod.rs new file mode 100644 index 0000000..c215d53 --- /dev/null +++ b/crates/owlen-core/src/state/mod.rs @@ -0,0 +1,194 @@ +//! Shared application state types used across TUI frontends. + +use std::fmt; + +/// High-level application state reported by the UI loop. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AppState { + Running, + Quit, +} + +/// Vim-style input modes supported by the TUI. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum InputMode { + Normal, + Editing, + ProviderSelection, + ModelSelection, + Help, + Visual, + Command, + SessionBrowser, + ThemeBrowser, +} + +impl fmt::Display for InputMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let label = match self { + InputMode::Normal => "Normal", + InputMode::Editing => "Editing", + InputMode::ModelSelection => "Model", + InputMode::ProviderSelection => "Provider", + InputMode::Help => "Help", + InputMode::Visual => "Visual", + InputMode::Command => "Command", + InputMode::SessionBrowser => "Sessions", + InputMode::ThemeBrowser => "Themes", + }; + f.write_str(label) + } +} + +/// Represents which panel is currently focused in the TUI layout. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FocusedPanel { + Chat, + Thinking, + Input, + Code, +} + +/// Auto-scroll state manager for scrollable panels. +#[derive(Debug, Clone)] +pub struct AutoScroll { + pub scroll: usize, + pub content_len: usize, + pub stick_to_bottom: bool, +} + +impl Default for AutoScroll { + fn default() -> Self { + Self { + scroll: 0, + content_len: 0, + stick_to_bottom: true, + } + } +} + +impl AutoScroll { + /// Update scroll position based on viewport height. + pub fn on_viewport(&mut self, viewport_h: usize) { + let max = self.content_len.saturating_sub(viewport_h); + if self.stick_to_bottom { + self.scroll = max; + } else { + self.scroll = self.scroll.min(max); + } + } + + /// Handle user scroll input. + pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) { + let max = self.content_len.saturating_sub(viewport_h) as isize; + let s = (self.scroll as isize + delta).clamp(0, max) as usize; + self.scroll = s; + self.stick_to_bottom = s as isize == max; + } + + pub fn scroll_half_page_down(&mut self, viewport_h: usize) { + let delta = (viewport_h / 2) as isize; + self.on_user_scroll(delta, viewport_h); + } + + pub fn scroll_half_page_up(&mut self, viewport_h: usize) { + let delta = -((viewport_h / 2) as isize); + self.on_user_scroll(delta, viewport_h); + } + + pub fn scroll_full_page_down(&mut self, viewport_h: usize) { + let delta = viewport_h as isize; + self.on_user_scroll(delta, viewport_h); + } + + pub fn scroll_full_page_up(&mut self, viewport_h: usize) { + let delta = -(viewport_h as isize); + self.on_user_scroll(delta, viewport_h); + } + + pub fn jump_to_top(&mut self) { + self.scroll = 0; + self.stick_to_bottom = false; + } + + pub fn jump_to_bottom(&mut self, viewport_h: usize) { + self.stick_to_bottom = true; + self.on_viewport(viewport_h); + } +} + +/// Visual selection state for text selection. +#[derive(Debug, Clone, Default)] +pub struct VisualSelection { + pub start: Option<(usize, usize)>, + pub end: Option<(usize, usize)>, +} + +impl VisualSelection { + pub fn new() -> Self { + Self::default() + } + + pub fn start_at(&mut self, pos: (usize, usize)) { + self.start = Some(pos); + self.end = Some(pos); + } + + pub fn extend_to(&mut self, pos: (usize, usize)) { + self.end = Some(pos); + } + + pub fn clear(&mut self) { + self.start = None; + self.end = None; + } + + pub fn is_active(&self) -> bool { + self.start.is_some() && self.end.is_some() + } + + pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> { + if let (Some(s), Some(e)) = (self.start, self.end) { + if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) { + Some((s, e)) + } else { + Some((e, s)) + } + } else { + None + } + } +} + +/// Cursor position helper for navigating scrollable content. +#[derive(Debug, Clone, Copy, Default)] +pub struct CursorPosition { + pub row: usize, + pub col: usize, +} + +impl CursorPosition { + pub fn new(row: usize, col: usize) -> Self { + Self { row, col } + } + + pub fn move_up(&mut self, amount: usize) { + self.row = self.row.saturating_sub(amount); + } + + pub fn move_down(&mut self, amount: usize, max: usize) { + self.row = (self.row + amount).min(max); + } + + pub fn move_left(&mut self, amount: usize) { + self.col = self.col.saturating_sub(amount); + } + + pub fn move_right(&mut self, amount: usize, max: usize) { + self.col = (self.col + amount).min(max); + } + + pub fn as_tuple(&self) -> (usize, usize) { + (self.row, self.col) + } +} diff --git a/crates/owlen-core/src/storage.rs b/crates/owlen-core/src/storage.rs index a12706a..9b12efc 100644 --- a/crates/owlen-core/src/storage.rs +++ b/crates/owlen-core/src/storage.rs @@ -50,14 +50,14 @@ impl StorageManager { /// Create a storage manager using the provided database path pub async fn with_database_path(database_path: PathBuf) -> Result { - if let Some(parent) = database_path.parent() { - if !parent.exists() { - std::fs::create_dir_all(parent).map_err(|e| { - Error::Storage(format!( - "Failed to create database directory {parent:?}: {e}" - )) - })?; - } + if let Some(parent) = database_path.parent() + && !parent.exists() + { + std::fs::create_dir_all(parent).map_err(|e| { + Error::Storage(format!( + "Failed to create database directory {parent:?}: {e}" + )) + })?; } let options = SqliteConnectOptions::from_str(&format!( @@ -431,13 +431,13 @@ impl StorageManager { } } - if migrated > 0 { - if let Err(err) = archive_legacy_directory(&legacy_dir) { - println!( - "Warning: migrated sessions but failed to archive legacy directory: {}", - err - ); - } + if migrated > 0 + && let Err(err) = archive_legacy_directory(&legacy_dir) + { + println!( + "Warning: migrated sessions but failed to archive legacy directory: {}", + err + ); } println!("Migrated {} legacy sessions.", migrated); diff --git a/crates/owlen-core/src/theme.rs b/crates/owlen-core/src/theme.rs index eac1b15..d8d9b90 100644 --- a/crates/owlen-core/src/theme.rs +++ b/crates/owlen-core/src/theme.rs @@ -586,16 +586,16 @@ where } fn parse_color(s: &str) -> Result { - if let Some(hex) = s.strip_prefix('#') { - if hex.len() == 6 { - let r = u8::from_str_radix(&hex[0..2], 16) - .map_err(|_| format!("Invalid hex color: {}", s))?; - let g = u8::from_str_radix(&hex[2..4], 16) - .map_err(|_| format!("Invalid hex color: {}", s))?; - let b = u8::from_str_radix(&hex[4..6], 16) - .map_err(|_| format!("Invalid hex color: {}", s))?; - return Ok(Color::Rgb(r, g, b)); - } + if let Some(hex) = s.strip_prefix('#') + && hex.len() == 6 + { + let r = + u8::from_str_radix(&hex[0..2], 16).map_err(|_| format!("Invalid hex color: {}", s))?; + let g = + u8::from_str_radix(&hex[2..4], 16).map_err(|_| format!("Invalid hex color: {}", s))?; + let b = + u8::from_str_radix(&hex[4..6], 16).map_err(|_| format!("Invalid hex color: {}", s))?; + return Ok(Color::Rgb(r, g, b)); } // Try named colors diff --git a/crates/owlen-core/src/tools.rs b/crates/owlen-core/src/tools.rs index b607c6f..47d2566 100644 --- a/crates/owlen-core/src/tools.rs +++ b/crates/owlen-core/src/tools.rs @@ -13,7 +13,7 @@ pub mod web_search; pub mod web_search_detailed; use async_trait::async_trait; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use std::collections::HashMap; use std::time::Duration; diff --git a/crates/owlen-core/src/tools/code_exec.rs b/crates/owlen-core/src/tools/code_exec.rs index 3db9f24..33ebeca 100644 --- a/crates/owlen-core/src/tools/code_exec.rs +++ b/crates/owlen-core/src/tools/code_exec.rs @@ -2,9 +2,9 @@ use std::sync::Arc; use std::time::Instant; use crate::Result; -use anyhow::{anyhow, Context}; +use anyhow::{Context, anyhow}; use async_trait::async_trait; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use super::{Tool, ToolResult}; use crate::sandbox::{SandboxConfig, SandboxedProcess}; diff --git a/crates/owlen-core/src/tools/web_scrape.rs b/crates/owlen-core/src/tools/web_scrape.rs index b96f82f..0e75f7d 100644 --- a/crates/owlen-core/src/tools/web_scrape.rs +++ b/crates/owlen-core/src/tools/web_scrape.rs @@ -2,7 +2,7 @@ use super::{Tool, ToolResult}; use crate::Result; use anyhow::Context; use async_trait::async_trait; -use serde_json::{json, Value}; +use serde_json::{Value, json}; /// Tool that fetches the raw HTML content for a list of URLs. /// diff --git a/crates/owlen-core/src/tools/web_search.rs b/crates/owlen-core/src/tools/web_search.rs index 8309570..5798221 100644 --- a/crates/owlen-core/src/tools/web_search.rs +++ b/crates/owlen-core/src/tools/web_search.rs @@ -4,7 +4,7 @@ use std::time::Instant; use crate::Result; use anyhow::Context; use async_trait::async_trait; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use super::{Tool, ToolResult}; use crate::consent::ConsentManager; diff --git a/crates/owlen-core/src/tools/web_search_detailed.rs b/crates/owlen-core/src/tools/web_search_detailed.rs index e8a9a1f..ba407c4 100644 --- a/crates/owlen-core/src/tools/web_search_detailed.rs +++ b/crates/owlen-core/src/tools/web_search_detailed.rs @@ -4,7 +4,7 @@ use std::time::Instant; use crate::Result; use anyhow::Context; use async_trait::async_trait; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use super::{Tool, ToolResult}; use crate::consent::ConsentManager; @@ -86,7 +86,9 @@ impl Tool for WebSearchDetailedTool { .expect("Consent manager mutex poisoned"); if !consent.has_consent(self.name()) { - return Ok(ToolResult::error("Consent not granted for detailed web search. This should have been handled by the TUI.")); + return Ok(ToolResult::error( + "Consent not granted for detailed web search. This should have been handled by the TUI.", + )); } } diff --git a/crates/owlen-core/src/ui.rs b/crates/owlen-core/src/ui.rs index b7f40ca..130e3f7 100644 --- a/crates/owlen-core/src/ui.rs +++ b/crates/owlen-core/src/ui.rs @@ -3,171 +3,20 @@ //! This module contains reusable UI components that can be shared between //! different TUI applications (chat, code, etc.) -use std::fmt; - /// Application state -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AppState { - Running, - Quit, -} +pub use crate::state::AppState; /// Input modes for TUI applications -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum InputMode { - Normal, - Editing, - ProviderSelection, - ModelSelection, - Help, - Visual, - Command, - SessionBrowser, - ThemeBrowser, -} - -impl fmt::Display for InputMode { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let label = match self { - InputMode::Normal => "Normal", - InputMode::Editing => "Editing", - InputMode::ModelSelection => "Model", - InputMode::ProviderSelection => "Provider", - InputMode::Help => "Help", - InputMode::Visual => "Visual", - InputMode::Command => "Command", - InputMode::SessionBrowser => "Sessions", - InputMode::ThemeBrowser => "Themes", - }; - f.write_str(label) - } -} +pub use crate::state::InputMode; /// Represents which panel is currently focused -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum FocusedPanel { - Chat, - Thinking, - Input, -} +pub use crate::state::FocusedPanel; /// Auto-scroll state manager for scrollable panels -#[derive(Debug, Clone)] -pub struct AutoScroll { - pub scroll: usize, - pub content_len: usize, - pub stick_to_bottom: bool, -} - -impl Default for AutoScroll { - fn default() -> Self { - Self { - scroll: 0, - content_len: 0, - stick_to_bottom: true, - } - } -} - -impl AutoScroll { - /// Update scroll position based on viewport height - pub fn on_viewport(&mut self, viewport_h: usize) { - let max = self.content_len.saturating_sub(viewport_h); - if self.stick_to_bottom { - self.scroll = max; - } else { - self.scroll = self.scroll.min(max); - } - } - - /// Handle user scroll input - pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) { - let max = self.content_len.saturating_sub(viewport_h) as isize; - let s = (self.scroll as isize + delta).clamp(0, max) as usize; - self.scroll = s; - self.stick_to_bottom = s as isize == max; - } - - /// Scroll down half page - pub fn scroll_half_page_down(&mut self, viewport_h: usize) { - let delta = (viewport_h / 2) as isize; - self.on_user_scroll(delta, viewport_h); - } - - /// Scroll up half page - pub fn scroll_half_page_up(&mut self, viewport_h: usize) { - let delta = -((viewport_h / 2) as isize); - self.on_user_scroll(delta, viewport_h); - } - - /// Scroll down full page - pub fn scroll_full_page_down(&mut self, viewport_h: usize) { - let delta = viewport_h as isize; - self.on_user_scroll(delta, viewport_h); - } - - /// Scroll up full page - pub fn scroll_full_page_up(&mut self, viewport_h: usize) { - let delta = -(viewport_h as isize); - self.on_user_scroll(delta, viewport_h); - } - - /// Jump to top - pub fn jump_to_top(&mut self) { - self.scroll = 0; - self.stick_to_bottom = false; - } - - /// Jump to bottom - pub fn jump_to_bottom(&mut self, viewport_h: usize) { - self.stick_to_bottom = true; - self.on_viewport(viewport_h); - } -} +pub use crate::state::AutoScroll; /// Visual selection state for text selection -#[derive(Debug, Clone, Default)] -pub struct VisualSelection { - pub start: Option<(usize, usize)>, // (row, col) - pub end: Option<(usize, usize)>, // (row, col) -} - -impl VisualSelection { - pub fn new() -> Self { - Self::default() - } - - pub fn start_at(&mut self, pos: (usize, usize)) { - self.start = Some(pos); - self.end = Some(pos); - } - - pub fn extend_to(&mut self, pos: (usize, usize)) { - self.end = Some(pos); - } - - pub fn clear(&mut self) { - self.start = None; - self.end = None; - } - - pub fn is_active(&self) -> bool { - self.start.is_some() && self.end.is_some() - } - - pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> { - if let (Some(s), Some(e)) = (self.start, self.end) { - // Normalize selection so start is always before end - if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) { - Some((s, e)) - } else { - Some((e, s)) - } - } else { - None - } - } -} +pub use crate::state::VisualSelection; /// Extract text from a selection range in a list of lines pub fn extract_text_from_selection( @@ -235,37 +84,7 @@ pub fn extract_text_from_selection( } /// Cursor position for navigating scrollable content -#[derive(Debug, Clone, Copy, Default)] -pub struct CursorPosition { - pub row: usize, - pub col: usize, -} - -impl CursorPosition { - pub fn new(row: usize, col: usize) -> Self { - Self { row, col } - } - - pub fn move_up(&mut self, amount: usize) { - self.row = self.row.saturating_sub(amount); - } - - pub fn move_down(&mut self, amount: usize, max: usize) { - self.row = (self.row + amount).min(max); - } - - pub fn move_left(&mut self, amount: usize) { - self.col = self.col.saturating_sub(amount); - } - - pub fn move_right(&mut self, amount: usize, max: usize) { - self.col = (self.col + amount).min(max); - } - - pub fn as_tuple(&self) -> (usize, usize) { - (self.row, self.col) - } -} +pub use crate::state::CursorPosition; /// Word boundary detection for navigation pub fn find_next_word_boundary(line: &str, col: usize) -> Option { diff --git a/crates/owlen-core/src/validation.rs b/crates/owlen-core/src/validation.rs index 986eae7..3f445fa 100644 --- a/crates/owlen-core/src/validation.rs +++ b/crates/owlen-core/src/validation.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use anyhow::{Context, Result}; use jsonschema::{JSONSchema, ValidationError}; -use serde_json::{json, Value}; +use serde_json::{Value, json}; pub struct SchemaValidator { schemas: HashMap, diff --git a/crates/owlen-core/tests/file_server.rs b/crates/owlen-core/tests/file_server.rs index 9215706..d4090e6 100644 --- a/crates/owlen-core/tests/file_server.rs +++ b/crates/owlen-core/tests/file_server.rs @@ -1,5 +1,5 @@ -use owlen_core::mcp::remote_client::RemoteMcpClient; use owlen_core::McpToolCall; +use owlen_core::mcp::remote_client::RemoteMcpClient; use std::fs::File; use std::io::Write; use tempfile::tempdir; diff --git a/crates/owlen-core/tests/file_write.rs b/crates/owlen-core/tests/file_write.rs index fe48d4b..f1af683 100644 --- a/crates/owlen-core/tests/file_write.rs +++ b/crates/owlen-core/tests/file_write.rs @@ -1,5 +1,5 @@ -use owlen_core::mcp::remote_client::RemoteMcpClient; use owlen_core::McpToolCall; +use owlen_core::mcp::remote_client::RemoteMcpClient; use tempfile::tempdir; #[tokio::test] diff --git a/crates/owlen-core/tests/phase9_remoting.rs b/crates/owlen-core/tests/phase9_remoting.rs index a0335e2..0a4068b 100644 --- a/crates/owlen-core/tests/phase9_remoting.rs +++ b/crates/owlen-core/tests/phase9_remoting.rs @@ -5,8 +5,8 @@ use owlen_core::mcp::failover::{FailoverConfig, FailoverMcpClient, ServerEntry, ServerHealth}; use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor}; use owlen_core::{Error, Result}; -use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; +use std::sync::atomic::{AtomicUsize, Ordering}; use std::time::Duration; /// Mock MCP client for testing failover behavior diff --git a/crates/owlen-core/tests/prompt_server.rs b/crates/owlen-core/tests/prompt_server.rs index 10e411d..56797dc 100644 --- a/crates/owlen-core/tests/prompt_server.rs +++ b/crates/owlen-core/tests/prompt_server.rs @@ -1,9 +1,9 @@ //! Integration test for the MCP prompt rendering server. +use owlen_core::Result; use owlen_core::config::McpServerConfig; use owlen_core::mcp::client::RemoteMcpClient; use owlen_core::mcp::{McpToolCall, McpToolResponse}; -use owlen_core::Result; use serde_json::json; use std::path::PathBuf; diff --git a/crates/owlen-core/tests/wrap_cursor_tests.rs b/crates/owlen-core/tests/wrap_cursor_tests.rs index 08c20bf..6fd7f13 100644 --- a/crates/owlen-core/tests/wrap_cursor_tests.rs +++ b/crates/owlen-core/tests/wrap_cursor_tests.rs @@ -1,6 +1,6 @@ #![allow(non_snake_case)] -use owlen_core::wrap_cursor::{build_cursor_map, ScreenPos}; +use owlen_core::wrap_cursor::{ScreenPos, build_cursor_map}; fn assert_cursor_pos(map: &[ScreenPos], byte_idx: usize, expected: ScreenPos) { assert_eq!(map[byte_idx], expected, "Mismatch at byte {}", byte_idx); diff --git a/crates/owlen-mcp-client/Cargo.toml b/crates/owlen-mcp-client/Cargo.toml index fd5427f..28eabfe 100644 --- a/crates/owlen-mcp-client/Cargo.toml +++ b/crates/owlen-mcp-client/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "owlen-mcp-client" version = "0.1.0" -edition = "2021" +edition.workspace = true description = "Dedicated MCP client library for Owlen, exposing remote MCP server communication" license = "AGPL-3.0" diff --git a/crates/owlen-mcp-client/src/lib.rs b/crates/owlen-mcp-client/src/lib.rs index f706b91..07708ad 100644 --- a/crates/owlen-mcp-client/src/lib.rs +++ b/crates/owlen-mcp-client/src/lib.rs @@ -8,11 +8,8 @@ pub use owlen_core::mcp::remote_client::RemoteMcpClient; pub use owlen_core::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse}; -// Re‑export the Provider implementation so the client can also be used as an -// LLM provider when the remote MCP server hosts a language‑model tool (e.g. -// `generate_text`). // Re‑export the core Provider trait so that the MCP client can also be used as an LLM provider. -pub use owlen_core::provider::Provider as McpProvider; +pub use owlen_core::Provider as McpProvider; // Note: The `RemoteMcpClient` type provides its own `new` constructor in the core // crate. Users can call `RemoteMcpClient::new()` directly. No additional wrapper diff --git a/crates/owlen-mcp-code-server/Cargo.toml b/crates/owlen-mcp-code-server/Cargo.toml index 6c27cdf..a96a9ef 100644 --- a/crates/owlen-mcp-code-server/Cargo.toml +++ b/crates/owlen-mcp-code-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "owlen-mcp-code-server" version = "0.1.0" -edition = "2021" +edition.workspace = true description = "MCP server exposing safe code execution tools for Owlen" license = "AGPL-3.0" diff --git a/crates/owlen-mcp-code-server/src/lib.rs b/crates/owlen-mcp-code-server/src/lib.rs index c8130b2..2008c87 100644 --- a/crates/owlen-mcp-code-server/src/lib.rs +++ b/crates/owlen-mcp-code-server/src/lib.rs @@ -10,11 +10,11 @@ pub mod sandbox; pub mod tools; use owlen_core::mcp::protocol::{ - methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, - RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, + ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError, + RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, methods, }; use owlen_core::tools::{Tool, ToolResult}; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use std::collections::HashMap; use std::sync::Arc; use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt}; @@ -149,10 +149,10 @@ async fn handle_request( supports_streaming: Some(false), }, }; - Ok(RpcResponse::new( - req.id, - serde_json::to_value(result).unwrap(), - )) + let payload = serde_json::to_value(result).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize initialize result: {}", e)) + })?; + Ok(RpcResponse::new(req.id, payload)) } methods::TOOLS_LIST => { let tools = registry.list_tools(); @@ -176,10 +176,10 @@ async fn handle_request( metadata: result.metadata, duration_ms: result.duration.as_millis() as u128, }; - Ok(RpcResponse::new( - req.id, - serde_json::to_value(resp).unwrap(), - )) + let payload = serde_json::to_value(resp).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize tool response: {}", e)) + })?; + Ok(RpcResponse::new(req.id, payload)) } _ => Err(RpcError::method_not_found(&req.method)), } diff --git a/crates/owlen-mcp-code-server/src/sandbox.rs b/crates/owlen-mcp-code-server/src/sandbox.rs index 3e0798d..cc045ba 100644 --- a/crates/owlen-mcp-code-server/src/sandbox.rs +++ b/crates/owlen-mcp-code-server/src/sandbox.rs @@ -1,12 +1,12 @@ //! Docker-based sandboxing for secure code execution use anyhow::{Context, Result}; +use bollard::Docker; use bollard::container::{ Config, CreateContainerOptions, RemoveContainerOptions, StartContainerOptions, WaitContainerOptions, }; use bollard::models::{HostConfig, Mount, MountTypeEnum}; -use bollard::Docker; use std::collections::HashMap; use std::path::Path; diff --git a/crates/owlen-mcp-code-server/src/tools.rs b/crates/owlen-mcp-code-server/src/tools.rs index 441aba0..d91ad9e 100644 --- a/crates/owlen-mcp-code-server/src/tools.rs +++ b/crates/owlen-mcp-code-server/src/tools.rs @@ -2,9 +2,9 @@ use crate::sandbox::Sandbox; use async_trait::async_trait; -use owlen_core::tools::{Tool, ToolResult}; use owlen_core::Result; -use serde_json::{json, Value}; +use owlen_core::tools::{Tool, ToolResult}; +use serde_json::{Value, json}; use std::path::PathBuf; /// Tool for compiling projects (Rust, Node.js, Python) diff --git a/crates/owlen-mcp-llm-server/Cargo.toml b/crates/owlen-mcp-llm-server/Cargo.toml index af3a68a..b2e63e7 100644 --- a/crates/owlen-mcp-llm-server/Cargo.toml +++ b/crates/owlen-mcp-llm-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "owlen-mcp-llm-server" version = "0.1.0" -edition = "2021" +edition.workspace = true [dependencies] owlen-core = { path = "../owlen-core" } diff --git a/crates/owlen-mcp-llm-server/src/main.rs b/crates/owlen-mcp-llm-server/src/main.rs index 3e0d02e..b34dec3 100644 --- a/crates/owlen-mcp-llm-server/src/main.rs +++ b/crates/owlen-mcp-llm-server/src/main.rs @@ -7,18 +7,19 @@ clippy::empty_line_after_outer_attr )] -use owlen_core::config::{ensure_provider_config, Config as OwlenConfig}; +use owlen_core::Provider; +use owlen_core::ProviderConfig; +use owlen_core::config::{Config as OwlenConfig, ensure_provider_config}; use owlen_core::mcp::protocol::{ - methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, - RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, + ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError, + RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, + methods, }; use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse}; -use owlen_core::provider::ProviderConfig; use owlen_core::providers::OllamaProvider; use owlen_core::types::{ChatParameters, ChatRequest, Message}; -use owlen_core::Provider; use serde::Deserialize; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use std::collections::HashMap; use std::env; use std::sync::Arc; @@ -178,7 +179,7 @@ async fn handle_generate_text(args: GenerateTextArgs) -> Result Result { supports_streaming: Some(true), }, }; - Ok(serde_json::to_value(result).unwrap()) + serde_json::to_value(result).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize init result: {}", e)) + }) } methods::TOOLS_LIST => { let tools = vec![ @@ -245,7 +248,9 @@ async fn handle_request(req: &RpcRequest) -> Result { .list_models() .await .map_err(|e| RpcError::internal_error(format!("Failed to list models: {}", e)))?; - Ok(serde_json::to_value(models).unwrap()) + serde_json::to_value(models).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize model list: {}", e)) + }) } methods::TOOLS_CALL => { // For streaming we will send incremental notifications directly from here. @@ -331,10 +336,24 @@ async fn main() -> anyhow::Result<()> { metadata: HashMap::new(), duration_ms: 0, }; - let final_resp = RpcResponse::new( - id.clone(), - serde_json::to_value(response).unwrap(), - ); + let payload = match serde_json::to_value(&response) { + Ok(value) => value, + Err(e) => { + let err_resp = RpcErrorResponse::new( + id.clone(), + RpcError::internal_error(format!( + "Failed to serialize resource response: {}", + e + )), + ); + let s = serde_json::to_string(&err_resp)?; + stdout.write_all(s.as_bytes()).await?; + stdout.write_all(b"\n").await?; + stdout.flush().await?; + continue; + } + }; + let final_resp = RpcResponse::new(id.clone(), payload); let s = serde_json::to_string(&final_resp)?; stdout.write_all(s.as_bytes()).await?; stdout.write_all(b"\n").await?; @@ -375,10 +394,24 @@ async fn main() -> anyhow::Result<()> { metadata: HashMap::new(), duration_ms: 0, }; - let final_resp = RpcResponse::new( - id.clone(), - serde_json::to_value(response).unwrap(), - ); + let payload = match serde_json::to_value(&response) { + Ok(value) => value, + Err(e) => { + let err_resp = RpcErrorResponse::new( + id.clone(), + RpcError::internal_error(format!( + "Failed to serialize directory listing: {}", + e + )), + ); + let s = serde_json::to_string(&err_resp)?; + stdout.write_all(s.as_bytes()).await?; + stdout.write_all(b"\n").await?; + stdout.flush().await?; + continue; + } + }; + let final_resp = RpcResponse::new(id.clone(), payload); let s = serde_json::to_string(&final_resp)?; stdout.write_all(s.as_bytes()).await?; stdout.write_all(b"\n").await?; @@ -454,7 +487,7 @@ async fn main() -> anyhow::Result<()> { parameters, tools: None, }; - let mut stream = match provider.chat_stream(request).await { + let mut stream = match provider.stream_prompt(request).await { Ok(s) => s, Err(e) => { let err_resp = RpcErrorResponse::new( @@ -510,8 +543,24 @@ async fn main() -> anyhow::Result<()> { metadata: HashMap::new(), duration_ms: 0, }; - let final_resp = - RpcResponse::new(id.clone(), serde_json::to_value(response).unwrap()); + let payload = match serde_json::to_value(&response) { + Ok(value) => value, + Err(e) => { + let err_resp = RpcErrorResponse::new( + id.clone(), + RpcError::internal_error(format!( + "Failed to serialize final streaming response: {}", + e + )), + ); + let s = serde_json::to_string(&err_resp)?; + stdout.write_all(s.as_bytes()).await?; + stdout.write_all(b"\n").await?; + stdout.flush().await?; + continue; + } + }; + let final_resp = RpcResponse::new(id.clone(), payload); let s = serde_json::to_string(&final_resp)?; stdout.write_all(s.as_bytes()).await?; stdout.write_all(b"\n").await?; diff --git a/crates/owlen-mcp-prompt-server/Cargo.toml b/crates/owlen-mcp-prompt-server/Cargo.toml index ac2e03c..4440cac 100644 --- a/crates/owlen-mcp-prompt-server/Cargo.toml +++ b/crates/owlen-mcp-prompt-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "owlen-mcp-prompt-server" version = "0.1.0" -edition = "2021" +edition.workspace = true description = "MCP server that renders prompt templates (YAML) for Owlen" license = "AGPL-3.0" diff --git a/crates/owlen-mcp-prompt-server/src/lib.rs b/crates/owlen-mcp-prompt-server/src/lib.rs index ea75f7e..90b89db 100644 --- a/crates/owlen-mcp-prompt-server/src/lib.rs +++ b/crates/owlen-mcp-prompt-server/src/lib.rs @@ -6,7 +6,7 @@ use anyhow::{Context, Result}; use handlebars::Handlebars; use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; +use serde_json::{Value, json}; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; @@ -14,8 +14,8 @@ use std::sync::Arc; use tokio::sync::RwLock; use owlen_core::mcp::protocol::{ - methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse, - RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, + ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError, + RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, methods, }; use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse}; use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt}; @@ -148,7 +148,7 @@ FINAL_ANSWER: Summary of what was done"# template.name, e ); } else { - let mut templates = futures::executor::block_on(self.templates.write()); + let mut templates = self.templates.blocking_write(); templates.insert(template.name.clone(), template); } } @@ -284,10 +284,10 @@ async fn handle_request( supports_streaming: Some(false), }, }; - Ok(RpcResponse::new( - req.id, - serde_json::to_value(result).unwrap(), - )) + let payload = serde_json::to_value(result).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize initialize result: {}", e)) + })?; + Ok(RpcResponse::new(req.id, payload)) } methods::TOOLS_LIST => { let tools = vec![ @@ -349,9 +349,17 @@ async fn handle_request( let srv = server.lock().await; match srv.get_template(name).await { - Some(template) => { - json!({"success": true, "template": serde_json::to_value(template).unwrap()}) - } + Some(template) => match serde_json::to_value(template) { + Ok(serialized) => { + json!({"success": true, "template": serialized}) + } + Err(e) => { + return Err(RpcError::internal_error(format!( + "Failed to serialize template '{}': {}", + name, e + ))); + } + }, None => json!({"success": false, "error": "Template not found"}), } } @@ -397,10 +405,10 @@ async fn handle_request( duration_ms: 0, }; - Ok(RpcResponse::new( - req.id, - serde_json::to_value(resp).unwrap(), - )) + let payload = serde_json::to_value(resp).map_err(|e| { + RpcError::internal_error(format!("Failed to serialize tool response: {}", e)) + })?; + Ok(RpcResponse::new(req.id, payload)) } _ => Err(RpcError::method_not_found(&req.method)), } diff --git a/crates/owlen-mcp-server/Cargo.toml b/crates/owlen-mcp-server/Cargo.toml index 81246f5..b15a081 100644 --- a/crates/owlen-mcp-server/Cargo.toml +++ b/crates/owlen-mcp-server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "owlen-mcp-server" version = "0.1.0" -edition = "2021" +edition.workspace = true [dependencies] tokio = { workspace = true } diff --git a/crates/owlen-mcp-server/src/main.rs b/crates/owlen-mcp-server/src/main.rs index e5b31a2..92cdd37 100644 --- a/crates/owlen-mcp-server/src/main.rs +++ b/crates/owlen-mcp-server/src/main.rs @@ -1,6 +1,6 @@ use owlen_core::mcp::protocol::{ - is_compatible, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, - RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION, + ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError, + RpcErrorResponse, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, is_compatible, }; use path_clean::PathClean; use serde::Deserialize; diff --git a/crates/owlen-tui/src/chat_app.rs b/crates/owlen-tui/src/chat_app.rs index 97dbc83..a6f131e 100644 --- a/crates/owlen-tui/src/chat_app.rs +++ b/crates/owlen-tui/src/chat_app.rs @@ -1,8 +1,8 @@ -use anyhow::{anyhow, Result}; +use anyhow::{Result, anyhow}; use owlen_core::mcp::remote_client::RemoteMcpClient; use owlen_core::{ + Provider, ProviderConfig, model::DetailedModelInfo, - provider::{Provider, ProviderConfig}, session::{SessionController, SessionOutcome}, storage::SessionMeta, theme::Theme, @@ -10,13 +10,16 @@ use owlen_core::{ ui::{AppState, AutoScroll, FocusedPanel, InputMode}, }; use ratatui::style::{Color, Modifier, Style}; -use tokio::sync::mpsc; +use textwrap::wrap; +use tokio::{sync::mpsc, task::JoinHandle}; use tui_textarea::{Input, TextArea}; use uuid::Uuid; use crate::config; use crate::events::Event; use crate::model_info_panel::ModelInfoPanel; +use crate::state::CommandPalette; +use crate::ui::format_tool_output; // Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly // imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`. use std::collections::{BTreeSet, HashMap, HashSet}; @@ -107,6 +110,7 @@ pub enum SessionEvent { response: ChatResponse, }, StreamError { + message_id: Option, message: String, }, ToolExecutionNeeded { @@ -120,17 +124,11 @@ pub enum SessionEvent { callback_id: Uuid, }, /// Agent iteration update (shows THOUGHT/ACTION/OBSERVATION) - AgentUpdate { - content: String, - }, + AgentUpdate { content: String }, /// Agent execution completed with final answer - AgentCompleted { - answer: String, - }, + AgentCompleted { answer: String }, /// Agent execution failed - AgentFailed { - error: String, - }, + AgentFailed { error: String }, } pub const HELP_TAB_COUNT: usize = 7; @@ -159,6 +157,7 @@ pub struct ChatApp { content_width: usize, // Track the content width for line wrapping calculations session_tx: mpsc::UnboundedSender, streaming: HashSet, + stream_tasks: HashMap>, textarea: TextArea<'static>, // Advanced text input widget pending_llm_request: bool, // Flag to indicate LLM request needs to be processed pending_tool_execution: Option<(Uuid, Vec)>, // Pending tool execution (message_id, tool_calls) @@ -167,16 +166,18 @@ pub struct ChatApp { current_thinking: Option, // Current thinking content from last assistant message // Holds the latest formatted Agentic ReAct actions (thought/action/observation) agent_actions: Option, - pending_key: Option, // For multi-key sequences like gg, dd - clipboard: String, // Vim-style clipboard for yank/paste - command_buffer: String, // Buffer for command mode input - command_suggestions: Vec, // Filtered command suggestions based on current input - selected_suggestion: usize, // Index of selected suggestion + pending_key: Option, // For multi-key sequences like gg, dd + clipboard: String, // Vim-style clipboard for yank/paste + command_palette: CommandPalette, // Command mode state (buffer + suggestions) visual_start: Option<(usize, usize)>, // Visual mode selection start (row, col) for Input panel visual_end: Option<(usize, usize)>, // Visual mode selection end (row, col) for scrollable panels focused_panel: FocusedPanel, // Currently focused panel for scrolling chat_cursor: (usize, usize), // Cursor position in Chat panel (row, col) thinking_cursor: (usize, usize), // Cursor position in Thinking panel (row, col) + code_view_path: Option, // Active code view file path + code_view_lines: Vec, // Cached lines for code view rendering + code_view_scroll: AutoScroll, // Scroll state for code view + code_view_viewport_height: usize, // Viewport height for code view panel saved_sessions: Vec, // Cached list of saved sessions selected_session_index: usize, // Index of selected session in browser help_tab_index: usize, // Currently selected help tab (0-(HELP_TAB_COUNT-1)) @@ -250,6 +251,7 @@ impl ChatApp { content_width: 80, // Default content width, will be updated during rendering session_tx, streaming: std::collections::HashSet::new(), + stream_tasks: HashMap::new(), textarea, pending_llm_request: false, pending_tool_execution: None, @@ -259,14 +261,16 @@ impl ChatApp { agent_actions: None, pending_key: None, clipboard: String::new(), - command_buffer: String::new(), - command_suggestions: Vec::new(), - selected_suggestion: 0, + command_palette: CommandPalette::new(), visual_start: None, visual_end: None, focused_panel: FocusedPanel::Input, chat_cursor: (0, 0), thinking_cursor: (0, 0), + code_view_path: None, + code_view_lines: Vec::new(), + code_view_scroll: AutoScroll::default(), + code_view_viewport_height: 0, saved_sessions: Vec::new(), selected_session_index: 0, help_tab_index: 0, @@ -328,6 +332,34 @@ impl ChatApp { self.controller.selected_model() } + pub fn current_provider(&self) -> &str { + &self.current_provider + } + + pub fn should_show_code_view(&self) -> bool { + matches!(self.operating_mode, owlen_core::mode::Mode::Code) && self.code_view_path.is_some() + } + + pub fn code_view_path(&self) -> Option<&str> { + self.code_view_path.as_deref() + } + + pub fn code_view_lines(&self) -> &[String] { + &self.code_view_lines + } + + pub fn code_view_scroll(&self) -> &AutoScroll { + &self.code_view_scroll + } + + pub fn code_view_scroll_mut(&mut self) -> &mut AutoScroll { + &mut self.code_view_scroll + } + + pub fn set_code_view_viewport_height(&mut self, height: usize) { + self.code_view_viewport_height = height; + } + // Synchronous access for UI rendering and other callers that expect an immediate Config. pub fn config(&self) -> tokio::sync::MutexGuard<'_, owlen_core::config::Config> { self.controller.config() @@ -345,9 +377,19 @@ impl ChatApp { /// Set the operating mode pub async fn set_mode(&mut self, mode: owlen_core::mode::Mode) { + if let Err(err) = self.controller.set_operating_mode(mode).await { + self.error = Some(format!("Failed to switch mode: {}", err)); + return; + } + + if !matches!(mode, owlen_core::mode::Mode::Code) { + self.close_code_view(); + self.set_system_status(String::new()); + } + self.operating_mode = mode; self.status = format!("Switched to {} mode", mode); - // Mode switching is handled by the SessionController's tool filtering + self.error = None; } /// Override the status line with a custom message. @@ -468,10 +510,9 @@ impl ChatApp { .model_info_panel .current_model_name() .map(|s| s.to_string()) + && let Some(updated) = self.model_details_cache.get(¤t).cloned() { - if let Some(updated) = self.model_details_cache.get(¤t).cloned() { - self.model_info_panel.set_model_info(updated); - } + self.model_info_panel.set_model_info(updated); } let total = self.model_details_cache.len(); self.status = format!("Cached model details for {} model(s)", total); @@ -575,98 +616,22 @@ impl ChatApp { } pub fn command_buffer(&self) -> &str { - &self.command_buffer + self.command_palette.buffer() } pub fn command_suggestions(&self) -> &[String] { - &self.command_suggestions + self.command_palette.suggestions() } pub fn selected_suggestion(&self) -> usize { - self.selected_suggestion + self.command_palette.selected_index() } /// Returns all available commands with their aliases - fn get_all_commands() -> Vec<(&'static str, &'static str)> { - vec![ - ("quit", "Exit the application"), - ("q", "Alias for quit"), - ("clear", "Clear the conversation"), - ("c", "Alias for clear"), - ("w", "Alias for write"), - ("save", "Alias for write"), - ("load", "Load a saved conversation"), - ("open", "Alias for load"), - ("o", "Alias for load"), - ("mode", "Switch operating mode (chat/code)"), - ("code", "Switch to code mode"), - ("chat", "Switch to chat mode"), - ("tools", "List available tools in current mode"), - ("sessions", "List saved sessions"), - ("help", "Show help documentation"), - ("h", "Alias for help"), - ("model", "Select a model"), - ("model info", "Show detailed information for a model"), - ("model refresh", "Refresh cached model information"), - ("model details", "Show details for the active model"), - ("m", "Alias for model"), - ( - "models info", - "Prefetch detailed information for all models", - ), - ("new", "Start a new conversation"), - ("n", "Alias for new"), - ("theme", "Switch theme"), - ("themes", "List available themes"), - ("tutorial", "Show keybinding tutorial"), - ("reload", "Reload configuration and themes"), - ("e", "Edit a file"), - ("edit", "Alias for edit"), - ("ls", "List directory contents"), - ("privacy-enable", "Enable a privacy-sensitive tool"), - ("privacy-disable", "Disable a privacy-sensitive tool"), - ("privacy-clear", "Clear stored secure data"), - ("agent", "Enable agent mode for autonomous task execution"), - ("stop-agent", "Stop the running agent"), - ] - } - - /// Update command suggestions based on current input - fn update_command_suggestions(&mut self) { - let input = self.command_buffer.trim(); - - if input.is_empty() { - // Show all commands when input is empty - self.command_suggestions = Self::get_all_commands() - .iter() - .map(|(cmd, _)| cmd.to_string()) - .collect(); - } else { - // Filter commands that start with the input - self.command_suggestions = Self::get_all_commands() - .iter() - .filter_map(|(cmd, _)| { - if cmd.starts_with(input) { - Some(cmd.to_string()) - } else { - None - } - }) - .collect(); - } - - // Reset selection if out of bounds - if self.selected_suggestion >= self.command_suggestions.len() { - self.selected_suggestion = 0; - } - } - /// Complete the current command with the selected suggestion fn complete_command(&mut self) { - if let Some(suggestion) = self.command_suggestions.get(self.selected_suggestion) { - self.command_buffer = suggestion.clone(); - self.update_command_suggestions(); - self.status = format!(":{}", self.command_buffer); + if let Some(suggestion) = self.command_palette.apply_selected() { + self.status = format!(":{}", suggestion); } } @@ -735,32 +700,63 @@ impl ChatApp { } } + fn focus_sequence(&self) -> Vec { + let mut order = vec![FocusedPanel::Chat]; + if self.should_show_code_view() { + order.push(FocusedPanel::Code); + } + if self.current_thinking.is_some() { + order.push(FocusedPanel::Thinking); + } + order.push(FocusedPanel::Input); + order + } + + fn ensure_focus_valid(&mut self) { + let order = self.focus_sequence(); + if order.is_empty() { + self.focused_panel = FocusedPanel::Chat; + } else if !order.contains(&self.focused_panel) { + self.focused_panel = order[0]; + } + } + pub fn cycle_focus_forward(&mut self) { - self.focused_panel = match self.focused_panel { - FocusedPanel::Chat => { - if self.current_thinking.is_some() { - FocusedPanel::Thinking - } else { - FocusedPanel::Input - } - } - FocusedPanel::Thinking => FocusedPanel::Input, - FocusedPanel::Input => FocusedPanel::Chat, - }; + let order = self.focus_sequence(); + if order.is_empty() { + self.focused_panel = FocusedPanel::Chat; + return; + } + if !order.contains(&self.focused_panel) { + self.focused_panel = order[0]; + } + let current_index = order + .iter() + .position(|panel| *panel == self.focused_panel) + .unwrap_or(0); + let next_index = (current_index + 1) % order.len(); + self.focused_panel = order[next_index]; } pub fn cycle_focus_backward(&mut self) { - self.focused_panel = match self.focused_panel { - FocusedPanel::Chat => FocusedPanel::Input, - FocusedPanel::Thinking => FocusedPanel::Chat, - FocusedPanel::Input => { - if self.current_thinking.is_some() { - FocusedPanel::Thinking - } else { - FocusedPanel::Chat - } - } + let order = self.focus_sequence(); + if order.is_empty() { + self.focused_panel = FocusedPanel::Chat; + return; + } + if !order.contains(&self.focused_panel) { + self.focused_panel = order[0]; + } + let current_index = order + .iter() + .position(|panel| *panel == self.focused_panel) + .unwrap_or(0); + let prev_index = if current_index == 0 { + order.len().saturating_sub(1) + } else { + current_index - 1 }; + self.focused_panel = order[prev_index]; } /// Sync textarea content to input buffer @@ -777,6 +773,39 @@ impl ChatApp { configure_textarea_defaults(&mut self.textarea); } + fn set_code_view_content(&mut self, path: impl Into, content: String) { + let mut lines: Vec = content.lines().map(|line| line.to_string()).collect(); + if content.ends_with('\n') { + lines.push(String::new()); + } + self.code_view_path = Some(path.into()); + self.code_view_lines = lines; + self.code_view_scroll = AutoScroll::default(); + self.code_view_scroll.content_len = self.code_view_lines.len(); + self.code_view_scroll.stick_to_bottom = false; + self.code_view_scroll.scroll = 0; + self.ensure_focus_valid(); + } + + fn close_code_view(&mut self) { + self.code_view_path = None; + self.code_view_lines.clear(); + self.code_view_scroll = AutoScroll::default(); + self.code_view_viewport_height = 0; + if matches!(self.focused_panel, FocusedPanel::Code) { + self.focused_panel = FocusedPanel::Chat; + } + self.ensure_focus_valid(); + } + + fn handle_resize(&mut self, width: u16, _height: u16) { + let approx_content_width = usize::from(width.saturating_sub(6)); + self.content_width = approx_content_width.max(20); + self.auto_scroll.stick_to_bottom = true; + self.thinking_scroll.stick_to_bottom = true; + self.code_view_scroll.stick_to_bottom = false; + } + pub async fn initialize_models(&mut self) -> Result<()> { let config_model_name = self.controller.config().general.default_model.clone(); let config_model_provider = self.controller.config().general.default_provider.clone(); @@ -833,6 +862,9 @@ impl ChatApp { Event::Tick => { // Future: update streaming timers } + Event::Resize(width, height) => { + self.handle_resize(width, height); + } Event::Paste(text) => { // Handle paste events - insert text directly without triggering sends if matches!(self.mode, InputMode::Editing) { @@ -974,8 +1006,15 @@ impl ChatApp { } match (key.code, key.modifiers) { - (KeyCode::Char('q'), KeyModifiers::NONE) - | (KeyCode::Char('c'), KeyModifiers::CONTROL) => { + (KeyCode::Char('q'), KeyModifiers::NONE) => { + return Ok(AppState::Quit); + } + (KeyCode::Char('c'), modifiers) + if modifiers.contains(KeyModifiers::CONTROL) => + { + if self.cancel_active_generation()? { + return Ok(AppState::Running); + } return Ok(AppState::Quit); } (KeyCode::Char('j'), modifiers) @@ -995,6 +1034,12 @@ impl ChatApp { } // Mode switches (KeyCode::Char('v'), KeyModifiers::NONE) => { + if matches!(self.focused_panel, FocusedPanel::Code) { + self.status = + "Code view is read-only; yank text with :open and copy manually." + .to_string(); + return Ok(AppState::Running); + } self.mode = InputMode::Visual; match self.focused_panel { @@ -1020,15 +1065,15 @@ impl ChatApp { self.visual_start = Some(cursor); self.visual_end = Some(cursor); } + FocusedPanel::Code => {} } self.status = "-- VISUAL -- (move with j/k, yank with y)".to_string(); } (KeyCode::Char(':'), KeyModifiers::NONE) => { self.mode = InputMode::Command; - self.command_buffer.clear(); - self.selected_suggestion = 0; - self.update_command_suggestions(); + self.command_palette.clear(); + self.command_palette.ensure_suggestions(); self.status = ":".to_string(); } // Enter editing mode @@ -1092,6 +1137,12 @@ impl ChatApp { } } } + FocusedPanel::Code => { + let viewport = self.code_view_viewport_height.max(1); + if self.code_view_scroll.scroll > 0 { + self.code_view_scroll.on_user_scroll(-1, viewport); + } + } FocusedPanel::Input => { self.on_scroll(-1); } @@ -1123,6 +1174,13 @@ impl ChatApp { } } } + FocusedPanel::Code => { + let viewport = self.code_view_viewport_height.max(1); + let max_lines = self.code_view_scroll.content_len; + if self.code_view_scroll.scroll + viewport < max_lines { + self.code_view_scroll.on_user_scroll(1, viewport); + } + } FocusedPanel::Input => { self.on_scroll(1); } @@ -1142,6 +1200,7 @@ impl ChatApp { self.thinking_cursor.1 -= 1; } } + FocusedPanel::Code => {} _ => {} } } @@ -1167,6 +1226,7 @@ impl ChatApp { } } } + FocusedPanel::Code => {} _ => {} } } @@ -1188,6 +1248,7 @@ impl ChatApp { self.thinking_cursor.1 = new_col; } } + FocusedPanel::Code => {} _ => {} }, (KeyCode::Char('e'), KeyModifiers::NONE) => match self.focused_panel { @@ -1206,6 +1267,7 @@ impl ChatApp { self.thinking_cursor.1 = new_col; } } + FocusedPanel::Code => {} _ => {} }, (KeyCode::Char('b'), KeyModifiers::NONE) => match self.focused_panel { @@ -1225,6 +1287,7 @@ impl ChatApp { self.thinking_cursor.1 = new_col; } } + FocusedPanel::Code => {} _ => {} }, (KeyCode::Char('^'), KeyModifiers::SHIFT) => match self.focused_panel { @@ -1247,6 +1310,7 @@ impl ChatApp { self.thinking_cursor.1 = first_non_blank; } } + FocusedPanel::Code => {} _ => {} }, // Line start/end navigation @@ -1258,6 +1322,7 @@ impl ChatApp { FocusedPanel::Thinking => { self.thinking_cursor.1 = 0; } + FocusedPanel::Code => {} _ => {} }, (KeyCode::Char('$'), KeyModifiers::NONE) @@ -1273,6 +1338,7 @@ impl ChatApp { self.thinking_cursor.1 = line.chars().count(); } } + FocusedPanel::Code => {} _ => {} }, // Half-page scrolling @@ -1335,6 +1401,7 @@ impl ChatApp { FocusedPanel::Chat => "Chat", FocusedPanel::Thinking => "Thinking", FocusedPanel::Input => "Input", + FocusedPanel::Code => "Code", }; self.status = format!("Focus: {}", panel_name); } @@ -1344,6 +1411,7 @@ impl ChatApp { FocusedPanel::Chat => "Chat", FocusedPanel::Thinking => "Thinking", FocusedPanel::Input => "Input", + FocusedPanel::Code => "Code", }; self.status = format!("Focus: {}", panel_name); } @@ -1357,12 +1425,27 @@ impl ChatApp { } } InputMode::Editing => match (key.code, key.modifiers) { + (KeyCode::Char('c'), modifiers) + if modifiers.contains(KeyModifiers::CONTROL) => + { + let _ = self.cancel_active_generation()?; + self.sync_textarea_to_buffer(); + self.mode = InputMode::Normal; + self.reset_status(); + } (KeyCode::Esc, KeyModifiers::NONE) => { // Sync textarea content to input buffer before leaving edit mode self.sync_textarea_to_buffer(); self.mode = InputMode::Normal; self.reset_status(); } + (KeyCode::Char('['), modifiers) + if modifiers.contains(KeyModifiers::CONTROL) => + { + self.sync_textarea_to_buffer(); + self.mode = InputMode::Normal; + self.reset_status(); + } (KeyCode::Char('j' | 'J'), m) if m.contains(KeyModifiers::CONTROL) => { self.textarea.insert_newline(); } @@ -1458,6 +1541,7 @@ impl ChatApp { self.status = "Nothing to yank".to_string(); } } + FocusedPanel::Code => {} } self.mode = InputMode::Normal; self.visual_start = None; @@ -1490,6 +1574,7 @@ impl ChatApp { self.status = "Nothing to yank".to_string(); } } + FocusedPanel::Code => {} } self.mode = InputMode::Normal; self.visual_start = None; @@ -1503,12 +1588,13 @@ impl ChatApp { } FocusedPanel::Chat | FocusedPanel::Thinking => { // Move selection left (decrease column) - if let Some((row, col)) = self.visual_end { - if col > 0 { - self.visual_end = Some((row, col - 1)); - } + if let Some((row, col)) = self.visual_end + && col > 0 + { + self.visual_end = Some((row, col - 1)); } } + FocusedPanel::Code => {} } } (KeyCode::Right, _) | (KeyCode::Char('l'), KeyModifiers::NONE) => { @@ -1522,6 +1608,7 @@ impl ChatApp { self.visual_end = Some((row, col + 1)); } } + FocusedPanel::Code => {} } } (KeyCode::Up, _) | (KeyCode::Char('k'), KeyModifiers::NONE) => { @@ -1531,14 +1618,15 @@ impl ChatApp { } FocusedPanel::Chat | FocusedPanel::Thinking => { // Move selection up (decrease end row) - if let Some((row, col)) = self.visual_end { - if row > 0 { - self.visual_end = Some((row - 1, col)); - // Scroll if needed to keep selection visible - self.on_scroll(-1); - } + if let Some((row, col)) = self.visual_end + && row > 0 + { + self.visual_end = Some((row - 1, col)); + // Scroll if needed to keep selection visible + self.on_scroll(-1); } } + FocusedPanel::Code => {} } } (KeyCode::Down, _) | (KeyCode::Char('j'), KeyModifiers::NONE) => { @@ -1563,6 +1651,7 @@ impl ChatApp { } } } + FocusedPanel::Code => {} } } (KeyCode::Char('w'), KeyModifiers::NONE) => { @@ -1573,14 +1662,14 @@ impl ChatApp { } FocusedPanel::Chat | FocusedPanel::Thinking => { // Move selection forward by word - if let Some((row, col)) = self.visual_end { - if let Some(new_col) = + if let Some((row, col)) = self.visual_end + && let Some(new_col) = self.find_next_word_boundary(row, col) - { - self.visual_end = Some((row, new_col)); - } + { + self.visual_end = Some((row, new_col)); } } + FocusedPanel::Code => {} } } (KeyCode::Char('b'), KeyModifiers::NONE) => { @@ -1591,14 +1680,14 @@ impl ChatApp { } FocusedPanel::Chat | FocusedPanel::Thinking => { // Move selection backward by word - if let Some((row, col)) = self.visual_end { - if let Some(new_col) = + if let Some((row, col)) = self.visual_end + && let Some(new_col) = self.find_prev_word_boundary(row, col) - { - self.visual_end = Some((row, new_col)); - } + { + self.visual_end = Some((row, new_col)); } } + FocusedPanel::Code => {} } } (KeyCode::Char('0'), KeyModifiers::NONE) | (KeyCode::Home, _) => { @@ -1612,6 +1701,7 @@ impl ChatApp { self.visual_end = Some((row, 0)); } } + FocusedPanel::Code => {} } } (KeyCode::Char('$'), KeyModifiers::NONE) | (KeyCode::End, _) => { @@ -1621,13 +1711,14 @@ impl ChatApp { } FocusedPanel::Chat | FocusedPanel::Thinking => { // Move selection to end of line - if let Some((row, _)) = self.visual_end { - if let Some(line) = self.get_line_at_row(row) { - let line_len = line.chars().count(); - self.visual_end = Some((row, line_len)); - } + if let Some((row, _)) = self.visual_end + && let Some(line) = self.get_line_at_row(row) + { + let line_len = line.chars().count(); + self.visual_end = Some((row, line_len)); } } + FocusedPanel::Code => {} } } _ => { @@ -1637,8 +1728,7 @@ impl ChatApp { InputMode::Command => match (key.code, key.modifiers) { (KeyCode::Esc, _) => { self.mode = InputMode::Normal; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); self.reset_status(); } (KeyCode::Tab, _) => { @@ -1647,22 +1737,16 @@ impl ChatApp { } (KeyCode::Up, _) | (KeyCode::Char('k'), KeyModifiers::CONTROL) => { // Navigate up in suggestions - if !self.command_suggestions.is_empty() { - self.selected_suggestion = - self.selected_suggestion.saturating_sub(1); - } + self.command_palette.select_previous(); } (KeyCode::Down, _) | (KeyCode::Char('j'), KeyModifiers::CONTROL) => { // Navigate down in suggestions - if !self.command_suggestions.is_empty() { - self.selected_suggestion = (self.selected_suggestion + 1) - .min(self.command_suggestions.len().saturating_sub(1)); - } + self.command_palette.select_next(); } (KeyCode::Enter, _) => { // Execute command - let cmd = self.command_buffer.trim(); - let parts: Vec<&str> = cmd.split_whitespace().collect(); + let cmd_owned = self.command_palette.buffer().trim().to_string(); + let parts: Vec<&str> = cmd_owned.split_whitespace().collect(); let command = parts.first().copied().unwrap_or(""); let args = &parts[1..]; @@ -1715,15 +1799,14 @@ impl ChatApp { } } } - "load" | "open" | "o" => { + "load" | "o" => { // Load saved sessions and enter browser mode match self.controller.list_saved_sessions().await { Ok(sessions) => { self.saved_sessions = sessions; self.selected_session_index = 0; self.mode = InputMode::SessionBrowser; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } Err(e) => { @@ -1732,6 +1815,52 @@ impl ChatApp { } } } + "open" => { + if let Some(path) = args.first() { + if !matches!( + self.operating_mode, + owlen_core::mode::Mode::Code + ) { + self.error = Some( + "Code view requires code mode. Run :mode code first." + .to_string(), + ); + } else { + match self.controller.read_file_with_tools(path).await { + Ok(content) => { + self.set_code_view_content( + path.to_string(), + content, + ); + self.focused_panel = FocusedPanel::Code; + self.ensure_focus_valid(); + self.status = format!("Opened {}", path); + self.set_system_status(format!( + "Viewing {}", + path + )); + self.error = None; + } + Err(e) => { + self.error = + Some(format!("Failed to open file: {}", e)); + } + } + } + } else { + self.error = Some("Usage: :open ".to_string()); + } + } + "close" => { + if self.code_view_path.is_some() { + self.close_code_view(); + self.status = "Closed code view".to_string(); + self.set_system_status(String::new()); + self.error = None; + } else { + self.status = "No code view active".to_string(); + } + } "sessions" => { // List saved sessions match self.controller.list_saved_sessions().await { @@ -1739,8 +1868,7 @@ impl ChatApp { self.saved_sessions = sessions; self.selected_session_index = 0; self.mode = InputMode::SessionBrowser; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } Err(e) => { @@ -1807,16 +1935,14 @@ impl ChatApp { } "h" | "help" => { self.mode = InputMode::Help; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } "m" | "model" => { if args.is_empty() { self.refresh_models().await?; self.mode = InputMode::ProviderSelection; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } @@ -1869,8 +1995,7 @@ impl ChatApp { } self.mode = InputMode::Normal; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } "models" => { @@ -1892,8 +2017,7 @@ impl ChatApp { } self.mode = InputMode::Normal; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } // "run-agent" command removed to break circular dependency on owlen-cli. @@ -1917,7 +2041,9 @@ impl ChatApp { } "n" | "new" => { self.controller.start_new_conversation(None, None); + self.reset_after_new_conversation()?; self.status = "Started new conversation".to_string(); + self.error = None; } "e" | "edit" => { if let Some(path) = args.first() { @@ -1996,8 +2122,7 @@ impl ChatApp { .unwrap_or(0); self.mode = InputMode::ThemeBrowser; - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); return Ok(AppState::Running); } "reload" => { @@ -2036,8 +2161,8 @@ impl ChatApp { config::save_config(&self.controller.config()) { self.error = Some(format!( - "Enabled {tool}, but failed to save config: {err}" - )); + "Enabled {tool}, but failed to save config: {err}" + )); } else { self.status = format!("Enabled tool: {tool}"); self.error = None; @@ -2061,8 +2186,8 @@ impl ChatApp { config::save_config(&self.controller.config()) { self.error = Some(format!( - "Disabled {tool}, but failed to save config: {err}" - )); + "Disabled {tool}, but failed to save config: {err}" + )); } else { self.status = format!("Disabled tool: {tool}"); self.error = None; @@ -2091,23 +2216,20 @@ impl ChatApp { } } _ => { - self.error = Some(format!("Unknown command: {}", cmd)); + self.error = Some(format!("Unknown command: {}", cmd_owned)); } } - self.command_buffer.clear(); - self.command_suggestions.clear(); + self.command_palette.clear(); self.mode = InputMode::Normal; } (KeyCode::Char(c), KeyModifiers::NONE) | (KeyCode::Char(c), KeyModifiers::SHIFT) => { - self.command_buffer.push(c); - self.update_command_suggestions(); - self.status = format!(":{}", self.command_buffer); + self.command_palette.push_char(c); + self.status = format!(":{}", self.command_palette.buffer()); } (KeyCode::Backspace, _) => { - self.command_buffer.pop(); - self.update_command_suggestions(); - self.status = format!(":{}", self.command_buffer); + self.command_palette.pop_char(); + self.status = format!(":{}", self.command_palette.buffer()); } _ => {} }, @@ -2326,23 +2448,20 @@ impl ChatApp { } } KeyCode::Char(' ') => { - if let Some(item) = self.current_model_selector_item() { - if let ModelSelectorItemKind::Header { provider, expanded } = + if let Some(item) = self.current_model_selector_item() + && let ModelSelectorItemKind::Header { provider, expanded } = item.kind() - { - if *expanded { - let provider_name = provider.clone(); - self.collapse_provider(&provider_name); - self.status = - format!("Collapsed provider: {}", provider_name); - } else { - let provider_name = provider.clone(); - self.expand_provider(&provider_name, true); - self.status = - format!("Expanded provider: {}", provider_name); - } - self.error = None; + { + if *expanded { + let provider_name = provider.clone(); + self.collapse_provider(&provider_name); + self.status = format!("Collapsed provider: {}", provider_name); + } else { + let provider_name = provider.clone(); + self.expand_provider(&provider_name, true); + self.status = format!("Expanded provider: {}", provider_name); } + self.error = None; } } _ => {} @@ -2365,10 +2484,11 @@ impl ChatApp { } } KeyCode::Char(ch) if ch.is_ascii_digit() => { - if let Some(idx) = ch.to_digit(10) { - if idx >= 1 && (idx as usize) <= HELP_TAB_COUNT { - self.help_tab_index = (idx - 1) as usize; - } + if let Some(idx) = ch.to_digit(10) + && idx >= 1 + && (idx as usize) <= HELP_TAB_COUNT + { + self.help_tab_index = (idx - 1) as usize; } } _ => {} @@ -2477,7 +2597,6 @@ impl ChatApp { }, } } - _ => {} } Ok(AppState::Running) @@ -2494,6 +2613,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.on_user_scroll(delta, viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.on_user_scroll(delta, viewport_height); + } + } FocusedPanel::Input => { // Input panel doesn't scroll } @@ -2510,6 +2635,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.scroll_half_page_down(viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.scroll_half_page_down(viewport_height); + } + } FocusedPanel::Input => {} } } @@ -2524,6 +2655,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.scroll_half_page_up(viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.scroll_half_page_up(viewport_height); + } + } FocusedPanel::Input => {} } } @@ -2538,6 +2675,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.scroll_full_page_down(viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.scroll_full_page_down(viewport_height); + } + } FocusedPanel::Input => {} } } @@ -2552,6 +2695,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.scroll_full_page_up(viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.scroll_full_page_up(viewport_height); + } + } FocusedPanel::Input => {} } } @@ -2565,6 +2714,11 @@ impl ChatApp { FocusedPanel::Thinking => { self.thinking_scroll.jump_to_top(); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + self.code_view_scroll.jump_to_top(); + } + } FocusedPanel::Input => {} } } @@ -2579,6 +2733,12 @@ impl ChatApp { let viewport_height = self.thinking_viewport_height.max(1); self.thinking_scroll.jump_to_bottom(viewport_height); } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + let viewport_height = self.code_view_viewport_height.max(1); + self.code_view_scroll.jump_to_bottom(viewport_height); + } + } FocusedPanel::Input => {} } } @@ -2597,6 +2757,7 @@ impl ChatApp { // Auto-scroll will handle this in the render loop if response.is_final { self.streaming.remove(&message_id); + self.stream_tasks.remove(&message_id); self.stop_loading_animation(); // Check if the completed stream has tool calls that need execution @@ -2613,8 +2774,18 @@ impl ChatApp { } } } - SessionEvent::StreamError { message } => { + SessionEvent::StreamError { + message_id, + message, + } => { self.stop_loading_animation(); + if let Some(id) = message_id { + self.streaming.remove(&id); + self.stream_tasks.remove(&id); + } else { + self.streaming.clear(); + self.stream_tasks.clear(); + } self.error = Some(message); } SessionEvent::ToolExecutionNeeded { @@ -2730,26 +2901,7 @@ impl ChatApp { RemoteMcpClient::new_with_config(&config) } else { // Fallback to legacy discovery: temporarily set env vars while spawning. - let backups: Vec<(String, Option)> = env_vars - .keys() - .map(|key| (key.clone(), std::env::var(key).ok())) - .collect(); - - for (key, value) in env_vars.iter() { - std::env::set_var(key, value); - } - - let result = RemoteMcpClient::new(); - - for (key, original) in backups { - if let Some(value) = original { - std::env::set_var(&key, value); - } else { - std::env::remove_var(&key); - } - } - - result + Self::with_temp_env_vars(&env_vars, RemoteMcpClient::new) }; match client_result { @@ -2788,6 +2940,38 @@ impl ChatApp { self.available_providers = providers.into_iter().collect(); } + fn with_temp_env_vars(env_vars: &HashMap, action: F) -> T + where + F: FnOnce() -> T, + { + let backups: Vec<(String, Option)> = env_vars + .keys() + .map(|key| (key.clone(), std::env::var(key).ok())) + .collect(); + + for (key, value) in env_vars { + // Safety: environment mutations are scoped to this synchronous call and restored + // immediately afterwards, so no other threads observe inconsistent state. + unsafe { + std::env::set_var(key, value); + } + } + + let result = action(); + + for (key, original) in backups { + unsafe { + if let Some(value) = original { + std::env::set_var(&key, value); + } else { + std::env::remove_var(&key); + } + } + } + + result + } + fn rebuild_model_selector_items(&mut self) { let mut items = Vec::new(); @@ -2835,7 +3019,7 @@ impl ChatApp { .entry(canonical) .and_modify(|entry| { if priority > entry.0 - || (priority == entry.0 && model.id < entry.1 .1.id) + || (priority == entry.0 && model.id < entry.1.1.id) { *entry = (priority, (idx, model)); } @@ -2887,7 +3071,7 @@ impl ChatApp { .find(|(_, item)| { matches!( item.kind(), - ModelSelectorItemKind::Model { provider: ref p, .. } if p == provider + ModelSelectorItemKind::Model { provider: p, .. } if p == provider ) }) .map(|(idx, _)| idx) @@ -2985,6 +3169,8 @@ impl ChatApp { } else if !self.available_providers.is_empty() { self.selected_provider_index = 0; self.selected_provider = self.available_providers[0].clone(); + } else { + self.selected_provider_index = 0; } } @@ -3055,7 +3241,7 @@ impl ChatApp { let mut env_vars = HashMap::new(); env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.to_string()); - let provider: Arc = if let Some(path) = server_binary { + let provider: Arc = if let Some(path) = server_binary { let config = McpServerConfig { name: canonical_name.to_string(), command: path.to_string_lossy().into_owned(), @@ -3065,26 +3251,7 @@ impl ChatApp { }; Arc::new(RemoteMcpClient::new_with_config(&config)?) } else { - let backups: Vec<(String, Option)> = env_vars - .keys() - .map(|key| (key.clone(), std::env::var(key).ok())) - .collect(); - - for (key, value) in env_vars.iter() { - std::env::set_var(key, value); - } - - let result = RemoteMcpClient::new(); - - for (key, original) in backups { - if let Some(value) = original { - std::env::set_var(&key, value); - } else { - std::env::remove_var(&key); - } - } - - Arc::new(result?) + Arc::new(Self::with_temp_env_vars(&env_vars, RemoteMcpClient::new)?) }; self.controller.switch_provider(provider).await?; @@ -3195,6 +3362,102 @@ impl ChatApp { self.error = None; } + pub fn has_active_generation(&self) -> bool { + self.pending_llm_request || !self.streaming.is_empty() + } + + pub fn cancel_active_generation(&mut self) -> Result { + let mut cancelled = false; + if self.pending_llm_request { + self.pending_llm_request = false; + cancelled = true; + } + + let mut cancel_error: Option = None; + + if !self.streaming.is_empty() { + let active_ids: Vec = self.streaming.iter().copied().collect(); + for message_id in active_ids { + if let Some(handle) = self.stream_tasks.remove(&message_id) { + handle.abort(); + } + if let Err(err) = self + .controller + .cancel_stream(message_id, "Generation cancelled by user.") + { + cancel_error = Some(err.to_string()); + } + self.streaming.remove(&message_id); + cancelled = true; + } + } + + if cancelled { + if let Some(err) = cancel_error { + self.error = Some(format!("Failed to finalize cancelled stream: {}", err)); + } else { + self.error = None; + } + self.stop_loading_animation(); + self.pending_tool_execution = None; + self.pending_consent = None; + self.current_thinking = None; + self.agent_actions = None; + self.status = "Generation cancelled".to_string(); + self.set_system_status("Generation cancelled".to_string()); + self.update_thinking_from_last_message(); + } + + Ok(cancelled) + } + + fn reset_after_new_conversation(&mut self) -> Result<()> { + let _ = self.cancel_active_generation()?; + self.close_code_view(); + self.set_system_status(String::new()); + self.pending_llm_request = false; + self.pending_tool_execution = None; + self.pending_consent = None; + self.pending_key = None; + self.visual_start = None; + self.visual_end = None; + self.clipboard.clear(); + + { + let buffer = self.controller.input_buffer_mut(); + buffer.clear(); + buffer.clear_history(); + } + + self.textarea = TextArea::default(); + configure_textarea_defaults(&mut self.textarea); + + self.auto_scroll = AutoScroll::default(); + self.thinking_scroll = AutoScroll::default(); + self.code_view_scroll = AutoScroll::default(); + self.code_view_viewport_height = 0; + + self.chat_cursor = (0, 0); + self.thinking_cursor = (0, 0); + + self.current_thinking = None; + self.agent_actions = None; + self.agent_mode = false; + self.agent_running = false; + self.is_loading = false; + + // Ensure no orphaned stream tasks remain + for (_, handle) in self.stream_tasks.drain() { + handle.abort(); + } + self.streaming.clear(); + + self.focused_panel = FocusedPanel::Input; + self.ensure_focus_valid(); + + Ok(()) + } + pub async fn process_pending_llm_request(&mut self) -> Result<()> { if !self.pending_llm_request { return Ok(()); @@ -3341,11 +3604,9 @@ impl ChatApp { } pub async fn process_pending_tool_execution(&mut self) -> Result<()> { - if self.pending_tool_execution.is_none() { + let Some((message_id, tool_calls)) = self.pending_tool_execution.take() else { return Ok(()); - } - - let (message_id, tool_calls) = self.pending_tool_execution.take().unwrap(); + }; // Check if consent is needed for any of these tools let consent_needed = self.controller.check_tools_consent_needed(&tool_calls); @@ -3360,18 +3621,29 @@ impl ChatApp { // Show consent for the first tool that needs it // After consent is granted, the next iteration will check remaining tools - let (tool_name, data_types, endpoints) = consent_needed.into_iter().next().unwrap(); - let callback_id = Uuid::new_v4(); - let sender = self.session_tx.clone(); - let _ = sender.send(SessionEvent::ConsentNeeded { - tool_name, - data_types, - endpoints, - callback_id, - }); - // Re-queue the tool execution for after consent is granted - self.pending_tool_execution = Some((message_id, tool_calls)); - return Ok(()); + if let Some((tool_name, data_types, endpoints)) = consent_needed.into_iter().next() { + let callback_id = Uuid::new_v4(); + let sender = self.session_tx.clone(); + let _ = sender.send(SessionEvent::ConsentNeeded { + tool_name: tool_name.clone(), + data_types: data_types.clone(), + endpoints: endpoints.clone(), + callback_id, + }); + self.pending_consent = Some(ConsentDialogState { + tool_name, + data_types, + endpoints, + callback_id, + }); + // Re-queue the tool execution for after consent is granted + self.pending_tool_execution = Some((message_id, tool_calls)); + return Ok(()); + } else { + // No consent entries found; treat as no-op and continue execution. + self.pending_tool_execution = Some((message_id, tool_calls)); + return Ok(()); + } } // Show tool execution status @@ -3536,9 +3808,12 @@ impl ChatApp { pub fn get_rendered_lines(&self) -> Vec { match self.focused_panel { FocusedPanel::Chat => { - // This should match exactly what render_messages produces let conversation = self.conversation(); - let formatter = self.formatter(); + let mut formatter = self.formatter().clone(); + let wrap_width = self.content_width.max(20); + formatter.set_wrap_width(wrap_width); + let show_role_labels = formatter.show_role_labels(); + let mut lines = Vec::new(); for (message_index, message) in conversation.messages.iter().enumerate() { @@ -3554,24 +3829,74 @@ impl ChatApp { let (content_without_think, _) = formatter.extract_thinking(&message.content); content_without_think + } else if matches!(role, Role::Tool) { + format_tool_output(&message.content) } else { message.content.clone() }; - // Add role label line - lines.push(format!("{}{}", emoji, name)); + let is_streaming = message + .metadata + .get("streaming") + .and_then(|v| v.as_bool()) + .unwrap_or(false); - // Add content lines with indent - for line in content_to_display.trim().lines() { - lines.push(format!(" {}", line)); + let formatted: Vec = content_to_display + .trim() + .lines() + .map(|s| s.to_string()) + .collect(); + let content = formatted.join("\n"); + + if show_role_labels { + lines.push(format!("{}{}", emoji, name)); + let indent = " "; + let available_width = wrap_width.saturating_sub(2); + let chunks = if available_width > 0 { + wrap(content.as_str(), available_width) + } else { + Vec::new() + }; + let last_index = chunks.len().saturating_sub(1); + for (chunk_idx, seg) in chunks.into_iter().enumerate() { + let seg_owned = seg.into_owned(); + let mut line = format!("{indent}{seg_owned}"); + if chunk_idx == last_index && is_streaming { + line.push_str(" β–Œ"); + } + lines.push(line); + } + } else { + let chunks = wrap(content.as_str(), wrap_width); + let last_index = chunks.len().saturating_sub(1); + for (chunk_idx, seg) in chunks.into_iter().enumerate() { + let mut line = seg.into_owned(); + if chunk_idx == last_index && is_streaming { + line.push_str(" β–Œ"); + } + lines.push(line); + } } - // Add separator except after last message if message_index < conversation.messages.len() - 1 { lines.push(String::new()); } } + let last_message_is_user = conversation + .messages + .last() + .map(|msg| matches!(msg.role, Role::User)) + .unwrap_or(true); + + if self.get_loading_indicator() != "" && last_message_is_user { + lines.push(format!("πŸ€– Assistant: {}", self.get_loading_indicator())); + } + + if lines.is_empty() { + lines.push("No messages yet. Press 'i' to start typing.".to_string()); + } + lines } FocusedPanel::Thinking => { @@ -3581,6 +3906,17 @@ impl ChatApp { Vec::new() } } + FocusedPanel::Code => { + if self.code_view_path.is_some() { + self.code_view_lines + .iter() + .enumerate() + .map(|(idx, line)| format!("{:>4} {}", idx + 1, line)) + .collect() + } else { + Vec::new() + } + } FocusedPanel::Input => Vec::new(), } } @@ -3647,11 +3983,11 @@ impl ChatApp { } } - fn spawn_stream(&mut self, message_id: Uuid, mut stream: owlen_core::provider::ChatStream) { + fn spawn_stream(&mut self, message_id: Uuid, mut stream: owlen_core::ChatStream) { let sender = self.session_tx.clone(); self.streaming.insert(message_id); - tokio::spawn(async move { + let handle = tokio::spawn(async move { use futures_util::StreamExt; while let Some(item) = stream.next().await { @@ -3669,6 +4005,7 @@ impl ChatApp { } Err(e) => { let _ = sender.send(SessionEvent::StreamError { + message_id: Some(message_id), message: e.to_string(), }); break; @@ -3676,6 +4013,8 @@ impl ChatApp { } } }); + + self.stream_tasks.insert(message_id, handle); } } diff --git a/crates/owlen-tui/src/commands/mod.rs b/crates/owlen-tui/src/commands/mod.rs new file mode 100644 index 0000000..d8fca3b --- /dev/null +++ b/crates/owlen-tui/src/commands/mod.rs @@ -0,0 +1,191 @@ +//! Command catalog and lookup utilities for the command palette. + +/// Metadata describing a single command keyword. +#[derive(Debug, Clone, Copy)] +pub struct CommandSpec { + pub keyword: &'static str, + pub description: &'static str, +} + +const COMMANDS: &[CommandSpec] = &[ + CommandSpec { + keyword: "quit", + description: "Exit the application", + }, + CommandSpec { + keyword: "q", + description: "Alias for quit", + }, + CommandSpec { + keyword: "clear", + description: "Clear the conversation", + }, + CommandSpec { + keyword: "c", + description: "Alias for clear", + }, + CommandSpec { + keyword: "w", + description: "Alias for write", + }, + CommandSpec { + keyword: "save", + description: "Alias for write", + }, + CommandSpec { + keyword: "load", + description: "Load a saved conversation", + }, + CommandSpec { + keyword: "o", + description: "Alias for load", + }, + CommandSpec { + keyword: "open", + description: "Open a file in the code view", + }, + CommandSpec { + keyword: "close", + description: "Close the active code view", + }, + CommandSpec { + keyword: "mode", + description: "Switch operating mode (chat/code)", + }, + CommandSpec { + keyword: "code", + description: "Switch to code mode", + }, + CommandSpec { + keyword: "chat", + description: "Switch to chat mode", + }, + CommandSpec { + keyword: "tools", + description: "List available tools in current mode", + }, + CommandSpec { + keyword: "sessions", + description: "List saved sessions", + }, + CommandSpec { + keyword: "help", + description: "Show help documentation", + }, + CommandSpec { + keyword: "h", + description: "Alias for help", + }, + CommandSpec { + keyword: "model", + description: "Select a model", + }, + CommandSpec { + keyword: "model info", + description: "Show detailed information for a model", + }, + CommandSpec { + keyword: "model refresh", + description: "Refresh cached model information", + }, + CommandSpec { + keyword: "model details", + description: "Show details for the active model", + }, + CommandSpec { + keyword: "m", + description: "Alias for model", + }, + CommandSpec { + keyword: "models info", + description: "Prefetch detailed information for all models", + }, + CommandSpec { + keyword: "new", + description: "Start a new conversation", + }, + CommandSpec { + keyword: "n", + description: "Alias for new", + }, + CommandSpec { + keyword: "theme", + description: "Switch theme", + }, + CommandSpec { + keyword: "themes", + description: "List available themes", + }, + CommandSpec { + keyword: "tutorial", + description: "Show keybinding tutorial", + }, + CommandSpec { + keyword: "reload", + description: "Reload configuration and themes", + }, + CommandSpec { + keyword: "e", + description: "Edit a file", + }, + CommandSpec { + keyword: "edit", + description: "Alias for edit", + }, + CommandSpec { + keyword: "ls", + description: "List directory contents", + }, + CommandSpec { + keyword: "privacy-enable", + description: "Enable a privacy-sensitive tool", + }, + CommandSpec { + keyword: "privacy-disable", + description: "Disable a privacy-sensitive tool", + }, + CommandSpec { + keyword: "privacy-clear", + description: "Clear stored secure data", + }, + CommandSpec { + keyword: "agent", + description: "Enable agent mode for autonomous task execution", + }, + CommandSpec { + keyword: "stop-agent", + description: "Stop the running agent", + }, +]; + +/// Return the static catalog of commands. +pub fn all() -> &'static [CommandSpec] { + COMMANDS +} + +/// Return the default suggestion list (all command keywords). +pub fn default_suggestions() -> Vec { + COMMANDS + .iter() + .map(|spec| spec.keyword.to_string()) + .collect() +} + +/// Generate keyword suggestions for the given input. +pub fn suggestions(input: &str) -> Vec { + let trimmed = input.trim(); + if trimmed.is_empty() { + return default_suggestions(); + } + + COMMANDS + .iter() + .filter_map(|spec| { + if spec.keyword.starts_with(trimmed) { + Some(spec.keyword.to_string()) + } else { + None + } + }) + .collect() +} diff --git a/crates/owlen-tui/src/config.rs b/crates/owlen-tui/src/config.rs index 45527b4..1253341 100644 --- a/crates/owlen-tui/src/config.rs +++ b/crates/owlen-tui/src/config.rs @@ -1,6 +1,6 @@ pub use owlen_core::config::{ - default_config_path, ensure_ollama_config, ensure_provider_config, session_timeout, Config, - GeneralSettings, InputSettings, StorageSettings, UiSettings, DEFAULT_CONFIG_PATH, + Config, DEFAULT_CONFIG_PATH, GeneralSettings, InputSettings, StorageSettings, UiSettings, + default_config_path, ensure_ollama_config, ensure_provider_config, session_timeout, }; /// Attempt to load configuration from default location diff --git a/crates/owlen-tui/src/lib.rs b/crates/owlen-tui/src/lib.rs index 1a4719b..ad437b3 100644 --- a/crates/owlen-tui/src/lib.rs +++ b/crates/owlen-tui/src/lib.rs @@ -14,9 +14,11 @@ pub mod chat_app; pub mod code_app; +pub mod commands; pub mod config; pub mod events; pub mod model_info_panel; +pub mod state; pub mod tui_controller; pub mod ui; diff --git a/crates/owlen-tui/src/model_info_panel.rs b/crates/owlen-tui/src/model_info_panel.rs index 22126c7..07bce55 100644 --- a/crates/owlen-tui/src/model_info_panel.rs +++ b/crates/owlen-tui/src/model_info_panel.rs @@ -1,10 +1,10 @@ use owlen_core::model::DetailedModelInfo; use owlen_core::theme::Theme; use ratatui::{ + Frame, layout::Rect, style::{Color, Modifier, Style}, widgets::{Block, Borders, Paragraph, Wrap}, - Frame, }; /// Dedicated panel for presenting detailed model information. diff --git a/crates/owlen-tui/src/state/command_palette.rs b/crates/owlen-tui/src/state/command_palette.rs new file mode 100644 index 0000000..9b7d3df --- /dev/null +++ b/crates/owlen-tui/src/state/command_palette.rs @@ -0,0 +1,92 @@ +use crate::commands; + +/// Encapsulates the command-line style palette used in command mode. +/// +/// The palette keeps track of the raw buffer, matching suggestions, and the +/// currently highlighted suggestion index. It contains no terminal-specific +/// logic which makes it straightforward to unit test. +#[derive(Debug, Clone, Default)] +pub struct CommandPalette { + buffer: String, + suggestions: Vec, + selected: usize, +} + +impl CommandPalette { + pub fn new() -> Self { + Self::default() + } + + pub fn buffer(&self) -> &str { + &self.buffer + } + + pub fn suggestions(&self) -> &[String] { + &self.suggestions + } + + pub fn selected_index(&self) -> usize { + self.selected + } + + pub fn clear(&mut self) { + self.buffer.clear(); + self.suggestions.clear(); + self.selected = 0; + } + + pub fn set_buffer(&mut self, value: impl Into) { + self.buffer = value.into(); + self.refresh_suggestions(); + } + + pub fn push_char(&mut self, ch: char) { + self.buffer.push(ch); + self.refresh_suggestions(); + } + + pub fn pop_char(&mut self) { + self.buffer.pop(); + self.refresh_suggestions(); + } + + pub fn select_previous(&mut self) { + if !self.suggestions.is_empty() { + self.selected = self.selected.saturating_sub(1); + } + } + + pub fn select_next(&mut self) { + if !self.suggestions.is_empty() { + let max_index = self.suggestions.len().saturating_sub(1); + self.selected = (self.selected + 1).min(max_index); + } + } + + pub fn apply_selected(&mut self) -> Option { + let selected = self + .suggestions + .get(self.selected) + .cloned() + .or_else(|| self.suggestions.first().cloned()); + if let Some(value) = selected.clone() { + self.buffer = value; + self.refresh_suggestions(); + } + selected + } + + pub fn refresh_suggestions(&mut self) { + let trimmed = self.buffer.trim(); + self.suggestions = commands::suggestions(trimmed); + if self.selected >= self.suggestions.len() { + self.selected = 0; + } + } + + pub fn ensure_suggestions(&mut self) { + if self.suggestions.is_empty() { + self.refresh_suggestions(); + } + } +} diff --git a/crates/owlen-tui/src/state/mod.rs b/crates/owlen-tui/src/state/mod.rs new file mode 100644 index 0000000..920b259 --- /dev/null +++ b/crates/owlen-tui/src/state/mod.rs @@ -0,0 +1,10 @@ +//! State helpers shared across TUI components. +//! +//! The `state` module contains lightweight wrappers that encapsulate UI state +//! shared between widgets. Keeping these helpers out of the main `chat_app` +//! implementation makes the command palette and other stateful widgets easier +//! to test in isolation. + +mod command_palette; + +pub use command_palette::CommandPalette; diff --git a/crates/owlen-tui/src/ui.rs b/crates/owlen-tui/src/ui.rs index 6e79cef..14814ad 100644 --- a/crates/owlen-tui/src/ui.rs +++ b/crates/owlen-tui/src/ui.rs @@ -1,14 +1,14 @@ +use ratatui::Frame; use ratatui::layout::{Alignment, Constraint, Direction, Layout, Rect}; use ratatui::style::{Color, Modifier, Style}; use ratatui::text::{Line, Span}; use ratatui::widgets::{Block, Borders, Clear, List, ListItem, ListState, Paragraph, Wrap}; -use ratatui::Frame; use serde_json; -use textwrap::{wrap, Options}; +use textwrap::{Options, wrap}; use tui_textarea::TextArea; use unicode_width::UnicodeWidthStr; -use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT}; +use crate::chat_app::{ChatApp, HELP_TAB_COUNT, ModelSelectorItemKind}; use owlen_core::model::DetailedModelInfo; use owlen_core::types::{ModelInfo, Role}; use owlen_core::ui::{FocusedPanel, InputMode}; @@ -22,10 +22,21 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) { // Set terminal background color let theme = app.theme().clone(); let background_block = Block::default().style(Style::default().bg(theme.background)); - frame.render_widget(background_block, frame.area()); + let full_area = frame.area(); + frame.render_widget(background_block, full_area); + + let (chat_area, code_area) = if app.should_show_code_view() { + let segments = Layout::default() + .direction(Direction::Horizontal) + .constraints([Constraint::Percentage(65), Constraint::Percentage(35)]) + .split(full_area); + (segments[0], Some(segments[1])) + } else { + (full_area, None) + }; // Calculate dynamic input height based on textarea content - let available_width = frame.area().width; + let available_width = chat_area.width; let input_height = if matches!(app.mode(), InputMode::Editing) { let visual_lines = calculate_wrapped_line_count( app.textarea().lines().iter().map(|s| s.as_str()), @@ -81,7 +92,7 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) { let layout = Layout::default() .direction(Direction::Vertical) .constraints(constraints) - .split(frame.area()); + .split(chat_area); let mut idx = 0; render_header(frame, layout[idx], app); @@ -124,19 +135,22 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) { } if app.is_model_info_visible() { - let panel_width = frame - .area() + let panel_width = full_area .width .saturating_div(3) .max(30) - .min(frame.area().width.saturating_sub(20).max(30)); - let x = frame.area().x + frame.area().width.saturating_sub(panel_width); - let area = Rect::new(x, frame.area().y, panel_width, frame.area().height); + .min(full_area.width.saturating_sub(20).max(30)); + let x = full_area.x + full_area.width.saturating_sub(panel_width); + let area = Rect::new(x, full_area.y, panel_width, full_area.height); frame.render_widget(Clear, area); let viewport_height = area.height.saturating_sub(2) as usize; app.set_model_info_viewport_height(viewport_height); app.model_info_panel_mut().render(frame, area, &theme); } + + if let Some(area) = code_area { + render_code_view(frame, area, app); + } } fn render_editable_textarea( @@ -219,10 +233,10 @@ fn render_editable_textarea( let metrics = compute_cursor_metrics(lines_slice, cursor, mask_char, inner, wrap_lines); - if let Some(ref metrics) = metrics { - if metrics.scroll_top > 0 { - paragraph = paragraph.scroll((metrics.scroll_top, 0)); - } + if let Some(ref metrics) = metrics + && metrics.scroll_top > 0 + { + paragraph = paragraph.scroll((metrics.scroll_top, 0)); } if let Some(block) = block { @@ -374,12 +388,10 @@ fn compute_cursor_metrics( break; } - if !cursor_found { - if let Some(last_segment) = segments.last() { - cursor_visual_row = total_visual_rows + segments.len().saturating_sub(1); - cursor_col_width = UnicodeWidthStr::width(last_segment.as_str()); - cursor_found = true; - } + if !cursor_found && let Some(last_segment) = segments.last() { + cursor_visual_row = total_visual_rows + segments.len().saturating_sub(1); + cursor_col_width = UnicodeWidthStr::width(last_segment.as_str()); + cursor_found = true; } } @@ -469,9 +481,15 @@ fn render_header(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { .fg(theme.focused_panel_border) .add_modifier(Modifier::BOLD), ); + let provider_span = Span::styled( + app.current_provider().to_string(), + Style::default().fg(theme.text), + ); let model_span = Span::styled( - format!("Model: {}", app.selected_model()), - Style::default().fg(theme.user_message_role), + app.selected_model().to_string(), + Style::default() + .fg(theme.user_message_role) + .add_modifier(Modifier::BOLD), ); let header_block = Block::default() @@ -482,7 +500,17 @@ fn render_header(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { let inner_area = header_block.inner(area); - let header_text = vec![Line::from(""), Line::from(format!(" {model_span} "))]; + let header_text = vec![ + Line::default(), + Line::from(vec![ + Span::raw(" "), + Span::styled("Provider: ", Style::default().fg(theme.placeholder)), + provider_span, + Span::raw(" "), + Span::styled("Model: ", Style::default().fg(theme.placeholder)), + model_span, + ]), + ]; let paragraph = Paragraph::new(header_text) .style(Style::default().bg(theme.background).fg(theme.text)) @@ -776,11 +804,11 @@ fn render_messages(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) { } // Apply visual selection highlighting if in visual mode and Chat panel is focused - if matches!(app.mode(), InputMode::Visual) && matches!(app.focused_panel(), FocusedPanel::Chat) + if matches!(app.mode(), InputMode::Visual) + && matches!(app.focused_panel(), FocusedPanel::Chat) + && let Some(selection) = app.visual_selection() { - if let Some(selection) = app.visual_selection() { - lines = apply_visual_selection(lines, Some(selection), &theme); - } + lines = apply_visual_selection(lines, Some(selection), &theme); } // Update AutoScroll state with accurate content length @@ -864,10 +892,9 @@ fn render_thinking(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) { // Apply visual selection highlighting if in visual mode and Thinking panel is focused if matches!(app.mode(), InputMode::Visual) && matches!(app.focused_panel(), FocusedPanel::Thinking) + && let Some(selection) = app.visual_selection() { - if let Some(selection) = app.visual_selection() { - lines = apply_visual_selection(lines, Some(selection), &theme); - } + lines = apply_visual_selection(lines, Some(selection), &theme); } // Update AutoScroll state with accurate content length @@ -1264,11 +1291,7 @@ where total += wrapped.len().max(1); } - if !seen { - 1 - } else { - total.max(1) - } + if !seen { 1 } else { total.max(1) } } fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { @@ -1328,6 +1351,30 @@ fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { .add_modifier(Modifier::BOLD), )); + spans.push(Span::styled(" ", Style::default().fg(theme.text))); + spans.push(Span::styled( + "Provider: ", + Style::default() + .fg(theme.placeholder) + .add_modifier(Modifier::ITALIC), + )); + spans.push(Span::styled( + app.current_provider().to_string(), + Style::default().fg(theme.text), + )); + spans.push(Span::styled(" ", Style::default().fg(theme.text))); + spans.push(Span::styled( + "Model: ", + Style::default() + .fg(theme.placeholder) + .add_modifier(Modifier::ITALIC), + )); + spans.push(Span::styled( + app.selected_model().to_string(), + Style::default() + .fg(theme.user_message_role) + .add_modifier(Modifier::BOLD), + )); spans.push(Span::styled(" ", Style::default().fg(theme.text))); spans.push(Span::styled(help_text, Style::default().fg(theme.info))); @@ -1344,6 +1391,76 @@ fn render_status(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) { frame.render_widget(paragraph, area); } +fn render_code_view(frame: &mut Frame<'_>, area: Rect, app: &mut ChatApp) { + let path = match app.code_view_path() { + Some(p) => p.to_string(), + None => { + frame.render_widget(Clear, area); + return; + } + }; + + let theme = app.theme().clone(); + frame.render_widget(Clear, area); + + let viewport_height = area.height.saturating_sub(2) as usize; + app.set_code_view_viewport_height(viewport_height); + + let mut lines: Vec = Vec::new(); + if app.code_view_lines().is_empty() { + lines.push(Line::from(Span::styled( + "(empty file)", + Style::default() + .fg(theme.placeholder) + .add_modifier(Modifier::ITALIC), + ))); + } else { + for (idx, content) in app.code_view_lines().iter().enumerate() { + let number = format!("{:>4} ", idx + 1); + let spans = vec![ + Span::styled( + number, + Style::default() + .fg(theme.placeholder) + .add_modifier(Modifier::DIM), + ), + Span::styled(content.clone(), Style::default().fg(theme.text)), + ]; + lines.push(Line::from(spans)); + } + } + + let scroll_state = app.code_view_scroll_mut(); + scroll_state.content_len = lines.len(); + scroll_state.on_viewport(viewport_height); + let scroll_position = scroll_state.scroll.min(u16::MAX as usize) as u16; + + let border_color = if matches!(app.focused_panel(), FocusedPanel::Code) { + theme.focused_panel_border + } else { + theme.unfocused_panel_border + }; + + let block = Block::default() + .borders(Borders::ALL) + .border_style(Style::default().fg(border_color)) + .style(Style::default().bg(theme.background).fg(theme.text)) + .title(Span::styled( + path, + Style::default() + .fg(theme.focused_panel_border) + .add_modifier(Modifier::BOLD), + )); + + let paragraph = Paragraph::new(lines) + .style(Style::default().bg(theme.background).fg(theme.text)) + .block(block) + .scroll((scroll_position, 0)) + .wrap(Wrap { trim: false }); + + frame.render_widget(paragraph, area); +} + fn render_provider_selector(frame: &mut Frame<'_>, app: &ChatApp) { let theme = app.theme(); let area = centered_rect(60, 60, frame.area()); @@ -1510,10 +1627,9 @@ fn build_model_selector_label( .parameter_size .as_ref() .or(detail.parameters.as_ref()) + && !parameters.trim().is_empty() { - if !parameters.trim().is_empty() { - parts.push(parameters.trim().to_string()); - } + parts.push(parameters.trim().to_string()); } if let Some(size) = detail.size { @@ -2032,8 +2148,17 @@ fn render_help(frame: &mut Frame<'_>, app: &ChatApp) { )]), Line::from(" :save [name] β†’ save current session (with optional name)"), Line::from(" :w [name] β†’ alias for :save"), - Line::from(" :load, :o, :open β†’ browse and load saved sessions"), + Line::from(" :load, :o β†’ browse and load saved sessions"), Line::from(" :sessions, :ls β†’ browse saved sessions"), + Line::from(""), + Line::from(vec![Span::styled( + "CODE VIEW", + Style::default() + .add_modifier(Modifier::BOLD) + .fg(theme.user_message_role), + )]), + Line::from(" :open β†’ open file in code side panel"), + Line::from(" :close β†’ close the code side panel"), // New mode and tool commands added in phases 0‑5 Line::from(" :code β†’ switch to code mode (CLI: owlen --code)"), Line::from(" :mode β†’ change current mode explicitly"), @@ -2066,7 +2191,7 @@ fn render_help(frame: &mut Frame<'_>, app: &ChatApp) { .add_modifier(Modifier::BOLD) .fg(theme.user_message_role), )]), - Line::from(" :load, :o, :open β†’ browse and select session"), + Line::from(" :load, :o β†’ browse and select session"), Line::from(" :sessions, :ls β†’ browse saved sessions"), Line::from(""), Line::from(vec![Span::styled( @@ -2291,13 +2416,13 @@ fn render_session_browser(frame: &mut Frame<'_>, app: &ChatApp) { let mut lines = vec![Line::from(Span::styled(name, style))]; // Add description if available and not empty - if let Some(description) = &session.description { - if !description.is_empty() { - lines.push(Line::from(Span::styled( - format!(" \"{}\"", description), - desc_style, - ))); - } + if let Some(description) = &session.description + && !description.is_empty() + { + lines.push(Line::from(Span::styled( + format!(" \"{}\"", description), + desc_style, + ))); } // Add metadata line @@ -2548,7 +2673,7 @@ fn role_color(role: &Role, theme: &owlen_core::theme::Theme) -> Style { } /// Format tool output JSON into a nice human-readable format -fn format_tool_output(content: &str) -> String { +pub(crate) fn format_tool_output(content: &str) -> String { // Try to parse as JSON if let Ok(json) = serde_json::from_str::(content) { let mut output = String::new(); @@ -2592,23 +2717,23 @@ fn format_tool_output(content: &str) -> String { } // Snippet (truncated if too long) - if let Some(snippet) = result.get("snippet").and_then(|v| v.as_str()) { - if !snippet.is_empty() { - // Strip HTML tags - let clean_snippet = snippet - .replace("", "") - .replace("", "") - .replace("'", "'") - .replace(""", "\""); + if let Some(snippet) = result.get("snippet").and_then(|v| v.as_str()) + && !snippet.is_empty() + { + // Strip HTML tags + let clean_snippet = snippet + .replace("", "") + .replace("", "") + .replace("'", "'") + .replace(""", "\""); - // Truncate if too long - let truncated = if clean_snippet.len() > 200 { - format!("{}...", &clean_snippet[..197]) - } else { - clean_snippet - }; - output.push_str(&format!(" {}\n", truncated)); - } + // Truncate if too long + let truncated = if clean_snippet.len() > 200 { + format!("{}...", &clean_snippet[..197]) + } else { + clean_snippet + }; + output.push_str(&format!(" {}\n", truncated)); } // URL (shortened if too long) diff --git a/crates/owlen-tui/tests/state_tests.rs b/crates/owlen-tui/tests/state_tests.rs new file mode 100644 index 0000000..3caeace --- /dev/null +++ b/crates/owlen-tui/tests/state_tests.rs @@ -0,0 +1,56 @@ +use owlen_tui::commands; +use owlen_tui::state::CommandPalette; + +#[test] +fn palette_tracks_buffer_and_suggestions() { + let mut palette = CommandPalette::new(); + assert_eq!(palette.buffer(), ""); + assert!(palette.suggestions().is_empty()); + + palette.set_buffer("mo"); + assert_eq!(palette.buffer(), "mo"); + assert!(palette.suggestions().iter().all(|s| s.starts_with("mo"))); + + palette.push_char('d'); + assert_eq!(palette.buffer(), "mod"); + assert!(palette.suggestions().iter().all(|s| s.starts_with("mod"))); + + palette.pop_char(); + assert_eq!(palette.buffer(), "mo"); +} + +#[test] +fn palette_selection_wraps_safely() { + let mut palette = CommandPalette::new(); + palette.set_buffer("m"); + let suggestions = palette.suggestions().len(); + assert!(suggestions > 0); + + palette.select_previous(); + assert_eq!(palette.selected_index(), 0); + + for _ in 0..suggestions * 2 { + palette.select_next(); + } + assert!(palette.selected_index() < palette.suggestions().len()); +} + +#[test] +fn palette_apply_selected_updates_buffer() { + let mut palette = CommandPalette::new(); + palette.set_buffer("mo"); + palette.select_next(); + let selected = palette.apply_selected().expect("suggestion"); + assert_eq!(palette.buffer(), selected); + assert!(selected.starts_with("m")); +} + +#[test] +fn command_catalog_contains_expected_aliases() { + let keywords: Vec<_> = commands::all().iter().map(|spec| spec.keyword).collect(); + assert!(keywords.contains(&"model")); + assert!(keywords.contains(&"open")); + assert!(keywords.contains(&"close")); + assert!(keywords.contains(&"sessions")); + assert!(keywords.contains(&"new")); +} diff --git a/docs/architecture.md b/docs/architecture.md index 567ed62..ce42118 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -31,7 +31,7 @@ A simplified diagram of how components interact: ## Crate Breakdown -- `owlen-core`: Defines the `LLMProvider` abstraction, routing, configuration, session state, encryption, and the MCP client layer. This crate is UI-agnostic and must not depend on concrete providers, terminals, or blocking I/O. +- `owlen-core`: Defines the `LlmProvider` abstraction, routing, configuration, session state, encryption, and the MCP client layer. This crate is UI-agnostic and must not depend on concrete providers, terminals, or blocking I/O. - `owlen-tui`: Hosts all terminal UI behaviour (event loop, rendering, input modes) while delegating business logic and provider access back to `owlen-core`. - `owlen-cli`: Small entry point that parses command-line options, resolves configuration, selects providers, and launches either the TUI or headless agent flows by calling into `owlen-core`. - `owlen-mcp-llm-server`: Runs concrete providers (e.g., Ollama) behind an MCP boundary, exposing them as `generate_text` tools. This crate owns provider-specific wiring and process sandboxing. @@ -131,3 +131,5 @@ The TUI is rendered on each iteration of the main application loop in `owlen-tui 3. **UI Composition**: Inside the closure, the UI is built by composing `ratatui` widgets. The root UI is defined in `owlen_tui::ui::render`, which builds the main layout and calls other functions to render specific components (like the chat panel, input box, etc.). 4. **State-Driven Rendering**: Each rendering function takes the current application state as an argument. It uses this state to decide what and how to render. For example, the border color of a panel might change if it is focused. 5. **Buffer and Diff**: `ratatui` does not draw directly to the terminal. Instead, it renders the widgets to an in-memory buffer. It then compares this buffer to the previous buffer and only sends the necessary changes to the terminal. This is highly efficient and prevents flickering. + +The command palette and other modal helpers expose lightweight state structs in `owlen_tui::state`. These components keep business logic (suggestion filtering, selection state, etc.) independent from rendering, which in turn makes them straightforward to unit test. diff --git a/docs/provider-implementation.md b/docs/provider-implementation.md index fbc1c53..f6f7863 100644 --- a/docs/provider-implementation.md +++ b/docs/provider-implementation.md @@ -36,7 +36,7 @@ In your new crate's `lib.rs`, you will define a struct for your provider and imp ```rust use async_trait::async_trait; use owlen_core::model::Model; -use owlen_core::provider::Provider; +use owlen_core::Provider; use owlen_core::session::Session; pub struct MyProvider; diff --git a/examples/mcp_chat.rs b/examples/mcp_chat.rs index 0d806b6..e8c5cb4 100644 --- a/examples/mcp_chat.rs +++ b/examples/mcp_chat.rs @@ -8,9 +8,9 @@ //! - Ensure Ollama is running with a model available use owlen_core::{ + Provider, mcp::remote_client::RemoteMcpClient, types::{ChatParameters, ChatRequest, Message, Role}, - Provider, }; use std::sync::Arc; @@ -57,7 +57,7 @@ async fn main() -> Result<(), anyhow::Error> { // Send request and get response println!("\nAssistant: "); - let response = client.chat(request).await?; + let response = client.send_prompt(request).await?; println!("{}", response.message.content); if let Some(usage) = response.usage {