Add App core struct with event-handling and initialization logic for TUI.

This commit is contained in:
2025-09-27 05:41:46 +02:00
commit 5bc0e02cd3
32 changed files with 7205 additions and 0 deletions

View File

@@ -0,0 +1,342 @@
use crate::provider::ProviderConfig;
use crate::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
use std::time::Duration;
/// Default location for the OWLEN configuration file
pub const DEFAULT_CONFIG_PATH: &str = "~/.config/owlen/config.toml";
/// Core configuration shared by all OWLEN clients
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
/// General application settings
pub general: GeneralSettings,
/// Provider specific configuration keyed by provider name
#[serde(default)]
pub providers: HashMap<String, ProviderConfig>,
/// UI preferences that frontends can opt into
#[serde(default)]
pub ui: UiSettings,
/// Storage related options
#[serde(default)]
pub storage: StorageSettings,
/// Input handling preferences
#[serde(default)]
pub input: InputSettings,
}
impl Default for Config {
fn default() -> Self {
let mut providers = HashMap::new();
providers.insert(
"ollama".to_string(),
ProviderConfig {
provider_type: "ollama".to_string(),
base_url: Some("http://localhost:11434".to_string()),
api_key: None,
extra: HashMap::new(),
},
);
Self {
general: GeneralSettings::default(),
providers,
ui: UiSettings::default(),
storage: StorageSettings::default(),
input: InputSettings::default(),
}
}
}
impl Config {
/// Load configuration from disk, falling back to defaults when missing
pub fn load(path: Option<&Path>) -> Result<Self> {
let path = match path {
Some(path) => path.to_path_buf(),
None => default_config_path(),
};
if path.exists() {
let content = fs::read_to_string(&path)?;
let mut config: Config =
toml::from_str(&content).map_err(|e| crate::Error::Config(e.to_string()))?;
config.ensure_defaults();
Ok(config)
} else {
Ok(Config::default())
}
}
/// Persist configuration to disk
pub fn save(&self, path: Option<&Path>) -> Result<()> {
let path = match path {
Some(path) => path.to_path_buf(),
None => default_config_path(),
};
if let Some(dir) = path.parent() {
fs::create_dir_all(dir)?;
}
let content =
toml::to_string_pretty(self).map_err(|e| crate::Error::Config(e.to_string()))?;
fs::write(path, content)?;
Ok(())
}
/// Get provider configuration by provider name
pub fn provider(&self, name: &str) -> Option<&ProviderConfig> {
self.providers.get(name)
}
/// Update or insert a provider configuration
pub fn upsert_provider(&mut self, name: impl Into<String>, config: ProviderConfig) {
self.providers.insert(name.into(), config);
}
/// Resolve default model in order of priority: explicit default, first cached model, provider fallback
pub fn resolve_default_model<'a>(
&'a self,
models: &'a [crate::types::ModelInfo],
) -> Option<&'a str> {
if let Some(model) = self.general.default_model.as_deref() {
if models.iter().any(|m| m.id == model || m.name == model) {
return Some(model);
}
}
if let Some(first) = models.first() {
return Some(&first.id);
}
self.general.default_model.as_deref()
}
fn ensure_defaults(&mut self) {
if self.general.default_provider.is_empty() {
self.general.default_provider = "ollama".to_string();
}
if !self.providers.contains_key("ollama") {
self.providers.insert(
"ollama".to_string(),
ProviderConfig {
provider_type: "ollama".to_string(),
base_url: Some("http://localhost:11434".to_string()),
api_key: None,
extra: HashMap::new(),
},
);
}
}
}
/// Default configuration path with user home expansion
pub fn default_config_path() -> PathBuf {
PathBuf::from(shellexpand::tilde(DEFAULT_CONFIG_PATH).as_ref())
}
/// General behaviour settings shared across clients
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GeneralSettings {
/// Default provider name for routing
pub default_provider: String,
/// Optional default model id
#[serde(default)]
pub default_model: Option<String>,
/// Whether streaming responses are preferred
#[serde(default = "GeneralSettings::default_streaming")]
pub enable_streaming: bool,
/// Optional path to a project context file automatically injected as system prompt
#[serde(default)]
pub project_context_file: Option<String>,
/// TTL for cached model listings in seconds
#[serde(default = "GeneralSettings::default_model_cache_ttl")]
pub model_cache_ttl_secs: u64,
}
impl GeneralSettings {
fn default_streaming() -> bool {
true
}
fn default_model_cache_ttl() -> u64 {
60
}
/// Duration representation of model cache TTL
pub fn model_cache_ttl(&self) -> Duration {
Duration::from_secs(self.model_cache_ttl_secs.max(5))
}
}
impl Default for GeneralSettings {
fn default() -> Self {
Self {
default_provider: "ollama".to_string(),
default_model: Some("llama3.2:latest".to_string()),
enable_streaming: Self::default_streaming(),
project_context_file: Some("OWLEN.md".to_string()),
model_cache_ttl_secs: Self::default_model_cache_ttl(),
}
}
}
/// UI preferences that consumers can respect as needed
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UiSettings {
#[serde(default = "UiSettings::default_theme")]
pub theme: String,
#[serde(default = "UiSettings::default_word_wrap")]
pub word_wrap: bool,
#[serde(default = "UiSettings::default_max_history_lines")]
pub max_history_lines: usize,
#[serde(default = "UiSettings::default_show_role_labels")]
pub show_role_labels: bool,
#[serde(default = "UiSettings::default_wrap_column")]
pub wrap_column: u16,
}
impl UiSettings {
fn default_theme() -> String {
"default".to_string()
}
fn default_word_wrap() -> bool {
true
}
fn default_max_history_lines() -> usize {
2000
}
fn default_show_role_labels() -> bool {
true
}
fn default_wrap_column() -> u16 {
100
}
}
impl Default for UiSettings {
fn default() -> Self {
Self {
theme: Self::default_theme(),
word_wrap: Self::default_word_wrap(),
max_history_lines: Self::default_max_history_lines(),
show_role_labels: Self::default_show_role_labels(),
wrap_column: Self::default_wrap_column(),
}
}
}
/// Storage related preferences
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StorageSettings {
#[serde(default = "StorageSettings::default_conversation_dir")]
pub conversation_dir: String,
#[serde(default = "StorageSettings::default_auto_save")]
pub auto_save_sessions: bool,
#[serde(default = "StorageSettings::default_max_sessions")]
pub max_saved_sessions: usize,
#[serde(default = "StorageSettings::default_session_timeout")]
pub session_timeout_minutes: u64,
}
impl StorageSettings {
fn default_conversation_dir() -> String {
"~/.local/share/owlen/conversations".to_string()
}
fn default_auto_save() -> bool {
true
}
fn default_max_sessions() -> usize {
25
}
fn default_session_timeout() -> u64 {
120
}
/// Resolve storage directory path
pub fn conversation_path(&self) -> PathBuf {
PathBuf::from(shellexpand::tilde(&self.conversation_dir).as_ref())
}
}
impl Default for StorageSettings {
fn default() -> Self {
Self {
conversation_dir: Self::default_conversation_dir(),
auto_save_sessions: Self::default_auto_save(),
max_saved_sessions: Self::default_max_sessions(),
session_timeout_minutes: Self::default_session_timeout(),
}
}
}
/// Input handling preferences shared across clients
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InputSettings {
#[serde(default = "InputSettings::default_multiline")]
pub multiline: bool,
#[serde(default = "InputSettings::default_history_size")]
pub history_size: usize,
#[serde(default = "InputSettings::default_tab_width")]
pub tab_width: u8,
#[serde(default = "InputSettings::default_confirm_send")]
pub confirm_send: bool,
}
impl InputSettings {
fn default_multiline() -> bool {
true
}
fn default_history_size() -> usize {
100
}
fn default_tab_width() -> u8 {
4
}
fn default_confirm_send() -> bool {
false
}
}
impl Default for InputSettings {
fn default() -> Self {
Self {
multiline: Self::default_multiline(),
history_size: Self::default_history_size(),
tab_width: Self::default_tab_width(),
confirm_send: Self::default_confirm_send(),
}
}
}
/// Convenience accessor for an Ollama provider entry, creating a default if missing
pub fn ensure_ollama_config(config: &mut Config) -> &ProviderConfig {
config
.providers
.entry("ollama".to_string())
.or_insert_with(|| ProviderConfig {
provider_type: "ollama".to_string(),
base_url: Some("http://localhost:11434".to_string()),
api_key: None,
extra: HashMap::new(),
})
}
/// Calculate absolute timeout for session data based on configuration
pub fn session_timeout(config: &Config) -> Duration {
Duration::from_secs(config.storage.session_timeout_minutes.max(1) * 60)
}

View File

@@ -0,0 +1,289 @@
use crate::types::{Conversation, Message};
use crate::Result;
use serde_json::{Number, Value};
use std::collections::{HashMap, VecDeque};
use std::time::{Duration, Instant};
use uuid::Uuid;
const STREAMING_FLAG: &str = "streaming";
const LAST_CHUNK_TS: &str = "last_chunk_ts";
const PLACEHOLDER_FLAG: &str = "placeholder";
/// Manage active and historical conversations, including streaming updates.
pub struct ConversationManager {
active: Conversation,
history: VecDeque<Conversation>,
message_index: HashMap<Uuid, usize>,
streaming: HashMap<Uuid, StreamingMetadata>,
max_history: usize,
}
#[derive(Debug, Clone)]
pub struct StreamingMetadata {
started: Instant,
last_update: Instant,
}
impl ConversationManager {
/// Create a new conversation manager with a default model
pub fn new(model: impl Into<String>) -> Self {
Self::with_history_capacity(model, 32)
}
/// Create with explicit history capacity
pub fn with_history_capacity(model: impl Into<String>, max_history: usize) -> Self {
let conversation = Conversation::new(model.into());
Self {
active: conversation,
history: VecDeque::new(),
message_index: HashMap::new(),
streaming: HashMap::new(),
max_history: max_history.max(1),
}
}
/// Access the active conversation
pub fn active(&self) -> &Conversation {
&self.active
}
/// Mutable access to the active conversation (auto refreshing indexes afterwards)
fn active_mut(&mut self) -> &mut Conversation {
&mut self.active
}
/// Replace the active conversation with a provided one, archiving the existing conversation if it contains data
pub fn load(&mut self, conversation: Conversation) {
if !self.active.messages.is_empty() {
self.archive_active();
}
self.message_index.clear();
for (idx, message) in conversation.messages.iter().enumerate() {
self.message_index.insert(message.id, idx);
}
self.stream_reset();
self.active = conversation;
}
/// Start a brand new conversation, archiving the previous one
pub fn start_new(&mut self, model: Option<String>, name: Option<String>) {
self.archive_active();
let model = model.unwrap_or_else(|| self.active.model.clone());
self.active = Conversation::new(model);
self.active.name = name;
self.message_index.clear();
self.stream_reset();
}
/// Archive the active conversation into history
pub fn archive_active(&mut self) {
if self.active.messages.is_empty() {
return;
}
let mut archived = self.active.clone();
archived.updated_at = std::time::SystemTime::now();
self.history.push_front(archived);
while self.history.len() > self.max_history {
self.history.pop_back();
}
}
/// Get immutable history
pub fn history(&self) -> impl Iterator<Item = &Conversation> {
self.history.iter()
}
/// Add a user message and return its identifier
pub fn push_user_message(&mut self, content: impl Into<String>) -> Uuid {
let message = Message::user(content.into());
self.register_message(message)
}
/// Add a system message and return its identifier
pub fn push_system_message(&mut self, content: impl Into<String>) -> Uuid {
let message = Message::system(content.into());
self.register_message(message)
}
/// Add an assistant message (non-streaming) and return its identifier
pub fn push_assistant_message(&mut self, content: impl Into<String>) -> Uuid {
let message = Message::assistant(content.into());
self.register_message(message)
}
/// Push an arbitrary message into the active conversation
pub fn push_message(&mut self, message: Message) -> Uuid {
self.register_message(message)
}
/// Start tracking a streaming assistant response, returning the message id to update
pub fn start_streaming_response(&mut self) -> Uuid {
let mut message = Message::assistant(String::new());
message
.metadata
.insert(STREAMING_FLAG.to_string(), Value::Bool(true));
let id = message.id;
self.register_message(message);
self.streaming.insert(
id,
StreamingMetadata {
started: Instant::now(),
last_update: Instant::now(),
},
);
id
}
/// Append streaming content to an assistant message
pub fn append_stream_chunk(
&mut self,
message_id: Uuid,
chunk: &str,
is_final: bool,
) -> Result<()> {
let index = self
.message_index
.get(&message_id)
.copied()
.ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?;
let conversation = self.active_mut();
if let Some(message) = conversation.messages.get_mut(index) {
let was_placeholder = message
.metadata
.remove(PLACEHOLDER_FLAG)
.and_then(|v| v.as_bool())
.unwrap_or(false);
if was_placeholder {
message.content.clear();
}
if !chunk.is_empty() {
message.content.push_str(chunk);
}
message.timestamp = std::time::SystemTime::now();
let millis = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_millis() as u64;
message.metadata.insert(
LAST_CHUNK_TS.to_string(),
Value::Number(Number::from(millis)),
);
if is_final {
message
.metadata
.insert(STREAMING_FLAG.to_string(), Value::Bool(false));
self.streaming.remove(&message_id);
} else if let Some(info) = self.streaming.get_mut(&message_id) {
info.last_update = Instant::now();
}
}
Ok(())
}
/// Set placeholder text for a streaming message
pub fn set_stream_placeholder(
&mut self,
message_id: Uuid,
text: impl Into<String>,
) -> Result<()> {
let index = self
.message_index
.get(&message_id)
.copied()
.ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?;
if let Some(message) = self.active_mut().messages.get_mut(index) {
message.content = text.into();
message.timestamp = std::time::SystemTime::now();
message
.metadata
.insert(PLACEHOLDER_FLAG.to_string(), Value::Bool(true));
}
Ok(())
}
/// Update the active model (used when user changes model mid session)
pub fn set_model(&mut self, model: impl Into<String>) {
self.active.model = model.into();
self.active.updated_at = std::time::SystemTime::now();
}
/// Provide read access to the cached streaming metadata
pub fn streaming_metadata(&self, message_id: &Uuid) -> Option<StreamingMetadata> {
self.streaming.get(message_id).cloned()
}
/// Remove inactive streaming messages that have stalled beyond the provided timeout
pub fn expire_stalled_streams(&mut self, idle_timeout: Duration) -> Vec<Uuid> {
let cutoff = Instant::now() - idle_timeout;
let mut expired = Vec::new();
self.streaming.retain(|id, meta| {
if meta.last_update < cutoff {
expired.push(*id);
false
} else {
true
}
});
expired
}
/// Clear all state
pub fn clear(&mut self) {
self.active.clear();
self.history.clear();
self.message_index.clear();
self.streaming.clear();
}
fn register_message(&mut self, message: Message) -> Uuid {
let id = message.id;
let idx;
{
let conversation = self.active_mut();
idx = conversation.messages.len();
conversation.messages.push(message);
conversation.updated_at = std::time::SystemTime::now();
}
self.message_index.insert(id, idx);
id
}
fn stream_reset(&mut self) {
self.streaming.clear();
}
}
impl StreamingMetadata {
/// Duration since the stream started
pub fn elapsed(&self) -> Duration {
self.started.elapsed()
}
/// Duration since the last chunk was received
pub fn idle_duration(&self) -> Duration {
self.last_update.elapsed()
}
/// Timestamp when streaming started
pub fn started_at(&self) -> Instant {
self.started
}
/// Timestamp of most recent update
pub fn last_update_at(&self) -> Instant {
self.last_update
}
}

View File

@@ -0,0 +1,61 @@
use crate::types::Message;
use textwrap::{wrap, Options};
/// Formats messages for display across different clients.
#[derive(Debug, Clone)]
pub struct MessageFormatter {
wrap_width: usize,
show_role_labels: bool,
preserve_empty_lines: bool,
}
impl MessageFormatter {
/// Create a new formatter
pub fn new(wrap_width: usize, show_role_labels: bool) -> Self {
Self {
wrap_width: wrap_width.max(20),
show_role_labels,
preserve_empty_lines: true,
}
}
/// Override whether empty lines should be preserved
pub fn with_preserve_empty(mut self, preserve: bool) -> Self {
self.preserve_empty_lines = preserve;
self
}
/// Render a message to a list of visual lines ready for display
pub fn format_message(&self, message: &Message) -> Vec<String> {
let mut lines = Vec::new();
let mut content = message.content.trim_end().to_string();
if content.is_empty() && self.preserve_empty_lines {
content.push(' ');
}
let options = Options::new(self.wrap_width)
.break_words(true)
.word_separator(textwrap::WordSeparator::UnicodeBreakProperties);
let wrapped = wrap(&content, &options);
if self.show_role_labels {
let label = format!("{}:", message.role.to_string().to_uppercase());
if let Some(first) = wrapped.first() {
lines.push(format!("{label} {first}"));
for line in wrapped.iter().skip(1) {
lines.push(format!("{:width$} {line}", "", width = label.len()));
}
} else {
lines.push(label);
}
} else {
for line in wrapped {
lines.push(line.into_owned());
}
}
lines
}
}

View File

@@ -0,0 +1,217 @@
use std::collections::VecDeque;
/// Text input buffer with history and cursor management.
#[derive(Debug, Clone)]
pub struct InputBuffer {
buffer: String,
cursor: usize,
history: VecDeque<String>,
history_index: Option<usize>,
max_history: usize,
pub multiline: bool,
tab_width: u8,
}
impl InputBuffer {
/// Create a new input buffer
pub fn new(max_history: usize, multiline: bool, tab_width: u8) -> Self {
Self {
buffer: String::new(),
cursor: 0,
history: VecDeque::with_capacity(max_history.max(1)),
history_index: None,
max_history: max_history.max(1),
multiline,
tab_width: tab_width.max(1),
}
}
/// Get current text
pub fn text(&self) -> &str {
&self.buffer
}
/// Current cursor position
pub fn cursor(&self) -> usize {
self.cursor
}
/// Replace buffer contents
pub fn set_text(&mut self, text: impl Into<String>) {
self.buffer = text.into();
self.cursor = self.buffer.len();
self.history_index = None;
}
/// Clear buffer and reset cursor
pub fn clear(&mut self) {
self.buffer.clear();
self.cursor = 0;
self.history_index = None;
}
/// Insert a character at the cursor position
pub fn insert_char(&mut self, ch: char) {
if ch == '\t' {
self.insert_tab();
return;
}
self.buffer.insert(self.cursor, ch);
self.cursor += ch.len_utf8();
}
/// Insert text at cursor
pub fn insert_text(&mut self, text: &str) {
self.buffer.insert_str(self.cursor, text);
self.cursor += text.len();
}
/// Insert spaces representing a tab
pub fn insert_tab(&mut self) {
let spaces = " ".repeat(self.tab_width as usize);
self.insert_text(&spaces);
}
/// Remove character before cursor
pub fn backspace(&mut self) {
if self.cursor == 0 {
return;
}
let prev_index = prev_char_boundary(&self.buffer, self.cursor);
self.buffer.drain(prev_index..self.cursor);
self.cursor = prev_index;
}
/// Remove character at cursor
pub fn delete(&mut self) {
if self.cursor >= self.buffer.len() {
return;
}
let next_index = next_char_boundary(&self.buffer, self.cursor);
self.buffer.drain(self.cursor..next_index);
}
/// Move cursor left by one grapheme
pub fn move_left(&mut self) {
if self.cursor == 0 {
return;
}
self.cursor = prev_char_boundary(&self.buffer, self.cursor);
}
/// Move cursor right by one grapheme
pub fn move_right(&mut self) {
if self.cursor >= self.buffer.len() {
return;
}
self.cursor = next_char_boundary(&self.buffer, self.cursor);
}
/// Move cursor to start of the buffer
pub fn move_home(&mut self) {
self.cursor = 0;
}
/// Move cursor to end of the buffer
pub fn move_end(&mut self) {
self.cursor = self.buffer.len();
}
/// Push current buffer into history, clearing the buffer afterwards
pub fn commit_to_history(&mut self) -> String {
let text = std::mem::take(&mut self.buffer);
if !text.trim().is_empty() {
self.push_history_entry(text.clone());
}
self.cursor = 0;
self.history_index = None;
text
}
/// Navigate to previous history entry
pub fn history_previous(&mut self) {
if self.history.is_empty() {
return;
}
let new_index = match self.history_index {
Some(idx) if idx + 1 < self.history.len() => idx + 1,
None => 0,
_ => return,
};
self.history_index = Some(new_index);
if let Some(entry) = self.history.get(new_index) {
self.buffer = entry.clone();
self.cursor = self.buffer.len();
}
}
/// Navigate to next history entry
pub fn history_next(&mut self) {
if self.history.is_empty() {
return;
}
if let Some(idx) = self.history_index {
if idx > 0 {
let new_idx = idx - 1;
self.history_index = Some(new_idx);
if let Some(entry) = self.history.get(new_idx) {
self.buffer = entry.clone();
self.cursor = self.buffer.len();
}
} else {
self.history_index = None;
self.buffer.clear();
self.cursor = 0;
}
} else {
self.buffer.clear();
self.cursor = 0;
}
}
/// Push a new entry into the history buffer, enforcing capacity
pub fn push_history_entry(&mut self, entry: String) {
if self
.history
.front()
.map(|existing| existing == &entry)
.unwrap_or(false)
{
return;
}
self.history.push_front(entry);
while self.history.len() > self.max_history {
self.history.pop_back();
}
}
}
fn prev_char_boundary(buffer: &str, cursor: usize) -> usize {
buffer[..cursor]
.char_indices()
.last()
.map(|(idx, _)| idx)
.unwrap_or(0)
}
fn next_char_boundary(buffer: &str, cursor: usize) -> usize {
if cursor >= buffer.len() {
return buffer.len();
}
let slice = &buffer[cursor..];
let mut iter = slice.char_indices();
iter.next();
if let Some((idx, _)) = iter.next() {
cursor + idx
} else {
buffer.len()
}
}

View File

@@ -0,0 +1,58 @@
//! Core traits and types for OWLEN LLM client
//!
//! This crate provides the foundational abstractions for building
//! LLM providers, routers, and MCP (Model Context Protocol) adapters.
pub mod config;
pub mod conversation;
pub mod formatting;
pub mod input;
pub mod model;
pub mod provider;
pub mod router;
pub mod session;
pub mod types;
pub use config::*;
pub use conversation::*;
pub use formatting::*;
pub use input::*;
pub use model::*;
pub use provider::*;
pub use router::*;
pub use session::*;
pub use types::*;
/// Result type used throughout the OWLEN ecosystem
pub type Result<T> = std::result::Result<T, Error>;
/// Core error types for OWLEN
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Provider error: {0}")]
Provider(#[from] anyhow::Error),
#[error("Network error: {0}")]
Network(String),
#[error("Authentication error: {0}")]
Auth(String),
#[error("Configuration error: {0}")]
Config(String),
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
#[error("Invalid input: {0}")]
InvalidInput(String),
#[error("Operation timed out: {0}")]
Timeout(String),
#[error("Serialization error: {0}")]
Serialization(#[from] serde_json::Error),
#[error("Unknown error: {0}")]
Unknown(String),
}

View File

@@ -0,0 +1,84 @@
use crate::types::ModelInfo;
use crate::Result;
use std::future::Future;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::RwLock;
#[derive(Default, Debug)]
struct ModelCache {
models: Vec<ModelInfo>,
last_refresh: Option<Instant>,
}
/// Caches model listings for improved selection performance
#[derive(Clone, Debug)]
pub struct ModelManager {
cache: Arc<RwLock<ModelCache>>,
ttl: Duration,
}
impl ModelManager {
/// Create a new manager with the desired cache TTL
pub fn new(ttl: Duration) -> Self {
Self {
cache: Arc::new(RwLock::new(ModelCache::default())),
ttl,
}
}
/// Get cached models, refreshing via the provided fetcher when stale. Returns the up-to-date model list.
pub async fn get_or_refresh<F, Fut>(
&self,
force_refresh: bool,
fetcher: F,
) -> Result<Vec<ModelInfo>>
where
F: FnOnce() -> Fut,
Fut: Future<Output = Result<Vec<ModelInfo>>>,
{
if !force_refresh {
if let Some(models) = self.cached_if_fresh().await {
return Ok(models);
}
}
let models = fetcher().await?;
let mut cache = self.cache.write().await;
cache.models = models.clone();
cache.last_refresh = Some(Instant::now());
Ok(models)
}
/// Return cached models without refreshing
pub async fn cached(&self) -> Vec<ModelInfo> {
self.cache.read().await.models.clone()
}
/// Drop cached models, forcing next call to refresh
pub async fn invalidate(&self) {
let mut cache = self.cache.write().await;
cache.models.clear();
cache.last_refresh = None;
}
/// Select a model by id or name from the cache
pub async fn select(&self, identifier: &str) -> Option<ModelInfo> {
let cache = self.cache.read().await;
cache
.models
.iter()
.find(|m| m.id == identifier || m.name == identifier)
.cloned()
}
async fn cached_if_fresh(&self) -> Option<Vec<ModelInfo>> {
let cache = self.cache.read().await;
let fresh = matches!(cache.last_refresh, Some(ts) if ts.elapsed() < self.ttl);
if fresh && !cache.models.is_empty() {
Some(cache.models.clone())
} else {
None
}
}
}

View File

@@ -0,0 +1,105 @@
//! Provider trait and related types
use crate::{types::*, Result};
use futures::Stream;
use std::pin::Pin;
use std::sync::Arc;
/// A stream of chat responses
pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
/// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.)
#[async_trait::async_trait]
pub trait Provider: Send + Sync {
/// Get the name of this provider
fn name(&self) -> &str;
/// List available models from this provider
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
/// Send a chat completion request
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse>;
/// Send a streaming chat completion request
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream>;
/// Check if the provider is available/healthy
async fn health_check(&self) -> Result<()>;
/// Get provider-specific configuration schema
fn config_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
}
/// Configuration for a provider
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ProviderConfig {
/// Provider type identifier
pub provider_type: String,
/// Base URL for API calls
pub base_url: Option<String>,
/// API key or token
pub api_key: Option<String>,
/// Additional provider-specific configuration
#[serde(flatten)]
pub extra: std::collections::HashMap<String, serde_json::Value>,
}
/// A registry of providers
pub struct ProviderRegistry {
providers: std::collections::HashMap<String, Arc<dyn Provider>>,
}
impl ProviderRegistry {
/// Create a new provider registry
pub fn new() -> Self {
Self {
providers: std::collections::HashMap::new(),
}
}
/// Register a provider
pub fn register<P: Provider + 'static>(&mut self, provider: P) {
self.register_arc(Arc::new(provider));
}
/// Register an already wrapped provider
pub fn register_arc(&mut self, provider: Arc<dyn Provider>) {
let name = provider.name().to_string();
self.providers.insert(name, provider);
}
/// Get a provider by name
pub fn get(&self, name: &str) -> Option<Arc<dyn Provider>> {
self.providers.get(name).cloned()
}
/// List all registered provider names
pub fn list_providers(&self) -> Vec<String> {
self.providers.keys().cloned().collect()
}
/// Get all models from all providers
pub async fn list_all_models(&self) -> Result<Vec<ModelInfo>> {
let mut all_models = Vec::new();
for provider in self.providers.values() {
match provider.list_models().await {
Ok(mut models) => all_models.append(&mut models),
Err(e) => {
// Log error but continue with other providers
eprintln!("Failed to get models from {}: {}", provider.name(), e);
}
}
}
Ok(all_models)
}
}
impl Default for ProviderRegistry {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,155 @@
//! Router for managing multiple providers and routing requests
use crate::{provider::*, types::*, Result};
use std::sync::Arc;
/// A router that can distribute requests across multiple providers
pub struct Router {
registry: ProviderRegistry,
routing_rules: Vec<RoutingRule>,
default_provider: Option<String>,
}
/// A rule for routing requests to specific providers
#[derive(Debug, Clone)]
pub struct RoutingRule {
/// Pattern to match against model names
pub model_pattern: String,
/// Provider to route to
pub provider: String,
/// Priority (higher numbers are checked first)
pub priority: u32,
}
impl Router {
/// Create a new router
pub fn new() -> Self {
Self {
registry: ProviderRegistry::new(),
routing_rules: Vec::new(),
default_provider: None,
}
}
/// Register a provider with the router
pub fn register_provider<P: Provider + 'static>(&mut self, provider: P) {
self.registry.register(provider);
}
/// Set the default provider
pub fn set_default_provider(&mut self, provider_name: String) {
self.default_provider = Some(provider_name);
}
/// Add a routing rule
pub fn add_routing_rule(&mut self, rule: RoutingRule) {
self.routing_rules.push(rule);
// Sort by priority (descending)
self.routing_rules
.sort_by(|a, b| b.priority.cmp(&a.priority));
}
/// Route a request to the appropriate provider
pub async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> {
let provider = self.find_provider_for_model(&request.model)?;
provider.chat(request).await
}
/// Route a streaming request to the appropriate provider
pub async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> {
let provider = self.find_provider_for_model(&request.model)?;
provider.chat_stream(request).await
}
/// List all available models from all providers
pub async fn list_models(&self) -> Result<Vec<ModelInfo>> {
self.registry.list_all_models().await
}
/// Find the appropriate provider for a given model
fn find_provider_for_model(&self, model: &str) -> Result<Arc<dyn Provider>> {
// Check routing rules first
for rule in &self.routing_rules {
if self.matches_pattern(&rule.model_pattern, model) {
if let Some(provider) = self.registry.get(&rule.provider) {
return Ok(provider);
}
}
}
// Fall back to default provider
if let Some(default) = &self.default_provider {
if let Some(provider) = self.registry.get(default) {
return Ok(provider);
}
}
// If no default, try to find any provider that has this model
// This is a fallback for cases where routing isn't configured
for provider_name in self.registry.list_providers() {
if let Some(provider) = self.registry.get(&provider_name) {
return Ok(provider);
}
}
Err(crate::Error::Provider(anyhow::anyhow!(
"No provider found for model: {}",
model
)))
}
/// Check if a model name matches a pattern
fn matches_pattern(&self, pattern: &str, model: &str) -> bool {
// Simple pattern matching for now
// Could be extended to support more complex patterns
if pattern == "*" {
return true;
}
if pattern.ends_with('*') {
let prefix = &pattern[..pattern.len() - 1];
return model.starts_with(prefix);
}
if pattern.starts_with('*') {
let suffix = &pattern[1..];
return model.ends_with(suffix);
}
pattern == model
}
/// Get routing configuration
pub fn get_routing_rules(&self) -> &[RoutingRule] {
&self.routing_rules
}
/// Get the default provider name
pub fn get_default_provider(&self) -> Option<&str> {
self.default_provider.as_deref()
}
}
impl Default for Router {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pattern_matching() {
let router = Router::new();
assert!(router.matches_pattern("*", "any-model"));
assert!(router.matches_pattern("gpt*", "gpt-4"));
assert!(router.matches_pattern("gpt*", "gpt-3.5-turbo"));
assert!(!router.matches_pattern("gpt*", "claude-3"));
assert!(router.matches_pattern("*:latest", "llama2:latest"));
assert!(router.matches_pattern("exact-match", "exact-match"));
assert!(!router.matches_pattern("exact-match", "different-model"));
}
}

View File

@@ -0,0 +1,204 @@
use crate::config::Config;
use crate::conversation::ConversationManager;
use crate::formatting::MessageFormatter;
use crate::input::InputBuffer;
use crate::model::ModelManager;
use crate::provider::{ChatStream, Provider};
use crate::types::{ChatParameters, ChatRequest, ChatResponse, Conversation, ModelInfo};
use crate::Result;
use std::sync::Arc;
use uuid::Uuid;
/// Outcome of submitting a chat request
pub enum SessionOutcome {
/// Immediate response received (non-streaming)
Complete(ChatResponse),
/// Streaming response where chunks will arrive asynchronously
Streaming {
response_id: Uuid,
stream: ChatStream,
},
}
/// High-level controller encapsulating session state and provider interactions
pub struct SessionController {
provider: Arc<dyn Provider>,
conversation: ConversationManager,
model_manager: ModelManager,
input_buffer: InputBuffer,
formatter: MessageFormatter,
config: Config,
}
impl SessionController {
/// Create a new controller with the given provider and configuration
pub fn new(provider: Arc<dyn Provider>, config: Config) -> Self {
let model = config
.general
.default_model
.clone()
.unwrap_or_else(|| "ollama/default".to_string());
let conversation =
ConversationManager::with_history_capacity(model, config.storage.max_saved_sessions);
let formatter =
MessageFormatter::new(config.ui.wrap_column as usize, config.ui.show_role_labels)
.with_preserve_empty(config.ui.word_wrap);
let input_buffer = InputBuffer::new(
config.input.history_size,
config.input.multiline,
config.input.tab_width,
);
let model_manager = ModelManager::new(config.general.model_cache_ttl());
Self {
provider,
conversation,
model_manager,
input_buffer,
formatter,
config,
}
}
/// Access the active conversation
pub fn conversation(&self) -> &Conversation {
self.conversation.active()
}
/// Mutable access to the conversation manager
pub fn conversation_mut(&mut self) -> &mut ConversationManager {
&mut self.conversation
}
/// Access input buffer
pub fn input_buffer(&self) -> &InputBuffer {
&self.input_buffer
}
/// Mutable input buffer access
pub fn input_buffer_mut(&mut self) -> &mut InputBuffer {
&mut self.input_buffer
}
/// Formatter for rendering messages
pub fn formatter(&self) -> &MessageFormatter {
&self.formatter
}
/// Access configuration
pub fn config(&self) -> &Config {
&self.config
}
/// Mutable configuration access
pub fn config_mut(&mut self) -> &mut Config {
&mut self.config
}
/// Currently selected model identifier
pub fn selected_model(&self) -> &str {
&self.conversation.active().model
}
/// Change current model for upcoming requests
pub fn set_model(&mut self, model: String) {
self.conversation.set_model(model.clone());
self.config.general.default_model = Some(model);
}
/// Retrieve cached models, refreshing from provider as needed
pub async fn models(&self, force_refresh: bool) -> Result<Vec<ModelInfo>> {
self.model_manager
.get_or_refresh(force_refresh, || async {
self.provider.list_models().await
})
.await
}
/// Attempt to select the configured default model from cached models
pub fn ensure_default_model(&mut self, models: &[ModelInfo]) {
if let Some(default) = self.config.general.default_model.clone() {
if models.iter().any(|m| m.id == default || m.name == default) {
self.set_model(default);
}
} else if let Some(model) = models.first() {
self.set_model(model.id.clone());
}
}
/// Submit a user message; optionally stream the response
pub async fn send_message(
&mut self,
content: String,
mut parameters: ChatParameters,
) -> Result<SessionOutcome> {
let streaming = parameters.stream || self.config.general.enable_streaming;
parameters.stream = streaming;
self.conversation.push_user_message(content);
let request = ChatRequest {
model: self.conversation.active().model.clone(),
messages: self.conversation.active().messages.clone(),
parameters,
};
if streaming {
match self.provider.chat_stream(request).await {
Ok(stream) => {
let response_id = self.conversation.start_streaming_response();
Ok(SessionOutcome::Streaming {
response_id,
stream,
})
}
Err(err) => {
self.conversation
.push_assistant_message(format!("Error starting stream: {}", err));
Err(err)
}
}
} else {
match self.provider.chat(request).await {
Ok(response) => {
self.conversation.push_message(response.message.clone());
Ok(SessionOutcome::Complete(response))
}
Err(err) => {
self.conversation
.push_assistant_message(format!("Error: {}", err));
Err(err)
}
}
}
}
/// Mark a streaming response message with placeholder content
pub fn mark_stream_placeholder(&mut self, message_id: Uuid, text: &str) -> Result<()> {
self.conversation
.set_stream_placeholder(message_id, text.to_string())
}
/// Apply streaming chunk to the conversation
pub fn apply_stream_chunk(&mut self, message_id: Uuid, chunk: &ChatResponse) -> Result<()> {
self.conversation
.append_stream_chunk(message_id, &chunk.message.content, chunk.is_final)
}
/// Access conversation history
pub fn history(&self) -> Vec<Conversation> {
self.conversation.history().cloned().collect()
}
/// Start a new conversation optionally targeting a specific model
pub fn start_new_conversation(&mut self, model: Option<String>, name: Option<String>) {
self.conversation.start_new(model, name);
}
/// Clear current conversation messages
pub fn clear(&mut self) {
self.conversation.clear();
}
}

View File

@@ -0,0 +1,203 @@
//! Core types used across OWLEN
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fmt;
use uuid::Uuid;
/// A message in a conversation
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct Message {
/// Unique identifier for this message
pub id: Uuid,
/// Role of the message sender (user, assistant, system)
pub role: Role,
/// Content of the message
pub content: String,
/// Optional metadata
pub metadata: HashMap<String, serde_json::Value>,
/// Timestamp when the message was created
pub timestamp: std::time::SystemTime,
}
/// Role of a message sender
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
/// Message from the user
User,
/// Message from the AI assistant
Assistant,
/// System message (prompts, context, etc.)
System,
}
impl fmt::Display for Role {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
Role::User => "user",
Role::Assistant => "assistant",
Role::System => "system",
};
f.write_str(label)
}
}
/// A conversation containing multiple messages
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Conversation {
/// Unique identifier for this conversation
pub id: Uuid,
/// Optional name/title for the conversation
pub name: Option<String>,
/// Messages in chronological order
pub messages: Vec<Message>,
/// Model used for this conversation
pub model: String,
/// When the conversation was created
pub created_at: std::time::SystemTime,
/// When the conversation was last updated
pub updated_at: std::time::SystemTime,
}
/// Configuration for a chat completion request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatRequest {
/// The model to use for completion
pub model: String,
/// The conversation messages
pub messages: Vec<Message>,
/// Optional parameters for the request
pub parameters: ChatParameters,
}
/// Parameters for chat completion
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatParameters {
/// Temperature for randomness (0.0 to 2.0)
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
/// Maximum tokens to generate
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
/// Whether to stream the response
#[serde(default)]
pub stream: bool,
/// Additional provider-specific parameters
#[serde(flatten)]
pub extra: HashMap<String, serde_json::Value>,
}
impl Default for ChatParameters {
fn default() -> Self {
Self {
temperature: None,
max_tokens: None,
stream: false,
extra: HashMap::new(),
}
}
}
/// Response from a chat completion request
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChatResponse {
/// The generated message
pub message: Message,
/// Token usage information
pub usage: Option<TokenUsage>,
/// Whether this is a streaming chunk
#[serde(default)]
pub is_streaming: bool,
/// Whether this is the final chunk in a stream
#[serde(default)]
pub is_final: bool,
}
/// Token usage information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TokenUsage {
/// Tokens in the prompt
pub prompt_tokens: u32,
/// Tokens in the completion
pub completion_tokens: u32,
/// Total tokens used
pub total_tokens: u32,
}
/// Information about an available model
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelInfo {
/// Model identifier
pub id: String,
/// Human-readable name
pub name: String,
/// Model description
pub description: Option<String>,
/// Provider that hosts this model
pub provider: String,
/// Context window size
pub context_window: Option<u32>,
/// Additional capabilities
pub capabilities: Vec<String>,
}
impl Message {
/// Create a new message
pub fn new(role: Role, content: String) -> Self {
Self {
id: Uuid::new_v4(),
role,
content,
metadata: HashMap::new(),
timestamp: std::time::SystemTime::now(),
}
}
/// Create a user message
pub fn user(content: String) -> Self {
Self::new(Role::User, content)
}
/// Create an assistant message
pub fn assistant(content: String) -> Self {
Self::new(Role::Assistant, content)
}
/// Create a system message
pub fn system(content: String) -> Self {
Self::new(Role::System, content)
}
}
impl Conversation {
/// Create a new conversation
pub fn new(model: String) -> Self {
let now = std::time::SystemTime::now();
Self {
id: Uuid::new_v4(),
name: None,
messages: Vec::new(),
model,
created_at: now,
updated_at: now,
}
}
/// Add a message to the conversation
pub fn add_message(&mut self, message: Message) {
self.messages.push(message);
self.updated_at = std::time::SystemTime::now();
}
/// Get the last message in the conversation
pub fn last_message(&self) -> Option<&Message> {
self.messages.last()
}
/// Clear all messages
pub fn clear(&mut self) {
self.messages.clear();
self.updated_at = std::time::SystemTime::now();
}
}