feat(model): add rich model metadata, caching, and UI panel for inspection

Introduce `DetailedModelInfo` and `ModelInfoRetrievalError` structs for richer model data.
Add `ModelDetailsCache` with TTL‑based storage and async API for get/insert/invalidate.
Extend `OllamaProvider` to fetch, cache, refresh, and list detailed model info.
Expose model‑detail methods in `Session` for on‑demand and bulk retrieval.
Add `ModelInfoPanel` widget to display detailed info with scrolling support.
Update TUI rendering to show the panel, compute viewport height, and render model selector labels with parameters, size, and context length.
Adjust imports and module re‑exports accordingly.
This commit is contained in:
2025-10-12 09:45:16 +02:00
parent fab63d224b
commit c2f5ccea3b
10 changed files with 1168 additions and 14 deletions

View File

@@ -20,6 +20,7 @@ use owlen_core::{
}; };
use owlen_tui::tui_controller::{TuiController, TuiRequest}; use owlen_tui::tui_controller::{TuiController, TuiRequest};
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent}; use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
use std::any::Any;
use std::borrow::Cow; use std::borrow::Cow;
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
@@ -346,6 +347,10 @@ impl Provider for OfflineProvider {
"offline provider cannot reach any backing models" "offline provider cannot reach any backing models"
))) )))
} }
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
} }
#[tokio::main(flavor = "multi_thread")] #[tokio::main(flavor = "multi_thread")]

View File

@@ -1,5 +1,10 @@
pub mod details;
pub use details::{DetailedModelInfo, ModelInfoRetrievalError};
use crate::types::ModelInfo; use crate::types::ModelInfo;
use crate::Result; use crate::Result;
use std::collections::HashMap;
use std::future::Future; use std::future::Future;
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -82,3 +87,125 @@ impl ModelManager {
} }
} }
} }
#[derive(Default, Debug)]
struct ModelDetailsCacheInner {
by_key: HashMap<String, DetailedModelInfo>,
name_to_key: HashMap<String, String>,
fetched_at: HashMap<String, Instant>,
}
/// Cache for rich model details, indexed by digest when available.
#[derive(Clone, Debug)]
pub struct ModelDetailsCache {
inner: Arc<RwLock<ModelDetailsCacheInner>>,
ttl: Duration,
}
impl ModelDetailsCache {
/// Create a new details cache with the provided TTL.
pub fn new(ttl: Duration) -> Self {
Self {
inner: Arc::new(RwLock::new(ModelDetailsCacheInner::default())),
ttl,
}
}
/// Try to read cached details for the provided model name.
pub async fn get(&self, name: &str) -> Option<DetailedModelInfo> {
let mut inner = self.inner.write().await;
let key = inner.name_to_key.get(name).cloned()?;
let stale = inner
.fetched_at
.get(&key)
.is_some_and(|ts| ts.elapsed() >= self.ttl);
if stale {
inner.by_key.remove(&key);
inner.name_to_key.remove(name);
inner.fetched_at.remove(&key);
return None;
}
inner.by_key.get(&key).cloned()
}
/// Cache the provided details, overwriting existing entries.
pub async fn insert(&self, info: DetailedModelInfo) {
let key = info.digest.clone().unwrap_or_else(|| info.name.clone());
let mut inner = self.inner.write().await;
// Remove prior mappings for this model name (possibly different digest).
if let Some(previous_key) = inner.name_to_key.get(&info.name).cloned() {
if previous_key != key {
inner.by_key.remove(&previous_key);
inner.fetched_at.remove(&previous_key);
}
}
inner.fetched_at.insert(key.clone(), Instant::now());
inner.name_to_key.insert(info.name.clone(), key.clone());
inner.by_key.insert(key, info);
}
/// Remove a specific model from the cache.
pub async fn invalidate(&self, name: &str) {
let mut inner = self.inner.write().await;
if let Some(key) = inner.name_to_key.remove(name) {
inner.by_key.remove(&key);
inner.fetched_at.remove(&key);
}
}
/// Clear the entire cache.
pub async fn invalidate_all(&self) {
let mut inner = self.inner.write().await;
inner.by_key.clear();
inner.name_to_key.clear();
inner.fetched_at.clear();
}
/// Return all cached values regardless of freshness.
pub async fn cached(&self) -> Vec<DetailedModelInfo> {
let inner = self.inner.read().await;
inner.by_key.values().cloned().collect()
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
use tokio::time::sleep;
fn sample_details(name: &str) -> DetailedModelInfo {
DetailedModelInfo {
name: name.to_string(),
..Default::default()
}
}
#[tokio::test]
async fn model_details_cache_returns_cached_entry() {
let cache = ModelDetailsCache::new(Duration::from_millis(50));
let info = sample_details("llama");
cache.insert(info.clone()).await;
let cached = cache.get("llama").await;
assert!(cached.is_some());
assert_eq!(cached.unwrap().name, "llama");
}
#[tokio::test]
async fn model_details_cache_expires_based_on_ttl() {
let cache = ModelDetailsCache::new(Duration::from_millis(10));
cache.insert(sample_details("phi")).await;
sleep(Duration::from_millis(30)).await;
assert!(cache.get("phi").await.is_none());
}
#[tokio::test]
async fn model_details_cache_invalidate_removes_entry() {
let cache = ModelDetailsCache::new(Duration::from_secs(1));
cache.insert(sample_details("mistral")).await;
cache.invalidate("mistral").await;
assert!(cache.get("mistral").await.is_none());
}
}

View File

@@ -0,0 +1,105 @@
//! Detailed model metadata for provider inspection features.
//!
//! These types capture richer information about locally available models
//! than the lightweight [`crate::types::ModelInfo`] listing and back the
//! higher-level inspection UI exposed in the Owlen TUI.
use serde::{Deserialize, Serialize};
/// Rich metadata about an Ollama model.
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct DetailedModelInfo {
/// Canonical model name (including tag).
pub name: String,
/// Reported architecture or model format.
#[serde(skip_serializing_if = "Option::is_none")]
pub architecture: Option<String>,
/// Human-readable parameter / quantisation summary.
#[serde(skip_serializing_if = "Option::is_none")]
pub parameters: Option<String>,
/// Context window length, if provided.
#[serde(skip_serializing_if = "Option::is_none")]
pub context_length: Option<u64>,
/// Embedding vector length for embedding-capable models.
#[serde(skip_serializing_if = "Option::is_none")]
pub embedding_length: Option<u64>,
/// Quantisation level (e.g., Q4_0, Q5_K_M).
#[serde(skip_serializing_if = "Option::is_none")]
pub quantization: Option<String>,
/// Primary family identifier (e.g., llama3).
#[serde(skip_serializing_if = "Option::is_none")]
pub family: Option<String>,
/// Additional family tags reported by Ollama.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub families: Vec<String>,
/// Verbose parameter size description (e.g., 70B parameters).
#[serde(skip_serializing_if = "Option::is_none")]
pub parameter_size: Option<String>,
/// Default prompt template packaged with the model.
#[serde(skip_serializing_if = "Option::is_none")]
pub template: Option<String>,
/// Default system prompt packaged with the model.
#[serde(skip_serializing_if = "Option::is_none")]
pub system: Option<String>,
/// License string provided by the model.
#[serde(skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
/// Raw modelfile contents (if available).
#[serde(skip_serializing_if = "Option::is_none")]
pub modelfile: Option<String>,
/// Modification timestamp (ISO-8601) if reported.
#[serde(skip_serializing_if = "Option::is_none")]
pub modified_at: Option<String>,
/// Approximate model size in bytes.
#[serde(skip_serializing_if = "Option::is_none")]
pub size: Option<u64>,
/// Digest / checksum used by Ollama (sha256).
#[serde(skip_serializing_if = "Option::is_none")]
pub digest: Option<String>,
}
impl DetailedModelInfo {
/// Convenience helper that normalises empty strings to `None`.
pub fn with_normalised_strings(mut self) -> Self {
if self.architecture.as_ref().is_some_and(String::is_empty) {
self.architecture = None;
}
if self.parameters.as_ref().is_some_and(String::is_empty) {
self.parameters = None;
}
if self.quantization.as_ref().is_some_and(String::is_empty) {
self.quantization = None;
}
if self.family.as_ref().is_some_and(String::is_empty) {
self.family = None;
}
if self.parameter_size.as_ref().is_some_and(String::is_empty) {
self.parameter_size = None;
}
if self.template.as_ref().is_some_and(String::is_empty) {
self.template = None;
}
if self.system.as_ref().is_some_and(String::is_empty) {
self.system = None;
}
if self.license.as_ref().is_some_and(String::is_empty) {
self.license = None;
}
if self.modelfile.as_ref().is_some_and(String::is_empty) {
self.modelfile = None;
}
if self.digest.as_ref().is_some_and(String::is_empty) {
self.digest = None;
}
self
}
}
/// Error payload returned when model inspection fails for a specific model.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelInfoRetrievalError {
/// Model that failed to resolve.
pub model_name: String,
/// Human-readable description of the failure.
pub error_message: String,
}

View File

@@ -3,6 +3,7 @@
use crate::{types::*, Error, Result}; use crate::{types::*, Error, Result};
use anyhow::anyhow; use anyhow::anyhow;
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt};
use std::any::Any;
use std::future::Future; use std::future::Future;
use std::pin::Pin; use std::pin::Pin;
use std::sync::Arc; use std::sync::Arc;
@@ -11,7 +12,7 @@ use std::sync::Arc;
pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>; pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
/// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.) with zero-cost static dispatch. /// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.) with zero-cost static dispatch.
pub trait LLMProvider: Send + Sync + 'static { pub trait LLMProvider: Send + Sync + 'static + Any + Sized {
type Stream: Stream<Item = Result<ChatResponse>> + Send + 'static; type Stream: Stream<Item = Result<ChatResponse>> + Send + 'static;
type ListModelsFuture<'a>: Future<Output = Result<Vec<ModelInfo>>> + Send type ListModelsFuture<'a>: Future<Output = Result<Vec<ModelInfo>>> + Send
@@ -40,6 +41,10 @@ pub trait LLMProvider: Send + Sync + 'static {
fn config_schema(&self) -> serde_json::Value { fn config_schema(&self) -> serde_json::Value {
serde_json::json!({}) serde_json::json!({})
} }
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
} }
/// Helper that implements [`LLMProvider::chat`] in terms of [`LLMProvider::chat_stream`]. /// Helper that implements [`LLMProvider::chat`] in terms of [`LLMProvider::chat_stream`].
@@ -81,6 +86,8 @@ pub trait Provider: Send + Sync {
fn config_schema(&self) -> serde_json::Value { fn config_schema(&self) -> serde_json::Value {
serde_json::json!({}) serde_json::json!({})
} }
fn as_any(&self) -> &(dyn Any + Send + Sync);
} }
#[async_trait::async_trait] #[async_trait::async_trait]
@@ -112,6 +119,10 @@ where
fn config_schema(&self) -> serde_json::Value { fn config_schema(&self) -> serde_json::Value {
LLMProvider::config_schema(self) LLMProvider::config_schema(self)
} }
fn as_any(&self) -> &(dyn Any + Send + Sync) {
LLMProvider::as_any(self)
}
} }
/// Configuration for a provider /// Configuration for a provider

View File

@@ -27,7 +27,7 @@ use uuid::Uuid;
use crate::{ use crate::{
config::GeneralSettings, config::GeneralSettings,
mcp::McpToolDescriptor, mcp::McpToolDescriptor,
model::ModelManager, model::{DetailedModelInfo, ModelDetailsCache, ModelManager},
provider::{LLMProvider, ProviderConfig}, provider::{LLMProvider, ProviderConfig},
types::{ types::{
ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall, ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall,
@@ -88,6 +88,7 @@ pub struct OllamaProvider {
http_client: Client, http_client: Client,
base_url: String, base_url: String,
model_manager: ModelManager, model_manager: ModelManager,
model_details_cache: ModelDetailsCache,
} }
impl OllamaProvider { impl OllamaProvider {
@@ -191,6 +192,7 @@ impl OllamaProvider {
http_client, http_client,
base_url: base_url.trim_end_matches('/').to_string(), base_url: base_url.trim_end_matches('/').to_string(),
model_manager: ModelManager::new(model_cache_ttl), model_manager: ModelManager::new(model_cache_ttl),
model_details_cache: ModelDetailsCache::new(model_cache_ttl),
}) })
} }
@@ -198,6 +200,84 @@ impl OllamaProvider {
build_api_endpoint(&self.base_url, endpoint) build_api_endpoint(&self.base_url, endpoint)
} }
/// Attempt to resolve detailed model information for the given model, using the local cache when possible.
pub async fn get_model_info(&self, model_name: &str) -> Result<DetailedModelInfo> {
if let Some(info) = self.model_details_cache.get(model_name).await {
return Ok(info);
}
self.fetch_and_cache_model_info(model_name, None).await
}
/// Force-refresh model information for the specified model.
pub async fn refresh_model_info(&self, model_name: &str) -> Result<DetailedModelInfo> {
self.model_details_cache.invalidate(model_name).await;
self.fetch_and_cache_model_info(model_name, None).await
}
/// Retrieve detailed information for all locally available models.
pub async fn get_all_models_info(&self) -> Result<Vec<DetailedModelInfo>> {
let models = self
.client
.list_local_models()
.await
.map_err(|err| self.map_ollama_error("list models", err, None))?;
let mut details = Vec::with_capacity(models.len());
for local in &models {
match self
.fetch_and_cache_model_info(&local.name, Some(local))
.await
{
Ok(info) => details.push(info),
Err(err) => warn!("Failed to gather model info for '{}': {}", local.name, err),
}
}
Ok(details)
}
/// Return any cached model information without touching the Ollama daemon.
pub async fn cached_model_info(&self) -> Vec<DetailedModelInfo> {
self.model_details_cache.cached().await
}
/// Remove a single model's cached information.
pub async fn invalidate_model_info(&self, model_name: &str) {
self.model_details_cache.invalidate(model_name).await;
}
/// Clear the entire model information cache.
pub async fn clear_model_info_cache(&self) {
self.model_details_cache.invalidate_all().await;
}
async fn fetch_and_cache_model_info(
&self,
model_name: &str,
local: Option<&LocalModel>,
) -> Result<DetailedModelInfo> {
let detail = self
.client
.show_model_info(model_name.to_string())
.await
.map_err(|err| self.map_ollama_error("show_model_info", err, Some(model_name)))?;
let local_owned = if let Some(local) = local {
Some(local.clone())
} else {
let models = self
.client
.list_local_models()
.await
.map_err(|err| self.map_ollama_error("list models", err, None))?;
models.into_iter().find(|m| m.name == model_name)
};
let detailed =
Self::convert_detailed_model_info(self.mode, model_name, local_owned.as_ref(), &detail);
self.model_details_cache.insert(detailed.clone()).await;
Ok(detailed)
}
fn prepare_chat_request( fn prepare_chat_request(
&self, &self,
model: String, model: String,
@@ -239,12 +319,24 @@ impl OllamaProvider {
.map_err(|err| self.map_ollama_error("list models", err, None))?; .map_err(|err| self.map_ollama_error("list models", err, None))?;
let client = self.client.clone(); let client = self.client.clone();
let cache = self.model_details_cache.clone();
let mode = self.mode;
let fetched = join_all(models.into_iter().map(|local| { let fetched = join_all(models.into_iter().map(|local| {
let client = client.clone(); let client = client.clone();
let cache = cache.clone();
async move { async move {
let name = local.name.clone(); let name = local.name.clone();
let detail = match client.show_model_info(name.clone()).await { let detail = match client.show_model_info(name.clone()).await {
Ok(info) => Some(info), Ok(info) => {
let detailed = OllamaProvider::convert_detailed_model_info(
mode,
&name,
Some(&local),
&info,
);
cache.insert(detailed).await;
Some(info)
}
Err(err) => { Err(err) => {
debug!("Failed to fetch Ollama model info for '{name}': {err}"); debug!("Failed to fetch Ollama model info for '{name}': {err}");
None None
@@ -261,6 +353,85 @@ impl OllamaProvider {
.collect()) .collect())
} }
fn convert_detailed_model_info(
mode: OllamaMode,
model_name: &str,
local: Option<&LocalModel>,
detail: &OllamaModelInfo,
) -> DetailedModelInfo {
let map = &detail.model_info;
let architecture =
pick_first_string(map, &["architecture", "model_format", "model_type", "arch"]);
let parameters = non_empty(detail.parameters.clone())
.or_else(|| pick_first_string(map, &["parameters"]));
let parameter_size = pick_first_string(map, &["parameter_size"]);
let context_length = pick_first_u64(map, &["context_length", "num_ctx", "max_context"]);
let embedding_length = pick_first_u64(map, &["embedding_length"]);
let quantization =
pick_first_string(map, &["quantization_level", "quantization", "quantize"]);
let family = pick_first_string(map, &["family", "model_family"]);
let mut families = pick_string_list(map, &["families", "model_families"]);
if families.is_empty() {
if let Some(single) = family.clone() {
families.push(single);
}
}
let system = pick_first_string(map, &["system"]);
let mut modified_at = local
.and_then(|entry| non_empty(entry.modified_at.clone()))
.or_else(|| pick_first_string(map, &["modified_at", "created_at"]));
if modified_at.is_none() && mode == OllamaMode::Cloud {
modified_at = pick_first_string(map, &["updated_at"]);
}
let size = local
.and_then(|entry| {
if entry.size > 0 {
Some(entry.size)
} else {
None
}
})
.or_else(|| pick_first_u64(map, &["size", "model_size", "download_size"]));
let digest = pick_first_string(map, &["digest", "sha256", "checksum"]);
let mut info = DetailedModelInfo {
name: model_name.to_string(),
architecture,
parameters,
context_length,
embedding_length,
quantization,
family,
families,
parameter_size,
template: non_empty(detail.template.clone()),
system,
license: non_empty(detail.license.clone()),
modelfile: non_empty(detail.modelfile.clone()),
modified_at,
size,
digest,
};
if info.parameter_size.is_none() {
info.parameter_size = info.parameters.clone();
}
info.with_normalised_strings()
}
fn convert_model(&self, model: LocalModel, detail: Option<OllamaModelInfo>) -> ModelInfo { fn convert_model(&self, model: LocalModel, detail: Option<OllamaModelInfo>) -> ModelInfo {
let scope = match self.mode { let scope = match self.mode {
OllamaMode::Local => "local", OllamaMode::Local => "local",
@@ -682,6 +853,93 @@ fn build_model_description(scope: &str, detail: Option<&OllamaModelInfo>) -> Str
format!("Ollama ({scope}) model") format!("Ollama ({scope}) model")
} }
fn non_empty(value: String) -> Option<String> {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(value)
}
}
fn pick_first_string(map: &JsonMap<String, Value>, keys: &[&str]) -> Option<String> {
keys.iter()
.filter_map(|key| map.get(*key))
.find_map(value_to_string)
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
}
fn pick_first_u64(map: &JsonMap<String, Value>, keys: &[&str]) -> Option<u64> {
keys.iter()
.filter_map(|key| map.get(*key))
.find_map(value_to_u64)
}
fn pick_string_list(map: &JsonMap<String, Value>, keys: &[&str]) -> Vec<String> {
for key in keys {
if let Some(value) = map.get(*key) {
match value {
Value::Array(items) => {
let collected: Vec<String> = items
.iter()
.filter_map(value_to_string)
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
if !collected.is_empty() {
return collected;
}
}
Value::String(text) => {
let collected: Vec<String> = text
.split(',')
.map(|part| part.trim())
.filter(|part| !part.is_empty())
.map(|part| part.to_string())
.collect();
if !collected.is_empty() {
return collected;
}
}
_ => {}
}
}
}
Vec::new()
}
fn value_to_string(value: &Value) -> Option<String> {
match value {
Value::String(text) => Some(text.clone()),
Value::Number(num) => Some(num.to_string()),
Value::Bool(flag) => Some(flag.to_string()),
_ => None,
}
}
fn value_to_u64(value: &Value) -> Option<u64> {
match value {
Value::Number(num) => {
if let Some(v) = num.as_u64() {
Some(v)
} else if let Some(v) = num.as_i64() {
v.try_into().ok()
} else if let Some(v) = num.as_f64() {
if v >= 0.0 {
Some(v as u64)
} else {
None
}
} else {
None
}
}
Value::String(text) => text.trim().parse::<u64>().ok(),
_ => None,
}
}
fn env_var_non_empty(name: &str) -> Option<String> { fn env_var_non_empty(name: &str) -> Option<String> {
env::var(name) env::var(name)
.ok() .ok()

View File

@@ -9,8 +9,9 @@ use crate::mcp::client::McpClient;
use crate::mcp::factory::McpClientFactory; use crate::mcp::factory::McpClientFactory;
use crate::mcp::permission::PermissionLayer; use crate::mcp::permission::PermissionLayer;
use crate::mcp::McpToolCall; use crate::mcp::McpToolCall;
use crate::model::ModelManager; use crate::model::{DetailedModelInfo, ModelManager};
use crate::provider::{ChatStream, Provider}; use crate::provider::{ChatStream, Provider};
use crate::providers::OllamaProvider;
use crate::storage::{SessionMeta, StorageManager}; use crate::storage::{SessionMeta, StorageManager};
use crate::types::{ use crate::types::{
ChatParameters, ChatRequest, ChatResponse, Conversation, Message, ModelInfo, ToolCall, ChatParameters, ChatRequest, ChatResponse, Conversation, Message, ModelInfo, ToolCall,
@@ -609,6 +610,66 @@ impl SessionController {
.await .await
} }
fn as_ollama(&self) -> Option<&OllamaProvider> {
self.provider
.as_ref()
.as_any()
.downcast_ref::<OllamaProvider>()
}
pub async fn model_details(
&self,
model_name: &str,
force_refresh: bool,
) -> Result<DetailedModelInfo> {
if let Some(ollama) = self.as_ollama() {
if force_refresh {
ollama.refresh_model_info(model_name).await
} else {
ollama.get_model_info(model_name).await
}
} else {
Err(Error::NotImplemented(format!(
"Provider '{}' does not expose model inspection",
self.provider.name()
)))
}
}
pub async fn all_model_details(&self, force_refresh: bool) -> Result<Vec<DetailedModelInfo>> {
if let Some(ollama) = self.as_ollama() {
if force_refresh {
ollama.clear_model_info_cache().await;
}
ollama.get_all_models_info().await
} else {
Err(Error::NotImplemented(format!(
"Provider '{}' does not expose model inspection",
self.provider.name()
)))
}
}
pub async fn cached_model_details(&self) -> Vec<DetailedModelInfo> {
if let Some(ollama) = self.as_ollama() {
ollama.cached_model_info().await
} else {
Vec::new()
}
}
pub async fn invalidate_model_details(&self, model_name: &str) {
if let Some(ollama) = self.as_ollama() {
ollama.invalidate_model_info(model_name).await;
}
}
pub async fn clear_model_details_cache(&self) {
if let Some(ollama) = self.as_ollama() {
ollama.clear_model_info_cache().await;
}
}
pub async fn ensure_default_model(&mut self, models: &[ModelInfo]) { pub async fn ensure_default_model(&mut self, models: &[ModelInfo]) {
let mut config = self.config.lock().await; let mut config = self.config.lock().await;
if let Some(default) = config.general.default_model.clone() { if let Some(default) = config.general.default_model.clone() {

View File

@@ -1,6 +1,7 @@
use anyhow::Result; use anyhow::{anyhow, Result};
use owlen_core::mcp::remote_client::RemoteMcpClient; use owlen_core::mcp::remote_client::RemoteMcpClient;
use owlen_core::{ use owlen_core::{
model::DetailedModelInfo,
provider::{Provider, ProviderConfig}, provider::{Provider, ProviderConfig},
session::{SessionController, SessionOutcome}, session::{SessionController, SessionOutcome},
storage::SessionMeta, storage::SessionMeta,
@@ -15,6 +16,7 @@ use uuid::Uuid;
use crate::config; use crate::config;
use crate::events::Event; use crate::events::Event;
use crate::model_info_panel::ModelInfoPanel;
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly // Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`. // imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
use std::collections::{BTreeSet, HashMap, HashSet}; use std::collections::{BTreeSet, HashMap, HashSet};
@@ -144,6 +146,10 @@ pub struct ChatApp {
pub selected_provider_index: usize, // Index into the available_providers list pub selected_provider_index: usize, // Index into the available_providers list
pub selected_model_item: Option<usize>, // Index into the flattened model selector list pub selected_model_item: Option<usize>, // Index into the flattened model selector list
model_selector_items: Vec<ModelSelectorItem>, // Flattened provider/model list for selector model_selector_items: Vec<ModelSelectorItem>, // Flattened provider/model list for selector
model_info_panel: ModelInfoPanel, // Dedicated model information viewer
model_details_cache: HashMap<String, DetailedModelInfo>, // Cached detailed metadata per model
show_model_info: bool, // Whether the model info panel is visible
model_info_viewport_height: usize, // Cached viewport height for the info panel
expanded_provider: Option<String>, // Which provider group is currently expanded expanded_provider: Option<String>, // Which provider group is currently expanded
current_provider: String, // Provider backing the active session current_provider: String, // Provider backing the active session
auto_scroll: AutoScroll, // Auto-scroll state for message rendering auto_scroll: AutoScroll, // Auto-scroll state for message rendering
@@ -231,6 +237,10 @@ impl ChatApp {
selected_provider_index: 0, selected_provider_index: 0,
selected_model_item: None, selected_model_item: None,
model_selector_items: Vec::new(), model_selector_items: Vec::new(),
model_info_panel: ModelInfoPanel::new(),
model_details_cache: HashMap::new(),
show_model_info: false,
model_info_viewport_height: 0,
expanded_provider: None, expanded_provider: None,
current_provider, current_provider,
auto_scroll: AutoScroll::default(), auto_scroll: AutoScroll::default(),
@@ -357,6 +367,124 @@ impl ChatApp {
self.models.get(index) self.models.get(index)
} }
pub fn cached_model_detail(&self, model_name: &str) -> Option<&DetailedModelInfo> {
self.model_details_cache.get(model_name)
}
pub fn model_info_panel_mut(&mut self) -> &mut ModelInfoPanel {
&mut self.model_info_panel
}
pub fn is_model_info_visible(&self) -> bool {
self.show_model_info
}
pub fn set_model_info_visible(&mut self, visible: bool) {
self.show_model_info = visible;
if !visible {
self.model_info_panel.reset_scroll();
self.model_info_viewport_height = 0;
}
}
pub fn set_model_info_viewport_height(&mut self, height: usize) {
self.model_info_viewport_height = height;
}
pub fn model_info_viewport_height(&self) -> usize {
self.model_info_viewport_height
}
pub async fn ensure_model_details(
&mut self,
model_name: &str,
force_refresh: bool,
) -> Result<()> {
if !force_refresh
&& self.show_model_info
&& self
.model_info_panel
.current_model_name()
.is_some_and(|name| name == model_name)
{
self.set_model_info_visible(false);
self.status = "Closed model info panel".to_string();
self.error = None;
return Ok(());
}
if !force_refresh {
if let Some(info) = self.model_details_cache.get(model_name).cloned() {
self.model_info_panel.set_model_info(info);
self.set_model_info_visible(true);
self.status = format!("Showing model info for {}", model_name);
self.error = None;
return Ok(());
}
} else {
self.model_details_cache.remove(model_name);
self.controller.invalidate_model_details(model_name).await;
}
match self
.controller
.model_details(model_name, force_refresh)
.await
{
Ok(details) => {
self.model_details_cache
.insert(model_name.to_string(), details.clone());
self.model_info_panel.set_model_info(details);
self.set_model_info_visible(true);
self.status = if force_refresh {
format!("Refreshed model info for {}", model_name)
} else {
format!("Showing model info for {}", model_name)
};
self.error = None;
Ok(())
}
Err(err) => {
self.error = Some(format!("Failed to load model info: {}", err));
Err(err.into())
}
}
}
pub async fn prefetch_all_model_details(&mut self, force_refresh: bool) -> Result<()> {
if force_refresh {
self.controller.clear_model_details_cache().await;
}
match self.controller.all_model_details(force_refresh).await {
Ok(details) => {
if force_refresh {
self.model_details_cache.clear();
}
for info in details {
self.model_details_cache.insert(info.name.clone(), info);
}
if let Some(current) = self
.model_info_panel
.current_model_name()
.map(|s| s.to_string())
{
if let Some(updated) = self.model_details_cache.get(&current).cloned() {
self.model_info_panel.set_model_info(updated);
}
}
let total = self.model_details_cache.len();
self.status = format!("Cached model details for {} model(s)", total);
self.error = None;
Ok(())
}
Err(err) => {
self.error = Some(format!("Failed to prefetch model info: {}", err));
Err(err.into())
}
}
}
pub fn auto_scroll(&self) -> &AutoScroll { pub fn auto_scroll(&self) -> &AutoScroll {
&self.auto_scroll &self.auto_scroll
} }
@@ -478,7 +606,14 @@ impl ChatApp {
("help", "Show help documentation"), ("help", "Show help documentation"),
("h", "Alias for help"), ("h", "Alias for help"),
("model", "Select a model"), ("model", "Select a model"),
("model info", "Show detailed information for a model"),
("model refresh", "Refresh cached model information"),
("model details", "Show details for the active model"),
("m", "Alias for model"), ("m", "Alias for model"),
(
"models info",
"Prefetch detailed information for all models",
),
("new", "Start a new conversation"), ("new", "Start a new conversation"),
("n", "Alias for new"), ("n", "Alias for new"),
("theme", "Switch theme"), ("theme", "Switch theme"),
@@ -648,6 +783,9 @@ impl ChatApp {
let (all_models, errors) = self.collect_models_from_all_providers().await; let (all_models, errors) = self.collect_models_from_all_providers().await;
self.models = all_models; self.models = all_models;
self.model_details_cache.clear();
self.model_info_panel.clear();
self.show_model_info = false;
self.recompute_available_providers(); self.recompute_available_providers();
@@ -804,6 +942,17 @@ impl ChatApp {
match self.mode { match self.mode {
InputMode::Normal => { InputMode::Normal => {
// Handle multi-key sequences first // Handle multi-key sequences first
if self.show_model_info
&& matches!(
(key.code, key.modifiers),
(KeyCode::Esc, KeyModifiers::NONE)
)
{
self.set_model_info_visible(false);
self.status = "Closed model info panel".to_string();
return Ok(AppState::Running);
}
if let Some(pending) = self.pending_key { if let Some(pending) = self.pending_key {
self.pending_key = None; self.pending_key = None;
match (pending, key.code) { match (pending, key.code) {
@@ -829,6 +978,21 @@ impl ChatApp {
| (KeyCode::Char('c'), KeyModifiers::CONTROL) => { | (KeyCode::Char('c'), KeyModifiers::CONTROL) => {
return Ok(AppState::Quit); return Ok(AppState::Quit);
} }
(KeyCode::Char('j'), modifiers)
if modifiers.contains(KeyModifiers::CONTROL) =>
{
if self.show_model_info && self.model_info_viewport_height > 0 {
self.model_info_panel
.scroll_down(self.model_info_viewport_height);
}
}
(KeyCode::Char('k'), modifiers)
if modifiers.contains(KeyModifiers::CONTROL) =>
{
if self.show_model_info && self.model_info_viewport_height > 0 {
self.model_info_panel.scroll_up();
}
}
// Mode switches // Mode switches
(KeyCode::Char('v'), KeyModifiers::NONE) => { (KeyCode::Char('v'), KeyModifiers::NONE) => {
self.mode = InputMode::Visual; self.mode = InputMode::Visual;
@@ -1648,12 +1812,90 @@ impl ChatApp {
return Ok(AppState::Running); return Ok(AppState::Running);
} }
"m" | "model" => { "m" | "model" => {
if args.is_empty() {
self.refresh_models().await?; self.refresh_models().await?;
self.mode = InputMode::ProviderSelection; self.mode = InputMode::ProviderSelection;
self.command_buffer.clear(); self.command_buffer.clear();
self.command_suggestions.clear(); self.command_suggestions.clear();
return Ok(AppState::Running); return Ok(AppState::Running);
} }
let subcommand = args[0];
let outcome: Result<()> = match subcommand {
"info" => {
let target = if args.len() > 1 {
args[1..].join(" ")
} else {
self.controller.selected_model().to_string()
};
if target.trim().is_empty() {
Err(anyhow!("Usage: :model info <name>"))
} else {
self.ensure_model_details(&target, false).await
}
}
"details" => {
let target =
self.controller.selected_model().to_string();
if target.trim().is_empty() {
Err(anyhow!(
"No active model set. Use :model to choose one first"
))
} else {
self.ensure_model_details(&target, false).await
}
}
"refresh" => {
let target = if args.len() > 1 {
args[1..].join(" ")
} else {
self.controller.selected_model().to_string()
};
if target.trim().is_empty() {
Err(anyhow!("Usage: :model refresh <name>"))
} else {
self.ensure_model_details(&target, true).await
}
}
_ => Err(anyhow!(format!(
"Unknown model subcommand: {}",
subcommand
))),
};
match outcome {
Ok(_) => self.error = None,
Err(err) => self.error = Some(err.to_string()),
}
self.mode = InputMode::Normal;
self.command_buffer.clear();
self.command_suggestions.clear();
return Ok(AppState::Running);
}
"models" => {
let outcome = if let Some(&"info") = args.first() {
let force_refresh = args
.get(1)
.map(|flag| {
matches!(*flag, "refresh" | "-r" | "--refresh")
})
.unwrap_or(false);
self.prefetch_all_model_details(force_refresh).await
} else {
Err(anyhow!("Usage: :models info [refresh]"))
};
match outcome {
Ok(_) => self.error = None,
Err(err) => self.error = Some(err.to_string()),
}
self.mode = InputMode::Normal;
self.command_buffer.clear();
self.command_suggestions.clear();
return Ok(AppState::Running);
}
// "run-agent" command removed to break circular dependency on owlen-cli. // "run-agent" command removed to break circular dependency on owlen-cli.
"agent" => { "agent" => {
if self.agent_running { if self.agent_running {
@@ -1897,8 +2139,13 @@ impl ChatApp {
}, },
InputMode::ModelSelection => match key.code { InputMode::ModelSelection => match key.code {
KeyCode::Esc => { KeyCode::Esc => {
if self.show_model_info {
self.set_model_info_visible(false);
self.status = "Closed model info panel".to_string();
} else {
self.mode = InputMode::Normal; self.mode = InputMode::Normal;
} }
}
KeyCode::Enter => { KeyCode::Enter => {
if let Some(item) = self.current_model_selector_item() { if let Some(item) = self.current_model_selector_item() {
match item.kind() { match item.kind() {
@@ -1960,6 +2207,7 @@ impl ChatApp {
} }
} }
self.mode = InputMode::Normal; self.mode = InputMode::Normal;
self.set_model_info_visible(false);
} else { } else {
self.error = Some( self.error = Some(
"No model available for the selected provider" "No model available for the selected provider"
@@ -1977,6 +2225,50 @@ impl ChatApp {
} }
} }
} }
KeyCode::Char('q') => {
if self.show_model_info {
self.set_model_info_visible(false);
self.status = "Closed model info panel".to_string();
} else {
self.mode = InputMode::Normal;
}
}
KeyCode::Char('i') => {
if let Some(model) = self.selected_model_info() {
let model_id = model.id.clone();
if let Err(err) = self.ensure_model_details(&model_id, false).await
{
self.error =
Some(format!("Failed to load model info: {}", err));
}
}
}
KeyCode::Char('r') => {
if let Some(model) = self.selected_model_info() {
let model_id = model.id.clone();
if let Err(err) = self.ensure_model_details(&model_id, true).await {
self.error =
Some(format!("Failed to refresh model info: {}", err));
} else {
self.error = None;
}
}
}
KeyCode::Char('j') => {
if self.show_model_info && self.model_info_viewport_height > 0 {
self.model_info_panel
.scroll_down(self.model_info_viewport_height);
} else {
self.move_model_selection(1);
}
}
KeyCode::Char('k') => {
if self.show_model_info && self.model_info_viewport_height > 0 {
self.model_info_panel.scroll_up();
} else {
self.move_model_selection(-1);
}
}
KeyCode::Up => { KeyCode::Up => {
self.move_model_selection(-1); self.move_model_selection(-1);
} }
@@ -2797,6 +3089,9 @@ impl ChatApp {
self.controller.switch_provider(provider).await?; self.controller.switch_provider(provider).await?;
self.current_provider = provider_name.to_string(); self.current_provider = provider_name.to_string();
self.model_details_cache.clear();
self.model_info_panel.clear();
self.set_model_info_visible(false);
Ok(()) Ok(())
} }
@@ -2813,6 +3108,9 @@ impl ChatApp {
Some(errors.join("; ")) Some(errors.join("; "))
}; };
self.models.clear(); self.models.clear();
self.model_details_cache.clear();
self.model_info_panel.clear();
self.set_model_info_visible(false);
self.recompute_available_providers(); self.recompute_available_providers();
if self.available_providers.is_empty() { if self.available_providers.is_empty() {
self.available_providers.push("ollama".to_string()); self.available_providers.push("ollama".to_string());
@@ -2825,6 +3123,9 @@ impl ChatApp {
} }
self.models = all_models; self.models = all_models;
self.model_details_cache.clear();
self.model_info_panel.clear();
self.set_model_info_visible(false);
self.recompute_available_providers(); self.recompute_available_providers();

View File

@@ -16,6 +16,7 @@ pub mod chat_app;
pub mod code_app; pub mod code_app;
pub mod config; pub mod config;
pub mod events; pub mod events;
pub mod model_info_panel;
pub mod tui_controller; pub mod tui_controller;
pub mod ui; pub mod ui;

View File

@@ -0,0 +1,226 @@
use owlen_core::model::DetailedModelInfo;
use owlen_core::theme::Theme;
use ratatui::{
layout::Rect,
style::{Color, Modifier, Style},
widgets::{Block, Borders, Paragraph, Wrap},
Frame,
};
/// Dedicated panel for presenting detailed model information.
pub struct ModelInfoPanel {
info: Option<DetailedModelInfo>,
scroll_offset: usize,
total_lines: usize,
}
impl ModelInfoPanel {
pub fn new() -> Self {
Self {
info: None,
scroll_offset: 0,
total_lines: 0,
}
}
pub fn set_model_info(&mut self, info: DetailedModelInfo) {
self.info = Some(info);
self.scroll_offset = 0;
self.total_lines = 0;
}
pub fn clear(&mut self) {
self.info = None;
self.scroll_offset = 0;
self.total_lines = 0;
}
pub fn render(&mut self, frame: &mut Frame<'_>, area: Rect, theme: &Theme) {
let block = Block::default()
.title("Model Information")
.borders(Borders::ALL)
.style(Style::default().bg(theme.background).fg(theme.text))
.border_style(Style::default().fg(theme.focused_panel_border));
if let Some(info) = &self.info {
let body = self.format_info(info);
self.total_lines = body.lines().count();
let paragraph = Paragraph::new(body)
.block(block)
.style(Style::default().fg(theme.text))
.wrap(Wrap { trim: true })
.scroll((self.scroll_offset as u16, 0));
frame.render_widget(paragraph, area);
} else {
self.total_lines = 0;
let paragraph = Paragraph::new("Select a model to inspect its details.")
.block(block)
.style(
Style::default()
.fg(Color::DarkGray)
.add_modifier(Modifier::ITALIC),
)
.wrap(Wrap { trim: true });
frame.render_widget(paragraph, area);
}
}
pub fn scroll_up(&mut self) {
if self.scroll_offset > 0 {
self.scroll_offset -= 1;
}
}
pub fn scroll_down(&mut self, viewport_height: usize) {
if viewport_height == 0 {
return;
}
let max_offset = self.total_lines.saturating_sub(viewport_height);
if self.scroll_offset < max_offset {
self.scroll_offset += 1;
}
}
pub fn reset_scroll(&mut self) {
self.scroll_offset = 0;
}
pub fn scroll_offset(&self) -> usize {
self.scroll_offset
}
pub fn total_lines(&self) -> usize {
self.total_lines
}
pub fn current_model_name(&self) -> Option<&str> {
self.info.as_ref().map(|info| info.name.as_str())
}
fn format_info(&self, info: &DetailedModelInfo) -> String {
let mut lines = Vec::new();
lines.push(format!("Name: {}", info.name));
lines.push(format!(
"Architecture: {}",
display_option(info.architecture.as_deref())
));
lines.push(format!(
"Parameters: {}",
display_option(info.parameters.as_deref())
));
lines.push(format!(
"Context Length: {}",
display_u64(info.context_length)
));
lines.push(format!(
"Embedding Length: {}",
display_u64(info.embedding_length)
));
lines.push(format!(
"Quantization: {}",
display_option(info.quantization.as_deref())
));
lines.push(format!(
"Family: {}",
display_option(info.family.as_deref())
));
if !info.families.is_empty() {
lines.push(format!("Families: {}", info.families.join(", ")));
}
lines.push(format!(
"Parameter Size: {}",
display_option(info.parameter_size.as_deref())
));
lines.push(format!("Size: {}", format_size(info.size)));
lines.push(format!(
"Modified: {}",
display_option(info.modified_at.as_deref())
));
lines.push(format!(
"License: {}",
display_option(info.license.as_deref())
));
lines.push(format!(
"Digest: {}",
display_option(info.digest.as_deref())
));
if let Some(template) = info.template.as_deref() {
lines.push(format!("Template: {}", snippet(template)));
}
if let Some(system) = info.system.as_deref() {
lines.push(format!("System Prompt: {}", snippet(system)));
}
if let Some(modelfile) = info.modelfile.as_deref() {
lines.push("Modelfile:".to_string());
lines.push(snippet_multiline(modelfile, 8));
}
lines.join("\n")
}
}
impl Default for ModelInfoPanel {
fn default() -> Self {
Self::new()
}
}
fn display_option(value: Option<&str>) -> String {
value
.map(|s| s.to_string())
.filter(|s| !s.trim().is_empty())
.unwrap_or_else(|| "N/A".to_string())
}
fn display_u64(value: Option<u64>) -> String {
value
.map(|v| v.to_string())
.unwrap_or_else(|| "N/A".to_string())
}
fn format_size(value: Option<u64>) -> String {
if let Some(bytes) = value {
if bytes >= 1_000_000_000 {
let human = bytes as f64 / 1_000_000_000_f64;
format!("{human:.2} GB ({} bytes)", bytes)
} else if bytes >= 1_000_000 {
let human = bytes as f64 / 1_000_000_f64;
format!("{human:.2} MB ({} bytes)", bytes)
} else if bytes >= 1_000 {
let human = bytes as f64 / 1_000_f64;
format!("{human:.2} KB ({} bytes)", bytes)
} else {
format!("{bytes} bytes")
}
} else {
"N/A".to_string()
}
}
fn snippet(text: &str) -> String {
const MAX_LEN: usize = 160;
if text.len() > MAX_LEN {
format!("{}", text.chars().take(MAX_LEN).collect::<String>())
} else {
text.to_string()
}
}
fn snippet_multiline(text: &str, max_lines: usize) -> String {
let mut lines = Vec::new();
for (idx, line) in text.lines().enumerate() {
if idx >= max_lines {
lines.push("".to_string());
break;
}
lines.push(snippet(line));
}
if lines.is_empty() {
"N/A".to_string()
} else {
lines.join("\n")
}
}

View File

@@ -9,6 +9,7 @@ use tui_textarea::TextArea;
use unicode_width::UnicodeWidthStr; use unicode_width::UnicodeWidthStr;
use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT}; use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT};
use owlen_core::model::DetailedModelInfo;
use owlen_core::types::{ModelInfo, Role}; use owlen_core::types::{ModelInfo, Role};
use owlen_core::ui::{FocusedPanel, InputMode}; use owlen_core::ui::{FocusedPanel, InputMode};
@@ -121,6 +122,21 @@ pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) {
_ => {} _ => {}
} }
} }
if app.is_model_info_visible() {
let panel_width = frame
.area()
.width
.saturating_div(3)
.max(30)
.min(frame.area().width.saturating_sub(20).max(30));
let x = frame.area().x + frame.area().width.saturating_sub(panel_width);
let area = Rect::new(x, frame.area().y, panel_width, frame.area().height);
frame.render_widget(Clear, area);
let viewport_height = area.height.saturating_sub(2) as usize;
app.set_model_info_viewport_height(viewport_height);
app.model_info_panel_mut().render(frame, area, &theme);
}
} }
fn render_editable_textarea( fn render_editable_textarea(
@@ -1434,12 +1450,8 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
ModelSelectorItemKind::Model { model_index, .. } => { ModelSelectorItemKind::Model { model_index, .. } => {
if let Some(model) = app.model_info_by_index(*model_index) { if let Some(model) = app.model_info_by_index(*model_index) {
let badges = model_badge_icons(model); let badges = model_badge_icons(model);
let detail = app.cached_model_detail(&model.id);
let label = if badges.is_empty() { let label = build_model_selector_label(model, detail, &badges);
format!(" {}", model.id)
} else {
format!(" {} - {}", model.id, badges.join(" "))
};
ListItem::new(Span::styled( ListItem::new(Span::styled(
label, label,
Style::default() Style::default()
@@ -1486,6 +1498,53 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
frame.render_stateful_widget(list, area, &mut state); frame.render_stateful_widget(list, area, &mut state);
} }
fn build_model_selector_label(
model: &ModelInfo,
detail: Option<&DetailedModelInfo>,
badges: &[&'static str],
) -> String {
let mut parts = vec![model.id.clone()];
if let Some(detail) = detail {
if let Some(parameters) = detail
.parameter_size
.as_ref()
.or(detail.parameters.as_ref())
{
if !parameters.trim().is_empty() {
parts.push(parameters.trim().to_string());
}
}
if let Some(size) = detail.size {
parts.push(format_short_size(size));
}
if let Some(ctx) = detail.context_length {
parts.push(format!("ctx {}", ctx));
}
}
let mut label = format!(" {}", parts.join(""));
if !badges.is_empty() {
label.push(' ');
label.push_str(&badges.join(" "));
}
label
}
fn format_short_size(bytes: u64) -> String {
if bytes >= 1_000_000_000 {
format!("{:.1} GB", bytes as f64 / 1_000_000_000_f64)
} else if bytes >= 1_000_000 {
format!("{:.1} MB", bytes as f64 / 1_000_000_f64)
} else if bytes >= 1_000 {
format!("{:.1} KB", bytes as f64 / 1_000_f64)
} else {
format!("{} B", bytes)
}
}
fn render_consent_dialog(frame: &mut Frame<'_>, app: &ChatApp) { fn render_consent_dialog(frame: &mut Frame<'_>, app: &ChatApp) {
let theme = app.theme(); let theme = app.theme();