refactor(ollama): replace handcrafted HTTP logic with ollama‑rs client and simplify request handling
- Switch to `ollama-rs` crate for chat, model listing, and streaming. - Remove custom request building, authentication handling, and debug logging. - Drop unsupported tool conversion; now ignore tool descriptors with a warning. - Refactor model fetching to use local model info and optional cloud details. - Consolidate error mapping via `map_ollama_error`. - Update health check to use the new HTTP client. - Delete obsolete `provider_interface.rs` test as the provider interface has changed.
This commit is contained in:
@@ -9,7 +9,7 @@ use tui_textarea::TextArea;
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT};
|
||||
use owlen_core::types::Role;
|
||||
use owlen_core::types::{ModelInfo, Role};
|
||||
use owlen_core::ui::{FocusedPanel, InputMode};
|
||||
|
||||
const PRIVACY_TAB_INDEX: usize = HELP_TAB_COUNT - 1;
|
||||
@@ -1371,6 +1371,47 @@ fn render_provider_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||
frame.render_stateful_widget(list, area, &mut state);
|
||||
}
|
||||
|
||||
fn model_badge_icons(model: &ModelInfo) -> Vec<&'static str> {
|
||||
let mut badges = Vec::new();
|
||||
|
||||
if model.supports_tools {
|
||||
badges.push("🔧");
|
||||
}
|
||||
|
||||
if model_has_feature(model, &["think", "reason"]) {
|
||||
badges.push("🧠");
|
||||
}
|
||||
|
||||
if model_has_feature(model, &["vision", "multimodal", "image"]) {
|
||||
badges.push("👁️");
|
||||
}
|
||||
|
||||
if model_has_feature(model, &["audio", "speech", "voice"]) {
|
||||
badges.push("🎧");
|
||||
}
|
||||
|
||||
badges
|
||||
}
|
||||
|
||||
fn model_has_feature(model: &ModelInfo, keywords: &[&str]) -> bool {
|
||||
let name_lower = model.name.to_ascii_lowercase();
|
||||
if keywords.iter().any(|kw| name_lower.contains(kw)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(description) = &model.description {
|
||||
let description_lower = description.to_ascii_lowercase();
|
||||
if keywords.iter().any(|kw| description_lower.contains(kw)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
model.capabilities.iter().any(|cap| {
|
||||
let lower = cap.to_ascii_lowercase();
|
||||
keywords.iter().any(|kw| lower.contains(kw))
|
||||
})
|
||||
}
|
||||
|
||||
fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||
let theme = app.theme();
|
||||
let area = centered_rect(60, 60, frame.area());
|
||||
@@ -1392,10 +1433,7 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||
}
|
||||
ModelSelectorItemKind::Model { model_index, .. } => {
|
||||
if let Some(model) = app.model_info_by_index(*model_index) {
|
||||
let mut badges = Vec::new();
|
||||
if model.supports_tools {
|
||||
badges.push("🔧");
|
||||
}
|
||||
let badges = model_badge_icons(model);
|
||||
|
||||
let label = if badges.is_empty() {
|
||||
format!(" {}", model.id)
|
||||
@@ -1428,7 +1466,7 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||
.block(
|
||||
Block::default()
|
||||
.title(Span::styled(
|
||||
"Select Model — 🔧 = Tool Support",
|
||||
"Select Model — 🔧 tools • 🧠 thinking • 👁️ vision • 🎧 audio",
|
||||
Style::default()
|
||||
.fg(theme.focused_panel_border)
|
||||
.add_modifier(Modifier::BOLD),
|
||||
@@ -1602,6 +1640,67 @@ fn render_consent_dialog(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||
frame.render_widget(paragraph, area);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn model_with(capabilities: Vec<&str>, description: Option<&str>) -> ModelInfo {
|
||||
ModelInfo {
|
||||
id: "model".into(),
|
||||
name: "model".into(),
|
||||
description: description.map(|s| s.to_string()),
|
||||
provider: "test".into(),
|
||||
context_window: None,
|
||||
capabilities: capabilities.into_iter().map(|s| s.to_string()).collect(),
|
||||
supports_tools: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn badges_include_tool_icon() {
|
||||
let model = ModelInfo {
|
||||
id: "tool-model".into(),
|
||||
name: "tool-model".into(),
|
||||
description: None,
|
||||
provider: "test".into(),
|
||||
context_window: None,
|
||||
capabilities: vec![],
|
||||
supports_tools: true,
|
||||
};
|
||||
|
||||
assert!(model_badge_icons(&model).contains(&"🔧"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn badges_detect_thinking_capability() {
|
||||
let model = model_with(vec!["Thinking"], None);
|
||||
let icons = model_badge_icons(&model);
|
||||
assert!(icons.contains(&"🧠"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn badges_detect_vision_from_description() {
|
||||
let model = model_with(vec!["chat"], Some("Supports multimodal vision"));
|
||||
let icons = model_badge_icons(&model);
|
||||
assert!(icons.contains(&"👁️"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn badges_detect_audio_from_name() {
|
||||
let model = ModelInfo {
|
||||
id: "voice-specialist".into(),
|
||||
name: "Voice-Specialist".into(),
|
||||
description: None,
|
||||
provider: "test".into(),
|
||||
context_window: None,
|
||||
capabilities: vec![],
|
||||
supports_tools: false,
|
||||
};
|
||||
let icons = model_badge_icons(&model);
|
||||
assert!(icons.contains(&"🎧"));
|
||||
}
|
||||
}
|
||||
|
||||
fn render_privacy_settings(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
|
||||
let theme = app.theme();
|
||||
let config = app.config();
|
||||
|
||||
Reference in New Issue
Block a user