refactor(ollama)!: remove Ollama provider crate and implementation

Deletes the `owlen-ollama` Cargo.toml and source files, fully removing the Ollama provider from the workspace. This aligns the project with the MCP‑only architecture and eliminates direct provider dependencies.
This commit is contained in:
2025-10-12 06:38:21 +02:00
parent 38aba1a6bb
commit 15e5c1206b
19 changed files with 1280 additions and 741 deletions

View File

@@ -26,7 +26,6 @@ required-features = ["chat-client"]
owlen-core = { path = "../owlen-core" }
# Optional TUI dependency, enabled by the "chat-client" feature.
owlen-tui = { path = "../owlen-tui", optional = true }
owlen-ollama = { path = "../owlen-ollama" }
log = { workspace = true }
async-trait = { workspace = true }
futures = { workspace = true }

View File

@@ -0,0 +1,401 @@
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::{anyhow, bail, Context, Result};
use clap::Subcommand;
use owlen_core::config as core_config;
use owlen_core::config::Config;
use owlen_core::credentials::{ApiCredentials, CredentialManager, OLLAMA_CLOUD_CREDENTIAL_ID};
use owlen_core::encryption;
use owlen_core::provider::{LLMProvider, ProviderConfig};
use owlen_core::providers::OllamaProvider;
use owlen_core::storage::StorageManager;
const DEFAULT_CLOUD_ENDPOINT: &str = "https://ollama.com";
#[derive(Debug, Subcommand)]
pub enum CloudCommand {
/// Configure Ollama Cloud credentials
Setup {
/// API key passed directly on the command line (prompted when omitted)
#[arg(long)]
api_key: Option<String>,
/// Override the cloud endpoint (default: https://ollama.com)
#[arg(long)]
endpoint: Option<String>,
/// Provider name to configure (default: ollama)
#[arg(long, default_value = "ollama")]
provider: String,
},
/// Check connectivity to Ollama Cloud
Status {
/// Provider name to check (default: ollama)
#[arg(long, default_value = "ollama")]
provider: String,
},
/// List available cloud-hosted models
Models {
/// Provider name to query (default: ollama)
#[arg(long, default_value = "ollama")]
provider: String,
},
/// Remove stored Ollama Cloud credentials
Logout {
/// Provider name to clear (default: ollama)
#[arg(long, default_value = "ollama")]
provider: String,
},
}
pub async fn run_cloud_command(command: CloudCommand) -> Result<()> {
match command {
CloudCommand::Setup {
api_key,
endpoint,
provider,
} => setup(provider, api_key, endpoint).await,
CloudCommand::Status { provider } => status(provider).await,
CloudCommand::Models { provider } => models(provider).await,
CloudCommand::Logout { provider } => logout(provider).await,
}
}
async fn setup(provider: String, api_key: Option<String>, endpoint: Option<String>) -> Result<()> {
let provider = canonical_provider_name(&provider);
let mut config = crate::config::try_load_config().unwrap_or_default();
let endpoint = endpoint.unwrap_or_else(|| DEFAULT_CLOUD_ENDPOINT.to_string());
ensure_provider_entry(&mut config, &provider, &endpoint);
let key = match api_key {
Some(value) if !value.trim().is_empty() => value,
_ => {
let prompt = format!("Enter API key for {provider}: ");
encryption::prompt_password(&prompt)?
}
};
if config.privacy.encrypt_local_data {
let storage = Arc::new(StorageManager::new().await?);
let manager = unlock_credential_manager(&config, storage.clone())?;
let credentials = ApiCredentials {
api_key: key.clone(),
endpoint: endpoint.clone(),
};
manager
.store_credentials(OLLAMA_CLOUD_CREDENTIAL_ID, &credentials)
.await?;
// Ensure plaintext key is not persisted to disk.
if let Some(entry) = config.providers.get_mut(&provider) {
entry.api_key = None;
}
} else if let Some(entry) = config.providers.get_mut(&provider) {
entry.api_key = Some(key.clone());
}
if let Some(entry) = config.providers.get_mut(&provider) {
entry.base_url = Some(endpoint.clone());
}
crate::config::save_config(&config)?;
println!("Saved Ollama configuration for provider '{provider}'.");
if config.privacy.encrypt_local_data {
println!("API key stored securely in the encrypted credential vault.");
} else {
println!("API key stored in plaintext configuration (encryption disabled).");
}
Ok(())
}
async fn status(provider: String) -> Result<()> {
let provider = canonical_provider_name(&provider);
let mut config = crate::config::try_load_config().unwrap_or_default();
let storage = Arc::new(StorageManager::new().await?);
let manager = if config.privacy.encrypt_local_data {
Some(unlock_credential_manager(&config, storage.clone())?)
} else {
None
};
let api_key = hydrate_api_key(&mut config, manager.as_ref()).await?;
ensure_provider_entry(&mut config, &provider, DEFAULT_CLOUD_ENDPOINT);
let provider_cfg = config
.provider(&provider)
.cloned()
.ok_or_else(|| anyhow!("Provider '{provider}' is not configured"))?;
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
.with_context(|| "Failed to construct Ollama provider. Run `owlen cloud setup` first.")?;
match ollama.health_check().await {
Ok(_) => {
println!(
"✓ Connected to {provider} ({})",
provider_cfg
.base_url
.as_deref()
.unwrap_or(DEFAULT_CLOUD_ENDPOINT)
);
if api_key.is_none() && config.privacy.encrypt_local_data {
println!(
"Warning: No API key stored; connection succeeded via environment variables."
);
}
}
Err(err) => {
println!("✗ Failed to reach {provider}: {err}");
}
}
Ok(())
}
async fn models(provider: String) -> Result<()> {
let provider = canonical_provider_name(&provider);
let mut config = crate::config::try_load_config().unwrap_or_default();
let storage = Arc::new(StorageManager::new().await?);
let manager = if config.privacy.encrypt_local_data {
Some(unlock_credential_manager(&config, storage.clone())?)
} else {
None
};
hydrate_api_key(&mut config, manager.as_ref()).await?;
ensure_provider_entry(&mut config, &provider, DEFAULT_CLOUD_ENDPOINT);
let provider_cfg = config
.provider(&provider)
.cloned()
.ok_or_else(|| anyhow!("Provider '{provider}' is not configured"))?;
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
.with_context(|| "Failed to construct Ollama provider. Run `owlen cloud setup` first.")?;
match ollama.list_models().await {
Ok(models) => {
if models.is_empty() {
println!("No cloud models reported by '{}'.", provider);
} else {
println!("Models available via '{}':", provider);
for model in models {
if let Some(description) = &model.description {
println!(" - {} ({})", model.id, description);
} else {
println!(" - {}", model.id);
}
}
}
}
Err(err) => {
bail!("Failed to list models: {err}");
}
}
Ok(())
}
async fn logout(provider: String) -> Result<()> {
let provider = canonical_provider_name(&provider);
let mut config = crate::config::try_load_config().unwrap_or_default();
let storage = Arc::new(StorageManager::new().await?);
if config.privacy.encrypt_local_data {
let manager = unlock_credential_manager(&config, storage.clone())?;
manager
.delete_credentials(OLLAMA_CLOUD_CREDENTIAL_ID)
.await?;
}
if let Some(entry) = provider_entry_mut(&mut config) {
entry.api_key = None;
}
crate::config::save_config(&config)?;
println!("Cleared credentials for provider '{provider}'.");
Ok(())
}
fn ensure_provider_entry(config: &mut Config, provider: &str, endpoint: &str) {
if provider == "ollama"
&& config.providers.contains_key("ollama-cloud")
&& !config.providers.contains_key("ollama")
{
if let Some(mut legacy) = config.providers.remove("ollama-cloud") {
legacy.provider_type = "ollama".to_string();
config.providers.insert("ollama".to_string(), legacy);
}
}
core_config::ensure_provider_config(config, provider);
if let Some(cfg) = config.providers.get_mut(provider) {
if cfg.provider_type != "ollama" {
cfg.provider_type = "ollama".to_string();
}
if cfg.base_url.is_none() {
cfg.base_url = Some(endpoint.to_string());
}
}
}
fn canonical_provider_name(provider: &str) -> String {
let normalized = provider.trim().replace('_', "-").to_ascii_lowercase();
match normalized.as_str() {
"" => "ollama".to_string(),
"ollama-cloud" => "ollama".to_string(),
value => value.to_string(),
}
}
fn set_env_if_missing(var: &str, value: &str) {
if std::env::var(var)
.map(|v| v.trim().is_empty())
.unwrap_or(true)
{
std::env::set_var(var, value);
}
}
fn provider_entry_mut(config: &mut Config) -> Option<&mut ProviderConfig> {
if config.providers.contains_key("ollama") {
config.providers.get_mut("ollama")
} else {
config.providers.get_mut("ollama-cloud")
}
}
fn provider_entry(config: &Config) -> Option<&ProviderConfig> {
if let Some(entry) = config.providers.get("ollama") {
return Some(entry);
}
config.providers.get("ollama-cloud")
}
fn unlock_credential_manager(
config: &Config,
storage: Arc<StorageManager>,
) -> Result<Arc<CredentialManager>> {
if !config.privacy.encrypt_local_data {
bail!("Credential manager requested but encryption is disabled");
}
let secure_path = vault_path(&storage)?;
let handle = unlock_vault(&secure_path)?;
let master_key = Arc::new(handle.data.master_key.clone());
Ok(Arc::new(CredentialManager::new(
storage,
master_key.clone(),
)))
}
fn vault_path(storage: &StorageManager) -> Result<PathBuf> {
let base_dir = storage
.database_path()
.parent()
.map(|p| p.to_path_buf())
.or_else(dirs::data_local_dir)
.unwrap_or_else(|| PathBuf::from("."));
Ok(base_dir.join("encrypted_data.json"))
}
fn unlock_vault(path: &Path) -> Result<encryption::VaultHandle> {
use std::env;
if path.exists() {
if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD") {
if !password.trim().is_empty() {
return encryption::unlock_with_password(path.to_path_buf(), &password)
.context("Failed to unlock vault with OWLEN_MASTER_PASSWORD");
}
}
for attempt in 0..3 {
let password = encryption::prompt_password("Enter master password: ")?;
match encryption::unlock_with_password(path.to_path_buf(), &password) {
Ok(handle) => {
env::set_var("OWLEN_MASTER_PASSWORD", password);
return Ok(handle);
}
Err(err) => {
eprintln!("Failed to unlock vault: {err}");
if attempt == 2 {
return Err(err);
}
}
}
}
bail!("Unable to unlock encrypted credential vault");
}
let handle = encryption::unlock_interactive(path.to_path_buf())?;
if env::var("OWLEN_MASTER_PASSWORD")
.map(|v| v.trim().is_empty())
.unwrap_or(true)
{
let password = encryption::prompt_password("Cache master password for this session: ")?;
env::set_var("OWLEN_MASTER_PASSWORD", password);
}
Ok(handle)
}
async fn hydrate_api_key(
config: &mut Config,
manager: Option<&Arc<CredentialManager>>,
) -> Result<Option<String>> {
if let Some(manager) = manager {
if let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await? {
let key = credentials.api_key.trim().to_string();
if !key.is_empty() {
set_env_if_missing("OLLAMA_API_KEY", &key);
set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key);
}
if let Some(cfg) = provider_entry_mut(config) {
if cfg.base_url.is_none() && !credentials.endpoint.trim().is_empty() {
cfg.base_url = Some(credentials.endpoint);
}
}
return Ok(Some(key));
}
}
if let Some(cfg) = provider_entry(config) {
if let Some(key) = cfg
.api_key
.as_ref()
.map(|value| value.trim())
.filter(|value| !value.is_empty())
{
set_env_if_missing("OLLAMA_API_KEY", key);
set_env_if_missing("OLLAMA_CLOUD_API_KEY", key);
return Ok(Some(key.to_string()));
}
}
Ok(None)
}
pub async fn load_runtime_credentials(
config: &mut Config,
storage: Arc<StorageManager>,
) -> Result<()> {
if config.privacy.encrypt_local_data {
let manager = unlock_credential_manager(config, storage.clone())?;
hydrate_api_key(config, Some(&manager)).await?;
} else {
hydrate_api_key(config, None).await?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn canonicalises_provider_names() {
assert_eq!(canonical_provider_name("OLLAMA_CLOUD"), "ollama");
assert_eq!(canonical_provider_name(" ollama-cloud"), "ollama");
assert_eq!(canonical_provider_name(""), "ollama");
}
}

View File

@@ -1,20 +1,23 @@
//! OWLEN CLI - Chat TUI client
mod cloud;
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use clap::{Parser, Subcommand};
use cloud::{load_runtime_credentials, CloudCommand};
use owlen_core::config as core_config;
use owlen_core::{
config::{Config, McpMode},
mcp::remote_client::RemoteMcpClient,
mode::Mode,
provider::ChatStream,
providers::OllamaProvider,
session::SessionController,
storage::StorageManager,
types::{ChatRequest, ChatResponse, Message, ModelInfo},
Error, Provider,
};
use owlen_ollama::OllamaProvider;
use owlen_tui::tui_controller::{TuiController, TuiRequest};
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
use std::borrow::Cow;
@@ -48,6 +51,9 @@ enum OwlenCommand {
/// Inspect or upgrade configuration files
#[command(subcommand)]
Config(ConfigCommand),
/// Manage Ollama Cloud credentials
#[command(subcommand)]
Cloud(CloudCommand),
/// Show manual steps for updating Owlen to the latest revision
Upgrade,
}
@@ -112,8 +118,7 @@ fn build_local_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
match provider_cfg.provider_type.as_str() {
"ollama" | "ollama-cloud" => {
let provider = OllamaProvider::from_config(provider_cfg, Some(&cfg.general))?;
let provider: Arc<dyn Provider> = Arc::new(provider);
Ok(provider)
Ok(Arc::new(provider) as Arc<dyn Provider>)
}
other => Err(anyhow::anyhow!(format!(
"Provider type '{other}' is not supported in legacy/local MCP mode"
@@ -121,9 +126,10 @@ fn build_local_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
}
}
fn run_command(command: OwlenCommand) -> Result<()> {
async fn run_command(command: OwlenCommand) -> Result<()> {
match command {
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await,
OwlenCommand::Upgrade => {
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force");
println!(
@@ -163,16 +169,34 @@ fn run_config_doctor() -> Result<()> {
changes.push("default provider missing; reset to 'ollama'".to_string());
}
if let Some(mut legacy) = config.providers.remove("ollama-cloud") {
legacy.provider_type = "ollama".to_string();
use std::collections::hash_map::Entry;
match config.providers.entry("ollama".to_string()) {
Entry::Occupied(mut existing) => {
let entry = existing.get_mut();
if entry.api_key.is_none() {
entry.api_key = legacy.api_key.take();
}
if entry.base_url.is_none() && legacy.base_url.is_some() {
entry.base_url = legacy.base_url.take();
}
entry.extra.extend(legacy.extra);
}
Entry::Vacant(slot) => {
slot.insert(legacy);
}
}
changes.push(
"migrated legacy 'ollama-cloud' provider into unified 'ollama' entry".to_string(),
);
}
if !config.providers.contains_key("ollama") {
core_config::ensure_provider_config(&mut config, "ollama");
changes.push("added default ollama provider configuration".to_string());
}
if !config.providers.contains_key("ollama-cloud") {
core_config::ensure_provider_config(&mut config, "ollama-cloud");
changes.push("added default ollama-cloud provider configuration".to_string());
}
match config.mcp.mode {
McpMode::Legacy => {
config.mcp.mode = McpMode::LocalOnly;
@@ -329,7 +353,7 @@ async fn main() -> Result<()> {
// Parse command-line arguments
let Args { code, command } = Args::parse();
if let Some(command) = command {
return run_command(command);
return run_command(command).await;
}
let initial_mode = if code { Mode::Code } else { Mode::Chat };
@@ -339,8 +363,6 @@ async fn main() -> Result<()> {
let color_support = detect_terminal_color_support();
// Load configuration (or fall back to defaults) for the session controller.
let mut cfg = config::try_load_config().unwrap_or_default();
// Disable encryption for CLI to avoid password prompts in this environment.
cfg.privacy.encrypt_local_data = false;
if let Some(previous_theme) = apply_terminal_theme(&mut cfg, &color_support) {
let term_label = match &color_support {
TerminalColorSupport::Limited { term } => Cow::from(term.as_str()),
@@ -357,6 +379,8 @@ async fn main() -> Result<()> {
);
}
cfg.validate()?;
let storage = Arc::new(StorageManager::new().await?);
load_runtime_credentials(&mut cfg, storage.clone()).await?;
let (tui_tx, _tui_rx) = mpsc::unbounded_channel::<TuiRequest>();
let tui_controller = Arc::new(TuiController::new(tui_tx));
@@ -387,7 +411,6 @@ async fn main() -> Result<()> {
}
};
let storage = Arc::new(StorageManager::new().await?);
let controller =
SessionController::new(provider, cfg, storage.clone(), tui_controller, false).await?;
let (mut app, mut session_rx) = ChatApp::new(controller).await?;

View File

@@ -21,6 +21,7 @@ unicode-width = "0.1"
uuid = { workspace = true }
textwrap = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
async-trait = { workspace = true }
toml = { workspace = true }
shellexpand = { workspace = true }

View File

@@ -57,10 +57,6 @@ impl Default for Config {
fn default() -> Self {
let mut providers = HashMap::new();
providers.insert("ollama".to_string(), default_ollama_provider_config());
providers.insert(
"ollama-cloud".to_string(),
default_ollama_cloud_provider_config(),
);
Self {
schema_version: Self::default_schema_version(),
@@ -200,7 +196,6 @@ impl Config {
}
ensure_provider_config(self, "ollama");
ensure_provider_config(self, "ollama-cloud");
if self.schema_version.is_empty() {
self.schema_version = Self::default_schema_version();
}
@@ -222,9 +217,42 @@ impl Config {
CONFIG_SCHEMA_VERSION
);
}
if let Some(legacy_cloud) = self.providers.remove("ollama_cloud") {
self.merge_legacy_ollama_provider(legacy_cloud);
}
if let Some(legacy_cloud) = self.providers.remove("ollama-cloud") {
self.merge_legacy_ollama_provider(legacy_cloud);
}
self.schema_version = CONFIG_SCHEMA_VERSION.to_string();
}
fn merge_legacy_ollama_provider(&mut self, mut legacy_cloud: ProviderConfig) {
use std::collections::hash_map::Entry;
legacy_cloud.provider_type = "ollama".to_string();
match self.providers.entry("ollama".to_string()) {
Entry::Occupied(mut entry) => {
let target = entry.get_mut();
if target.base_url.is_none() {
target.base_url = legacy_cloud.base_url.take();
}
if target.api_key.is_none() {
target.api_key = legacy_cloud.api_key.take();
}
if target.extra.is_empty() && !legacy_cloud.extra.is_empty() {
target.extra = legacy_cloud.extra;
}
}
Entry::Vacant(entry) => {
entry.insert(legacy_cloud);
}
}
}
fn validate_default_provider(&self) -> Result<()> {
if self.general.default_provider.trim().is_empty() {
return Err(crate::Error::Config(
@@ -308,15 +336,6 @@ fn default_ollama_provider_config() -> ProviderConfig {
}
}
fn default_ollama_cloud_provider_config() -> ProviderConfig {
ProviderConfig {
provider_type: "ollama-cloud".to_string(),
base_url: Some("https://ollama.com".to_string()),
api_key: None,
extra: HashMap::new(),
}
}
/// Default configuration path with user home expansion
pub fn default_config_path() -> PathBuf {
if let Some(config_dir) = dirs::config_dir() {
@@ -787,11 +806,14 @@ pub fn ensure_provider_config<'a>(
) -> &'a ProviderConfig {
use std::collections::hash_map::Entry;
if matches!(provider_name, "ollama_cloud" | "ollama-cloud") {
return ensure_provider_config(config, "ollama");
}
match config.providers.entry(provider_name.to_string()) {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let default = match provider_name {
"ollama-cloud" => default_ollama_cloud_provider_config(),
"ollama" => default_ollama_provider_config(),
other => ProviderConfig {
provider_type: other.to_string(),
@@ -857,20 +879,44 @@ mod tests {
}
#[test]
fn default_config_contains_local_and_cloud_providers() {
fn default_config_contains_local_provider() {
let config = Config::default();
assert!(config.providers.contains_key("ollama"));
assert!(config.providers.contains_key("ollama-cloud"));
}
#[test]
fn ensure_provider_config_backfills_cloud_defaults() {
fn ensure_provider_config_aliases_cloud_defaults() {
let mut config = Config::default();
config.providers.remove("ollama-cloud");
config.providers.clear();
let cloud = ensure_provider_config(&mut config, "ollama-cloud");
assert_eq!(cloud.provider_type, "ollama-cloud");
assert_eq!(cloud.base_url.as_deref(), Some("https://ollama.com"));
assert_eq!(cloud.provider_type, "ollama");
assert_eq!(cloud.base_url.as_deref(), Some("http://localhost:11434"));
assert!(config.providers.contains_key("ollama"));
assert!(!config.providers.contains_key("ollama-cloud"));
}
#[test]
fn migrate_ollama_cloud_underscore_key() {
let mut config = Config::default();
config.providers.clear();
config.providers.insert(
"ollama_cloud".to_string(),
ProviderConfig {
provider_type: "ollama_cloud".to_string(),
base_url: Some("https://api.ollama.com".to_string()),
api_key: Some("secret".to_string()),
extra: HashMap::new(),
},
);
config.apply_schema_migrations("1.0.0");
assert!(config.providers.get("ollama_cloud").is_none());
assert!(config.providers.get("ollama-cloud").is_none());
let cloud = config.providers.get("ollama").expect("migrated config");
assert_eq!(cloud.provider_type, "ollama");
assert_eq!(cloud.base_url.as_deref(), Some("https://api.ollama.com"));
assert_eq!(cloud.api_key.as_deref(), Some("secret"));
}
#[test]

View File

@@ -10,6 +10,8 @@ pub struct ApiCredentials {
pub endpoint: String,
}
pub const OLLAMA_CLOUD_CREDENTIAL_ID: &str = "provider_ollama_cloud";
pub struct CredentialManager {
storage: Arc<StorageManager>,
master_key: Arc<Vec<u8>>,

View File

@@ -15,6 +15,7 @@ pub mod mcp;
pub mod mode;
pub mod model;
pub mod provider;
pub mod providers;
pub mod router;
pub mod sandbox;
pub mod session;
@@ -43,6 +44,7 @@ pub use mode::*;
pub use model::*;
// Export provider types but exclude test_utils to avoid ambiguity
pub use provider::{ChatStream, LLMProvider, Provider, ProviderConfig, ProviderRegistry};
pub use providers::*;
pub use router::*;
pub use sandbox::*;
pub use session::*;

View File

@@ -0,0 +1,8 @@
//! Built-in LLM provider implementations.
//!
//! Each provider integration lives in its own module so that maintenance
//! stays focused and configuration remains clear.
pub mod ollama;
pub use ollama::OllamaProvider;

View File

@@ -5,7 +5,6 @@ edition = "2021"
[dependencies]
owlen-core = { path = "../owlen-core" }
owlen-ollama = { path = "../owlen-ollama" }
tokio = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

View File

@@ -14,13 +14,14 @@ use owlen_core::mcp::protocol::{
};
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
use owlen_core::provider::ProviderConfig;
use owlen_core::providers::OllamaProvider;
use owlen_core::types::{ChatParameters, ChatRequest, Message};
use owlen_core::Provider;
use owlen_ollama::OllamaProvider;
use serde::Deserialize;
use serde_json::{json, Value};
use std::collections::HashMap;
use std::env;
use std::sync::Arc;
use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt};
use tokio_stream::StreamExt;
@@ -108,42 +109,56 @@ fn resources_list_descriptor() -> McpToolDescriptor {
}
}
fn provider_from_config() -> Result<OllamaProvider, RpcError> {
fn provider_from_config() -> Result<Arc<dyn Provider>, RpcError> {
let mut config = OwlenConfig::load(None).unwrap_or_default();
let provider_name =
let requested_name =
env::var("OWLEN_PROVIDER").unwrap_or_else(|_| config.general.default_provider.clone());
if config.provider(&provider_name).is_none() {
ensure_provider_config(&mut config, &provider_name);
let provider_key = canonical_provider_name(&requested_name);
if config.provider(&provider_key).is_none() {
ensure_provider_config(&mut config, &provider_key);
}
let provider_cfg: ProviderConfig =
config.provider(&provider_name).cloned().ok_or_else(|| {
config.provider(&provider_key).cloned().ok_or_else(|| {
RpcError::internal_error(format!(
"Provider '{provider_name}' not found in configuration"
"Provider '{provider_key}' not found in configuration"
))
})?;
if provider_cfg.provider_type != "ollama" && provider_cfg.provider_type != "ollama-cloud" {
return Err(RpcError::internal_error(format!(
"Unsupported provider type '{}' for MCP LLM server",
provider_cfg.provider_type
)));
match provider_cfg.provider_type.as_str() {
"ollama" | "ollama-cloud" => {
let provider = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
.map_err(|e| {
RpcError::internal_error(format!(
"Failed to init Ollama provider from config: {e}"
))
})?;
Ok(Arc::new(provider) as Arc<dyn Provider>)
}
other => Err(RpcError::internal_error(format!(
"Unsupported provider type '{other}' for MCP LLM server"
))),
}
OllamaProvider::from_config(&provider_cfg, Some(&config.general)).map_err(|e| {
RpcError::internal_error(format!("Failed to init OllamaProvider from config: {}", e))
})
}
fn create_provider() -> Result<OllamaProvider, RpcError> {
fn create_provider() -> Result<Arc<dyn Provider>, RpcError> {
if let Ok(url) = env::var("OLLAMA_URL") {
return OllamaProvider::new(&url).map_err(|e| {
RpcError::internal_error(format!("Failed to init OllamaProvider: {}", e))
});
let provider = OllamaProvider::new(&url).map_err(|e| {
RpcError::internal_error(format!("Failed to init Ollama provider: {e}"))
})?;
return Ok(Arc::new(provider) as Arc<dyn Provider>);
}
provider_from_config()
}
fn canonical_provider_name(name: &str) -> String {
if name.eq_ignore_ascii_case("ollama-cloud") {
"ollama".to_string()
} else {
name.to_string()
}
}
async fn handle_generate_text(args: GenerateTextArgs) -> Result<String, RpcError> {
let provider = create_provider()?;
@@ -409,16 +424,14 @@ async fn main() -> anyhow::Result<()> {
}
};
// Initialize Ollama provider and start streaming
let ollama_url = env::var("OLLAMA_URL")
.unwrap_or_else(|_| "http://localhost:11434".to_string());
let provider = match OllamaProvider::new(&ollama_url) {
// Initialize provider and start streaming
let provider = match create_provider() {
Ok(p) => p,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(),
RpcError::internal_error(format!(
"Failed to init OllamaProvider: {}",
"Failed to initialize provider: {:?}",
e
)),
);

View File

@@ -1,34 +0,0 @@
[package]
name = "owlen-ollama"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
homepage.workspace = true
description = "Ollama provider for OWLEN LLM client"
[dependencies]
owlen-core = { path = "../owlen-core" }
# HTTP client
reqwest = { workspace = true }
# Async runtime
tokio = { workspace = true }
tokio-stream = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
# Serialization
serde = { workspace = true }
serde_json = { workspace = true }
# Utilities
anyhow = { workspace = true }
thiserror = { workspace = true }
uuid = { workspace = true }
async-trait = { workspace = true }
[dev-dependencies]
tokio-test = { workspace = true }

View File

@@ -1,9 +0,0 @@
# Owlen Ollama
This crate provides an implementation of the `owlen-core::Provider` trait for the [Ollama](https://ollama.ai) backend.
It allows Owlen to communicate with a local Ollama instance, sending requests and receiving responses from locally-run large language models. You can also target [Ollama Cloud](https://docs.ollama.com/cloud) by pointing the provider at `https://ollama.com` (or `https://api.ollama.com`) and providing an API key through your Owlen configuration (or the `OLLAMA_API_KEY` / `OLLAMA_CLOUD_API_KEY` environment variables). The client automatically adds the required Bearer authorization header when a key is supplied, accepts either host without rewriting, and expands inline environment references like `$OLLAMA_API_KEY` if you prefer not to check the secret into your config file. The generated configuration now includes both `providers.ollama` and `providers.ollama-cloud` entries—switch between them by updating `general.default_provider`.
## Configuration
To use this provider, you need to have Ollama installed and running. The default address is `http://localhost:11434`. You can configure this in your `config.toml` if your Ollama instance is running elsewhere.

View File

@@ -17,7 +17,7 @@ use crate::config;
use crate::events::Event;
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
use std::collections::{BTreeSet, HashSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::sync::Arc;
const ONBOARDING_STATUS_LINE: &str =
@@ -2392,70 +2392,85 @@ impl ChatApp {
let mut models = Vec::new();
let mut errors = Vec::new();
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
.join("../..")
.canonicalize()
.ok();
let server_binary = workspace_root.as_ref().and_then(|root| {
let candidates = [
"target/debug/owlen-mcp-llm-server",
"target/release/owlen-mcp-llm-server",
];
candidates
.iter()
.map(|rel| root.join(rel))
.find(|p| p.exists())
.map(|p| p.to_string_lossy().into_owned())
});
for (name, provider_cfg) in provider_entries {
let provider_type = provider_cfg.provider_type.to_ascii_lowercase();
if provider_type != "ollama" && provider_type != "ollama-cloud" {
continue;
}
// All providers communicate via MCP LLM server (Phase 10).
// For cloud providers, the URL is passed via the provider config.
let client_result = if provider_type == "ollama-cloud" {
// Cloud Ollama - create MCP client with custom URL via env var
use owlen_core::config::McpServerConfig;
use std::collections::HashMap;
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
.join("../..")
.canonicalize()
.ok();
let binary_path = workspace_root.and_then(|root| {
let candidates = [
"target/debug/owlen-mcp-llm-server",
"target/release/owlen-mcp-llm-server",
];
candidates
.iter()
.map(|rel| root.join(rel))
.find(|p| p.exists())
});
if let Some(path) = binary_path {
let mut env_vars = HashMap::new();
if let Some(url) = &provider_cfg.base_url {
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
}
let config = McpServerConfig {
name: name.clone(),
command: path.to_string_lossy().into_owned(),
args: Vec::new(),
transport: "stdio".to_string(),
env: env_vars,
};
RemoteMcpClient::new_with_config(&config)
} else {
Err(owlen_core::Error::NotImplemented(
"MCP server binary not found".into(),
))
}
let canonical_name = if name.eq_ignore_ascii_case("ollama-cloud") {
"ollama".to_string()
} else {
// Local Ollama - use default MCP client
RemoteMcpClient::new()
name.clone()
};
// All providers communicate via MCP LLM server (Phase 10).
// Select provider by name via OWLEN_PROVIDER so per-provider settings apply.
let mut env_vars = HashMap::new();
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.clone());
let client_result = if let Some(binary_path) = server_binary.as_ref() {
use owlen_core::config::McpServerConfig;
let config = McpServerConfig {
name: format!("provider::{canonical_name}"),
command: binary_path.clone(),
args: Vec::new(),
transport: "stdio".to_string(),
env: env_vars.clone(),
};
RemoteMcpClient::new_with_config(&config)
} else {
// Fallback to legacy discovery: temporarily set env vars while spawning.
let backups: Vec<(String, Option<String>)> = env_vars
.keys()
.map(|key| (key.clone(), std::env::var(key).ok()))
.collect();
for (key, value) in env_vars.iter() {
std::env::set_var(key, value);
}
let result = RemoteMcpClient::new();
for (key, original) in backups {
if let Some(value) = original {
std::env::set_var(&key, value);
} else {
std::env::remove_var(&key);
}
}
result
};
match client_result {
Ok(client) => match client.list_models().await {
Ok(mut provider_models) => {
for model in &mut provider_models {
model.provider = name.clone();
model.provider = canonical_name.clone();
}
models.extend(provider_models);
}
Err(err) => errors.push(format!("{}: {}", name, err)),
},
Err(err) => errors.push(format!("{}: {}", name, err)),
Err(err) => errors.push(format!("{}: {}", canonical_name, err)),
}
}
@@ -2497,13 +2512,50 @@ impl ChatApp {
items.push(ModelSelectorItem::header(provider.clone(), is_expanded));
if is_expanded {
let mut matches: Vec<(usize, &ModelInfo)> = self
let relevant: Vec<(usize, &ModelInfo)> = self
.models
.iter()
.enumerate()
.filter(|(_, model)| &model.provider == provider)
.collect();
let mut best_by_canonical: HashMap<String, (i8, (usize, &ModelInfo))> =
HashMap::new();
let provider_lower = provider.to_ascii_lowercase();
for (idx, model) in relevant {
let canonical = model.id.to_string();
let is_cloud_id = model.id.ends_with("-cloud");
let priority = match provider_lower.as_str() {
"ollama" | "ollama-cloud" => {
if is_cloud_id {
1
} else {
2
}
}
_ => 1,
};
best_by_canonical
.entry(canonical)
.and_modify(|entry| {
if priority > entry.0
|| (priority == entry.0 && model.id < entry.1 .1.id)
{
*entry = (priority, (idx, model));
}
})
.or_insert((priority, (idx, model)));
}
let mut matches: Vec<(usize, &ModelInfo)> = best_by_canonical
.into_values()
.map(|entry| entry.1)
.collect();
matches.sort_by(|(_, a), (_, b)| a.id.cmp(&b.id));
if matches.is_empty() {
@@ -2680,54 +2732,67 @@ impl ChatApp {
return Ok(());
}
let provider_cfg = if let Some(cfg) = self.controller.config().provider(provider_name) {
cfg.clone()
use owlen_core::config::McpServerConfig;
use std::collections::HashMap;
let canonical_name = if provider_name.eq_ignore_ascii_case("ollama-cloud") {
"ollama"
} else {
let mut guard = self.controller.config_mut();
// Pass a mutable reference directly; avoid unnecessary deref
let cfg = config::ensure_provider_config(&mut guard, provider_name);
cfg.clone()
provider_name
};
// All providers use MCP architecture (Phase 10).
// For cloud providers, pass the URL via environment variable.
let provider: Arc<dyn owlen_core::provider::Provider> = if provider_cfg
.provider_type
.eq_ignore_ascii_case("ollama-cloud")
{
// Cloud Ollama - create MCP client with custom URL
use owlen_core::config::McpServerConfig;
use std::collections::HashMap;
if self.controller.config().provider(canonical_name).is_none() {
let mut guard = self.controller.config_mut();
config::ensure_provider_config(&mut guard, canonical_name);
}
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
.join("../..")
.canonicalize()?;
let binary_path = [
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
.join("../..")
.canonicalize()
.ok();
let server_binary = workspace_root.as_ref().and_then(|root| {
[
"target/debug/owlen-mcp-llm-server",
"target/release/owlen-mcp-llm-server",
]
.iter()
.map(|rel| workspace_root.join(rel))
.map(|rel| root.join(rel))
.find(|p| p.exists())
.ok_or_else(|| anyhow::anyhow!("MCP LLM server binary not found"))?;
});
let mut env_vars = HashMap::new();
if let Some(url) = &provider_cfg.base_url {
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
}
let mut env_vars = HashMap::new();
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.to_string());
let provider: Arc<dyn owlen_core::provider::Provider> = if let Some(path) = server_binary {
let config = McpServerConfig {
name: provider_name.to_string(),
command: binary_path.to_string_lossy().into_owned(),
name: canonical_name.to_string(),
command: path.to_string_lossy().into_owned(),
args: Vec::new(),
transport: "stdio".to_string(),
env: env_vars,
};
Arc::new(RemoteMcpClient::new_with_config(&config)?)
} else {
// Local Ollama via default MCP client
Arc::new(RemoteMcpClient::new()?)
let backups: Vec<(String, Option<String>)> = env_vars
.keys()
.map(|key| (key.clone(), std::env::var(key).ok()))
.collect();
for (key, value) in env_vars.iter() {
std::env::set_var(key, value);
}
let result = RemoteMcpClient::new();
for (key, original) in backups {
if let Some(value) = original {
std::env::set_var(&key, value);
} else {
std::env::remove_var(&key);
}
}
Arc::new(result?)
};
self.controller.switch_provider(provider).await?;

View File

@@ -1390,16 +1390,17 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
.add_modifier(Modifier::BOLD),
))
}
ModelSelectorItemKind::Model {
provider: _,
model_index,
} => {
ModelSelectorItemKind::Model { model_index, .. } => {
if let Some(model) = app.model_info_by_index(*model_index) {
let tool_indicator = if model.supports_tools { "🔧 " } else { " " };
let label = if model.name.is_empty() {
format!(" {}{}", tool_indicator, model.id)
let mut badges = Vec::new();
if model.supports_tools {
badges.push("🔧");
}
let label = if badges.is_empty() {
format!(" {}", model.id)
} else {
format!(" {}{} {}", tool_indicator, model.id, model.name)
format!(" {} - {}", model.id, badges.join(" "))
};
ListItem::new(Span::styled(
label,