refactor(ollama)!: remove Ollama provider crate and implementation
Deletes the `owlen-ollama` Cargo.toml and source files, fully removing the Ollama provider from the workspace. This aligns the project with the MCP‑only architecture and eliminates direct provider dependencies.
This commit is contained in:
@@ -17,7 +17,7 @@ use crate::config;
|
||||
use crate::events::Event;
|
||||
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
|
||||
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::sync::Arc;
|
||||
|
||||
const ONBOARDING_STATUS_LINE: &str =
|
||||
@@ -2392,70 +2392,85 @@ impl ChatApp {
|
||||
let mut models = Vec::new();
|
||||
let mut errors = Vec::new();
|
||||
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()
|
||||
.ok();
|
||||
let server_binary = workspace_root.as_ref().and_then(|root| {
|
||||
let candidates = [
|
||||
"target/debug/owlen-mcp-llm-server",
|
||||
"target/release/owlen-mcp-llm-server",
|
||||
];
|
||||
candidates
|
||||
.iter()
|
||||
.map(|rel| root.join(rel))
|
||||
.find(|p| p.exists())
|
||||
.map(|p| p.to_string_lossy().into_owned())
|
||||
});
|
||||
|
||||
for (name, provider_cfg) in provider_entries {
|
||||
let provider_type = provider_cfg.provider_type.to_ascii_lowercase();
|
||||
if provider_type != "ollama" && provider_type != "ollama-cloud" {
|
||||
continue;
|
||||
}
|
||||
|
||||
// All providers communicate via MCP LLM server (Phase 10).
|
||||
// For cloud providers, the URL is passed via the provider config.
|
||||
let client_result = if provider_type == "ollama-cloud" {
|
||||
// Cloud Ollama - create MCP client with custom URL via env var
|
||||
use owlen_core::config::McpServerConfig;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()
|
||||
.ok();
|
||||
|
||||
let binary_path = workspace_root.and_then(|root| {
|
||||
let candidates = [
|
||||
"target/debug/owlen-mcp-llm-server",
|
||||
"target/release/owlen-mcp-llm-server",
|
||||
];
|
||||
candidates
|
||||
.iter()
|
||||
.map(|rel| root.join(rel))
|
||||
.find(|p| p.exists())
|
||||
});
|
||||
|
||||
if let Some(path) = binary_path {
|
||||
let mut env_vars = HashMap::new();
|
||||
if let Some(url) = &provider_cfg.base_url {
|
||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
||||
}
|
||||
|
||||
let config = McpServerConfig {
|
||||
name: name.clone(),
|
||||
command: path.to_string_lossy().into_owned(),
|
||||
args: Vec::new(),
|
||||
transport: "stdio".to_string(),
|
||||
env: env_vars,
|
||||
};
|
||||
RemoteMcpClient::new_with_config(&config)
|
||||
} else {
|
||||
Err(owlen_core::Error::NotImplemented(
|
||||
"MCP server binary not found".into(),
|
||||
))
|
||||
}
|
||||
let canonical_name = if name.eq_ignore_ascii_case("ollama-cloud") {
|
||||
"ollama".to_string()
|
||||
} else {
|
||||
// Local Ollama - use default MCP client
|
||||
RemoteMcpClient::new()
|
||||
name.clone()
|
||||
};
|
||||
|
||||
// All providers communicate via MCP LLM server (Phase 10).
|
||||
// Select provider by name via OWLEN_PROVIDER so per-provider settings apply.
|
||||
let mut env_vars = HashMap::new();
|
||||
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.clone());
|
||||
|
||||
let client_result = if let Some(binary_path) = server_binary.as_ref() {
|
||||
use owlen_core::config::McpServerConfig;
|
||||
|
||||
let config = McpServerConfig {
|
||||
name: format!("provider::{canonical_name}"),
|
||||
command: binary_path.clone(),
|
||||
args: Vec::new(),
|
||||
transport: "stdio".to_string(),
|
||||
env: env_vars.clone(),
|
||||
};
|
||||
RemoteMcpClient::new_with_config(&config)
|
||||
} else {
|
||||
// Fallback to legacy discovery: temporarily set env vars while spawning.
|
||||
let backups: Vec<(String, Option<String>)> = env_vars
|
||||
.keys()
|
||||
.map(|key| (key.clone(), std::env::var(key).ok()))
|
||||
.collect();
|
||||
|
||||
for (key, value) in env_vars.iter() {
|
||||
std::env::set_var(key, value);
|
||||
}
|
||||
|
||||
let result = RemoteMcpClient::new();
|
||||
|
||||
for (key, original) in backups {
|
||||
if let Some(value) = original {
|
||||
std::env::set_var(&key, value);
|
||||
} else {
|
||||
std::env::remove_var(&key);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
};
|
||||
|
||||
match client_result {
|
||||
Ok(client) => match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = name.clone();
|
||||
model.provider = canonical_name.clone();
|
||||
}
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
Err(err) => errors.push(format!("{}: {}", canonical_name, err)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2497,13 +2512,50 @@ impl ChatApp {
|
||||
items.push(ModelSelectorItem::header(provider.clone(), is_expanded));
|
||||
|
||||
if is_expanded {
|
||||
let mut matches: Vec<(usize, &ModelInfo)> = self
|
||||
let relevant: Vec<(usize, &ModelInfo)> = self
|
||||
.models
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(_, model)| &model.provider == provider)
|
||||
.collect();
|
||||
|
||||
let mut best_by_canonical: HashMap<String, (i8, (usize, &ModelInfo))> =
|
||||
HashMap::new();
|
||||
|
||||
let provider_lower = provider.to_ascii_lowercase();
|
||||
|
||||
for (idx, model) in relevant {
|
||||
let canonical = model.id.to_string();
|
||||
|
||||
let is_cloud_id = model.id.ends_with("-cloud");
|
||||
let priority = match provider_lower.as_str() {
|
||||
"ollama" | "ollama-cloud" => {
|
||||
if is_cloud_id {
|
||||
1
|
||||
} else {
|
||||
2
|
||||
}
|
||||
}
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
best_by_canonical
|
||||
.entry(canonical)
|
||||
.and_modify(|entry| {
|
||||
if priority > entry.0
|
||||
|| (priority == entry.0 && model.id < entry.1 .1.id)
|
||||
{
|
||||
*entry = (priority, (idx, model));
|
||||
}
|
||||
})
|
||||
.or_insert((priority, (idx, model)));
|
||||
}
|
||||
|
||||
let mut matches: Vec<(usize, &ModelInfo)> = best_by_canonical
|
||||
.into_values()
|
||||
.map(|entry| entry.1)
|
||||
.collect();
|
||||
|
||||
matches.sort_by(|(_, a), (_, b)| a.id.cmp(&b.id));
|
||||
|
||||
if matches.is_empty() {
|
||||
@@ -2680,54 +2732,67 @@ impl ChatApp {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let provider_cfg = if let Some(cfg) = self.controller.config().provider(provider_name) {
|
||||
cfg.clone()
|
||||
use owlen_core::config::McpServerConfig;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let canonical_name = if provider_name.eq_ignore_ascii_case("ollama-cloud") {
|
||||
"ollama"
|
||||
} else {
|
||||
let mut guard = self.controller.config_mut();
|
||||
// Pass a mutable reference directly; avoid unnecessary deref
|
||||
let cfg = config::ensure_provider_config(&mut guard, provider_name);
|
||||
cfg.clone()
|
||||
provider_name
|
||||
};
|
||||
|
||||
// All providers use MCP architecture (Phase 10).
|
||||
// For cloud providers, pass the URL via environment variable.
|
||||
let provider: Arc<dyn owlen_core::provider::Provider> = if provider_cfg
|
||||
.provider_type
|
||||
.eq_ignore_ascii_case("ollama-cloud")
|
||||
{
|
||||
// Cloud Ollama - create MCP client with custom URL
|
||||
use owlen_core::config::McpServerConfig;
|
||||
use std::collections::HashMap;
|
||||
if self.controller.config().provider(canonical_name).is_none() {
|
||||
let mut guard = self.controller.config_mut();
|
||||
config::ensure_provider_config(&mut guard, canonical_name);
|
||||
}
|
||||
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()?;
|
||||
|
||||
let binary_path = [
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()
|
||||
.ok();
|
||||
let server_binary = workspace_root.as_ref().and_then(|root| {
|
||||
[
|
||||
"target/debug/owlen-mcp-llm-server",
|
||||
"target/release/owlen-mcp-llm-server",
|
||||
]
|
||||
.iter()
|
||||
.map(|rel| workspace_root.join(rel))
|
||||
.map(|rel| root.join(rel))
|
||||
.find(|p| p.exists())
|
||||
.ok_or_else(|| anyhow::anyhow!("MCP LLM server binary not found"))?;
|
||||
});
|
||||
|
||||
let mut env_vars = HashMap::new();
|
||||
if let Some(url) = &provider_cfg.base_url {
|
||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
||||
}
|
||||
let mut env_vars = HashMap::new();
|
||||
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.to_string());
|
||||
|
||||
let provider: Arc<dyn owlen_core::provider::Provider> = if let Some(path) = server_binary {
|
||||
let config = McpServerConfig {
|
||||
name: provider_name.to_string(),
|
||||
command: binary_path.to_string_lossy().into_owned(),
|
||||
name: canonical_name.to_string(),
|
||||
command: path.to_string_lossy().into_owned(),
|
||||
args: Vec::new(),
|
||||
transport: "stdio".to_string(),
|
||||
env: env_vars,
|
||||
};
|
||||
Arc::new(RemoteMcpClient::new_with_config(&config)?)
|
||||
} else {
|
||||
// Local Ollama via default MCP client
|
||||
Arc::new(RemoteMcpClient::new()?)
|
||||
let backups: Vec<(String, Option<String>)> = env_vars
|
||||
.keys()
|
||||
.map(|key| (key.clone(), std::env::var(key).ok()))
|
||||
.collect();
|
||||
|
||||
for (key, value) in env_vars.iter() {
|
||||
std::env::set_var(key, value);
|
||||
}
|
||||
|
||||
let result = RemoteMcpClient::new();
|
||||
|
||||
for (key, original) in backups {
|
||||
if let Some(value) = original {
|
||||
std::env::set_var(&key, value);
|
||||
} else {
|
||||
std::env::remove_var(&key);
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(result?)
|
||||
};
|
||||
|
||||
self.controller.switch_provider(provider).await?;
|
||||
|
||||
Reference in New Issue
Block a user