feat(phase10): complete MCP-only architecture migration
Phase 10 "Cleanup & Production Polish" is now complete. All LLM interactions now go through the Model Context Protocol (MCP), removing direct provider dependencies from CLI/TUI. ## Major Changes ### MCP Architecture - All providers (local and cloud Ollama) now use RemoteMcpClient - Removed owlen-ollama dependency from owlen-tui - MCP LLM server accepts OLLAMA_URL environment variable for cloud providers - Proper notification handling for streaming responses - Fixed response deserialization (McpToolResponse unwrapping) ### Code Cleanup - Removed direct OllamaProvider instantiation from TUI - Updated collect_models_from_all_providers() to use MCP for all providers - Updated switch_provider() to use MCP with environment configuration - Removed unused general config variable ### Documentation - Added comprehensive MCP Architecture section to docs/architecture.md - Documented MCP communication flow and cloud provider support - Updated crate breakdown to reflect MCP servers ### Security & Performance - Path traversal protection verified for all resource operations - Process isolation via separate MCP server processes - Tool permissions controlled via consent manager - Clean release build of entire workspace verified ## Benefits of MCP Architecture 1. **Separation of Concerns**: TUI/CLI never directly instantiates providers 2. **Process Isolation**: LLM interactions run in separate processes 3. **Extensibility**: New providers can be added as MCP servers 4. **Multi-Transport**: Supports STDIO, HTTP, and WebSocket 5. **Tool Integration**: MCP servers expose tools to LLMs This completes Phase 10 and establishes a clean, production-ready architecture for future development. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -2320,14 +2320,14 @@ impl ChatApp {
|
||||
}
|
||||
|
||||
async fn collect_models_from_all_providers(&self) -> (Vec<ModelInfo>, Vec<String>) {
|
||||
let (provider_entries, general) = {
|
||||
let provider_entries = {
|
||||
let config = self.controller.config();
|
||||
let entries: Vec<(String, ProviderConfig)> = config
|
||||
.providers
|
||||
.iter()
|
||||
.map(|(name, cfg)| (name.clone(), cfg.clone()))
|
||||
.collect();
|
||||
(entries, config.general.clone())
|
||||
entries
|
||||
};
|
||||
|
||||
let mut models = Vec::new();
|
||||
@@ -2339,36 +2339,64 @@ impl ChatApp {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Separate handling based on provider type.
|
||||
if provider_type == "ollama" {
|
||||
// Local Ollama – communicate via the MCP LLM server.
|
||||
match RemoteMcpClient::new() {
|
||||
Ok(client) => match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = name.clone();
|
||||
}
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
// All providers communicate via MCP LLM server (Phase 10).
|
||||
// For cloud providers, the URL is passed via the provider config.
|
||||
let client_result = if provider_type == "ollama-cloud" {
|
||||
// Cloud Ollama - create MCP client with custom URL via env var
|
||||
use owlen_core::config::McpServerConfig;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()
|
||||
.ok();
|
||||
|
||||
let binary_path = workspace_root.and_then(|root| {
|
||||
let candidates = [
|
||||
"target/debug/owlen-mcp-llm-server",
|
||||
"target/release/owlen-mcp-llm-server",
|
||||
];
|
||||
candidates
|
||||
.iter()
|
||||
.map(|rel| root.join(rel))
|
||||
.find(|p| p.exists())
|
||||
});
|
||||
|
||||
if let Some(path) = binary_path {
|
||||
let mut env_vars = HashMap::new();
|
||||
if let Some(url) = &provider_cfg.base_url {
|
||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
||||
}
|
||||
|
||||
let config = McpServerConfig {
|
||||
name: name.clone(),
|
||||
command: path.to_string_lossy().into_owned(),
|
||||
args: Vec::new(),
|
||||
transport: "stdio".to_string(),
|
||||
env: env_vars,
|
||||
};
|
||||
RemoteMcpClient::new_with_config(&config)
|
||||
} else {
|
||||
Err(owlen_core::Error::NotImplemented(
|
||||
"MCP server binary not found".into(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
// Ollama Cloud – use the direct Ollama provider implementation.
|
||||
use owlen_ollama::OllamaProvider;
|
||||
match OllamaProvider::from_config(&provider_cfg, Some(&general)) {
|
||||
Ok(provider) => match provider.list_models().await {
|
||||
Ok(mut cloud_models) => {
|
||||
for model in &mut cloud_models {
|
||||
model.provider = name.clone();
|
||||
}
|
||||
models.extend(cloud_models);
|
||||
// Local Ollama - use default MCP client
|
||||
RemoteMcpClient::new()
|
||||
};
|
||||
|
||||
match client_result {
|
||||
Ok(client) => match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = name.clone();
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
}
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2602,18 +2630,46 @@ impl ChatApp {
|
||||
cfg.clone()
|
||||
};
|
||||
|
||||
let general = self.controller.config().general.clone();
|
||||
// Choose the appropriate provider implementation based on its type.
|
||||
let provider: Arc<dyn owlen_core::provider::Provider> =
|
||||
if provider_cfg.provider_type.eq_ignore_ascii_case("ollama") {
|
||||
// Local Ollama via MCP server.
|
||||
Arc::new(RemoteMcpClient::new()?)
|
||||
} else {
|
||||
// Ollama Cloud – instantiate the direct provider.
|
||||
use owlen_ollama::OllamaProvider;
|
||||
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&general))?;
|
||||
Arc::new(ollama)
|
||||
// All providers use MCP architecture (Phase 10).
|
||||
// For cloud providers, pass the URL via environment variable.
|
||||
let provider: Arc<dyn owlen_core::provider::Provider> = if provider_cfg
|
||||
.provider_type
|
||||
.eq_ignore_ascii_case("ollama-cloud")
|
||||
{
|
||||
// Cloud Ollama - create MCP client with custom URL
|
||||
use owlen_core::config::McpServerConfig;
|
||||
use std::collections::HashMap;
|
||||
|
||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../..")
|
||||
.canonicalize()?;
|
||||
|
||||
let binary_path = [
|
||||
"target/debug/owlen-mcp-llm-server",
|
||||
"target/release/owlen-mcp-llm-server",
|
||||
]
|
||||
.iter()
|
||||
.map(|rel| workspace_root.join(rel))
|
||||
.find(|p| p.exists())
|
||||
.ok_or_else(|| anyhow::anyhow!("MCP LLM server binary not found"))?;
|
||||
|
||||
let mut env_vars = HashMap::new();
|
||||
if let Some(url) = &provider_cfg.base_url {
|
||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
||||
}
|
||||
|
||||
let config = McpServerConfig {
|
||||
name: provider_name.to_string(),
|
||||
command: binary_path.to_string_lossy().into_owned(),
|
||||
args: Vec::new(),
|
||||
transport: "stdio".to_string(),
|
||||
env: env_vars,
|
||||
};
|
||||
Arc::new(RemoteMcpClient::new_with_config(&config)?)
|
||||
} else {
|
||||
// Local Ollama via default MCP client
|
||||
Arc::new(RemoteMcpClient::new()?)
|
||||
};
|
||||
|
||||
self.controller.switch_provider(provider).await?;
|
||||
self.current_provider = provider_name.to_string();
|
||||
|
||||
Reference in New Issue
Block a user