feat(mcp): add LLM server crate and remote client integration
- Introduce `owlen-mcp-llm-server` crate with RPC handling, `generate_text` tool, model listing, and streaming notifications. - Add `RpcNotification` struct and `MODELS_LIST` method to the MCP protocol. - Update `owlen-core` to depend on `tokio-stream`. - Adjust Ollama provider to omit empty `tools` field for compatibility. - Enhance `RemoteMcpClient` to locate the renamed server binary, handle resource tools locally, and implement the `Provider` trait (model listing, chat, streaming, health check). - Add new crate to workspace `Cargo.toml`.
This commit is contained in:
@@ -14,7 +14,7 @@ use uuid::Uuid;
|
||||
|
||||
use crate::config;
|
||||
use crate::events::Event;
|
||||
use owlen_ollama::OllamaProvider;
|
||||
use owlen_core::mcp::remote_client::RemoteMcpClient;
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -2195,20 +2195,41 @@ impl ChatApp {
|
||||
continue;
|
||||
}
|
||||
|
||||
match OllamaProvider::from_config(&provider_cfg, Some(&general)) {
|
||||
Ok(provider) => match provider.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = name.clone();
|
||||
// Separate handling based on provider type.
|
||||
if provider_type == "ollama" {
|
||||
// Local Ollama – communicate via the MCP LLM server.
|
||||
match RemoteMcpClient::new() {
|
||||
Ok(client) => match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = name.clone();
|
||||
}
|
||||
models.extend(provider_models);
|
||||
}
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
}
|
||||
} else {
|
||||
// Ollama Cloud – use the direct Ollama provider implementation.
|
||||
use owlen_ollama::OllamaProvider;
|
||||
match OllamaProvider::from_config(&provider_cfg, Some(&general)) {
|
||||
Ok(provider) => match provider.list_models().await {
|
||||
Ok(mut cloud_models) => {
|
||||
for model in &mut cloud_models {
|
||||
model.provider = name.clone();
|
||||
}
|
||||
models.extend(cloud_models);
|
||||
}
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
},
|
||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort models alphabetically by name for a predictable UI order
|
||||
models.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
|
||||
(models, errors)
|
||||
}
|
||||
|
||||
@@ -2438,7 +2459,17 @@ impl ChatApp {
|
||||
};
|
||||
|
||||
let general = self.controller.config().general.clone();
|
||||
let provider = Arc::new(OllamaProvider::from_config(&provider_cfg, Some(&general))?);
|
||||
// Choose the appropriate provider implementation based on its type.
|
||||
let provider: Arc<dyn owlen_core::provider::Provider> =
|
||||
if provider_cfg.provider_type.eq_ignore_ascii_case("ollama") {
|
||||
// Local Ollama via MCP server.
|
||||
Arc::new(RemoteMcpClient::new()?)
|
||||
} else {
|
||||
// Ollama Cloud – instantiate the direct provider.
|
||||
use owlen_ollama::OllamaProvider;
|
||||
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&general))?;
|
||||
Arc::new(ollama)
|
||||
};
|
||||
|
||||
self.controller.switch_provider(provider).await?;
|
||||
self.current_provider = provider_name.to_string();
|
||||
|
||||
Reference in New Issue
Block a user