feat(mcp): add LLM server crate and remote client integration

- Introduce `owlen-mcp-llm-server` crate with RPC handling, `generate_text` tool, model listing, and streaming notifications.
- Add `RpcNotification` struct and `MODELS_LIST` method to the MCP protocol.
- Update `owlen-core` to depend on `tokio-stream`.
- Adjust Ollama provider to omit empty `tools` field for compatibility.
- Enhance `RemoteMcpClient` to locate the renamed server binary, handle resource tools locally, and implement the `Provider` trait (model listing, chat, streaming, health check).
- Add new crate to workspace `Cargo.toml`.
This commit is contained in:
2025-10-09 13:46:33 +02:00
parent fe414d49e6
commit 05e90d3e2b
8 changed files with 689 additions and 20 deletions

View File

@@ -0,0 +1,20 @@
[package]
name = "owlen-mcp-llm-server"
version = "0.1.0"
edition = "2021"
[dependencies]
owlen-core = { path = "../owlen-core" }
owlen-ollama = { path = "../owlen-ollama" }
tokio = { version = "1.0", features = ["full"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
anyhow = "1.0"
tokio-stream = "0.1"
[[bin]]
name = "owlen-mcp-llm-server"
path = "src/lib.rs"
[lib]
path = "src/lib.rs"