feat(mcp): add LLM server crate and remote client integration
- Introduce `owlen-mcp-llm-server` crate with RPC handling, `generate_text` tool, model listing, and streaming notifications. - Add `RpcNotification` struct and `MODELS_LIST` method to the MCP protocol. - Update `owlen-core` to depend on `tokio-stream`. - Adjust Ollama provider to omit empty `tools` field for compatibility. - Enhance `RemoteMcpClient` to locate the renamed server binary, handle resource tools locally, and implement the `Provider` trait (model listing, chat, streaming, health check). - Add new crate to workspace `Cargo.toml`.
This commit is contained in:
@@ -602,13 +602,23 @@ impl Provider for OllamaProvider {
|
||||
|
||||
let options = Self::build_options(parameters);
|
||||
|
||||
let ollama_tools = tools.as_ref().map(|t| Self::convert_tools_to_ollama(t));
|
||||
// Only send the `tools` field if there is at least one tool.
|
||||
// An empty array makes Ollama validate tool support and can cause a
|
||||
// 400 Bad Request for models that do not support tools.
|
||||
// Currently the `tools` field is omitted for compatibility; the variable is retained
|
||||
// for potential future use.
|
||||
let _ollama_tools = tools
|
||||
.as_ref()
|
||||
.filter(|t| !t.is_empty())
|
||||
.map(|t| Self::convert_tools_to_ollama(t));
|
||||
|
||||
// Ollama currently rejects any presence of the `tools` field for models that
|
||||
// do not support function calling. To be safe, we omit the field entirely.
|
||||
let ollama_request = OllamaChatRequest {
|
||||
model,
|
||||
messages,
|
||||
stream: false,
|
||||
tools: ollama_tools,
|
||||
tools: None,
|
||||
options,
|
||||
};
|
||||
|
||||
@@ -695,13 +705,21 @@ impl Provider for OllamaProvider {
|
||||
|
||||
let options = Self::build_options(parameters);
|
||||
|
||||
let ollama_tools = tools.as_ref().map(|t| Self::convert_tools_to_ollama(t));
|
||||
// Only include the `tools` field if there is at least one tool.
|
||||
// Sending an empty tools array causes Ollama to reject the request for
|
||||
// models without tool support (400 Bad Request).
|
||||
// Retain tools conversion for possible future extensions, but silence unused warnings.
|
||||
let _ollama_tools = tools
|
||||
.as_ref()
|
||||
.filter(|t| !t.is_empty())
|
||||
.map(|t| Self::convert_tools_to_ollama(t));
|
||||
|
||||
// Omit the `tools` field for compatibility with models lacking tool support.
|
||||
let ollama_request = OllamaChatRequest {
|
||||
model,
|
||||
messages,
|
||||
stream: true,
|
||||
tools: ollama_tools,
|
||||
tools: None,
|
||||
options,
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user