refactor(core): remove provider module, migrate to LLMProvider, add client mode handling, improve serialization error handling, update workspace edition, and clean up conditionals and imports

This commit is contained in:
2025-10-12 12:38:55 +02:00
parent c2f5ccea3b
commit 7851af14a9
63 changed files with 2221 additions and 1236 deletions

View File

@@ -1,7 +1,7 @@
[package]
name = "owlen-mcp-llm-server"
version = "0.1.0"
edition = "2021"
edition.workspace = true
[dependencies]
owlen-core = { path = "../owlen-core" }

View File

@@ -7,18 +7,19 @@
clippy::empty_line_after_outer_attr
)]
use owlen_core::config::{ensure_provider_config, Config as OwlenConfig};
use owlen_core::Provider;
use owlen_core::ProviderConfig;
use owlen_core::config::{Config as OwlenConfig, ensure_provider_config};
use owlen_core::mcp::protocol::{
methods, ErrorCode, InitializeParams, InitializeResult, RequestId, RpcError, RpcErrorResponse,
RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo, PROTOCOL_VERSION,
ErrorCode, InitializeParams, InitializeResult, PROTOCOL_VERSION, RequestId, RpcError,
RpcErrorResponse, RpcNotification, RpcRequest, RpcResponse, ServerCapabilities, ServerInfo,
methods,
};
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
use owlen_core::provider::ProviderConfig;
use owlen_core::providers::OllamaProvider;
use owlen_core::types::{ChatParameters, ChatRequest, Message};
use owlen_core::Provider;
use serde::Deserialize;
use serde_json::{json, Value};
use serde_json::{Value, json};
use std::collections::HashMap;
use std::env;
use std::sync::Arc;
@@ -178,7 +179,7 @@ async fn handle_generate_text(args: GenerateTextArgs) -> Result<String, RpcError
// Use streaming API and collect output
let mut stream = provider
.chat_stream(request)
.stream_prompt(request)
.await
.map_err(|e| RpcError::internal_error(format!("Chat request failed: {}", e)))?;
let mut content = String::new();
@@ -228,7 +229,9 @@ async fn handle_request(req: &RpcRequest) -> Result<Value, RpcError> {
supports_streaming: Some(true),
},
};
Ok(serde_json::to_value(result).unwrap())
serde_json::to_value(result).map_err(|e| {
RpcError::internal_error(format!("Failed to serialize init result: {}", e))
})
}
methods::TOOLS_LIST => {
let tools = vec![
@@ -245,7 +248,9 @@ async fn handle_request(req: &RpcRequest) -> Result<Value, RpcError> {
.list_models()
.await
.map_err(|e| RpcError::internal_error(format!("Failed to list models: {}", e)))?;
Ok(serde_json::to_value(models).unwrap())
serde_json::to_value(models).map_err(|e| {
RpcError::internal_error(format!("Failed to serialize model list: {}", e))
})
}
methods::TOOLS_CALL => {
// For streaming we will send incremental notifications directly from here.
@@ -331,10 +336,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(),
duration_ms: 0,
};
let final_resp = RpcResponse::new(
id.clone(),
serde_json::to_value(response).unwrap(),
);
let payload = match serde_json::to_value(&response) {
Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(),
RpcError::internal_error(format!(
"Failed to serialize resource response: {}",
e
)),
);
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
@@ -375,10 +394,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(),
duration_ms: 0,
};
let final_resp = RpcResponse::new(
id.clone(),
serde_json::to_value(response).unwrap(),
);
let payload = match serde_json::to_value(&response) {
Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(),
RpcError::internal_error(format!(
"Failed to serialize directory listing: {}",
e
)),
);
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
@@ -454,7 +487,7 @@ async fn main() -> anyhow::Result<()> {
parameters,
tools: None,
};
let mut stream = match provider.chat_stream(request).await {
let mut stream = match provider.stream_prompt(request).await {
Ok(s) => s,
Err(e) => {
let err_resp = RpcErrorResponse::new(
@@ -510,8 +543,24 @@ async fn main() -> anyhow::Result<()> {
metadata: HashMap::new(),
duration_ms: 0,
};
let final_resp =
RpcResponse::new(id.clone(), serde_json::to_value(response).unwrap());
let payload = match serde_json::to_value(&response) {
Ok(value) => value,
Err(e) => {
let err_resp = RpcErrorResponse::new(
id.clone(),
RpcError::internal_error(format!(
"Failed to serialize final streaming response: {}",
e
)),
);
let s = serde_json::to_string(&err_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;
stdout.flush().await?;
continue;
}
};
let final_resp = RpcResponse::new(id.clone(), payload);
let s = serde_json::to_string(&final_resp)?;
stdout.write_all(s.as_bytes()).await?;
stdout.write_all(b"\n").await?;