refactor(core): add LLMClient facade trait; decouple TUI from Provider/MCP details

This commit is contained in:
2025-10-17 01:52:10 +02:00
parent 5182f86133
commit 7f987737f9
5 changed files with 82 additions and 16 deletions

View File

@@ -0,0 +1,32 @@
use std::sync::Arc;
use async_trait::async_trait;
use crate::{
Result,
llm::ChatStream,
mcp::{McpToolCall, McpToolDescriptor, McpToolResponse},
types::{ChatRequest, ChatResponse, ModelInfo},
};
/// Object-safe facade for interacting with LLM backends.
#[async_trait]
pub trait LlmClient: Send + Sync {
/// List the models exposed by this client.
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
/// Issue a one-shot chat request and wait for the complete response.
async fn send_chat(&self, request: ChatRequest) -> Result<ChatResponse>;
/// Stream chat responses incrementally.
async fn stream_chat(&self, request: ChatRequest) -> Result<ChatStream>;
/// Enumerate tools exposed by the backing provider.
async fn list_tools(&self) -> Result<Vec<McpToolDescriptor>>;
/// Invoke a tool exposed by the provider.
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse>;
}
/// Convenience alias for trait-object clients.
pub type DynLlmClient = Arc<dyn LlmClient>;

View File

@@ -0,0 +1 @@
pub mod llm_client;

View File

@@ -11,6 +11,7 @@ pub mod consent;
pub mod conversation; pub mod conversation;
pub mod credentials; pub mod credentials;
pub mod encryption; pub mod encryption;
pub mod facade;
pub mod formatting; pub mod formatting;
pub mod input; pub mod input;
pub mod llm; pub mod llm;
@@ -42,6 +43,7 @@ pub use formatting::*;
pub use input::*; pub use input::*;
pub use oauth::*; pub use oauth::*;
// Export MCP types but exclude test_utils to avoid ambiguity // Export MCP types but exclude test_utils to avoid ambiguity
pub use facade::llm_client::*;
pub use llm::{ pub use llm::{
ChatStream, LlmProvider, Provider, ProviderConfig, ProviderRegistry, send_via_stream, ChatStream, LlmProvider, Provider, ProviderConfig, ProviderRegistry, send_via_stream,
}; };

View File

@@ -7,7 +7,10 @@ use crate::consent::{ConsentManager, ConsentScope};
use crate::tools::{Tool, WebScrapeTool, WebSearchTool}; use crate::tools::{Tool, WebScrapeTool, WebSearchTool};
use crate::types::ModelInfo; use crate::types::ModelInfo;
use crate::types::{ChatResponse, Message, Role}; use crate::types::{ChatResponse, Message, Role};
use crate::{Error, LlmProvider, Result, mode::Mode, send_via_stream}; use crate::{
ChatStream, Error, LlmProvider, Result, facade::llm_client::LlmClient, mode::Mode,
send_via_stream,
};
use anyhow::anyhow; use anyhow::anyhow;
use futures::{StreamExt, future::BoxFuture, stream}; use futures::{StreamExt, future::BoxFuture, stream};
use reqwest::Client as HttpClient; use reqwest::Client as HttpClient;
@@ -564,3 +567,27 @@ impl LlmProvider for RemoteMcpClient {
}) })
} }
} }
#[async_trait::async_trait]
impl LlmClient for RemoteMcpClient {
async fn list_models(&self) -> Result<Vec<ModelInfo>> {
<Self as LlmProvider>::list_models(self).await
}
async fn send_chat(&self, request: crate::types::ChatRequest) -> Result<ChatResponse> {
<Self as LlmProvider>::send_prompt(self, request).await
}
async fn stream_chat(&self, request: crate::types::ChatRequest) -> Result<ChatStream> {
let stream = <Self as LlmProvider>::stream_prompt(self, request).await?;
Ok(Box::pin(stream))
}
async fn list_tools(&self) -> Result<Vec<McpToolDescriptor>> {
<Self as McpClient>::list_tools(self).await
}
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse> {
<Self as McpClient>::call_tool(self, call).await
}
}

View File

@@ -2,6 +2,7 @@ use anyhow::{Context, Result, anyhow};
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Local, Utc}; use chrono::{DateTime, Local, Utc};
use crossterm::terminal::{disable_raw_mode, enable_raw_mode}; use crossterm::terminal::{disable_raw_mode, enable_raw_mode};
use owlen_core::facade::llm_client::LlmClient;
use owlen_core::mcp::remote_client::RemoteMcpClient; use owlen_core::mcp::remote_client::RemoteMcpClient;
use owlen_core::mcp::{McpToolDescriptor, McpToolResponse}; use owlen_core::mcp::{McpToolDescriptor, McpToolResponse};
use owlen_core::provider::{ use owlen_core::provider::{
@@ -9,7 +10,7 @@ use owlen_core::provider::{
ProviderType, ProviderType,
}; };
use owlen_core::{ use owlen_core::{
Provider, ProviderConfig, ProviderConfig,
config::McpResourceConfig, config::McpResourceConfig,
model::DetailedModelInfo, model::DetailedModelInfo,
oauth::{DeviceAuthorization, DevicePollState}, oauth::{DeviceAuthorization, DevicePollState},
@@ -7581,7 +7582,9 @@ impl ChatApp {
}; };
match client_result { match client_result {
Ok(client) => match client.list_models().await { Ok(client) => {
let client: Arc<dyn LlmClient> = Arc::new(client);
match client.list_models().await {
Ok(mut provider_models) => { Ok(mut provider_models) => {
for model in &mut provider_models { for model in &mut provider_models {
model.provider = canonical_name.clone(); model.provider = canonical_name.clone();
@@ -7596,7 +7599,8 @@ impl ChatApp {
.insert(canonical_name.clone(), ProviderScopeStatus::default()); .insert(canonical_name.clone(), ProviderScopeStatus::default());
errors.push(format!("{}: {}", name, err)) errors.push(format!("{}: {}", name, err))
} }
}, }
}
Err(err) => { Err(err) => {
scope_status_map.insert(canonical_name.clone(), ProviderScopeStatus::default()); scope_status_map.insert(canonical_name.clone(), ProviderScopeStatus::default());
errors.push(format!("{}: {}", canonical_name, err)); errors.push(format!("{}: {}", canonical_name, err));