refactor(core): add LLMClient facade trait; decouple TUI from Provider/MCP details
This commit is contained in:
32
crates/owlen-core/src/facade/llm_client.rs
Normal file
32
crates/owlen-core/src/facade/llm_client.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::{
|
||||
Result,
|
||||
llm::ChatStream,
|
||||
mcp::{McpToolCall, McpToolDescriptor, McpToolResponse},
|
||||
types::{ChatRequest, ChatResponse, ModelInfo},
|
||||
};
|
||||
|
||||
/// Object-safe facade for interacting with LLM backends.
|
||||
#[async_trait]
|
||||
pub trait LlmClient: Send + Sync {
|
||||
/// List the models exposed by this client.
|
||||
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
|
||||
|
||||
/// Issue a one-shot chat request and wait for the complete response.
|
||||
async fn send_chat(&self, request: ChatRequest) -> Result<ChatResponse>;
|
||||
|
||||
/// Stream chat responses incrementally.
|
||||
async fn stream_chat(&self, request: ChatRequest) -> Result<ChatStream>;
|
||||
|
||||
/// Enumerate tools exposed by the backing provider.
|
||||
async fn list_tools(&self) -> Result<Vec<McpToolDescriptor>>;
|
||||
|
||||
/// Invoke a tool exposed by the provider.
|
||||
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse>;
|
||||
}
|
||||
|
||||
/// Convenience alias for trait-object clients.
|
||||
pub type DynLlmClient = Arc<dyn LlmClient>;
|
||||
1
crates/owlen-core/src/facade/mod.rs
Normal file
1
crates/owlen-core/src/facade/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod llm_client;
|
||||
@@ -11,6 +11,7 @@ pub mod consent;
|
||||
pub mod conversation;
|
||||
pub mod credentials;
|
||||
pub mod encryption;
|
||||
pub mod facade;
|
||||
pub mod formatting;
|
||||
pub mod input;
|
||||
pub mod llm;
|
||||
@@ -42,6 +43,7 @@ pub use formatting::*;
|
||||
pub use input::*;
|
||||
pub use oauth::*;
|
||||
// Export MCP types but exclude test_utils to avoid ambiguity
|
||||
pub use facade::llm_client::*;
|
||||
pub use llm::{
|
||||
ChatStream, LlmProvider, Provider, ProviderConfig, ProviderRegistry, send_via_stream,
|
||||
};
|
||||
|
||||
@@ -7,7 +7,10 @@ use crate::consent::{ConsentManager, ConsentScope};
|
||||
use crate::tools::{Tool, WebScrapeTool, WebSearchTool};
|
||||
use crate::types::ModelInfo;
|
||||
use crate::types::{ChatResponse, Message, Role};
|
||||
use crate::{Error, LlmProvider, Result, mode::Mode, send_via_stream};
|
||||
use crate::{
|
||||
ChatStream, Error, LlmProvider, Result, facade::llm_client::LlmClient, mode::Mode,
|
||||
send_via_stream,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use futures::{StreamExt, future::BoxFuture, stream};
|
||||
use reqwest::Client as HttpClient;
|
||||
@@ -564,3 +567,27 @@ impl LlmProvider for RemoteMcpClient {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl LlmClient for RemoteMcpClient {
|
||||
async fn list_models(&self) -> Result<Vec<ModelInfo>> {
|
||||
<Self as LlmProvider>::list_models(self).await
|
||||
}
|
||||
|
||||
async fn send_chat(&self, request: crate::types::ChatRequest) -> Result<ChatResponse> {
|
||||
<Self as LlmProvider>::send_prompt(self, request).await
|
||||
}
|
||||
|
||||
async fn stream_chat(&self, request: crate::types::ChatRequest) -> Result<ChatStream> {
|
||||
let stream = <Self as LlmProvider>::stream_prompt(self, request).await?;
|
||||
Ok(Box::pin(stream))
|
||||
}
|
||||
|
||||
async fn list_tools(&self) -> Result<Vec<McpToolDescriptor>> {
|
||||
<Self as McpClient>::list_tools(self).await
|
||||
}
|
||||
|
||||
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse> {
|
||||
<Self as McpClient>::call_tool(self, call).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ use anyhow::{Context, Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Local, Utc};
|
||||
use crossterm::terminal::{disable_raw_mode, enable_raw_mode};
|
||||
use owlen_core::facade::llm_client::LlmClient;
|
||||
use owlen_core::mcp::remote_client::RemoteMcpClient;
|
||||
use owlen_core::mcp::{McpToolDescriptor, McpToolResponse};
|
||||
use owlen_core::provider::{
|
||||
@@ -9,7 +10,7 @@ use owlen_core::provider::{
|
||||
ProviderType,
|
||||
};
|
||||
use owlen_core::{
|
||||
Provider, ProviderConfig,
|
||||
ProviderConfig,
|
||||
config::McpResourceConfig,
|
||||
model::DetailedModelInfo,
|
||||
oauth::{DeviceAuthorization, DevicePollState},
|
||||
@@ -7581,22 +7582,25 @@ impl ChatApp {
|
||||
};
|
||||
|
||||
match client_result {
|
||||
Ok(client) => match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = canonical_name.clone();
|
||||
Ok(client) => {
|
||||
let client: Arc<dyn LlmClient> = Arc::new(client);
|
||||
match client.list_models().await {
|
||||
Ok(mut provider_models) => {
|
||||
for model in &mut provider_models {
|
||||
model.provider = canonical_name.clone();
|
||||
}
|
||||
let statuses = Self::extract_scope_status(&provider_models);
|
||||
Self::accumulate_scope_errors(&mut errors, &canonical_name, &statuses);
|
||||
scope_status_map.insert(canonical_name.clone(), statuses);
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => {
|
||||
scope_status_map
|
||||
.insert(canonical_name.clone(), ProviderScopeStatus::default());
|
||||
errors.push(format!("{}: {}", name, err))
|
||||
}
|
||||
let statuses = Self::extract_scope_status(&provider_models);
|
||||
Self::accumulate_scope_errors(&mut errors, &canonical_name, &statuses);
|
||||
scope_status_map.insert(canonical_name.clone(), statuses);
|
||||
models.extend(provider_models);
|
||||
}
|
||||
Err(err) => {
|
||||
scope_status_map
|
||||
.insert(canonical_name.clone(), ProviderScopeStatus::default());
|
||||
errors.push(format!("{}: {}", name, err))
|
||||
}
|
||||
},
|
||||
}
|
||||
Err(err) => {
|
||||
scope_status_map.insert(canonical_name.clone(), ProviderScopeStatus::default());
|
||||
errors.push(format!("{}: {}", canonical_name, err));
|
||||
|
||||
Reference in New Issue
Block a user