refactor(core): add LLMClient facade trait; decouple TUI from Provider/MCP details
This commit is contained in:
32
crates/owlen-core/src/facade/llm_client.rs
Normal file
32
crates/owlen-core/src/facade/llm_client.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use crate::{
|
||||
Result,
|
||||
llm::ChatStream,
|
||||
mcp::{McpToolCall, McpToolDescriptor, McpToolResponse},
|
||||
types::{ChatRequest, ChatResponse, ModelInfo},
|
||||
};
|
||||
|
||||
/// Object-safe facade for interacting with LLM backends.
|
||||
#[async_trait]
|
||||
pub trait LlmClient: Send + Sync {
|
||||
/// List the models exposed by this client.
|
||||
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
|
||||
|
||||
/// Issue a one-shot chat request and wait for the complete response.
|
||||
async fn send_chat(&self, request: ChatRequest) -> Result<ChatResponse>;
|
||||
|
||||
/// Stream chat responses incrementally.
|
||||
async fn stream_chat(&self, request: ChatRequest) -> Result<ChatStream>;
|
||||
|
||||
/// Enumerate tools exposed by the backing provider.
|
||||
async fn list_tools(&self) -> Result<Vec<McpToolDescriptor>>;
|
||||
|
||||
/// Invoke a tool exposed by the provider.
|
||||
async fn call_tool(&self, call: McpToolCall) -> Result<McpToolResponse>;
|
||||
}
|
||||
|
||||
/// Convenience alias for trait-object clients.
|
||||
pub type DynLlmClient = Arc<dyn LlmClient>;
|
||||
Reference in New Issue
Block a user