Add App core struct with event-handling and initialization logic for TUI.
This commit is contained in:
204
crates/owlen-core/src/session.rs
Normal file
204
crates/owlen-core/src/session.rs
Normal file
@@ -0,0 +1,204 @@
|
||||
use crate::config::Config;
|
||||
use crate::conversation::ConversationManager;
|
||||
use crate::formatting::MessageFormatter;
|
||||
use crate::input::InputBuffer;
|
||||
use crate::model::ModelManager;
|
||||
use crate::provider::{ChatStream, Provider};
|
||||
use crate::types::{ChatParameters, ChatRequest, ChatResponse, Conversation, ModelInfo};
|
||||
use crate::Result;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Outcome of submitting a chat request
|
||||
pub enum SessionOutcome {
|
||||
/// Immediate response received (non-streaming)
|
||||
Complete(ChatResponse),
|
||||
/// Streaming response where chunks will arrive asynchronously
|
||||
Streaming {
|
||||
response_id: Uuid,
|
||||
stream: ChatStream,
|
||||
},
|
||||
}
|
||||
|
||||
/// High-level controller encapsulating session state and provider interactions
|
||||
pub struct SessionController {
|
||||
provider: Arc<dyn Provider>,
|
||||
conversation: ConversationManager,
|
||||
model_manager: ModelManager,
|
||||
input_buffer: InputBuffer,
|
||||
formatter: MessageFormatter,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl SessionController {
|
||||
/// Create a new controller with the given provider and configuration
|
||||
pub fn new(provider: Arc<dyn Provider>, config: Config) -> Self {
|
||||
let model = config
|
||||
.general
|
||||
.default_model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "ollama/default".to_string());
|
||||
|
||||
let conversation =
|
||||
ConversationManager::with_history_capacity(model, config.storage.max_saved_sessions);
|
||||
let formatter =
|
||||
MessageFormatter::new(config.ui.wrap_column as usize, config.ui.show_role_labels)
|
||||
.with_preserve_empty(config.ui.word_wrap);
|
||||
let input_buffer = InputBuffer::new(
|
||||
config.input.history_size,
|
||||
config.input.multiline,
|
||||
config.input.tab_width,
|
||||
);
|
||||
|
||||
let model_manager = ModelManager::new(config.general.model_cache_ttl());
|
||||
|
||||
Self {
|
||||
provider,
|
||||
conversation,
|
||||
model_manager,
|
||||
input_buffer,
|
||||
formatter,
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
/// Access the active conversation
|
||||
pub fn conversation(&self) -> &Conversation {
|
||||
self.conversation.active()
|
||||
}
|
||||
|
||||
/// Mutable access to the conversation manager
|
||||
pub fn conversation_mut(&mut self) -> &mut ConversationManager {
|
||||
&mut self.conversation
|
||||
}
|
||||
|
||||
/// Access input buffer
|
||||
pub fn input_buffer(&self) -> &InputBuffer {
|
||||
&self.input_buffer
|
||||
}
|
||||
|
||||
/// Mutable input buffer access
|
||||
pub fn input_buffer_mut(&mut self) -> &mut InputBuffer {
|
||||
&mut self.input_buffer
|
||||
}
|
||||
|
||||
/// Formatter for rendering messages
|
||||
pub fn formatter(&self) -> &MessageFormatter {
|
||||
&self.formatter
|
||||
}
|
||||
|
||||
/// Access configuration
|
||||
pub fn config(&self) -> &Config {
|
||||
&self.config
|
||||
}
|
||||
|
||||
/// Mutable configuration access
|
||||
pub fn config_mut(&mut self) -> &mut Config {
|
||||
&mut self.config
|
||||
}
|
||||
|
||||
/// Currently selected model identifier
|
||||
pub fn selected_model(&self) -> &str {
|
||||
&self.conversation.active().model
|
||||
}
|
||||
|
||||
/// Change current model for upcoming requests
|
||||
pub fn set_model(&mut self, model: String) {
|
||||
self.conversation.set_model(model.clone());
|
||||
self.config.general.default_model = Some(model);
|
||||
}
|
||||
|
||||
/// Retrieve cached models, refreshing from provider as needed
|
||||
pub async fn models(&self, force_refresh: bool) -> Result<Vec<ModelInfo>> {
|
||||
self.model_manager
|
||||
.get_or_refresh(force_refresh, || async {
|
||||
self.provider.list_models().await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Attempt to select the configured default model from cached models
|
||||
pub fn ensure_default_model(&mut self, models: &[ModelInfo]) {
|
||||
if let Some(default) = self.config.general.default_model.clone() {
|
||||
if models.iter().any(|m| m.id == default || m.name == default) {
|
||||
self.set_model(default);
|
||||
}
|
||||
} else if let Some(model) = models.first() {
|
||||
self.set_model(model.id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
/// Submit a user message; optionally stream the response
|
||||
pub async fn send_message(
|
||||
&mut self,
|
||||
content: String,
|
||||
mut parameters: ChatParameters,
|
||||
) -> Result<SessionOutcome> {
|
||||
let streaming = parameters.stream || self.config.general.enable_streaming;
|
||||
parameters.stream = streaming;
|
||||
|
||||
self.conversation.push_user_message(content);
|
||||
|
||||
let request = ChatRequest {
|
||||
model: self.conversation.active().model.clone(),
|
||||
messages: self.conversation.active().messages.clone(),
|
||||
parameters,
|
||||
};
|
||||
|
||||
if streaming {
|
||||
match self.provider.chat_stream(request).await {
|
||||
Ok(stream) => {
|
||||
let response_id = self.conversation.start_streaming_response();
|
||||
Ok(SessionOutcome::Streaming {
|
||||
response_id,
|
||||
stream,
|
||||
})
|
||||
}
|
||||
Err(err) => {
|
||||
self.conversation
|
||||
.push_assistant_message(format!("Error starting stream: {}", err));
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match self.provider.chat(request).await {
|
||||
Ok(response) => {
|
||||
self.conversation.push_message(response.message.clone());
|
||||
Ok(SessionOutcome::Complete(response))
|
||||
}
|
||||
Err(err) => {
|
||||
self.conversation
|
||||
.push_assistant_message(format!("Error: {}", err));
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Mark a streaming response message with placeholder content
|
||||
pub fn mark_stream_placeholder(&mut self, message_id: Uuid, text: &str) -> Result<()> {
|
||||
self.conversation
|
||||
.set_stream_placeholder(message_id, text.to_string())
|
||||
}
|
||||
|
||||
/// Apply streaming chunk to the conversation
|
||||
pub fn apply_stream_chunk(&mut self, message_id: Uuid, chunk: &ChatResponse) -> Result<()> {
|
||||
self.conversation
|
||||
.append_stream_chunk(message_id, &chunk.message.content, chunk.is_final)
|
||||
}
|
||||
|
||||
/// Access conversation history
|
||||
pub fn history(&self) -> Vec<Conversation> {
|
||||
self.conversation.history().cloned().collect()
|
||||
}
|
||||
|
||||
/// Start a new conversation optionally targeting a specific model
|
||||
pub fn start_new_conversation(&mut self, model: Option<String>, name: Option<String>) {
|
||||
self.conversation.start_new(model, name);
|
||||
}
|
||||
|
||||
/// Clear current conversation messages
|
||||
pub fn clear(&mut self) {
|
||||
self.conversation.clear();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user