Remove App implementation: delete TUI application logic, event handling, and related structures.

This commit is contained in:
2025-09-30 02:40:20 +02:00
parent a5727c0a1d
commit 54bcabd53d
12 changed files with 450 additions and 3299 deletions

View File

@@ -12,6 +12,8 @@ pub mod provider;
pub mod router;
pub mod session;
pub mod types;
pub mod ui;
pub mod wrap_cursor;
pub use config::*;
pub use conversation::*;
@@ -21,7 +23,6 @@ pub use model::*;
pub use provider::*;
pub use router::*;
pub use session::*;
pub mod wrap_cursor;
/// Result type used throughout the OWLEN ecosystem
pub type Result<T> = std::result::Result<T, Error>;

425
crates/owlen-core/src/ui.rs Normal file
View File

@@ -0,0 +1,425 @@
//! Shared UI components and state management for TUI applications
//!
//! This module contains reusable UI components that can be shared between
//! different TUI applications (chat, code, etc.)
use std::fmt;
/// Application state
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AppState {
Running,
Quit,
}
/// Input modes for TUI applications
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InputMode {
Normal,
Editing,
ProviderSelection,
ModelSelection,
Help,
Visual,
Command,
}
impl fmt::Display for InputMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
InputMode::Normal => "Normal",
InputMode::Editing => "Editing",
InputMode::ModelSelection => "Model",
InputMode::ProviderSelection => "Provider",
InputMode::Help => "Help",
InputMode::Visual => "Visual",
InputMode::Command => "Command",
};
f.write_str(label)
}
}
/// Represents which panel is currently focused
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FocusedPanel {
Chat,
Thinking,
Input,
}
/// Auto-scroll state manager for scrollable panels
#[derive(Debug, Clone)]
pub struct AutoScroll {
pub scroll: usize,
pub content_len: usize,
pub stick_to_bottom: bool,
}
impl Default for AutoScroll {
fn default() -> Self {
Self {
scroll: 0,
content_len: 0,
stick_to_bottom: true,
}
}
}
impl AutoScroll {
/// Update scroll position based on viewport height
pub fn on_viewport(&mut self, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h);
if self.stick_to_bottom {
self.scroll = max;
} else {
self.scroll = self.scroll.min(max);
}
}
/// Handle user scroll input
pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h) as isize;
let s = (self.scroll as isize + delta).clamp(0, max) as usize;
self.scroll = s;
self.stick_to_bottom = s as isize == max;
}
/// Scroll down half page
pub fn scroll_half_page_down(&mut self, viewport_h: usize) {
let delta = (viewport_h / 2) as isize;
self.on_user_scroll(delta, viewport_h);
}
/// Scroll up half page
pub fn scroll_half_page_up(&mut self, viewport_h: usize) {
let delta = -((viewport_h / 2) as isize);
self.on_user_scroll(delta, viewport_h);
}
/// Scroll down full page
pub fn scroll_full_page_down(&mut self, viewport_h: usize) {
let delta = viewport_h as isize;
self.on_user_scroll(delta, viewport_h);
}
/// Scroll up full page
pub fn scroll_full_page_up(&mut self, viewport_h: usize) {
let delta = -(viewport_h as isize);
self.on_user_scroll(delta, viewport_h);
}
/// Jump to top
pub fn jump_to_top(&mut self) {
self.scroll = 0;
self.stick_to_bottom = false;
}
/// Jump to bottom
pub fn jump_to_bottom(&mut self, viewport_h: usize) {
self.stick_to_bottom = true;
self.on_viewport(viewport_h);
}
}
/// Visual selection state for text selection
#[derive(Debug, Clone, Default)]
pub struct VisualSelection {
pub start: Option<(usize, usize)>, // (row, col)
pub end: Option<(usize, usize)>, // (row, col)
}
impl VisualSelection {
pub fn new() -> Self {
Self::default()
}
pub fn start_at(&mut self, pos: (usize, usize)) {
self.start = Some(pos);
self.end = Some(pos);
}
pub fn extend_to(&mut self, pos: (usize, usize)) {
self.end = Some(pos);
}
pub fn clear(&mut self) {
self.start = None;
self.end = None;
}
pub fn is_active(&self) -> bool {
self.start.is_some() && self.end.is_some()
}
pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> {
if let (Some(s), Some(e)) = (self.start, self.end) {
// Normalize selection so start is always before end
if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) {
Some((s, e))
} else {
Some((e, s))
}
} else {
None
}
}
}
/// Extract text from a selection range in a list of lines
pub fn extract_text_from_selection(
lines: &[String],
start: (usize, usize),
end: (usize, usize),
) -> Option<String> {
if lines.is_empty() || start.0 >= lines.len() {
return None;
}
let start_row = start.0;
let start_col = start.1;
let end_row = end.0.min(lines.len() - 1);
let end_col = end.1;
if start_row == end_row {
// Single line selection
let line = &lines[start_row];
let chars: Vec<char> = line.chars().collect();
let start_c = start_col.min(chars.len());
let end_c = end_col.min(chars.len());
if start_c >= end_c {
return None;
}
let selected: String = chars[start_c..end_c].iter().collect();
Some(selected)
} else {
// Multi-line selection
let mut result = Vec::new();
// First line: from start_col to end
let first_line = &lines[start_row];
let first_chars: Vec<char> = first_line.chars().collect();
let start_c = start_col.min(first_chars.len());
if start_c < first_chars.len() {
result.push(first_chars[start_c..].iter().collect::<String>());
}
// Middle lines: entire lines
for row in (start_row + 1)..end_row {
if row < lines.len() {
result.push(lines[row].clone());
}
}
// Last line: from start to end_col
if end_row < lines.len() && end_row > start_row {
let last_line = &lines[end_row];
let last_chars: Vec<char> = last_line.chars().collect();
let end_c = end_col.min(last_chars.len());
if end_c > 0 {
result.push(last_chars[..end_c].iter().collect::<String>());
}
}
if result.is_empty() {
None
} else {
Some(result.join("\n"))
}
}
}
/// Cursor position for navigating scrollable content
#[derive(Debug, Clone, Copy, Default)]
pub struct CursorPosition {
pub row: usize,
pub col: usize,
}
impl CursorPosition {
pub fn new(row: usize, col: usize) -> Self {
Self { row, col }
}
pub fn move_up(&mut self, amount: usize) {
self.row = self.row.saturating_sub(amount);
}
pub fn move_down(&mut self, amount: usize, max: usize) {
self.row = (self.row + amount).min(max);
}
pub fn move_left(&mut self, amount: usize) {
self.col = self.col.saturating_sub(amount);
}
pub fn move_right(&mut self, amount: usize, max: usize) {
self.col = (self.col + amount).min(max);
}
pub fn as_tuple(&self) -> (usize, usize) {
(self.row, self.col)
}
}
/// Word boundary detection for navigation
pub fn find_next_word_boundary(line: &str, col: usize) -> Option<usize> {
let chars: Vec<char> = line.chars().collect();
if col >= chars.len() {
return Some(chars.len());
}
let mut pos = col;
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// Skip current word
if is_word_char(chars[pos]) {
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
} else {
// Skip non-word characters
while pos < chars.len() && !is_word_char(chars[pos]) {
pos += 1;
}
}
Some(pos)
}
pub fn find_word_end(line: &str, col: usize) -> Option<usize> {
let chars: Vec<char> = line.chars().collect();
if col >= chars.len() {
return Some(chars.len());
}
let mut pos = col;
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// If on a word character, move to end of current word
if is_word_char(chars[pos]) {
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
// Move back one to be ON the last character
if pos > 0 {
pos -= 1;
}
} else {
// Skip non-word characters
while pos < chars.len() && !is_word_char(chars[pos]) {
pos += 1;
}
// Now on first char of next word, move to its end
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
if pos > 0 {
pos -= 1;
}
}
Some(pos)
}
pub fn find_prev_word_boundary(line: &str, col: usize) -> Option<usize> {
let chars: Vec<char> = line.chars().collect();
if col == 0 || chars.is_empty() {
return Some(0);
}
let mut pos = col.min(chars.len());
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// Move back one position first
if pos > 0 {
pos -= 1;
}
// Skip non-word characters
while pos > 0 && !is_word_char(chars[pos]) {
pos -= 1;
}
// Skip word characters to find start of word
while pos > 0 && is_word_char(chars[pos - 1]) {
pos -= 1;
}
Some(pos)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_auto_scroll() {
let mut scroll = AutoScroll::default();
scroll.content_len = 100;
// Test on_viewport with stick_to_bottom
scroll.on_viewport(10);
assert_eq!(scroll.scroll, 90);
// Test user scroll up
scroll.on_user_scroll(-10, 10);
assert_eq!(scroll.scroll, 80);
assert!(!scroll.stick_to_bottom);
// Test jump to bottom
scroll.jump_to_bottom(10);
assert!(scroll.stick_to_bottom);
assert_eq!(scroll.scroll, 90);
}
#[test]
fn test_visual_selection() {
let mut selection = VisualSelection::new();
assert!(!selection.is_active());
selection.start_at((0, 0));
assert!(selection.is_active());
selection.extend_to((2, 5));
let normalized = selection.get_normalized();
assert_eq!(normalized, Some(((0, 0), (2, 5))));
selection.clear();
assert!(!selection.is_active());
}
#[test]
fn test_extract_text_single_line() {
let lines = vec!["Hello World".to_string()];
let result = extract_text_from_selection(&lines, (0, 0), (0, 5));
assert_eq!(result, Some("Hello".to_string()));
}
#[test]
fn test_extract_text_multi_line() {
let lines = vec![
"First line".to_string(),
"Second line".to_string(),
"Third line".to_string(),
];
let result = extract_text_from_selection(&lines, (0, 6), (2, 5));
assert_eq!(result, Some("line\nSecond line\nThird".to_string()));
}
#[test]
fn test_word_boundaries() {
let line = "hello world test";
assert_eq!(find_next_word_boundary(line, 0), Some(5));
assert_eq!(find_next_word_boundary(line, 5), Some(6));
assert_eq!(find_next_word_boundary(line, 6), Some(11));
assert_eq!(find_prev_word_boundary(line, 16), Some(12));
assert_eq!(find_prev_word_boundary(line, 11), Some(6));
assert_eq!(find_prev_word_boundary(line, 6), Some(0));
}
}

View File

@@ -1,910 +0,0 @@
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tokio::sync::mpsc;
use uuid::Uuid;
use crate::config::Config;
use crate::database::Database;
use crate::events::Event;
use crate::files::FileManager;
use crate::ollama::{Message, OllamaClient, OllamaEvent};
pub type AppResult<T> = Result<T, Box<dyn std::error::Error>>;
/// The main application state
#[derive(Debug)]
pub enum AppState {
Running,
Quit,
}
/// Current input mode for the application
#[derive(Debug, Clone, PartialEq)]
pub enum InputMode {
/// User is in the initialization process
Init,
/// User is typing a message
Editing,
/// User is browsing the conversation
Normal,
/// User is selecting a model
ModelSelection,
/// User is viewing stats
StatsMenu,
/// User is viewing help
Help,
/// User is browsing files
FileBrowser,
/// User is managing sessions
SessionManager,
/// User is typing a filename for operations
FileInput,
/// User is typing a session name
SessionInput,
}
/// Different steps of the initialization process
#[derive(Debug, Clone, PartialEq)]
pub enum InitState {
/// Asking user to select a backend
BackendSelection,
/// Asking user to input custom host
CustomHostInput,
/// Checking for Ollama connection
CheckingOllama,
/// Fetching available models
#[allow(dead_code)]
FetchingModels,
/// Asking user to select a model
ModelSelection,
/// Configuration is complete
Complete,
}
/// A conversation message with metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversationMessage {
pub role: String,
pub content: String,
pub request_id: Option<Uuid>,
pub is_streaming: bool,
}
/// Session statistics
#[derive(Debug, Clone, Default)]
pub struct SessionStats {
pub session_start: Option<std::time::Instant>,
pub messages_sent: u32,
pub messages_received: u32,
pub total_characters_sent: u32,
pub total_characters_received: u32,
pub models_used: std::collections::HashSet<String>,
pub errors_encountered: u32,
}
/// Main application structure
pub struct App {
/// Current input mode
pub input_mode: InputMode,
/// Current state of the initialization process
pub init_state: InitState,
/// Selected backend type
pub backend_type: crate::config::BackendType,
/// Current input buffer
pub input: String,
/// Cursor position in input
pub input_cursor_position: usize,
/// Conversation history
pub messages: Vec<ConversationMessage>,
/// Current selected model
pub selected_model: String,
/// Available models from Ollama
pub available_models: Vec<String>,
/// Selected model index for model selection UI
pub model_selection_index: usize,
/// Ollama client for making API requests
ollama_client: OllamaClient,
/// Currently active requests ( for tracking streaming responses)
active_requests: HashMap<Uuid, usize>, // UUID -> message index
/// Status message to show at the bottom
pub status_message: String,
/// Scroll position in the message list
pub message_scroll: usize,
/// Error message to display
pub error_message: Option<String>,
/// Session statistics
pub stats: SessionStats,
/// File manager for file operations
file_manager: FileManager,
/// Current file path for operations
pub current_file_path: String,
/// Available files in current directory
pub available_files: Vec<crate::files::FileInfo>,
/// Selected file index
pub file_selection_index: usize,
/// Available sessions
pub available_sessions: Vec<crate::database::SessionSummary>,
/// Selected session index
pub session_selection_index: usize,
/// Input buffer for file operations
pub file_input: String,
/// Session name input
pub session_name_input: String,
/// Database for session storage
database: Database,
}
impl App {
pub fn new(ollama_sender: mpsc::UnboundedSender<OllamaEvent>, config: Config, db: &Database, is_init: bool) -> Self {
let ollama_client = OllamaClient::new(
config.general.ollama_host.clone(),
ollama_sender,
);
// Initialize file manager
let file_manager = FileManager::new(config.clone());
// Load project context if available
let mut messages = Vec::new();
if let Ok(Some(context)) = file_manager.load_project_context() {
messages.push(ConversationMessage {
role: "system".to_string(),
content: format!("Project Context:\n{}", context),
request_id: None,
is_streaming: false,
});
}
let (input_mode, init_state, backend_type) = if is_init {
(InputMode::Init, InitState::BackendSelection, crate::config::BackendType::Ollama)
} else {
(InputMode::Normal, InitState::Complete, crate::config::BackendType::Ollama)
};
let mut stats = SessionStats::default();
stats.session_start = Some(std::time::Instant::now());
stats.models_used.insert(config.general.default_model.clone());
let app = Self {
input_mode,
init_state,
backend_type,
input: String::new(),
input_cursor_position: 0,
messages, // Use loaded messages (including project context)
selected_model: config.general.default_model.clone(), // Default model
available_models: vec![config.general.default_model.clone()],
model_selection_index: 0,
ollama_client,
active_requests: HashMap::new(),
status_message: "Press 'h' for help or 'q' to quit".to_string(),
message_scroll: 0,
error_message: None,
stats,
file_manager,
current_file_path: ".".to_string(),
available_files: Vec::new(),
file_selection_index: 0,
available_sessions: Vec::new(),
session_selection_index: 0,
file_input: String::new(),
session_name_input: String::new(),
database: db.clone(),
};
if is_init {
let ollama_client = app.ollama_client.clone();
tokio::spawn(async move {
let _ = ollama_client.get_models().await;
});
}
app
}
/// Handle terminal events
pub async fn handle_event(&mut self, event: Event) -> AppResult<AppState> {
self.error_message = None; // Clear error message on new input
match self.input_mode {
InputMode::Init => self.handle_init_mode_event(event).await,
InputMode::Normal => self.handle_normal_mode_event(event).await,
InputMode::Editing => self.handle_editing_mode_event(event).await,
InputMode::ModelSelection => self.handle_model_selection_event(event).await,
InputMode::StatsMenu => self.handle_stats_menu_event(event).await,
InputMode::Help => self.handle_help_event(event).await,
InputMode::FileBrowser => self.handle_file_browser_event(event).await,
InputMode::SessionManager => self.handle_session_manager_event(event).await,
InputMode::FileInput => self.handle_file_input_event(event).await,
InputMode::SessionInput => self.handle_session_input_event(event).await,
}
}
/// Handle events in initialization mode
async fn handle_init_mode_event(&mut self, event: Event) -> AppResult<AppState> {
match self.init_state {
InitState::BackendSelection => {
if event.is_up() {
self.backend_type = crate::config::BackendType::Ollama;
}
if event.is_down() {
self.backend_type = crate::config::BackendType::Custom;
}
if event.is_enter() {
match self.backend_type {
crate::config::BackendType::Ollama => {
self.init_state = InitState::CheckingOllama;
let ollama_client = self.ollama_client.clone();
tokio::spawn(async move {
let _ = ollama_client.get_models().await;
});
}
crate::config::BackendType::Custom => {
self.init_state = InitState::CustomHostInput;
self.input.clear();
self.input_cursor_position = 0;
}
}
}
}
InitState::CustomHostInput => {
if event.is_escape() {
self.init_state = InitState::BackendSelection;
self.input.clear();
self.input_cursor_position = 0;
}
if event.is_enter() && !self.input.trim().is_empty() {
// Update ollama_client with custom host
self.ollama_client = OllamaClient::new(self.input.trim().to_string(), self.ollama_client.event_sender.clone());
self.init_state = InitState::CheckingOllama;
let ollama_client = self.ollama_client.clone();
tokio::spawn(async move {
let _ = ollama_client.get_models().await;
});
}
if event.is_backspace() && self.input_cursor_position > 0 {
let current_index = self.input_cursor_position;
self.input.remove(current_index - 1);
self.input_cursor_position -= 1;
}
if event.is_left() && self.input_cursor_position > 0 {
self.input_cursor_position -= 1;
}
if event.is_right() && self.input_cursor_position < self.input.len() {
self.input_cursor_position += 1;
}
if let Some(c) = event.as_char() {
self.input.insert(self.input_cursor_position, c);
self.input_cursor_position += 1;
}
}
InitState::CheckingOllama => {
// This state is handled by the initial ollama call in `App::new`
// We transition to the next state in `handle_ollama_event`
}
InitState::FetchingModels => {
// This state is handled by the initial ollama call in `App::new`
// We transition to the next state in `handle_ollama_event`
}
InitState::ModelSelection => {
if event.is_up() && self.model_selection_index > 0 {
self.model_selection_index -= 1;
}
if event.is_down() && self.model_selection_index < self.available_models.len().saturating_sub(1) {
self.model_selection_index += 1;
}
if event.is_enter() && !self.available_models.is_empty() && self.model_selection_index < self.available_models.len() {
self.selected_model = self.available_models[self.model_selection_index].clone();
self.status_message = format!("Selected model: {}", self.selected_model);
self.init_state = InitState::Complete;
// Track model change in stats
self.stats.models_used.insert(self.selected_model.clone());
// Save config
let config = Config {
general: crate::config::GeneralConfig {
default_model: self.selected_model.clone(),
ollama_host: self.ollama_client.base_url.clone(),
backend_type: self.backend_type.clone(),
project_context_file: "OWLEN.md".to_string(),
},
..Default::default()
};
crate::config::save_config(&config)?;
self.input_mode = InputMode::Normal;
}
}
InitState::Complete => {
self.input_mode = InputMode::Normal;
}
}
Ok(AppState::Running)
}
/// Handle events in normal (browsing) mode
async fn handle_normal_mode_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_quit() {
return Ok(AppState::Quit);
}
if let Some(c) = event.as_char() {
match c {
'i' => {
self.input_mode = InputMode::Editing;
self.status_message = "Type your message... (Esc to cancel, Enter to send)".to_string();
}
'm' => {
self.input_mode = InputMode::ModelSelection;
self.status_message = "Select model... (Enter to confirm, Esc to cancel)".to_string();
// Refresh model list
let _ = self.ollama_client.get_models().await;
// Set model_selection_index to the currently selected model
if let Some(index) = self.available_models.iter().position(|m| m == &self.selected_model) {
self.model_selection_index = index;
}
}
's' => {
self.input_mode = InputMode::StatsMenu;
self.status_message = "Session Statistics (Esc to close)".to_string();
}
'h' => {
self.input_mode = InputMode::Help;
self.status_message = "Help - All Available Commands (Esc to close)".to_string();
}
'f' => {
self.input_mode = InputMode::FileBrowser;
self.status_message = "File Browser - ↑/↓:Navigate Enter:Read r:Refresh Esc:Close".to_string();
self.refresh_file_list();
}
'l' => {
self.input_mode = InputMode::SessionManager;
self.status_message = "Session Manager - ↑/↓:Navigate Enter:Load s:Save d:Delete Esc:Close".to_string();
self.refresh_session_list();
}
'j' => {
// Scroll down in messages
if self.message_scroll > 0 {
self.message_scroll -= 1;
}
}
'k' => {
// Scroll up in messages
self.message_scroll += 1;
}
'c' => {
// Clear conversation
self.messages.clear();
self.active_requests.clear();
self.message_scroll = 0;
self.status_message = "Conversation cleared".to_string();
}
'r' => {
// Refresh models
let _ = self.ollama_client.get_models().await;
self.status_message = "Refreshing models...".to_string();
}
_ => {}
}
}
if event.is_up() && self.message_scroll > 0 {
self.message_scroll -= 1;
}
if event.is_down() {
self.message_scroll += 1;
}
Ok(AppState::Running)
}
/// Handle events in editing mode
async fn handle_editing_mode_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.input.clear();
self.input_cursor_position = 0;
self.status_message = "Message cancelled".to_string();
return Ok(AppState::Running);
}
if event.is_enter() && !self.input.trim().is_empty() {
let message = self.input.trim().to_string();
self.input.clear();
self.input_cursor_position = 0;
self.input_mode = InputMode::Normal;
// Add user message to conversation
self.messages.push(ConversationMessage {
role: "user".to_string(),
content: message.clone(),
request_id: None,
is_streaming: false,
});
// Update stats
self.stats.messages_sent += 1;
self.stats.total_characters_sent += message.len() as u32;
// Prepare messages for Ollama API (convert to API format)
let api_messages: Vec<Message> = self.messages
.iter()
.filter(|m| !m.is_streaming) // Don't include streaming messages
.map(|m| Message {
role: m.role.clone(),
content: m.content.clone(),
})
.collect();
// Send to Ollama
match self.ollama_client.chat(self.selected_model.clone(), api_messages).await {
Ok(request_id) => {
// Add placeholder for assistant response
let message_index = self.messages.len();
self.messages.push(ConversationMessage {
role: "assistant".to_string(),
content: String::new(),
request_id: Some(request_id),
is_streaming: true,
});
self.active_requests.insert(request_id, message_index);
self.status_message = format!("Sending message to {}...", self.selected_model);
}
Err(e) => {
self.error_message = Some(format!("Failed to send message: {}", e));
self.status_message = "Ready".to_string();
}
}
return Ok(AppState::Running);
}
if event.is_backspace() && self.input_cursor_position > 0 {
let current_index = self.input_cursor_position;
self.input.remove(current_index - 1);
self.input_cursor_position -= 1;
}
if event.is_left() && self.input_cursor_position > 0 {
self.input_cursor_position -= 1;
}
if event.is_right() && self.input_cursor_position < self.input.len() {
self.input_cursor_position += 1;
}
if let Some(c) = event.as_char() {
self.input.insert(self.input_cursor_position, c);
self.input_cursor_position += 1;
}
Ok(AppState::Running)
}
/// Handle events in model selection mode
async fn handle_model_selection_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.status_message = "Model selection cancelled".to_string();
return Ok(AppState::Running);
}
if event.is_enter() {
if !self.available_models.is_empty() && self.model_selection_index < self.available_models.len() {
self.selected_model = self.available_models[self.model_selection_index].clone();
self.status_message = format!("Selected model: {}", self.selected_model);
// Track model change in stats
self.stats.models_used.insert(self.selected_model.clone());
// Save config
let config = Config {
general: crate::config::GeneralConfig {
default_model: self.selected_model.clone(),
ollama_host: self.ollama_client.base_url.clone(),
backend_type: self.backend_type.clone(),
project_context_file: "OWLEN.md".to_string(),
},
..Default::default()
};
if let Err(e) = crate::config::save_config(&config) {
self.status_message = format!("Failed to save config: {}", e);
}
}
self.input_mode = InputMode::Normal;
return Ok(AppState::Running);
}
if event.is_up() && self.model_selection_index > 0 {
self.model_selection_index -= 1;
}
if event.is_down() && self.model_selection_index < self.available_models.len().saturating_sub(1) {
self.model_selection_index += 1;
}
Ok(AppState::Running)
}
/// Handle events in stats menu mode
async fn handle_stats_menu_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.status_message = "Press 'h' for help or 'q' to quit".to_string();
return Ok(AppState::Running);
}
Ok(AppState::Running)
}
/// Handle events in help mode
async fn handle_help_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.status_message = "Press 'h' for help or 'q' to quit".to_string();
return Ok(AppState::Running);
}
Ok(AppState::Running)
}
/// Handle events in file browser mode
async fn handle_file_browser_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.status_message = "Press 'h' for help or 'q' to quit".to_string();
return Ok(AppState::Running);
}
if event.is_up() && self.file_selection_index > 0 {
self.file_selection_index -= 1;
}
if event.is_down() && self.file_selection_index < self.available_files.len().saturating_sub(1) {
self.file_selection_index += 1;
}
if event.is_enter() && !self.available_files.is_empty() {
let selected_file = &self.available_files[self.file_selection_index];
if let Ok(content) = self.file_manager.read_file(&selected_file.path) {
// Add file content as a system message
self.messages.push(ConversationMessage {
role: "system".to_string(),
content: format!("File: {}\n\n{}", selected_file.name, content),
request_id: None,
is_streaming: false,
});
self.status_message = format!("Loaded file: {}", selected_file.name);
self.input_mode = InputMode::Normal;
} else {
self.status_message = format!("Failed to read file: {}", selected_file.name);
}
}
if let Some(c) = event.as_char() {
match c {
'r' => {
self.refresh_file_list();
self.status_message = "File list refreshed".to_string();
}
_ => {}
}
}
Ok(AppState::Running)
}
/// Handle events in session manager mode
async fn handle_session_manager_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::Normal;
self.status_message = "Press 'h' for help or 'q' to quit".to_string();
return Ok(AppState::Running);
}
if event.is_up() && self.session_selection_index > 0 {
self.session_selection_index -= 1;
}
if event.is_down() && self.session_selection_index < self.available_sessions.len().saturating_sub(1) {
self.session_selection_index += 1;
}
if event.is_enter() && !self.available_sessions.is_empty() {
// Load selected session
let session_id = &self.available_sessions[self.session_selection_index].id;
self.load_session(session_id.clone());
}
if let Some(c) = event.as_char() {
match c {
's' => {
// Save current session
self.input_mode = InputMode::SessionInput;
self.session_name_input.clear();
self.status_message = "Enter session name:".to_string();
}
'd' => {
// Delete selected session
if !self.available_sessions.is_empty() {
let session_id = &self.available_sessions[self.session_selection_index].id;
self.delete_session(session_id.clone());
}
}
'r' => {
self.refresh_session_list();
self.status_message = "Session list refreshed".to_string();
}
_ => {}
}
}
Ok(AppState::Running)
}
/// Handle events in file input mode
async fn handle_file_input_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::FileBrowser;
self.file_input.clear();
self.status_message = "File operation cancelled".to_string();
return Ok(AppState::Running);
}
if event.is_enter() {
// Process file input
self.input_mode = InputMode::FileBrowser;
self.status_message = "File operation completed".to_string();
self.file_input.clear();
return Ok(AppState::Running);
}
// Handle text input
if let Some(c) = event.as_char() {
if c.is_ascii_graphic() || c == ' ' {
self.file_input.push(c);
}
}
if event.is_backspace() && !self.file_input.is_empty() {
self.file_input.pop();
}
Ok(AppState::Running)
}
/// Handle events in session input mode
async fn handle_session_input_event(&mut self, event: Event) -> AppResult<AppState> {
if event.is_escape() {
self.input_mode = InputMode::SessionManager;
self.session_name_input.clear();
self.status_message = "Session save cancelled".to_string();
return Ok(AppState::Running);
}
if event.is_enter() && !self.session_name_input.trim().is_empty() {
// Save session with the given name
let session_name = self.session_name_input.trim().to_string();
self.save_current_session(session_name);
self.input_mode = InputMode::SessionManager;
self.session_name_input.clear();
return Ok(AppState::Running);
}
// Handle text input
if let Some(c) = event.as_char() {
if c.is_ascii_graphic() || c == ' ' {
self.session_name_input.push(c);
}
}
if event.is_backspace() && !self.session_name_input.is_empty() {
self.session_name_input.pop();
}
Ok(AppState::Running)
}
/// Handle events from Ollama client
pub async fn handle_ollama_event(&mut self, event: OllamaEvent) -> AppResult<()> {
match event {
OllamaEvent::MessageChunk { request_id, content, done } => {
if let Some(&message_index) = self.active_requests.get(&request_id) {
if let Some(message) = self.messages.get_mut(message_index) {
message.content.push_str(&content);
if done {
message.is_streaming = false;
self.active_requests.remove(&request_id);
self.status_message = "Response completed".to_string();
// Update stats
self.stats.messages_received += 1;
self.stats.total_characters_received += message.content.len() as u32;
}
}
}
}
OllamaEvent::Error { request_id, error } => {
if let Some(&message_index) = self.active_requests.get(&request_id) {
if let Some(message) = self.messages.get_mut(message_index) {
message.content = format!("Error: {}", error);
message.is_streaming = false;
}
self.active_requests.remove(&request_id);
// Update error stats
self.stats.errors_encountered += 1;
}
self.error_message = Some(error);
self.status_message = "Error occurred".to_string();
}
OllamaEvent::ModelsAvailable(models) => {
if self.input_mode == InputMode::Init {
if !models.is_empty() {
self.available_models = models;
self.model_selection_index = 0;
self.init_state = InitState::ModelSelection;
} else {
self.error_message = Some("No models found. Please make sure Ollama is running and you have downloaded some models.".to_string());
}
} else if !models.is_empty() {
self.available_models = models;
self.model_selection_index = 0;
// If current selected model is not in the list, use the first one
if !self.available_models.contains(&self.selected_model) && !self.available_models.is_empty() {
self.selected_model = self.available_models[0].clone();
}
self.status_message = format!("Found {} models", self.available_models.len());
} else {
self.status_message = "No models available".to_string();
}
}
}
Ok(())
}
/// Get the current conversation as a displayable format
pub fn get_display_messages(&self) -> &Vec<ConversationMessage> {
&self.messages
}
/// Check if currently in editing mode
pub fn is_editing(&self) -> bool {
self.input_mode == InputMode::Editing
}
/// Check if currently in stats menu mode
pub fn is_stats_menu(&self) -> bool {
self.input_mode == InputMode::StatsMenu
}
/// Check if currently in help mode
pub fn is_help(&self) -> bool {
self.input_mode == InputMode::Help
}
/// Check if currently in model selection mode
pub fn is_model_selecting(&self) -> bool {
self.input_mode == InputMode::ModelSelection
}
/// Print final session statistics
pub fn print_final_stats(&self) {
let elapsed = if let Some(start) = self.stats.session_start {
start.elapsed()
} else {
std::time::Duration::from_secs(0)
};
let session_duration = format!("{}m {}s", elapsed.as_secs() / 60, elapsed.as_secs() % 60);
let models_used: Vec<String> = self.stats.models_used.iter().cloned().collect();
println!("\n┌──────────────────────────────────────┐");
println!("│ Session Summary │");
println!("├──────────────────────────────────────┤");
println!("│ Duration: {:>16}", session_duration);
println!("│ Messages Sent: {:>16}", self.stats.messages_sent);
println!("│ Messages Received: {:>16}", self.stats.messages_received);
println!("│ Characters Sent: {:>16}", self.stats.total_characters_sent);
println!("│ Characters Rcvd: {:>16}", self.stats.total_characters_received);
println!("│ Errors: {:>16}", self.stats.errors_encountered);
println!("│ Models Used: {:>16}", models_used.len());
if !models_used.is_empty() {
println!("│ Models: {:25}", models_used.join(", "));
}
println!("└──────────────────────────────────────┘");
}
/// Refresh the file list for the file browser
fn refresh_file_list(&mut self) {
if let Ok(files) = self.file_manager.list_files(&self.current_file_path) {
self.available_files = files;
self.file_selection_index = 0;
}
}
/// Refresh the session list for the session manager
fn refresh_session_list(&mut self) {
if let Ok(sessions) = self.database.get_session_summaries() {
self.available_sessions = sessions;
} else {
self.available_sessions = Vec::new();
}
self.session_selection_index = 0;
}
/// Load a session by ID
fn load_session(&mut self, session_id: String) {
if let Ok(Some(session)) = self.database.load_session(&session_id) {
self.messages = session.messages;
self.selected_model = session.model_used;
self.status_message = format!("Loaded session: {}", session.name);
} else {
self.status_message = "Failed to load session".to_string();
}
self.input_mode = InputMode::Normal;
}
/// Save the current session
fn save_current_session(&mut self, session_name: String) {
use std::time::SystemTime;
use uuid::Uuid;
let session = crate::database::Session {
id: Uuid::new_v4().to_string(),
name: session_name.clone(),
messages: self.messages.clone(),
created_at: SystemTime::now(),
updated_at: SystemTime::now(),
model_used: self.selected_model.clone(),
};
if let Ok(()) = self.database.save_session(&session) {
self.status_message = format!("Saved session: {}", session_name);
} else {
self.status_message = "Failed to save session".to_string();
}
self.refresh_session_list();
}
/// Delete a session by ID
fn delete_session(&mut self, session_id: String) {
if let Ok(()) = self.database.delete_session(&session_id) {
self.status_message = "Session deleted successfully".to_string();
} else {
self.status_message = "Failed to delete session".to_string();
}
self.refresh_session_list();
}
/// Check if currently in file browser mode
pub fn is_file_browser(&self) -> bool {
self.input_mode == InputMode::FileBrowser
}
/// Check if currently in session manager mode
pub fn is_session_manager(&self) -> bool {
self.input_mode == InputMode::SessionManager
}
/// Check if currently in file input mode
pub fn is_file_input(&self) -> bool {
self.input_mode == InputMode::FileInput
}
/// Check if currently in session input mode
pub fn is_session_input(&self) -> bool {
self.input_mode == InputMode::SessionInput
}
}

View File

@@ -1,644 +0,0 @@
//! Chat-specific TUI implementation
use anyhow::Result;
use owlen_core::provider::Provider;
use owlen_core::types::{ChatRequest, Message, Role, ChatParameters};
use owlen_core::Error as CoreError;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tokio::sync::mpsc;
use uuid::Uuid;
use crate::config::Config;
use crate::database::Database;
use crate::events::Event;
use crate::ui::ChatRenderer;
pub type ChatResult<T> = Result<T, Box<dyn std::error::Error>>;
/// Chat application state
#[derive(Debug)]
pub enum ChatState {
Running,
Quit,
}
/// Chat input modes
#[derive(Debug, Clone, PartialEq)]
pub enum ChatInputMode {
Normal,
Editing,
ModelSelection,
ConversationSelection,
Help,
}
/// A conversation message with metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversationMessage {
pub id: Uuid,
pub role: String,
pub content: String,
pub timestamp: std::time::SystemTime,
pub model_used: Option<String>,
pub is_streaming: bool,
}
impl ConversationMessage {
pub fn new(role: String, content: String) -> Self {
Self {
id: Uuid::new_v4(),
role,
content,
timestamp: std::time::SystemTime::now(),
model_used: None,
is_streaming: false,
}
}
pub fn to_core_message(&self) -> Message {
let role = match self.role.as_str() {
"user" => Role::User,
"assistant" => Role::Assistant,
"system" => Role::System,
_ => Role::User,
};
Message::new(role, self.content.clone())
}
}
/// Conversation metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Conversation {
pub id: Uuid,
pub title: String,
pub created_at: std::time::SystemTime,
pub updated_at: std::time::SystemTime,
pub message_count: usize,
pub model_used: String,
}
/// Session statistics
#[derive(Debug, Clone, Default)]
pub struct ChatStats {
pub session_start: Option<std::time::Instant>,
pub messages_sent: u32,
pub messages_received: u32,
pub total_characters_sent: u32,
pub total_characters_received: u32,
pub models_used: std::collections::HashSet<String>,
pub errors_encountered: u32,
pub current_conversation_id: Option<Uuid>,
}
/// Chat application
pub struct ChatApp {
/// Current input mode
pub input_mode: ChatInputMode,
/// Current input buffer
pub input: String,
/// Multi-line input buffer
pub input_lines: Vec<String>,
/// Current line in multi-line input
pub current_input_line: usize,
/// Cursor position in current line
pub input_cursor_position: usize,
/// Conversation messages
pub messages: Vec<ConversationMessage>,
/// Available models
pub available_models: Vec<String>,
/// Selected model index
pub model_selection_index: usize,
/// Current selected model
pub selected_model: String,
/// Available conversations
pub conversations: Vec<Conversation>,
/// Selected conversation index
pub conversation_selection_index: usize,
/// Current conversation ID
pub current_conversation_id: Option<Uuid>,
/// Message scroll position
pub message_scroll: usize,
/// Status message
pub status_message: String,
/// Error message
pub error_message: Option<String>,
/// Session statistics
pub stats: ChatStats,
/// Provider for LLM requests
provider: Box<dyn Provider>,
/// Active streaming requests
active_requests: HashMap<Uuid, usize>,
/// Database for persistence
database: Database,
/// Configuration
config: Config,
/// UI renderer
renderer: ChatRenderer,
}
impl ChatApp {
pub fn new(provider: Box<dyn Provider>, config: Config, database: Database) -> ChatResult<Self> {
let renderer = ChatRenderer::new(&config);
let current_conversation_id = Uuid::new_v4();
let mut stats = ChatStats::default();
stats.session_start = Some(std::time::Instant::now());
stats.current_conversation_id = Some(current_conversation_id);
Ok(Self {
input_mode: ChatInputMode::Normal,
input: String::new(),
input_lines: vec![String::new()],
current_input_line: 0,
input_cursor_position: 0,
messages: Vec::new(),
available_models: Vec::new(),
model_selection_index: 0,
selected_model: config.general.default_model.clone(),
conversations: Vec::new(),
conversation_selection_index: 0,
current_conversation_id: Some(current_conversation_id),
message_scroll: 0,
status_message: "Press 'h' for help".to_string(),
error_message: None,
stats,
provider,
active_requests: HashMap::new(),
database,
config,
renderer,
})
}
/// Handle user input events
pub async fn handle_event(&mut self, event: Event) -> ChatResult<ChatState> {
use crossterm::event::{KeyCode, KeyModifiers};
match event {
Event::Key(key) => {
self.clear_error();
match self.input_mode {
ChatInputMode::Normal => {
match (key.code, key.modifiers) {
(KeyCode::Char('q'), KeyModifiers::NONE) => {
return Ok(ChatState::Quit);
}
(KeyCode::Char('h'), KeyModifiers::NONE) => {
self.input_mode = ChatInputMode::Help;
}
(KeyCode::Char('m'), KeyModifiers::NONE) => {
self.refresh_models().await?;
self.input_mode = ChatInputMode::ModelSelection;
}
(KeyCode::Char('c'), KeyModifiers::NONE) => {
self.refresh_conversations().await?;
self.input_mode = ChatInputMode::ConversationSelection;
}
(KeyCode::Char('n'), KeyModifiers::NONE) => {
self.new_conversation().await?;
}
(KeyCode::Char('i'), KeyModifiers::NONE) | (KeyCode::Enter, KeyModifiers::NONE) => {
self.input_mode = ChatInputMode::Editing;
}
(KeyCode::Up, KeyModifiers::NONE) => {
self.scroll_up();
}
(KeyCode::Down, KeyModifiers::NONE) => {
self.scroll_down();
}
(KeyCode::PageUp, KeyModifiers::NONE) => {
self.page_up();
}
(KeyCode::PageDown, KeyModifiers::NONE) => {
self.page_down();
}
_ => {}
}
}
ChatInputMode::Editing => {
match (key.code, key.modifiers) {
(KeyCode::Esc, KeyModifiers::NONE) => {
self.input_mode = ChatInputMode::Normal;
self.clear_input();
}
(KeyCode::Enter, KeyModifiers::CTRL) => {
let message = self.get_input_content();
if !message.trim().is_empty() {
self.send_message(message).await?;
self.clear_input();
self.input_mode = ChatInputMode::Normal;
}
}
(KeyCode::Enter, KeyModifiers::NONE) => {
self.add_input_line();
}
(KeyCode::Backspace, KeyModifiers::NONE) => {
self.handle_backspace();
}
(KeyCode::Delete, KeyModifiers::NONE) => {
self.handle_delete();
}
(KeyCode::Left, KeyModifiers::NONE) => {
self.move_cursor_left();
}
(KeyCode::Right, KeyModifiers::NONE) => {
self.move_cursor_right();
}
(KeyCode::Up, KeyModifiers::NONE) => {
self.move_cursor_up();
}
(KeyCode::Down, KeyModifiers::NONE) => {
self.move_cursor_down();
}
(KeyCode::Home, KeyModifiers::NONE) => {
self.input_cursor_position = 0;
}
(KeyCode::End, KeyModifiers::NONE) => {
self.input_cursor_position = self.current_line().len();
}
(KeyCode::Char(c), KeyModifiers::NONE) | (KeyCode::Char(c), KeyModifiers::SHIFT) => {
self.insert_char(c);
}
_ => {}
}
}
ChatInputMode::ModelSelection => {
match key.code {
KeyCode::Esc => {
self.input_mode = ChatInputMode::Normal;
}
KeyCode::Enter => {
if !self.available_models.is_empty() {
self.selected_model = self.available_models[self.model_selection_index].clone();
self.stats.models_used.insert(self.selected_model.clone());
self.status_message = format!("Selected model: {}", self.selected_model);
}
self.input_mode = ChatInputMode::Normal;
}
KeyCode::Up => {
if self.model_selection_index > 0 {
self.model_selection_index -= 1;
}
}
KeyCode::Down => {
if self.model_selection_index < self.available_models.len().saturating_sub(1) {
self.model_selection_index += 1;
}
}
_ => {}
}
}
ChatInputMode::ConversationSelection => {
match key.code {
KeyCode::Esc => {
self.input_mode = ChatInputMode::Normal;
}
KeyCode::Enter => {
if !self.conversations.is_empty() {
self.load_conversation(self.conversations[self.conversation_selection_index].id).await?;
}
self.input_mode = ChatInputMode::Normal;
}
KeyCode::Up => {
if self.conversation_selection_index > 0 {
self.conversation_selection_index -= 1;
}
}
KeyCode::Down => {
if self.conversation_selection_index < self.conversations.len().saturating_sub(1) {
self.conversation_selection_index += 1;
}
}
KeyCode::Char('d') => {
if !self.conversations.is_empty() {
let conv_id = self.conversations[self.conversation_selection_index].id;
self.delete_conversation(conv_id).await?;
}
}
_ => {}
}
}
ChatInputMode::Help => {
self.input_mode = ChatInputMode::Normal;
}
}
}
Event::Resize(_, _) => {
// Handle terminal resize
}
}
Ok(ChatState::Running)
}
/// Send a message to the LLM
async fn send_message(&mut self, content: String) -> ChatResult<()> {
let user_message = ConversationMessage::new("user".to_string(), content.clone());
self.messages.push(user_message);
self.stats.messages_sent += 1;
self.stats.total_characters_sent += content.len() as u32;
// Save message to database
self.save_current_conversation().await?;
// Convert messages to core format
let core_messages: Vec<Message> = self.messages.iter()
.map(|m| m.to_core_message())
.collect();
let request = ChatRequest {
model: self.selected_model.clone(),
messages: core_messages,
parameters: ChatParameters {
temperature: Some(self.config.llm.temperature),
max_tokens: self.config.llm.max_tokens,
extra: HashMap::new(),
},
};
// Add placeholder for assistant response
let mut assistant_message = ConversationMessage::new("assistant".to_string(), String::new());
assistant_message.is_streaming = true;
assistant_message.model_used = Some(self.selected_model.clone());
let message_index = self.messages.len();
self.messages.push(assistant_message);
match self.provider.chat_stream(request).await {
Ok(mut stream) => {
use futures_util::StreamExt;
while let Some(response) = stream.next().await {
match response {
Ok(chat_response) => {
if let Some(message) = self.messages.get_mut(message_index) {
message.content.push_str(&chat_response.message.content);
message.is_streaming = !chat_response.is_final;
if chat_response.is_final {
self.stats.messages_received += 1;
self.stats.total_characters_received += message.content.len() as u32;
// Save completed conversation
self.save_current_conversation().await?;
}
}
}
Err(e) => {
self.handle_error(format!("Streaming error: {}", e));
break;
}
}
}
}
Err(e) => {
// Remove the placeholder message on error
self.messages.pop();
self.handle_error(format!("Failed to send message: {}", e));
}
}
self.scroll_to_bottom();
Ok(())
}
/// Refresh available models
async fn refresh_models(&mut self) -> ChatResult<()> {
match self.provider.list_models().await {
Ok(models) => {
self.available_models = models.into_iter().map(|m| m.id).collect();
self.model_selection_index = 0;
// Update selection index if current model is in the list
if let Some(index) = self.available_models.iter().position(|m| m == &self.selected_model) {
self.model_selection_index = index;
}
}
Err(e) => {
self.handle_error(format!("Failed to fetch models: {}", e));
}
}
Ok(())
}
/// Refresh available conversations
async fn refresh_conversations(&mut self) -> ChatResult<()> {
match self.database.list_conversations().await {
Ok(conversations) => {
self.conversations = conversations;
self.conversation_selection_index = 0;
}
Err(e) => {
self.handle_error(format!("Failed to load conversations: {}", e));
}
}
Ok(())
}
/// Create a new conversation
async fn new_conversation(&mut self) -> ChatResult<()> {
self.save_current_conversation().await?;
self.messages.clear();
self.current_conversation_id = Some(Uuid::new_v4());
self.stats.current_conversation_id = self.current_conversation_id;
self.message_scroll = 0;
self.status_message = "Started new conversation".to_string();
Ok(())
}
/// Load a conversation
async fn load_conversation(&mut self, conversation_id: Uuid) -> ChatResult<()> {
// Save current conversation first
self.save_current_conversation().await?;
match self.database.load_conversation(conversation_id).await {
Ok(messages) => {
self.messages = messages;
self.current_conversation_id = Some(conversation_id);
self.stats.current_conversation_id = Some(conversation_id);
self.message_scroll = 0;
self.scroll_to_bottom();
self.status_message = "Conversation loaded".to_string();
}
Err(e) => {
self.handle_error(format!("Failed to load conversation: {}", e));
}
}
Ok(())
}
/// Delete a conversation
async fn delete_conversation(&mut self, conversation_id: Uuid) -> ChatResult<()> {
match self.database.delete_conversation(conversation_id).await {
Ok(_) => {
self.refresh_conversations().await?;
self.status_message = "Conversation deleted".to_string();
// If we deleted the current conversation, start a new one
if self.current_conversation_id == Some(conversation_id) {
self.new_conversation().await?;
}
}
Err(e) => {
self.handle_error(format!("Failed to delete conversation: {}", e));
}
}
Ok(())
}
/// Save current conversation to database
async fn save_current_conversation(&mut self) -> ChatResult<()> {
if let Some(conversation_id) = self.current_conversation_id {
if !self.messages.is_empty() {
let _ = self.database.save_conversation(conversation_id, &self.messages).await;
}
}
Ok(())
}
// Input handling methods
fn get_input_content(&self) -> String {
self.input_lines.join("\n")
}
fn clear_input(&mut self) {
self.input_lines = vec![String::new()];
self.current_input_line = 0;
self.input_cursor_position = 0;
}
fn add_input_line(&mut self) {
self.input_lines.insert(self.current_input_line + 1, String::new());
self.current_input_line += 1;
self.input_cursor_position = 0;
}
fn current_line(&self) -> &String {
&self.input_lines[self.current_input_line]
}
fn current_line_mut(&mut self) -> &mut String {
&mut self.input_lines[self.current_input_line]
}
fn insert_char(&mut self, c: char) {
self.current_line_mut().insert(self.input_cursor_position, c);
self.input_cursor_position += 1;
}
fn handle_backspace(&mut self) {
if self.input_cursor_position > 0 {
self.current_line_mut().remove(self.input_cursor_position - 1);
self.input_cursor_position -= 1;
} else if self.current_input_line > 0 {
// Join current line with previous line
let current_content = self.input_lines.remove(self.current_input_line);
self.current_input_line -= 1;
self.input_cursor_position = self.current_line().len();
self.current_line_mut().push_str(&current_content);
}
}
fn handle_delete(&mut self) {
if self.input_cursor_position < self.current_line().len() {
self.current_line_mut().remove(self.input_cursor_position);
} else if self.current_input_line < self.input_lines.len() - 1 {
// Join next line with current line
let next_content = self.input_lines.remove(self.current_input_line + 1);
self.current_line_mut().push_str(&next_content);
}
}
fn move_cursor_left(&mut self) {
if self.input_cursor_position > 0 {
self.input_cursor_position -= 1;
} else if self.current_input_line > 0 {
self.current_input_line -= 1;
self.input_cursor_position = self.current_line().len();
}
}
fn move_cursor_right(&mut self) {
if self.input_cursor_position < self.current_line().len() {
self.input_cursor_position += 1;
} else if self.current_input_line < self.input_lines.len() - 1 {
self.current_input_line += 1;
self.input_cursor_position = 0;
}
}
fn move_cursor_up(&mut self) {
if self.current_input_line > 0 {
self.current_input_line -= 1;
self.input_cursor_position = self.input_cursor_position.min(self.current_line().len());
}
}
fn move_cursor_down(&mut self) {
if self.current_input_line < self.input_lines.len() - 1 {
self.current_input_line += 1;
self.input_cursor_position = self.input_cursor_position.min(self.current_line().len());
}
}
// Scrolling methods
fn scroll_up(&mut self) {
if self.message_scroll > 0 {
self.message_scroll -= 1;
}
}
fn scroll_down(&mut self) {
if self.message_scroll < self.messages.len().saturating_sub(1) {
self.message_scroll += 1;
}
}
fn page_up(&mut self) {
self.message_scroll = self.message_scroll.saturating_sub(10);
}
fn page_down(&mut self) {
self.message_scroll = (self.message_scroll + 10).min(self.messages.len().saturating_sub(1));
}
fn scroll_to_bottom(&mut self) {
self.message_scroll = self.messages.len().saturating_sub(1);
}
// Error handling
fn handle_error<S: Into<String>>(&mut self, error: S) {
let error_msg = error.into();
self.error_message = Some(error_msg);
self.stats.errors_encountered += 1;
}
fn clear_error(&mut self) {
self.error_message = None;
}
/// Get renderer for UI drawing
pub fn renderer(&self) -> &ChatRenderer {
&self.renderer
}
/// Print final statistics
pub fn print_final_stats(&self) {
if let Some(start_time) = self.stats.session_start {
let duration = start_time.elapsed();
println!("\n=== Chat Session Statistics ===");
println!("Session duration: {:?}", duration);
println!("Messages sent: {}", self.stats.messages_sent);
println!("Messages received: {}", self.stats.messages_received);
println!("Characters sent: {}", self.stats.total_characters_sent);
println!("Characters received: {}", self.stats.total_characters_received);
println!("Models used: {:?}", self.stats.models_used);
println!("Errors encountered: {}", self.stats.errors_encountered);
}
}
}

View File

@@ -2,6 +2,7 @@ use anyhow::Result;
use owlen_core::{
session::{SessionController, SessionOutcome},
types::{ChatParameters, ChatResponse, Conversation, ModelInfo, Role},
ui::{AppState, AutoScroll, FocusedPanel, InputMode},
};
use ratatui::style::{Color, Modifier, Style};
use tokio::sync::mpsc;
@@ -11,80 +12,6 @@ use uuid::Uuid;
use crate::config;
use crate::events::Event;
use std::collections::HashSet;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AppState {
Running,
Quit,
}
pub struct AutoScroll {
pub scroll: usize,
pub content_len: usize,
pub stick_to_bottom: bool,
}
impl Default for AutoScroll {
fn default() -> Self {
Self {
scroll: 0,
content_len: 0,
stick_to_bottom: true,
}
}
}
impl AutoScroll {
pub fn on_viewport(&mut self, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h);
if self.stick_to_bottom {
self.scroll = max;
} else {
self.scroll = self.scroll.min(max);
}
}
pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) {
let max = self.content_len.saturating_sub(viewport_h) as isize;
let s = (self.scroll as isize + delta).clamp(0, max) as usize;
self.scroll = s;
self.stick_to_bottom = s as isize == max;
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InputMode {
Normal,
Editing,
ProviderSelection,
ModelSelection,
Help,
Visual,
Command,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum FocusedPanel {
Chat,
Thinking,
Input,
}
impl fmt::Display for InputMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
InputMode::Normal => "Normal",
InputMode::Editing => "Editing",
InputMode::ModelSelection => "Model",
InputMode::ProviderSelection => "Provider",
InputMode::Help => "Help",
InputMode::Visual => "Visual",
InputMode::Command => "Command",
};
f.write_str(label)
}
}
/// Messages emitted by asynchronous streaming tasks
#[derive(Debug)]
@@ -1184,13 +1111,11 @@ impl ChatApp {
pub fn scroll_half_page_down(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
let delta = (self.viewport_height / 2) as isize;
self.auto_scroll.on_user_scroll(delta, self.viewport_height);
self.auto_scroll.scroll_half_page_down(self.viewport_height);
}
FocusedPanel::Thinking => {
let viewport_height = self.thinking_viewport_height.max(1);
let delta = (viewport_height / 2) as isize;
self.thinking_scroll.on_user_scroll(delta, viewport_height);
self.thinking_scroll.scroll_half_page_down(viewport_height);
}
FocusedPanel::Input => {}
}
@@ -1200,13 +1125,11 @@ impl ChatApp {
pub fn scroll_half_page_up(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
let delta = -((self.viewport_height / 2) as isize);
self.auto_scroll.on_user_scroll(delta, self.viewport_height);
self.auto_scroll.scroll_half_page_up(self.viewport_height);
}
FocusedPanel::Thinking => {
let viewport_height = self.thinking_viewport_height.max(1);
let delta = -((viewport_height / 2) as isize);
self.thinking_scroll.on_user_scroll(delta, viewport_height);
self.thinking_scroll.scroll_half_page_up(viewport_height);
}
FocusedPanel::Input => {}
}
@@ -1216,13 +1139,11 @@ impl ChatApp {
pub fn scroll_full_page_down(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
let delta = self.viewport_height as isize;
self.auto_scroll.on_user_scroll(delta, self.viewport_height);
self.auto_scroll.scroll_full_page_down(self.viewport_height);
}
FocusedPanel::Thinking => {
let viewport_height = self.thinking_viewport_height.max(1);
let delta = viewport_height as isize;
self.thinking_scroll.on_user_scroll(delta, viewport_height);
self.thinking_scroll.scroll_full_page_down(viewport_height);
}
FocusedPanel::Input => {}
}
@@ -1232,13 +1153,11 @@ impl ChatApp {
pub fn scroll_full_page_up(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
let delta = -(self.viewport_height as isize);
self.auto_scroll.on_user_scroll(delta, self.viewport_height);
self.auto_scroll.scroll_full_page_up(self.viewport_height);
}
FocusedPanel::Thinking => {
let viewport_height = self.thinking_viewport_height.max(1);
let delta = -(viewport_height as isize);
self.thinking_scroll.on_user_scroll(delta, viewport_height);
self.thinking_scroll.scroll_full_page_up(viewport_height);
}
FocusedPanel::Input => {}
}
@@ -1248,12 +1167,10 @@ impl ChatApp {
pub fn jump_to_top(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
self.auto_scroll.scroll = 0;
self.auto_scroll.stick_to_bottom = false;
self.auto_scroll.jump_to_top();
}
FocusedPanel::Thinking => {
self.thinking_scroll.scroll = 0;
self.thinking_scroll.stick_to_bottom = false;
self.thinking_scroll.jump_to_top();
}
FocusedPanel::Input => {}
}
@@ -1263,13 +1180,11 @@ impl ChatApp {
pub fn jump_to_bottom(&mut self) {
match self.focused_panel {
FocusedPanel::Chat => {
self.auto_scroll.stick_to_bottom = true;
self.auto_scroll.on_viewport(self.viewport_height);
self.auto_scroll.jump_to_bottom(self.viewport_height);
}
FocusedPanel::Thinking => {
let viewport_height = self.thinking_viewport_height.max(1);
self.thinking_scroll.stick_to_bottom = true;
self.thinking_scroll.on_viewport(viewport_height);
self.thinking_scroll.jump_to_bottom(viewport_height);
}
FocusedPanel::Input => {}
}
@@ -1576,94 +1491,17 @@ impl ChatApp {
fn find_next_word_boundary(&self, row: usize, col: usize) -> Option<usize> {
let line = self.get_line_at_row(row)?;
let chars: Vec<char> = line.chars().collect();
if col >= chars.len() {
return Some(chars.len());
}
let mut pos = col;
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// Skip current word
if is_word_char(chars[pos]) {
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
} else {
// Skip non-word characters
while pos < chars.len() && !is_word_char(chars[pos]) {
pos += 1;
}
}
Some(pos)
owlen_core::ui::find_next_word_boundary(&line, col)
}
fn find_word_end(&self, row: usize, col: usize) -> Option<usize> {
let line = self.get_line_at_row(row)?;
let chars: Vec<char> = line.chars().collect();
if col >= chars.len() {
return Some(chars.len());
}
let mut pos = col;
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// If on a word character, move to end of current word
if is_word_char(chars[pos]) {
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
// Move back one to be ON the last character
if pos > 0 {
pos -= 1;
}
} else {
// Skip non-word characters
while pos < chars.len() && !is_word_char(chars[pos]) {
pos += 1;
}
// Now on first char of next word, move to its end
while pos < chars.len() && is_word_char(chars[pos]) {
pos += 1;
}
if pos > 0 {
pos -= 1;
}
}
Some(pos)
owlen_core::ui::find_word_end(&line, col)
}
fn find_prev_word_boundary(&self, row: usize, col: usize) -> Option<usize> {
let line = self.get_line_at_row(row)?;
let chars: Vec<char> = line.chars().collect();
if col == 0 || chars.is_empty() {
return Some(0);
}
let mut pos = col.min(chars.len());
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
// Move back one position first
if pos > 0 {
pos -= 1;
}
// Skip non-word characters
while pos > 0 && !is_word_char(chars[pos]) {
pos -= 1;
}
// Skip word characters to find start of word
while pos > 0 && is_word_char(chars[pos - 1]) {
pos -= 1;
}
Some(pos)
owlen_core::ui::find_prev_word_boundary(&line, col)
}
fn yank_from_panel(&self) -> Option<String> {
@@ -1679,7 +1517,7 @@ impl ChatApp {
};
let lines = self.get_rendered_lines();
extract_text_from_selection(&lines, start_pos, end_pos)
owlen_core::ui::extract_text_from_selection(&lines, start_pos, end_pos)
}
pub fn update_thinking_from_last_message(&mut self) {
@@ -1736,66 +1574,6 @@ impl ChatApp {
}
}
fn extract_text_from_selection(lines: &[String], start: (usize, usize), end: (usize, usize)) -> Option<String> {
if lines.is_empty() || start.0 >= lines.len() {
return None;
}
let start_row = start.0;
let start_col = start.1;
let end_row = end.0.min(lines.len() - 1);
let end_col = end.1;
if start_row == end_row {
// Single line selection
let line = &lines[start_row];
let chars: Vec<char> = line.chars().collect();
let start_c = start_col.min(chars.len());
let end_c = end_col.min(chars.len());
if start_c >= end_c {
return None;
}
let selected: String = chars[start_c..end_c].iter().collect();
Some(selected)
} else {
// Multi-line selection
let mut result = Vec::new();
// First line: from start_col to end
let first_line = &lines[start_row];
let first_chars: Vec<char> = first_line.chars().collect();
let start_c = start_col.min(first_chars.len());
if start_c < first_chars.len() {
result.push(first_chars[start_c..].iter().collect::<String>());
}
// Middle lines: entire lines
for row in (start_row + 1)..end_row {
if row < lines.len() {
result.push(lines[row].clone());
}
}
// Last line: from start to end_col
if end_row < lines.len() && end_row > start_row {
let last_line = &lines[end_row];
let last_chars: Vec<char> = last_line.chars().collect();
let end_c = end_col.min(last_chars.len());
if end_c > 0 {
result.push(last_chars[..end_c].iter().collect::<String>());
}
}
if result.is_empty() {
None
} else {
Some(result.join("\n"))
}
}
}
fn configure_textarea_defaults(textarea: &mut TextArea<'static>) {
textarea.set_placeholder_text("Type your message here...");
textarea.set_tab_length(4);

View File

@@ -1,787 +0,0 @@
//! Code-specific TUI implementation
use anyhow::Result;
use owlen_core::provider::Provider;
use owlen_core::types::{ChatRequest, Message, Role, ChatParameters};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use uuid::Uuid;
use crate::config::Config;
use crate::database::Database;
use crate::events::Event;
use crate::files::FileManager;
use crate::ui::CodeRenderer;
use crate::chat::{ConversationMessage, ChatResult, ChatStats};
/// Code application state
#[derive(Debug)]
pub enum CodeState {
Running,
Quit,
}
/// Code-specific input modes
#[derive(Debug, Clone, PartialEq)]
pub enum CodeInputMode {
Normal,
Editing,
ModelSelection,
FileBrowser,
FileSearch,
ProjectExplorer,
Help,
}
/// File context information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileContext {
pub path: String,
pub content: String,
pub language: Option<String>,
pub line_count: usize,
pub size: usize,
}
/// Code session with project context
#[derive(Debug, Clone)]
pub struct CodeSession {
pub project_root: Option<String>,
pub active_files: Vec<FileContext>,
pub recent_files: Vec<String>,
pub language_context: Option<String>,
}
/// Code application optimized for programming assistance
pub struct CodeApp {
/// Current input mode
pub input_mode: CodeInputMode,
/// Multi-line input with syntax awareness
pub input_lines: Vec<String>,
/// Current line in input
pub current_input_line: usize,
/// Cursor position in current line
pub input_cursor_position: usize,
/// Conversation messages with code context
pub messages: Vec<ConversationMessage>,
/// Available models
pub available_models: Vec<String>,
/// Selected model index
pub model_selection_index: usize,
/// Current selected model
pub selected_model: String,
/// Code session information
pub session: CodeSession,
/// File manager for project operations
file_manager: FileManager,
/// Available files in current directory
pub available_files: Vec<crate::files::FileInfo>,
/// Selected file index
pub file_selection_index: usize,
/// Current file path for operations
pub current_file_path: String,
/// File search query
pub file_search_query: String,
/// Filtered file results
pub filtered_files: Vec<crate::files::FileInfo>,
/// Message scroll position
pub message_scroll: usize,
/// Status message
pub status_message: String,
/// Error message
pub error_message: Option<String>,
/// Session statistics
pub stats: ChatStats,
/// Provider for LLM requests
provider: Box<dyn Provider>,
/// Active streaming requests
active_requests: HashMap<Uuid, usize>,
/// Database for persistence
database: Database,
/// Configuration
config: Config,
/// UI renderer for code mode
renderer: CodeRenderer,
}
impl CodeApp {
pub fn new(provider: Box<dyn Provider>, config: Config, database: Database) -> ChatResult<Self> {
let file_manager = FileManager::new(config.clone());
let renderer = CodeRenderer::new(&config);
let session = CodeSession {
project_root: std::env::current_dir().ok().map(|p| p.to_string_lossy().to_string()),
active_files: Vec::new(),
recent_files: Vec::new(),
language_context: None,
};
let mut stats = ChatStats::default();
stats.session_start = Some(std::time::Instant::now());
stats.current_conversation_id = Some(Uuid::new_v4());
let mut app = Self {
input_mode: CodeInputMode::Normal,
input_lines: vec![String::new()],
current_input_line: 0,
input_cursor_position: 0,
messages: Vec::new(),
available_models: Vec::new(),
model_selection_index: 0,
selected_model: config.general.default_model.clone(),
session,
file_manager,
available_files: Vec::new(),
file_selection_index: 0,
current_file_path: ".".to_string(),
file_search_query: String::new(),
filtered_files: Vec::new(),
message_scroll: 0,
status_message: "Press 'h' for help | Code Assistant Mode".to_string(),
error_message: None,
stats,
provider,
active_requests: HashMap::new(),
database,
config,
renderer,
};
// Add code assistant system message
app.add_code_context();
// Load project context if available
app.load_project_context();
Ok(app)
}
/// Add code assistant context to the conversation
fn add_code_context(&mut self) {
let code_context = ConversationMessage::new(
"system".to_string(),
r#"You are OWLEN Code Assistant, an AI programming helper. You excel at:
- Explaining code and programming concepts clearly
- Helping debug issues and providing solutions
- Suggesting improvements and optimizations
- Writing clean, efficient code in various languages
- Code reviews and best practices
- Architecture and design patterns
- Refactoring and modernization
When helping with code:
- Provide clear, well-commented examples
- Explain your reasoning and approach
- Suggest multiple solutions when appropriate
- Consider performance, readability, and maintainability
- Ask clarifying questions when context is needed
- Use proper syntax highlighting and formatting
Current mode: Code Assistance
Available project context will be provided when files are opened or referenced."#.to_string(),
);
self.messages.push(code_context);
}
/// Load project context from current directory
fn load_project_context(&mut self) {
if let Ok(Some(context)) = self.file_manager.load_project_context() {
let project_message = ConversationMessage::new(
"system".to_string(),
format!("Project Context:\n{}", context),
);
self.messages.push(project_message);
}
// Refresh file list
self.refresh_files();
}
/// Handle user input events
pub async fn handle_event(&mut self, event: Event) -> ChatResult<CodeState> {
use crossterm::event::{KeyCode, KeyModifiers};
match event {
Event::Key(key) => {
self.clear_error();
match self.input_mode {
CodeInputMode::Normal => {
match (key.code, key.modifiers) {
(KeyCode::Char('q'), KeyModifiers::NONE) => {
return Ok(CodeState::Quit);
}
(KeyCode::Char('h'), KeyModifiers::NONE) => {
self.input_mode = CodeInputMode::Help;
}
(KeyCode::Char('m'), KeyModifiers::NONE) => {
self.refresh_models().await?;
self.input_mode = CodeInputMode::ModelSelection;
}
(KeyCode::Char('f'), KeyModifiers::NONE) => {
self.refresh_files();
self.input_mode = CodeInputMode::FileBrowser;
}
(KeyCode::Char('p'), KeyModifiers::NONE) => {
self.input_mode = CodeInputMode::ProjectExplorer;
}
(KeyCode::Char('/'), KeyModifiers::NONE) => {
self.input_mode = CodeInputMode::FileSearch;
self.file_search_query.clear();
}
(KeyCode::Char('i'), KeyModifiers::NONE) | (KeyCode::Enter, KeyModifiers::NONE) => {
self.input_mode = CodeInputMode::Editing;
}
(KeyCode::Up, KeyModifiers::NONE) => {
self.scroll_up();
}
(KeyCode::Down, KeyModifiers::NONE) => {
self.scroll_down();
}
(KeyCode::PageUp, KeyModifiers::NONE) => {
self.page_up();
}
(KeyCode::PageDown, KeyModifiers::NONE) => {
self.page_down();
}
(KeyCode::Char('o'), KeyModifiers::NONE) => {
if !self.available_files.is_empty() && self.file_selection_index < self.available_files.len() {
self.open_file(self.available_files[self.file_selection_index].path.clone()).await?;
}
}
_ => {}
}
}
CodeInputMode::Editing => {
match (key.code, key.modifiers) {
(KeyCode::Esc, KeyModifiers::NONE) => {
self.input_mode = CodeInputMode::Normal;
self.clear_input();
}
(KeyCode::Enter, KeyModifiers::CTRL) => {
let message = self.get_input_content();
if !message.trim().is_empty() {
self.send_message(message).await?;
self.clear_input();
self.input_mode = CodeInputMode::Normal;
}
}
(KeyCode::Enter, KeyModifiers::NONE) => {
self.add_input_line();
}
(KeyCode::Tab, KeyModifiers::NONE) => {
// Add code indentation
self.insert_string(" ");
}
(KeyCode::Backspace, KeyModifiers::NONE) => {
self.handle_backspace();
}
(KeyCode::Delete, KeyModifiers::NONE) => {
self.handle_delete();
}
(KeyCode::Left, KeyModifiers::NONE) => {
self.move_cursor_left();
}
(KeyCode::Right, KeyModifiers::NONE) => {
self.move_cursor_right();
}
(KeyCode::Up, KeyModifiers::NONE) => {
self.move_cursor_up();
}
(KeyCode::Down, KeyModifiers::NONE) => {
self.move_cursor_down();
}
(KeyCode::Home, KeyModifiers::NONE) => {
self.input_cursor_position = 0;
}
(KeyCode::End, KeyModifiers::NONE) => {
self.input_cursor_position = self.current_line().len();
}
(KeyCode::Char(c), KeyModifiers::NONE) | (KeyCode::Char(c), KeyModifiers::SHIFT) => {
self.insert_char(c);
}
_ => {}
}
}
CodeInputMode::ModelSelection => {
match key.code {
KeyCode::Esc => {
self.input_mode = CodeInputMode::Normal;
}
KeyCode::Enter => {
if !self.available_models.is_empty() {
self.selected_model = self.available_models[self.model_selection_index].clone();
self.stats.models_used.insert(self.selected_model.clone());
self.status_message = format!("Selected model: {}", self.selected_model);
}
self.input_mode = CodeInputMode::Normal;
}
KeyCode::Up => {
if self.model_selection_index > 0 {
self.model_selection_index -= 1;
}
}
KeyCode::Down => {
if self.model_selection_index < self.available_models.len().saturating_sub(1) {
self.model_selection_index += 1;
}
}
_ => {}
}
}
CodeInputMode::FileBrowser => {
match key.code {
KeyCode::Esc => {
self.input_mode = CodeInputMode::Normal;
}
KeyCode::Enter => {
if !self.available_files.is_empty() {
let file_info = &self.available_files[self.file_selection_index];
if file_info.is_dir {
self.current_file_path = file_info.path.clone();
self.refresh_files();
} else {
self.open_file(file_info.path.clone()).await?;
self.input_mode = CodeInputMode::Normal;
}
}
}
KeyCode::Up => {
if self.file_selection_index > 0 {
self.file_selection_index -= 1;
}
}
KeyCode::Down => {
if self.file_selection_index < self.available_files.len().saturating_sub(1) {
self.file_selection_index += 1;
}
}
KeyCode::Backspace => {
// Go to parent directory
if let Some(parent) = std::path::Path::new(&self.current_file_path).parent() {
self.current_file_path = parent.to_string_lossy().to_string();
self.refresh_files();
}
}
_ => {}
}
}
CodeInputMode::FileSearch => {
match key.code {
KeyCode::Esc => {
self.input_mode = CodeInputMode::Normal;
self.file_search_query.clear();
}
KeyCode::Enter => {
if !self.filtered_files.is_empty() {
self.open_file(self.filtered_files[0].path.clone()).await?;
self.input_mode = CodeInputMode::Normal;
self.file_search_query.clear();
}
}
KeyCode::Backspace => {
self.file_search_query.pop();
self.filter_files();
}
KeyCode::Char(c) => {
self.file_search_query.push(c);
self.filter_files();
}
_ => {}
}
}
CodeInputMode::ProjectExplorer => {
match key.code {
KeyCode::Esc => {
self.input_mode = CodeInputMode::Normal;
}
KeyCode::Char('r') => {
self.load_project_context();
self.status_message = "Project context refreshed".to_string();
}
_ => {}
}
}
CodeInputMode::Help => {
self.input_mode = CodeInputMode::Normal;
}
}
}
Event::Resize(_, _) => {
// Handle terminal resize
}
}
Ok(CodeState::Running)
}
/// Send a message to the LLM with code context
async fn send_message(&mut self, content: String) -> ChatResult<()> {
// Add file context if relevant files are active
let enhanced_content = if !self.session.active_files.is_empty() {
let mut context = String::new();
context.push_str(&content);
context.push_str("\n\n--- Active File Context ---\n");
for file_ctx in &self.session.active_files {
context.push_str(&format!("File: {}\n", file_ctx.path));
if let Some(lang) = &file_ctx.language {
context.push_str(&format!("Language: {}\n", lang));
}
context.push_str("```\n");
context.push_str(&file_ctx.content);
context.push_str("\n```\n\n");
}
context
} else {
content.clone()
};
let user_message = ConversationMessage::new("user".to_string(), enhanced_content);
self.messages.push(user_message);
self.stats.messages_sent += 1;
self.stats.total_characters_sent += content.len() as u32;
// Convert messages to core format
let core_messages: Vec<Message> = self.messages.iter()
.map(|m| {
let role = match m.role.as_str() {
"user" => Role::User,
"assistant" => Role::Assistant,
"system" => Role::System,
_ => Role::User,
};
Message::new(role, m.content.clone())
})
.collect();
let request = ChatRequest {
model: self.selected_model.clone(),
messages: core_messages,
parameters: ChatParameters {
temperature: Some(self.config.llm.temperature),
max_tokens: self.config.llm.max_tokens,
extra: HashMap::new(),
},
};
// Add placeholder for assistant response
let mut assistant_message = ConversationMessage::new("assistant".to_string(), String::new());
assistant_message.is_streaming = true;
assistant_message.model_used = Some(self.selected_model.clone());
let message_index = self.messages.len();
self.messages.push(assistant_message);
match self.provider.chat_stream(request).await {
Ok(mut stream) => {
use futures_util::StreamExt;
while let Some(response) = stream.next().await {
match response {
Ok(chat_response) => {
if let Some(message) = self.messages.get_mut(message_index) {
message.content.push_str(&chat_response.message.content);
message.is_streaming = !chat_response.is_final;
if chat_response.is_final {
self.stats.messages_received += 1;
self.stats.total_characters_received += message.content.len() as u32;
}
}
}
Err(e) => {
self.handle_error(format!("Streaming error: {}", e));
break;
}
}
}
}
Err(e) => {
// Remove the placeholder message on error
self.messages.pop();
self.handle_error(format!("Failed to send message: {}", e));
}
}
self.scroll_to_bottom();
Ok(())
}
/// Open a file and add it to the active context
async fn open_file(&mut self, file_path: String) -> ChatResult<()> {
match self.file_manager.read_file(&file_path) {
Ok(content) => {
let language = self.detect_language(&file_path);
let file_context = FileContext {
path: file_path.clone(),
content: content.clone(),
language: language.clone(),
line_count: content.lines().count(),
size: content.len(),
};
// Add to active files (limit to last 5 files)
self.session.active_files.push(file_context);
if self.session.active_files.len() > 5 {
self.session.active_files.remove(0);
}
// Update recent files
if !self.session.recent_files.contains(&file_path) {
self.session.recent_files.push(file_path.clone());
if self.session.recent_files.len() > 10 {
self.session.recent_files.remove(0);
}
}
// Set language context
if let Some(lang) = language {
self.session.language_context = Some(lang);
}
self.status_message = format!("Opened: {} ({} lines)", file_path, self.session.active_files.last().unwrap().line_count);
}
Err(e) => {
self.handle_error(format!("Failed to open file: {}", e));
}
}
Ok(())
}
/// Detect programming language from file extension
fn detect_language(&self, file_path: &str) -> Option<String> {
if let Some(extension) = std::path::Path::new(file_path).extension() {
match extension.to_str()? {
"rs" => Some("rust".to_string()),
"py" => Some("python".to_string()),
"js" => Some("javascript".to_string()),
"ts" => Some("typescript".to_string()),
"jsx" => Some("javascript".to_string()),
"tsx" => Some("typescript".to_string()),
"go" => Some("go".to_string()),
"java" => Some("java".to_string()),
"cpp" | "cxx" | "cc" => Some("cpp".to_string()),
"c" => Some("c".to_string()),
"h" | "hpp" => Some("c".to_string()),
"cs" => Some("csharp".to_string()),
"rb" => Some("ruby".to_string()),
"php" => Some("php".to_string()),
"swift" => Some("swift".to_string()),
"kt" => Some("kotlin".to_string()),
"scala" => Some("scala".to_string()),
"sh" | "bash" => Some("bash".to_string()),
"sql" => Some("sql".to_string()),
"html" => Some("html".to_string()),
"css" => Some("css".to_string()),
"scss" => Some("scss".to_string()),
"json" => Some("json".to_string()),
"yaml" | "yml" => Some("yaml".to_string()),
"toml" => Some("toml".to_string()),
"xml" => Some("xml".to_string()),
"md" => Some("markdown".to_string()),
_ => None,
}
} else {
None
}
}
/// Refresh available models
async fn refresh_models(&mut self) -> ChatResult<()> {
match self.provider.list_models().await {
Ok(models) => {
self.available_models = models.into_iter().map(|m| m.id).collect();
self.model_selection_index = 0;
if let Some(index) = self.available_models.iter().position(|m| m == &self.selected_model) {
self.model_selection_index = index;
}
}
Err(e) => {
self.handle_error(format!("Failed to fetch models: {}", e));
}
}
Ok(())
}
/// Refresh file list in current directory
fn refresh_files(&mut self) {
match self.file_manager.list_files(&self.current_file_path) {
Ok(files) => {
self.available_files = files;
self.file_selection_index = 0;
}
Err(e) => {
self.handle_error(format!("Failed to list files: {}", e));
}
}
}
/// Filter files based on search query
fn filter_files(&mut self) {
if self.file_search_query.is_empty() {
self.filtered_files = self.available_files.clone();
} else {
self.filtered_files = self.available_files
.iter()
.filter(|file| {
file.name.to_lowercase().contains(&self.file_search_query.to_lowercase())
})
.cloned()
.collect();
}
}
// Input handling methods (similar to chat.rs but optimized for code)
fn get_input_content(&self) -> String {
self.input_lines.join("\n")
}
fn clear_input(&mut self) {
self.input_lines = vec![String::new()];
self.current_input_line = 0;
self.input_cursor_position = 0;
}
fn add_input_line(&mut self) {
self.input_lines.insert(self.current_input_line + 1, String::new());
self.current_input_line += 1;
self.input_cursor_position = 0;
}
fn current_line(&self) -> &String {
&self.input_lines[self.current_input_line]
}
fn current_line_mut(&mut self) -> &mut String {
&mut self.input_lines[self.current_input_line]
}
fn insert_char(&mut self, c: char) {
self.current_line_mut().insert(self.input_cursor_position, c);
self.input_cursor_position += 1;
}
fn insert_string(&mut self, s: &str) {
for c in s.chars() {
self.insert_char(c);
}
}
fn handle_backspace(&mut self) {
if self.input_cursor_position > 0 {
self.current_line_mut().remove(self.input_cursor_position - 1);
self.input_cursor_position -= 1;
} else if self.current_input_line > 0 {
let current_content = self.input_lines.remove(self.current_input_line);
self.current_input_line -= 1;
self.input_cursor_position = self.current_line().len();
self.current_line_mut().push_str(&current_content);
}
}
fn handle_delete(&mut self) {
if self.input_cursor_position < self.current_line().len() {
self.current_line_mut().remove(self.input_cursor_position);
} else if self.current_input_line < self.input_lines.len() - 1 {
let next_content = self.input_lines.remove(self.current_input_line + 1);
self.current_line_mut().push_str(&next_content);
}
}
fn move_cursor_left(&mut self) {
if self.input_cursor_position > 0 {
self.input_cursor_position -= 1;
} else if self.current_input_line > 0 {
self.current_input_line -= 1;
self.input_cursor_position = self.current_line().len();
}
}
fn move_cursor_right(&mut self) {
if self.input_cursor_position < self.current_line().len() {
self.input_cursor_position += 1;
} else if self.current_input_line < self.input_lines.len() - 1 {
self.current_input_line += 1;
self.input_cursor_position = 0;
}
}
fn move_cursor_up(&mut self) {
if self.current_input_line > 0 {
self.current_input_line -= 1;
self.input_cursor_position = self.input_cursor_position.min(self.current_line().len());
}
}
fn move_cursor_down(&mut self) {
if self.current_input_line < self.input_lines.len() - 1 {
self.current_input_line += 1;
self.input_cursor_position = self.input_cursor_position.min(self.current_line().len());
}
}
// Scrolling methods
fn scroll_up(&mut self) {
if self.message_scroll > 0 {
self.message_scroll -= 1;
}
}
fn scroll_down(&mut self) {
if self.message_scroll < self.messages.len().saturating_sub(1) {
self.message_scroll += 1;
}
}
fn page_up(&mut self) {
self.message_scroll = self.message_scroll.saturating_sub(10);
}
fn page_down(&mut self) {
self.message_scroll = (self.message_scroll + 10).min(self.messages.len().saturating_sub(1));
}
fn scroll_to_bottom(&mut self) {
self.message_scroll = self.messages.len().saturating_sub(1);
}
// Error handling
fn handle_error<S: Into<String>>(&mut self, error: S) {
let error_msg = error.into();
self.error_message = Some(error_msg);
self.stats.errors_encountered += 1;
}
fn clear_error(&mut self) {
self.error_message = None;
}
/// Get renderer for UI drawing
pub fn renderer(&self) -> &CodeRenderer {
&self.renderer
}
/// Print final statistics
pub fn print_final_stats(&self) {
if let Some(start_time) = self.stats.session_start {
let duration = start_time.elapsed();
println!("\n=== Code Assistant Session Statistics ===");
println!("Session duration: {:?}", duration);
println!("Messages sent: {}", self.stats.messages_sent);
println!("Messages received: {}", self.stats.messages_received);
println!("Characters sent: {}", self.stats.total_characters_sent);
println!("Characters received: {}", self.stats.total_characters_received);
println!("Models used: {:?}", self.stats.models_used);
println!("Errors encountered: {}", self.stats.errors_encountered);
println!("Active files: {}", self.session.active_files.len());
println!("Language context: {:?}", self.session.language_context);
}
}
}

View File

@@ -1,8 +1,9 @@
use anyhow::Result;
use owlen_core::session::SessionController;
use owlen_core::ui::{AppState, InputMode};
use tokio::sync::mpsc;
use crate::chat_app::{AppState, ChatApp, InputMode, SessionEvent};
use crate::chat_app::{ChatApp, SessionEvent};
use crate::events::Event;
const DEFAULT_SYSTEM_PROMPT: &str =

View File

@@ -1,152 +0,0 @@
use sled::Db;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::time::SystemTime;
use crate::app::ConversationMessage;
const DB_PATH: &str = "~/.config/owlen/sessions.db";
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Session {
pub id: String,
pub name: String,
pub messages: Vec<ConversationMessage>,
pub created_at: SystemTime,
pub updated_at: SystemTime,
pub model_used: String,
}
#[derive(Clone)]
pub struct Database {
db: Db,
}
impl Database {
pub fn new() -> Result<Self> {
let path = if let Ok(custom_path) = std::env::var("OWLEN_DB_PATH") {
custom_path
} else {
shellexpand::tilde(DB_PATH).to_string()
};
let db = sled::open(path)?;
Ok(Self { db })
}
pub fn save_conversation(&self, messages: &[ConversationMessage]) -> Result<()> {
let serialized = serde_json::to_string(messages)?;
self.db.insert("last_session", serialized.as_bytes())?;
Ok(())
}
pub fn load_conversation(&self) -> Result<Option<Vec<ConversationMessage>>> {
if let Some(serialized) = self.db.get("last_session")? {
let serialized: &[u8] = &serialized;
let messages: Vec<ConversationMessage> = serde_json::from_slice(serialized)?;
Ok(Some(messages))
} else {
Ok(None)
}
}
/// Save a named session
pub fn save_session(&self, session: &Session) -> Result<()> {
let key = format!("session_{}", session.id);
let serialized = serde_json::to_string(session)?;
self.db.insert(key.as_bytes(), serialized.as_bytes())?;
// Also update the list of session IDs
self.add_session_to_list(&session.id)?;
Ok(())
}
/// Load a specific session by ID
pub fn load_session(&self, session_id: &str) -> Result<Option<Session>> {
let key = format!("session_{}", session_id);
if let Some(serialized) = self.db.get(key.as_bytes())? {
let serialized: &[u8] = &serialized;
let session: Session = serde_json::from_slice(serialized)?;
Ok(Some(session))
} else {
Ok(None)
}
}
/// Delete a session
pub fn delete_session(&self, session_id: &str) -> Result<()> {
let key = format!("session_{}", session_id);
self.db.remove(key.as_bytes())?;
self.remove_session_from_list(session_id)?;
Ok(())
}
/// List all saved sessions
pub fn list_sessions(&self) -> Result<Vec<Session>> {
let session_ids = self.get_session_list()?;
let mut sessions = Vec::new();
for session_id in session_ids {
if let Some(session) = self.load_session(&session_id)? {
sessions.push(session);
}
}
// Sort by updated_at (most recent first)
sessions.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
Ok(sessions)
}
/// Get summary of sessions (id, name, message count, last updated)
pub fn get_session_summaries(&self) -> Result<Vec<SessionSummary>> {
let sessions = self.list_sessions()?;
let summaries = sessions.into_iter().map(|s| SessionSummary {
id: s.id,
name: s.name,
message_count: s.messages.len(),
last_updated: s.updated_at,
model_used: s.model_used,
}).collect();
Ok(summaries)
}
/// Internal method to maintain session list
fn add_session_to_list(&self, session_id: &str) -> Result<()> {
let mut session_ids = self.get_session_list()?;
if !session_ids.contains(&session_id.to_string()) {
session_ids.push(session_id.to_string());
let serialized = serde_json::to_string(&session_ids)?;
self.db.insert("session_list", serialized.as_bytes())?;
}
Ok(())
}
/// Internal method to remove session from list
fn remove_session_from_list(&self, session_id: &str) -> Result<()> {
let mut session_ids = self.get_session_list()?;
session_ids.retain(|id| id != session_id);
let serialized = serde_json::to_string(&session_ids)?;
self.db.insert("session_list", serialized.as_bytes())?;
Ok(())
}
/// Get list of session IDs
fn get_session_list(&self) -> Result<Vec<String>> {
if let Some(serialized) = self.db.get("session_list")? {
let serialized: &[u8] = &serialized;
let session_ids: Vec<String> = serde_json::from_slice(serialized)?;
Ok(session_ids)
} else {
Ok(Vec::new())
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SessionSummary {
pub id: String,
pub name: String,
pub message_count: usize,
pub last_updated: SystemTime,
pub model_used: String,
}

View File

@@ -1,269 +0,0 @@
use anyhow::{Result, Context};
use std::fs;
use std::path::{Path, PathBuf};
use std::time::SystemTime;
use crate::config::Config;
#[derive(Debug, Clone)]
pub struct FileInfo {
pub path: PathBuf,
pub name: String,
pub size: u64,
pub modified: SystemTime,
pub is_readable: bool,
pub is_writable: bool,
}
pub struct FileManager {
config: Config,
}
impl FileManager {
pub fn new(config: Config) -> Self {
Self { config }
}
/// Read a file and return its contents
pub fn read_file<P: AsRef<Path>>(&self, path: P) -> Result<String> {
let path = path.as_ref();
let metadata = fs::metadata(path)
.with_context(|| format!("Failed to get metadata for {}", path.display()))?;
// Check file size limit
let size_mb = metadata.len() / (1024 * 1024);
if size_mb > self.config.files.max_file_size_mb {
return Err(anyhow::anyhow!(
"File {} is too large ({} MB > {} MB limit)",
path.display(),
size_mb,
self.config.files.max_file_size_mb
));
}
let content = fs::read_to_string(path)
.with_context(|| format!("Failed to read file {}", path.display()))?;
Ok(content)
}
/// Write content to a file
pub fn write_file<P: AsRef<Path>>(&self, path: P, content: &str) -> Result<()> {
let path = path.as_ref();
// Create backup if enabled
if self.config.files.backup_files && path.exists() {
self.create_backup(path)?;
}
// Ensure parent directory exists
if let Some(parent) = path.parent() {
fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory {}", parent.display()))?;
}
fs::write(path, content)
.with_context(|| format!("Failed to write file {}", path.display()))?;
Ok(())
}
/// Create a backup of the file
fn create_backup<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let path = path.as_ref();
let backup_path = path.with_extension(format!("{}.backup",
path.extension().and_then(|s| s.to_str()).unwrap_or("txt")));
fs::copy(path, &backup_path)
.with_context(|| format!("Failed to create backup at {}", backup_path.display()))?;
Ok(())
}
/// List files in a directory
pub fn list_files<P: AsRef<Path>>(&self, dir: P) -> Result<Vec<FileInfo>> {
let dir = dir.as_ref();
let entries = fs::read_dir(dir)
.with_context(|| format!("Failed to read directory {}", dir.display()))?;
let mut files = Vec::new();
for entry in entries {
let entry = entry?;
let path = entry.path();
if path.is_file() {
let metadata = entry.metadata()?;
let name = entry.file_name().to_string_lossy().to_string();
files.push(FileInfo {
path: path.clone(),
name,
size: metadata.len(),
modified: metadata.modified()?,
is_readable: path.exists() && fs::File::open(&path).is_ok(),
is_writable: !metadata.permissions().readonly(),
});
}
}
// Sort by name
files.sort_by(|a, b| a.name.cmp(&b.name));
Ok(files)
}
/// Check if a file exists
pub fn file_exists<P: AsRef<Path>>(&self, path: P) -> bool {
path.as_ref().exists()
}
/// Get file info
pub fn get_file_info<P: AsRef<Path>>(&self, path: P) -> Result<FileInfo> {
let path = path.as_ref();
let metadata = fs::metadata(path)
.with_context(|| format!("Failed to get metadata for {}", path.display()))?;
Ok(FileInfo {
path: path.to_path_buf(),
name: path.file_name().unwrap_or_default().to_string_lossy().to_string(),
size: metadata.len(),
modified: metadata.modified()?,
is_readable: path.exists() && fs::File::open(&path).is_ok(),
is_writable: !metadata.permissions().readonly(),
})
}
/// Append content to a file
pub fn append_file<P: AsRef<Path>>(&self, path: P, content: &str) -> Result<()> {
let path = path.as_ref();
use std::io::Write;
let mut file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(path)
.with_context(|| format!("Failed to open file for appending {}", path.display()))?;
file.write_all(content.as_bytes())
.with_context(|| format!("Failed to append to file {}", path.display()))?;
Ok(())
}
/// Load project context file (OWLEN.md)
pub fn load_project_context(&self) -> Result<Option<String>> {
let context_file = &self.config.general.project_context_file;
if self.file_exists(context_file) {
match self.read_file(context_file) {
Ok(content) => Ok(Some(content)),
Err(_) => Ok(None), // File exists but can't read, return None instead of error
}
} else {
Ok(None)
}
}
/// Create a default project context file
pub fn create_default_project_context(&self) -> Result<()> {
let context_file = &self.config.general.project_context_file;
if !self.file_exists(context_file) {
let default_content = r#"# Project Context - OWLlama
This file provides context about your project to the AI assistant.
## Project Description
Describe your project here.
## Key Files and Structure
List important files, directories, and their purposes.
## Technologies Used
- Programming languages
- Frameworks
- Tools and dependencies
## Development Guidelines
- Coding standards
- Best practices
- Testing approach
## Current Focus
What you're currently working on or need help with.
---
*This file is automatically loaded as context for AI conversations.*
"#;
self.write_file(context_file, default_content)?;
}
Ok(())
}
}
/// Utility functions for common file operations
pub mod utils {
use super::*;
/// Get the current working directory
pub fn get_current_dir() -> Result<PathBuf> {
std::env::current_dir()
.context("Failed to get current directory")
}
/// Expand tilde in path
pub fn expand_path<P: AsRef<Path>>(path: P) -> PathBuf {
let path_str = path.as_ref().to_string_lossy();
let expanded = shellexpand::tilde(&path_str);
PathBuf::from(expanded.as_ref())
}
/// Get relative path from current directory
pub fn get_relative_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
let current_dir = get_current_dir()?;
let absolute_path = path.as_ref().canonicalize()
.context("Failed to canonicalize path")?;
absolute_path.strip_prefix(&current_dir)
.map(|p| p.to_path_buf())
.or_else(|_| Ok(absolute_path))
}
/// Check if path is a text file based on extension
pub fn is_text_file<P: AsRef<Path>>(path: P) -> bool {
let path = path.as_ref();
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
matches!(ext.to_lowercase().as_str(),
"txt" | "md" | "rs" | "py" | "js" | "ts" | "html" | "css" | "json" |
"toml" | "yaml" | "yml" | "xml" | "csv" | "log" | "sh" | "bash" |
"c" | "cpp" | "h" | "hpp" | "java" | "go" | "php" | "rb" | "swift" |
"kt" | "scala" | "r" | "sql" | "dockerfile" | "makefile"
)
} else {
// Files without extensions might be text (like Makefile, README, etc.)
path.file_name()
.and_then(|name| name.to_str())
.map(|name| name.chars().all(|c| c.is_ascii()))
.unwrap_or(false)
}
}
/// Format file size in human readable format
pub fn format_file_size(size: u64) -> String {
const UNITS: &[&str] = &["B", "KB", "MB", "GB", "TB"];
let mut size = size as f64;
let mut unit_index = 0;
while size >= 1024.0 && unit_index < UNITS.len() - 1 {
size /= 1024.0;
unit_index += 1;
}
if unit_index == 0 {
format!("{} {}", size as u64, UNITS[unit_index])
} else {
format!("{:.1} {}", size, UNITS[unit_index])
}
}
}

View File

@@ -4,6 +4,7 @@ pub mod config;
pub mod events;
pub mod ui;
pub use chat_app::{AppState, ChatApp, InputMode, SessionEvent};
pub use chat_app::{ChatApp, SessionEvent};
pub use code_app::CodeApp;
pub use events::{Event, EventHandler};
pub use owlen_core::ui::{AppState, FocusedPanel, InputMode};

View File

@@ -1,293 +0,0 @@
use anyhow::Result;
use futures_util::StreamExt;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use tokio::sync::mpsc;
use uuid::Uuid;
/// Events that can be sent from the Ollama client
#[derive(Debug, Clone)]
pub enum OllamaEvent {
/// Streaming response chunk
MessageChunk {
request_id: Uuid,
content: String,
done: bool,
},
/// Error occurred during request
Error {
request_id: Uuid,
error: String,
},
/// Available models list
ModelsAvailable(Vec<String>),
}
/// Message in the conversation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub role: String,
pub content: String,
}
/// Request to Ollama's chat API
#[derive(Debug, Serialize)]
struct ChatRequest {
model: String,
messages: Vec<Message>,
stream: bool,
}
/// Response from Ollama's chat API (streaming)
#[derive(Debug, Deserialize)]
struct ChatResponse {
message: MessageResponse,
done: bool,
}
#[derive(Debug, Deserialize)]
struct MessageResponse {
content: String,
}
/// Response from models endpoint
#[derive(Debug, Deserialize)]
struct ModelsResponse {
models: Vec<ModelInfo>,
}
#[derive(Debug, Deserialize)]
struct ModelInfo {
name: String,
}
/// Generate request for single completion
#[derive(Debug, Serialize)]
struct GenerateRequest {
model: String,
prompt: String,
stream: bool,
}
/// Generate response (streaming)
#[derive(Debug, Deserialize)]
struct GenerateResponse {
response: String,
done: bool,
}
#[derive(Clone)]
pub struct OllamaClient {
client: Client,
pub base_url: String,
pub event_sender: mpsc::UnboundedSender<OllamaEvent>,
}
impl OllamaClient {
pub fn new(
base_url: String,
event_sender: mpsc::UnboundedSender<OllamaEvent>,
) -> Self {
let client = Client::new();
Self {
client,
base_url,
event_sender,
}
}
/// Start a chat conversation with streaming response
pub async fn chat(&self, model: String, messages: Vec<Message>) -> Result<Uuid> {
let request_id = Uuid::new_v4();
let url = format!("{}/api/chat", self.base_url);
let request = ChatRequest {
model: model.clone(), // Clone model for potential fallback
messages,
stream: true,
};
let response = self.client
.post(&url)
.json(&request)
.send()
.await?;
if response.status() == reqwest::StatusCode::NOT_FOUND {
// Fallback to generate endpoint
let prompt = request.messages.into_iter().map(|m| format!("{}: {}", m.role, m.content)).collect::<Vec<String>>().join("\n");
return self.generate(model, prompt).await;
}
if !response.status().is_success() {
let error = format!("HTTP error: {}", response.status());
self.send_error(request_id, error).await;
return Ok(request_id);
}
let mut stream = response.bytes_stream();
let sender = self.event_sender.clone();
// Spawn task to handle streaming response
tokio::spawn(async move {
while let Some(chunk) = stream.next().await {
match chunk {
Ok(bytes) => {
let text = String::from_utf8_lossy(&bytes);
// Parse each line as potential JSON
for line in text.lines() {
if line.trim().is_empty() {
continue;
}
match serde_json::from_str::<ChatResponse>(line) {
Ok(response) => {
let _ = sender.send(OllamaEvent::MessageChunk {
request_id,
content: response.message.content,
done: response.done,
});
if response.done {
break;
}
}
Err(e) => {
let _ = sender.send(OllamaEvent::Error {
request_id,
error: format!("JSON parse error: {}", e),
});
}
}
}
}
Err(e) => {
let _ = sender.send(OllamaEvent::Error {
request_id,
error: format!("Stream error: {}", e),
});
break;
}
}
}
});
Ok(request_id)
}
/// Generate a single completion (alternative to chat)
pub async fn generate(&self, model: String, prompt: String) -> Result<Uuid> {
let request_id = Uuid::new_v4();
let url = format!("{}/api/generate", self.base_url);
let request = GenerateRequest {
model,
prompt,
stream: true,
};
let response = self.client
.post(&url)
.json(&request)
.send()
.await?;
if !response.status().is_success() {
let error = format!("HTTP error: {}", response.status());
self.send_error(request_id, error).await;
return Ok(request_id);
}
let mut stream = response.bytes_stream();
let sender = self.event_sender.clone();
tokio::spawn(async move {
while let Some(chunk) = stream.next().await {
match chunk {
Ok(bytes) => {
let text = String::from_utf8_lossy(&bytes);
for line in text.lines() {
if line.trim().is_empty() {
continue;
}
match serde_json::from_str::<GenerateResponse>(line) {
Ok(response) => {
let _ = sender.send(OllamaEvent::MessageChunk {
request_id,
content: response.response,
done: response.done,
});
if response.done {
break;
}
}
Err(e) => {
let _ = sender.send(OllamaEvent::Error {
request_id,
error: format!("JSON parse error: {}", e),
});
}
}
}
}
Err(e) => {
let _ = sender.send(OllamaEvent::Error {
request_id,
error: format!("Stream error: {}", e),
});
break;
}
}
}
});
Ok(request_id)
}
/// Get list of available models
pub async fn get_models(&self) -> Result<()> {
let url = format!("{}/api/tags", self.base_url);
let response = self.client
.get(&url)
.send()
.await?;
if response.status().is_success() {
let models_response: ModelsResponse = response.json().await?;
let model_names = models_response
.models
.into_iter()
.map(|m| m.name)
.collect();
let _ = self.event_sender.send(OllamaEvent::ModelsAvailable(model_names));
} else {
let error = format!("Failed to fetch models: {}", response.status());
// We don't have a specific request_id for this, so we'll use a nil UUID
let _ = self.event_sender.send(OllamaEvent::Error {
request_id: Uuid::nil(),
error,
});
}
Ok(())
}
async fn send_error(&self, request_id: Uuid, error: String) {
let _ = self.event_sender.send(OllamaEvent::Error {
request_id,
error,
});
}
}
/// Default Ollama configuration
impl Default for OllamaClient {
fn default() -> Self {
let (tx, _rx) = mpsc::unbounded_channel();
Self::new("http://localhost:11434".to_string(), tx)
}
}

View File

@@ -7,7 +7,8 @@ use textwrap::{wrap, Options};
use tui_textarea::TextArea;
use unicode_width::UnicodeWidthStr;
use crate::chat_app::{ChatApp, InputMode};
use crate::chat_app::ChatApp;
use owlen_core::ui::{FocusedPanel, InputMode};
use owlen_core::types::Role;
pub fn render_chat(frame: &mut Frame<'_>, app: &mut ChatApp) {
@@ -436,7 +437,6 @@ fn render_header(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
frame.render_widget(paragraph, inner_area);
}
use crate::chat_app::FocusedPanel;
fn apply_visual_selection(lines: Vec<Line>, selection: Option<((usize, usize), (usize, usize))>) -> Vec<Line> {
if let Some(((start_row, start_col), (end_row, end_col))) = selection {