Enhance loading feedback: add loading animation for Assistant replies, improve TUI message updates, and refine response handling logic. Update README to reflect roadmap progress.
This commit is contained in:
@@ -69,6 +69,9 @@ pub struct ChatApp {
|
||||
session_tx: mpsc::UnboundedSender<SessionEvent>,
|
||||
streaming: HashSet<Uuid>,
|
||||
textarea: TextArea<'static>, // Advanced text input widget
|
||||
pending_llm_request: bool, // Flag to indicate LLM request needs to be processed
|
||||
loading_animation_frame: usize, // Frame counter for loading animation
|
||||
is_loading: bool, // Whether we're currently loading a response
|
||||
}
|
||||
|
||||
impl ChatApp {
|
||||
@@ -93,6 +96,9 @@ impl ChatApp {
|
||||
session_tx,
|
||||
streaming: std::collections::HashSet::new(),
|
||||
textarea,
|
||||
pending_llm_request: false,
|
||||
loading_animation_frame: 0,
|
||||
is_loading: false,
|
||||
};
|
||||
|
||||
(app, session_rx)
|
||||
@@ -286,7 +292,7 @@ impl ChatApp {
|
||||
KeyCode::Enter if key.modifiers.is_empty() => {
|
||||
// Send message and return to normal mode
|
||||
self.sync_textarea_to_buffer();
|
||||
self.try_send_message().await?;
|
||||
self.send_user_message_and_request_response();
|
||||
// Clear the textarea by setting it to empty
|
||||
self.textarea = TextArea::default();
|
||||
configure_textarea_defaults(&mut self.textarea);
|
||||
@@ -407,10 +413,12 @@ impl ChatApp {
|
||||
}
|
||||
if response.is_final {
|
||||
self.streaming.remove(&message_id);
|
||||
self.status = "Response complete".to_string();
|
||||
self.stop_loading_animation();
|
||||
self.status = "Ready".to_string();
|
||||
}
|
||||
}
|
||||
SessionEvent::StreamError { message } => {
|
||||
self.stop_loading_animation();
|
||||
self.error = Some(message);
|
||||
}
|
||||
}
|
||||
@@ -474,22 +482,43 @@ impl ChatApp {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn try_send_message(&mut self) -> Result<()> {
|
||||
fn send_user_message_and_request_response(&mut self) {
|
||||
let content = self.controller.input_buffer().text().trim().to_string();
|
||||
if content.is_empty() {
|
||||
self.error = Some("Cannot send empty message".to_string());
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 1: Add user message to conversation immediately (synchronous)
|
||||
let message = self.controller.input_buffer_mut().commit_to_history();
|
||||
self.controller.conversation_mut().push_user_message(message.clone());
|
||||
self.scroll_to_bottom();
|
||||
|
||||
// Step 2: Set flag to process LLM request on next event loop iteration
|
||||
self.pending_llm_request = true;
|
||||
self.status = "Message sent".to_string();
|
||||
self.error = None;
|
||||
}
|
||||
|
||||
pub async fn process_pending_llm_request(&mut self) -> Result<()> {
|
||||
if !self.pending_llm_request {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.scroll_to_bottom();
|
||||
self.pending_llm_request = false;
|
||||
|
||||
// Step 1: Show loading model status and start animation
|
||||
self.status = format!("Loading model '{}'...", self.controller.selected_model());
|
||||
self.start_loading_animation();
|
||||
|
||||
let message = self.controller.input_buffer_mut().commit_to_history();
|
||||
let mut parameters = ChatParameters::default();
|
||||
parameters.stream = self.controller.config().general.enable_streaming;
|
||||
|
||||
match self.controller.send_message(message, parameters).await {
|
||||
// Step 2: Start the actual request
|
||||
match self.controller.send_request_with_current_conversation(parameters).await {
|
||||
Ok(SessionOutcome::Complete(_response)) => {
|
||||
self.status = "Response received".to_string();
|
||||
self.stop_loading_animation();
|
||||
self.status = "Ready".to_string();
|
||||
self.error = None;
|
||||
Ok(())
|
||||
}
|
||||
@@ -497,17 +526,19 @@ impl ChatApp {
|
||||
response_id,
|
||||
stream,
|
||||
}) => {
|
||||
// Step 3: Model loaded, now generating response
|
||||
self.status = "Generating response...".to_string();
|
||||
|
||||
self.spawn_stream(response_id, stream);
|
||||
match self
|
||||
.controller
|
||||
.mark_stream_placeholder(response_id, "Loading...")
|
||||
.mark_stream_placeholder(response_id, "▌")
|
||||
{
|
||||
Ok(_) => self.error = None,
|
||||
Err(err) => {
|
||||
self.error = Some(format!("Could not set loading placeholder: {}", err));
|
||||
self.error = Some(format!("Could not set response placeholder: {}", err));
|
||||
}
|
||||
}
|
||||
self.status = "Waiting for response...".to_string();
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -522,13 +553,15 @@ impl ChatApp {
|
||||
self.mode = InputMode::ProviderSelection;
|
||||
} else {
|
||||
self.error = Some(message);
|
||||
self.status = "Send failed".to_string();
|
||||
self.status = "Request failed".to_string();
|
||||
}
|
||||
self.stop_loading_animation();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn sync_selected_model_index(&mut self) {
|
||||
let current_model_id = self.controller.selected_model().to_string();
|
||||
let filtered_models: Vec<&ModelInfo> = self
|
||||
@@ -568,6 +601,39 @@ impl ChatApp {
|
||||
self.content_width = content_width;
|
||||
}
|
||||
|
||||
pub fn start_loading_animation(&mut self) {
|
||||
self.is_loading = true;
|
||||
self.loading_animation_frame = 0;
|
||||
}
|
||||
|
||||
pub fn stop_loading_animation(&mut self) {
|
||||
self.is_loading = false;
|
||||
}
|
||||
|
||||
pub fn advance_loading_animation(&mut self) {
|
||||
if self.is_loading {
|
||||
self.loading_animation_frame = (self.loading_animation_frame + 1) % 8; // 8-frame animation
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_loading_indicator(&self) -> &'static str {
|
||||
if !self.is_loading {
|
||||
return "";
|
||||
}
|
||||
|
||||
match self.loading_animation_frame {
|
||||
0 => "⠋",
|
||||
1 => "⠙",
|
||||
2 => "⠹",
|
||||
3 => "⠸",
|
||||
4 => "⠼",
|
||||
5 => "⠴",
|
||||
6 => "⠦",
|
||||
7 => "⠧",
|
||||
_ => "⠋",
|
||||
}
|
||||
}
|
||||
|
||||
fn is_at_bottom(&self) -> bool {
|
||||
let total_lines = self.calculate_total_content_lines();
|
||||
let max_scroll = total_lines.saturating_sub(self.viewport_height);
|
||||
|
||||
Reference in New Issue
Block a user