From f29f30669235db7acb5d2d7137cbc4c55140a0c3 Mon Sep 17 00:00:00 2001 From: vikingowl Date: Sat, 25 Oct 2025 07:19:05 +0200 Subject: [PATCH] test(tui): add golden streaming flows for chat + tool calls Acceptance-Criteria:\n- snapshot coverage for idle chat and tool-call streaming states protects header, toast, and transcript rendering.\n- Tests use deterministic settings so reruns pass without manual snapshot acceptance. Test-Notes:\n- INSTA_UPDATE=always cargo test -p owlen-tui --test chat_snapshots\n- cargo test --- crates/owlen-tui/Cargo.toml | 1 + crates/owlen-tui/tests/chat_snapshots.rs | 199 ++++++++++++++++++ ..._snapshots__chat_idle_snapshot@100x35.snap | 39 ++++ ...pshots__chat_tool_call_snapshot@80x24.snap | 28 +++ 4 files changed, 267 insertions(+) create mode 100644 crates/owlen-tui/tests/chat_snapshots.rs create mode 100644 crates/owlen-tui/tests/snapshots/chat_snapshots__chat_idle_snapshot@100x35.snap create mode 100644 crates/owlen-tui/tests/snapshots/chat_snapshots__chat_tool_call_snapshot@80x24.snap diff --git a/crates/owlen-tui/Cargo.toml b/crates/owlen-tui/Cargo.toml index 326f6fd..6ea2d06 100644 --- a/crates/owlen-tui/Cargo.toml +++ b/crates/owlen-tui/Cargo.toml @@ -49,3 +49,4 @@ log = { workspace = true } [dev-dependencies] tokio-test = { workspace = true } tempfile = { workspace = true } +insta = { version = "1.40", features = ["glob"] } diff --git a/crates/owlen-tui/tests/chat_snapshots.rs b/crates/owlen-tui/tests/chat_snapshots.rs new file mode 100644 index 0000000..5574fdb --- /dev/null +++ b/crates/owlen-tui/tests/chat_snapshots.rs @@ -0,0 +1,199 @@ +use std::sync::Arc; + +use async_trait::async_trait; +use insta::{assert_snapshot, with_settings}; +use owlen_core::{ + Config, Mode, Provider, + session::SessionController, + storage::StorageManager, + types::{Message, ToolCall}, + ui::{NoOpUiController, UiController}, +}; +use owlen_tui::ChatApp; +use owlen_tui::ui::render_chat; +use ratatui::{Terminal, backend::TestBackend}; +use tempfile::tempdir; +use tokio::sync::mpsc; + +struct StubProvider; + +#[async_trait] +impl Provider for StubProvider { + fn name(&self) -> &str { + "stub-provider" + } + + async fn list_models(&self) -> owlen_core::Result> { + Ok(vec![owlen_core::types::ModelInfo { + id: "stub-model".into(), + name: "Stub Model".into(), + description: Some("Stub model for golden snapshot tests".into()), + provider: self.name().into(), + context_window: Some(8192), + capabilities: vec!["chat".into(), "tool-use".into()], + supports_tools: true, + }]) + } + + async fn send_prompt( + &self, + _request: owlen_core::types::ChatRequest, + ) -> owlen_core::Result { + Ok(owlen_core::types::ChatResponse { + message: Message::assistant("stub completion".into()), + usage: None, + is_streaming: false, + is_final: true, + }) + } + + async fn stream_prompt( + &self, + _request: owlen_core::types::ChatRequest, + ) -> owlen_core::Result { + Ok(Box::pin(futures_util::stream::empty())) + } + + async fn health_check(&self) -> owlen_core::Result<()> { + Ok(()) + } + + fn as_any(&self) -> &(dyn std::any::Any + Send + Sync) { + self + } +} + +fn buffer_to_string(buffer: &ratatui::buffer::Buffer) -> String { + let mut output = String::new(); + + for y in 0..buffer.area.height { + output.push('"'); + for x in 0..buffer.area.width { + output.push_str(buffer[(x, y)].symbol()); + } + output.push('"'); + output.push('\n'); + } + + output +} + +async fn build_chat_app(configure: F) -> ChatApp +where + F: FnOnce(&mut SessionController), +{ + let temp_dir = tempdir().expect("temp dir"); + let storage = + StorageManager::with_database_path(temp_dir.path().join("owlen-tui-snapshots.db")) + .await + .expect("storage"); + let storage = Arc::new(storage); + + let mut config = Config::default(); + config.general.default_model = Some("stub-model".into()); + config.general.enable_streaming = true; + config.privacy.encrypt_local_data = false; + config.privacy.require_consent_per_session = false; + config.ui.show_onboarding = false; + config.ui.show_timestamps = false; + let provider: Arc = Arc::new(StubProvider); + let ui: Arc = Arc::new(NoOpUiController); + let (event_tx, controller_event_rx) = mpsc::unbounded_channel(); + + let mut session = SessionController::new( + Arc::clone(&provider), + config, + Arc::clone(&storage), + ui, + true, + Some(event_tx), + ) + .await + .expect("session controller"); + + session + .set_operating_mode(Mode::Chat) + .await + .expect("chat mode"); + + configure(&mut session); + + let (app, mut session_rx) = ChatApp::new(session, controller_event_rx) + .await + .expect("chat app"); + session_rx.close(); + + app +} + +fn render_snapshot(app: &mut ChatApp, width: u16, height: u16) -> String { + let backend = TestBackend::new(width, height); + let mut terminal = Terminal::new(backend).expect("terminal"); + + terminal + .draw(|frame| render_chat(frame, app)) + .expect("render chat"); + + let buffer = terminal.backend().buffer(); + buffer_to_string(buffer) +} + +#[tokio::test(flavor = "multi_thread")] +async fn render_chat_idle_snapshot() { + let mut app = build_chat_app(|_| {}).await; + + with_settings!({ snapshot_suffix => "100x35" }, { + let snapshot = render_snapshot(&mut app, 100, 35); + assert_snapshot!("chat_idle_snapshot", snapshot); + }); +} + +#[tokio::test(flavor = "multi_thread")] +async fn render_chat_tool_call_snapshot() { + let mut app = build_chat_app(|session| { + let conversation = session.conversation_mut(); + conversation.push_user_message("What happened in the Rust ecosystem today?"); + + let stream_id = conversation.start_streaming_response(); + conversation + .set_stream_placeholder(stream_id, "Consulting the knowledge base…") + .expect("placeholder"); + + let tool_call = ToolCall { + id: "call-search-1".into(), + name: "web_search".into(), + arguments: serde_json::json!({ "query": "Rust language news" }), + }; + conversation + .set_tool_calls_on_message(stream_id, vec![tool_call.clone()]) + .expect("tool call metadata"); + conversation + .append_stream_chunk(stream_id, "Found multiple articles…", false) + .expect("stream chunk"); + + let tool_message = Message::tool( + tool_call.id.clone(), + "Rust 1.85 released with generics cleanups and faster async compilation.".to_string(), + ); + conversation.push_message(tool_message); + + let assistant_summary = Message::assistant( + "Summarising the latest Rust release and the async runtime updates.".into(), + ); + conversation.push_message(assistant_summary); + }) + .await; + + // Surface quota toast to exercise header/status rendering. + app.push_toast( + owlen_tui::toast::ToastLevel::Warning, + "Cloud usage is at 82% of the hourly quota.", + ); + + with_settings!({ + snapshot_suffix => "80x24" + }, { + let snapshot = render_snapshot(&mut app, 80, 24); + assert_snapshot!("chat_tool_call_snapshot", snapshot); + }); +} diff --git a/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_idle_snapshot@100x35.snap b/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_idle_snapshot@100x35.snap new file mode 100644 index 0000000..c94fabf --- /dev/null +++ b/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_idle_snapshot@100x35.snap @@ -0,0 +1,39 @@ +--- +source: crates/owlen-tui/tests/chat_snapshots.rs +expression: snapshot +--- +" " +" πŸ¦‰ OWLEN v0.2.0 Β· Mode Chat Β· Focus Input ollama_local Β· stub-model " +" " +" Context metrics not available Cloud usage pending " +" " +" " +" " +" β–Œ Chat Β· stub-model PgUp/PgDn scroll Β· g/G jump Β· s save Β· Ctrl+2 focus " +" " +" No messages yet. Press 'i' to start typing. " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" " +" Input Press i to start typing Β· Ctrl+5 focus " +" " +" " +" System/Status " +" " +" " +" NORMAL β”‚ CHAT β”‚ INPUowlen-tui Β· 1:1 Β· UTF-8 ollama_local β–Έ stub-model Β· LSP:βœ“ " +" " +" " +" " diff --git a/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_tool_call_snapshot@80x24.snap b/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_tool_call_snapshot@80x24.snap new file mode 100644 index 0000000..be7f3c2 --- /dev/null +++ b/crates/owlen-tui/tests/snapshots/chat_snapshots__chat_tool_call_snapshot@80x24.snap @@ -0,0 +1,28 @@ +--- +source: crates/owlen-tui/tests/chat_snapshots.rs +expression: snapshot +--- +" " +" πŸ¦‰ OWLEN v0.2.0 Β· Mode Chat Β· Focus Input ollama_local Β· stub-model " +" " +" Context metrics not available Cloud usage pending " +" " +" " +" " +" β–Œ Chat Β· stub-model PgUp/PgDn scroll Β· g/G jump Β· s save Β· Ctrl+2 focus " +" β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” " +" β”‚ lation. β”‚ WARN Cloud usage is at 82% of the hourly β”‚ " +" β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ " +" β”Œ πŸ€– Assistant ────────────────────────────────────────────────────────┐ " +" β”‚ Summarising the latest Rust release and the async runtime update β”‚ " +" β”‚ s. β”‚ " +" " +" Input Press i to start typing Β· Ctrl+5 focus " +" " +" " +" System/Status " +" " +" NORMAL β”‚ CHAT β”‚ INPUowlen-tui Β· 1:1 Β· UTF-8 ollama_local β–Έ stub-mode " +" " +" " +" "