Files
owlen/crates/owlen-tui/tests/chat_snapshots.rs
vikingowl f29f306692 test(tui): add golden streaming flows for chat + tool calls
Acceptance-Criteria:\n- snapshot coverage for idle chat and tool-call streaming states protects header, toast, and transcript rendering.\n- Tests use deterministic settings so reruns pass without manual snapshot acceptance.

Test-Notes:\n- INSTA_UPDATE=always cargo test -p owlen-tui --test chat_snapshots\n- cargo test
2025-10-25 07:19:05 +02:00

200 lines
5.9 KiB
Rust

use std::sync::Arc;
use async_trait::async_trait;
use insta::{assert_snapshot, with_settings};
use owlen_core::{
Config, Mode, Provider,
session::SessionController,
storage::StorageManager,
types::{Message, ToolCall},
ui::{NoOpUiController, UiController},
};
use owlen_tui::ChatApp;
use owlen_tui::ui::render_chat;
use ratatui::{Terminal, backend::TestBackend};
use tempfile::tempdir;
use tokio::sync::mpsc;
struct StubProvider;
#[async_trait]
impl Provider for StubProvider {
fn name(&self) -> &str {
"stub-provider"
}
async fn list_models(&self) -> owlen_core::Result<Vec<owlen_core::types::ModelInfo>> {
Ok(vec![owlen_core::types::ModelInfo {
id: "stub-model".into(),
name: "Stub Model".into(),
description: Some("Stub model for golden snapshot tests".into()),
provider: self.name().into(),
context_window: Some(8192),
capabilities: vec!["chat".into(), "tool-use".into()],
supports_tools: true,
}])
}
async fn send_prompt(
&self,
_request: owlen_core::types::ChatRequest,
) -> owlen_core::Result<owlen_core::types::ChatResponse> {
Ok(owlen_core::types::ChatResponse {
message: Message::assistant("stub completion".into()),
usage: None,
is_streaming: false,
is_final: true,
})
}
async fn stream_prompt(
&self,
_request: owlen_core::types::ChatRequest,
) -> owlen_core::Result<owlen_core::ChatStream> {
Ok(Box::pin(futures_util::stream::empty()))
}
async fn health_check(&self) -> owlen_core::Result<()> {
Ok(())
}
fn as_any(&self) -> &(dyn std::any::Any + Send + Sync) {
self
}
}
fn buffer_to_string(buffer: &ratatui::buffer::Buffer) -> String {
let mut output = String::new();
for y in 0..buffer.area.height {
output.push('"');
for x in 0..buffer.area.width {
output.push_str(buffer[(x, y)].symbol());
}
output.push('"');
output.push('\n');
}
output
}
async fn build_chat_app<F>(configure: F) -> ChatApp
where
F: FnOnce(&mut SessionController),
{
let temp_dir = tempdir().expect("temp dir");
let storage =
StorageManager::with_database_path(temp_dir.path().join("owlen-tui-snapshots.db"))
.await
.expect("storage");
let storage = Arc::new(storage);
let mut config = Config::default();
config.general.default_model = Some("stub-model".into());
config.general.enable_streaming = true;
config.privacy.encrypt_local_data = false;
config.privacy.require_consent_per_session = false;
config.ui.show_onboarding = false;
config.ui.show_timestamps = false;
let provider: Arc<dyn Provider> = Arc::new(StubProvider);
let ui: Arc<dyn UiController> = Arc::new(NoOpUiController);
let (event_tx, controller_event_rx) = mpsc::unbounded_channel();
let mut session = SessionController::new(
Arc::clone(&provider),
config,
Arc::clone(&storage),
ui,
true,
Some(event_tx),
)
.await
.expect("session controller");
session
.set_operating_mode(Mode::Chat)
.await
.expect("chat mode");
configure(&mut session);
let (app, mut session_rx) = ChatApp::new(session, controller_event_rx)
.await
.expect("chat app");
session_rx.close();
app
}
fn render_snapshot(app: &mut ChatApp, width: u16, height: u16) -> String {
let backend = TestBackend::new(width, height);
let mut terminal = Terminal::new(backend).expect("terminal");
terminal
.draw(|frame| render_chat(frame, app))
.expect("render chat");
let buffer = terminal.backend().buffer();
buffer_to_string(buffer)
}
#[tokio::test(flavor = "multi_thread")]
async fn render_chat_idle_snapshot() {
let mut app = build_chat_app(|_| {}).await;
with_settings!({ snapshot_suffix => "100x35" }, {
let snapshot = render_snapshot(&mut app, 100, 35);
assert_snapshot!("chat_idle_snapshot", snapshot);
});
}
#[tokio::test(flavor = "multi_thread")]
async fn render_chat_tool_call_snapshot() {
let mut app = build_chat_app(|session| {
let conversation = session.conversation_mut();
conversation.push_user_message("What happened in the Rust ecosystem today?");
let stream_id = conversation.start_streaming_response();
conversation
.set_stream_placeholder(stream_id, "Consulting the knowledge base…")
.expect("placeholder");
let tool_call = ToolCall {
id: "call-search-1".into(),
name: "web_search".into(),
arguments: serde_json::json!({ "query": "Rust language news" }),
};
conversation
.set_tool_calls_on_message(stream_id, vec![tool_call.clone()])
.expect("tool call metadata");
conversation
.append_stream_chunk(stream_id, "Found multiple articles…", false)
.expect("stream chunk");
let tool_message = Message::tool(
tool_call.id.clone(),
"Rust 1.85 released with generics cleanups and faster async compilation.".to_string(),
);
conversation.push_message(tool_message);
let assistant_summary = Message::assistant(
"Summarising the latest Rust release and the async runtime updates.".into(),
);
conversation.push_message(assistant_summary);
})
.await;
// Surface quota toast to exercise header/status rendering.
app.push_toast(
owlen_tui::toast::ToastLevel::Warning,
"Cloud usage is at 82% of the hourly quota.",
);
with_settings!({
snapshot_suffix => "80x24"
}, {
let snapshot = render_snapshot(&mut app, 80, 24);
assert_snapshot!("chat_tool_call_snapshot", snapshot);
});
}