feat(ui): add TUI with streaming agent integration and theming

Add a new terminal UI crate (crates/app/ui) built with ratatui providing an
interactive chat interface with real-time LLM streaming and tool visualization.

Features:
- Chat panel with horizontal padding for improved readability
- Input box with cursor navigation and command history
- Status bar with session statistics and uniform background styling
- 7 theme presets: Tokyo Night (default), Dracula, Catppuccin, Nord,
  Synthwave, Rose Pine, and Midnight Ocean
- Theme switching via /theme <name> and /themes commands
- Streaming LLM responses that accumulate into single messages
- Real-time tool call visualization with success/error states
- Session tracking (messages, tokens, tool calls, duration)
- REPL commands: /help, /status, /cost, /checkpoint, /rewind, /clear, /exit

Integration:
- CLI automatically launches TUI mode when running interactively (no prompt)
- Falls back to legacy text REPL with --no-tui flag
- Uses existing agent loop with streaming support
- Supports all existing tools (read, write, edit, glob, grep, bash)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-01 22:57:25 +01:00
parent 5caf502009
commit 09c8c9d83e
14 changed files with 1614 additions and 3 deletions

View File

@@ -19,6 +19,8 @@ tools-slash = { path = "../../tools/slash" }
config-agent = { package = "config-agent", path = "../../platform/config" }
permissions = { path = "../../platform/permissions" }
hooks = { path = "../../platform/hooks" }
ui = { path = "../ui" }
atty = "0.2"
futures-util = "0.3.31"
[dev-dependencies]

View File

@@ -149,6 +149,9 @@ struct Args {
/// Output format (text, json, stream-json)
#[arg(long, value_enum, default_value = "text")]
output_format: OutputFormat,
/// Disable TUI and use legacy text-based REPL
#[arg(long)]
no_tui: bool,
#[arg()]
prompt: Vec<String>,
#[command(subcommand)]
@@ -434,15 +437,15 @@ async fn main() -> Result<()> {
}
}
let model = args.model.unwrap_or(settings.model);
let api_key = args.api_key.or(settings.api_key);
let model = args.model.unwrap_or(settings.model.clone());
let api_key = args.api_key.or(settings.api_key.clone());
// Use Ollama Cloud when model has "-cloud" suffix AND API key is set
let use_cloud = model.ends_with("-cloud") && api_key.is_some();
let client = if use_cloud {
OllamaClient::with_cloud().with_api_key(api_key.unwrap())
} else {
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
let base_url = args.ollama_url.unwrap_or(settings.ollama_url.clone());
let mut client = OllamaClient::new(base_url);
if let Some(key) = api_key {
client = client.with_api_key(key);
@@ -456,6 +459,13 @@ async fn main() -> Result<()> {
// Check if interactive mode (no prompt provided)
if args.prompt.is_empty() {
// Use TUI mode unless --no-tui flag is set or not a TTY
if !args.no_tui && atty::is(atty::Stream::Stdout) {
// Launch TUI
return ui::run(client, opts, perms, settings).await;
}
// Legacy text-based REPL
println!("🤖 Owlen Interactive Mode");
println!("Model: {}", opts.model);
println!("Mode: {:?}", settings.mode);