diff --git a/crates/app/cli/src/main.rs b/crates/app/cli/src/main.rs index 3136457..75e52c6 100644 --- a/crates/app/cli/src/main.rs +++ b/crates/app/cli/src/main.rs @@ -1,12 +1,11 @@ use clap::{Parser, ValueEnum}; use color_eyre::eyre::{Result, eyre}; use config_agent::load_settings; -use futures_util::TryStreamExt; use hooks::{HookEvent, HookManager, HookResult}; -use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage}; +use llm_ollama::{OllamaClient, OllamaOptions}; use permissions::{PermissionDecision, Tool}; use serde::Serialize; -use std::io::{self, Write}; +use std::io::Write; use std::time::{SystemTime, UNIX_EPOCH}; #[derive(Debug, Clone, Copy, ValueEnum)] @@ -435,12 +434,6 @@ async fn main() -> Result<()> { } } - let prompt = if args.prompt.is_empty() { - "Say hello".to_string() - } else { - args.prompt.join(" ") - }; - let model = args.model.unwrap_or(settings.model); let api_key = args.api_key.or(settings.api_key); @@ -461,6 +454,45 @@ async fn main() -> Result<()> { stream: true, }; + // Check if interactive mode (no prompt provided) + if args.prompt.is_empty() { + println!("šŸ¤– Owlen Interactive Mode"); + println!("Model: {}", opts.model); + println!("Type your message and press Enter. Press Ctrl+C to exit.\n"); + + use std::io::{stdin, BufRead}; + let stdin = stdin(); + let mut lines = stdin.lock().lines(); + + loop { + print!("\n> "); + std::io::stdout().flush().ok(); + + if let Some(Ok(line)) = lines.next() { + let prompt = line.trim(); + if prompt.is_empty() { + continue; + } + + // Run agent loop for this prompt + match agent_core::run_agent_loop(&client, prompt, &opts, &perms).await { + Ok(response) => { + println!("\n{}", response); + } + Err(e) => { + eprintln!("\nāŒ Error: {}", e); + } + } + } else { + break; + } + } + + return Ok(()); + } + + // Non-interactive mode - process single prompt + let prompt = args.prompt.join(" "); let start_time = SystemTime::now(); // Handle different output formats