feat(cli): add interactive REPL mode with agent loop
Add proper interactive mode when no prompt is provided: **Interactive REPL Features**: - Starts when running `cargo run` with no arguments - Shows welcome message with model name - Prompts with `> ` for user input - Each input runs through the full agent loop with tools - Continues until Ctrl+C or EOF - Displays tool calls and results in real-time **Changes**: - Detect empty prompt and enter interactive loop - Use stdin.lines() for reading user input - Call agent_core::run_agent_loop for each message - Handle errors gracefully and continue - Clean up unused imports **Usage**: ```bash # Interactive mode cargo run # Single prompt mode cargo run -- --print "Find all Cargo.toml files" # Tool subcommands cargo run -- glob "**/*.rs" ``` Example session: ``` 🤖 Owlen Interactive Mode Model: qwen3:8b > Find all markdown files 🔧 Tool call: glob with args: {"pattern":"**/*.md"} ✅ Tool result: ./README.md ./CLAUDE.md ./AGENTS.md ... > exit ``` 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,12 +1,11 @@
|
||||
use clap::{Parser, ValueEnum};
|
||||
use color_eyre::eyre::{Result, eyre};
|
||||
use config_agent::load_settings;
|
||||
use futures_util::TryStreamExt;
|
||||
use hooks::{HookEvent, HookManager, HookResult};
|
||||
use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage};
|
||||
use llm_ollama::{OllamaClient, OllamaOptions};
|
||||
use permissions::{PermissionDecision, Tool};
|
||||
use serde::Serialize;
|
||||
use std::io::{self, Write};
|
||||
use std::io::Write;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
#[derive(Debug, Clone, Copy, ValueEnum)]
|
||||
@@ -435,12 +434,6 @@ async fn main() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let prompt = if args.prompt.is_empty() {
|
||||
"Say hello".to_string()
|
||||
} else {
|
||||
args.prompt.join(" ")
|
||||
};
|
||||
|
||||
let model = args.model.unwrap_or(settings.model);
|
||||
let api_key = args.api_key.or(settings.api_key);
|
||||
|
||||
@@ -461,6 +454,45 @@ async fn main() -> Result<()> {
|
||||
stream: true,
|
||||
};
|
||||
|
||||
// Check if interactive mode (no prompt provided)
|
||||
if args.prompt.is_empty() {
|
||||
println!("🤖 Owlen Interactive Mode");
|
||||
println!("Model: {}", opts.model);
|
||||
println!("Type your message and press Enter. Press Ctrl+C to exit.\n");
|
||||
|
||||
use std::io::{stdin, BufRead};
|
||||
let stdin = stdin();
|
||||
let mut lines = stdin.lock().lines();
|
||||
|
||||
loop {
|
||||
print!("\n> ");
|
||||
std::io::stdout().flush().ok();
|
||||
|
||||
if let Some(Ok(line)) = lines.next() {
|
||||
let prompt = line.trim();
|
||||
if prompt.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Run agent loop for this prompt
|
||||
match agent_core::run_agent_loop(&client, prompt, &opts, &perms).await {
|
||||
Ok(response) => {
|
||||
println!("\n{}", response);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("\n❌ Error: {}", e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Non-interactive mode - process single prompt
|
||||
let prompt = args.prompt.join(" ");
|
||||
let start_time = SystemTime::now();
|
||||
|
||||
// Handle different output formats
|
||||
|
||||
Reference in New Issue
Block a user