Files
owlen/crates/app/ui/src/app.rs
vikingowl 84fa08ab45 feat(plan): Add plan execution system with external tool support
Plan Execution System:
- Add PlanStep, AccumulatedPlan types for multi-turn tool call accumulation
- Implement AccumulatedPlanStatus for tracking plan lifecycle
- Support selective approval of proposed tool calls before execution

External Tools Integration:
- Add ExternalToolDefinition and ExternalToolTransport to plugins crate
- Extend ToolContext with external_tools registry
- Add external_tool_to_llm_tool conversion for LLM compatibility

JSON-RPC Communication:
- Add jsonrpc crate for JSON-RPC 2.0 protocol support
- Enable stdio-based communication with external tool servers

UI & Engine Updates:
- Add plan_panel.rs component for displaying accumulated plans
- Wire plan mode into engine loop
- Add plan mode integration tests

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-12-26 22:47:54 +01:00

1928 lines
81 KiB
Rust

use crate::{
components::{
Autocomplete, AutocompleteResult, ChatMessage, ChatPanel, CommandHelp, InputBox,
ModelPicker, PermissionPopup, PickerResult, PlanPanel, ProviderTabs, StatusBar, TodoPanel,
},
events::{handle_key_event, AppEvent},
layout::AppLayout,
provider_manager::ProviderManager,
theme::{Provider, Theme, VimMode},
};
use tools_plan::AccumulatedPlanStatus;
use tools_todo::TodoList;
use agent_core::{CheckpointManager, SessionHistory, SessionStats, ToolContext, execute_tool, get_tool_definitions};
use color_eyre::eyre::Result;
use crossterm::{
event::{Event, EventStream, EnableMouseCapture, DisableMouseCapture, KeyCode},
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
ExecutableCommand,
};
use futures::StreamExt;
use llm_core::{ChatMessage as LLMChatMessage, ChatOptions, LlmProvider, ProviderType};
use permissions::{Action, PermissionDecision, PermissionManager, Tool as PermTool};
use ratatui::{
backend::CrosstermBackend,
layout::Rect,
style::Style,
text::{Line, Span},
widgets::Paragraph,
Terminal,
};
use serde_json::Value;
use std::{io::stdout, path::PathBuf, sync::Arc, time::SystemTime};
use tokio::sync::mpsc;
/// Holds information about a pending tool execution
#[allow(dead_code)] // Fields used for permission popup display
struct PendingToolCall {
tool_name: String,
arguments: Value,
perm_tool: PermTool,
context: Option<String>,
}
/// Provider mode - single client or multi-provider manager
enum ProviderMode {
/// Legacy single-provider mode
Single(Arc<dyn LlmProvider>),
/// Multi-provider with switching support (owned)
Multi(ProviderManager),
/// Multi-provider with shared manager (for engine integration)
Shared(Arc<tokio::sync::Mutex<ProviderManager>>),
}
use agent_core::messages::{Message, UserAction, AgentResponse};
use agent_core::state::{AppState as EngineState};
pub struct TuiApp {
// UI components
chat_panel: ChatPanel,
input_box: InputBox,
status_bar: StatusBar,
todo_panel: TodoPanel,
plan_panel: PlanPanel,
permission_popup: Option<PermissionPopup>,
autocomplete: Autocomplete,
command_help: CommandHelp,
provider_tabs: ProviderTabs,
model_picker: ModelPicker,
theme: Theme,
// Session state
stats: SessionStats,
history: SessionHistory,
checkpoint_mgr: CheckpointManager,
todo_list: TodoList,
// Provider state
provider_mode: ProviderMode,
opts: ChatOptions,
perms: PermissionManager,
#[allow(dead_code)] // Reserved for tool execution context
ctx: ToolContext,
#[allow(dead_code)]
settings: config_agent::Settings,
// Engine integration
engine_tx: Option<mpsc::Sender<Message>>,
shared_state: Option<Arc<tokio::sync::Mutex<EngineState>>>,
engine_rx: Option<mpsc::Receiver<Message>>,
// Runtime state
running: bool,
waiting_for_llm: bool,
pending_tool: Option<PendingToolCall>,
permission_tx: Option<tokio::sync::oneshot::Sender<bool>>,
vim_mode: VimMode,
}
impl TuiApp {
/// Set the engine components
pub fn set_engine(
&mut self,
tx: mpsc::Sender<Message>,
state: Arc<tokio::sync::Mutex<EngineState>>,
rx: mpsc::Receiver<Message>,
) {
self.engine_tx = Some(tx);
self.shared_state = Some(state);
self.engine_rx = Some(rx);
}
/// Create a new TUI app with a single provider (legacy mode)
pub fn new(
client: Arc<dyn LlmProvider>,
opts: ChatOptions,
perms: PermissionManager,
settings: config_agent::Settings,
) -> Result<Self> {
let theme = Theme::default();
let mode = perms.mode();
// Determine provider from client name
let provider = match client.name() {
"anthropic" => Provider::Claude,
"openai" => Provider::OpenAI,
_ => Provider::Ollama,
};
Ok(Self {
chat_panel: ChatPanel::new(theme.clone()),
input_box: InputBox::new(theme.clone()),
status_bar: StatusBar::new(opts.model.clone(), mode, theme.clone()),
todo_panel: TodoPanel::new(theme.clone()),
plan_panel: PlanPanel::new(theme.clone()),
permission_popup: None,
autocomplete: Autocomplete::new(theme.clone()),
command_help: CommandHelp::new(theme.clone()),
provider_tabs: ProviderTabs::with_provider(provider, theme.clone()),
model_picker: ModelPicker::new(theme.clone()),
theme,
stats: SessionStats::new(),
history: SessionHistory::new(),
checkpoint_mgr: CheckpointManager::new(PathBuf::from(".owlen/checkpoints")),
todo_list: TodoList::new(),
provider_mode: ProviderMode::Single(client),
opts,
perms,
ctx: ToolContext::new(),
settings,
engine_tx: None,
shared_state: None,
engine_rx: None,
running: true,
waiting_for_llm: false,
pending_tool: None,
permission_tx: None,
vim_mode: VimMode::Insert,
})
}
/// Create a new TUI app with multi-provider support
pub fn with_provider_manager(
provider_manager: ProviderManager,
perms: PermissionManager,
settings: config_agent::Settings,
) -> Result<Self> {
let theme = Theme::default();
let mode = perms.mode();
// Get initial provider and model
let current_provider = provider_manager.current_provider_type();
let current_model = provider_manager.current_model().to_string();
let provider = match current_provider {
ProviderType::Anthropic => Provider::Claude,
ProviderType::OpenAI => Provider::OpenAI,
ProviderType::Ollama => Provider::Ollama,
};
let opts = ChatOptions::new(&current_model);
// Check if we're in plan mode from settings
let is_plan_mode = settings.mode == "plan";
let mut status_bar = StatusBar::new(current_model, mode, theme.clone());
if is_plan_mode {
status_bar.set_planning_mode(true);
}
Ok(Self {
chat_panel: ChatPanel::new(theme.clone()),
input_box: InputBox::new(theme.clone()),
status_bar,
todo_panel: TodoPanel::new(theme.clone()),
plan_panel: PlanPanel::new(theme.clone()),
permission_popup: None,
autocomplete: Autocomplete::new(theme.clone()),
command_help: CommandHelp::new(theme.clone()),
provider_tabs: ProviderTabs::with_provider(provider, theme.clone()),
model_picker: ModelPicker::new(theme.clone()),
theme,
stats: SessionStats::new(),
history: SessionHistory::new(),
checkpoint_mgr: CheckpointManager::new(PathBuf::from(".owlen/checkpoints")),
todo_list: TodoList::new(),
provider_mode: ProviderMode::Multi(provider_manager),
opts,
perms,
ctx: ToolContext::new(),
settings,
engine_tx: None,
shared_state: None,
engine_rx: None,
running: true,
waiting_for_llm: false,
pending_tool: None,
permission_tx: None,
vim_mode: VimMode::Insert,
})
}
/// Create a new TUI app with a shared ProviderManager (for engine integration)
pub fn with_shared_provider_manager(
provider_manager: Arc<tokio::sync::Mutex<ProviderManager>>,
perms: PermissionManager,
settings: config_agent::Settings,
) -> Result<Self> {
let theme = Theme::default();
let mode = perms.mode();
// Get initial provider and model (need to block to access shared manager)
let (current_provider, current_model) = {
let guard = futures::executor::block_on(provider_manager.lock());
(guard.current_provider_type(), guard.current_model().to_string())
};
let provider = match current_provider {
ProviderType::Anthropic => Provider::Claude,
ProviderType::OpenAI => Provider::OpenAI,
ProviderType::Ollama => Provider::Ollama,
};
let opts = ChatOptions::new(&current_model);
// Check if we're in plan mode from settings
let is_plan_mode = settings.mode == "plan";
let mut status_bar = StatusBar::new(current_model, mode, theme.clone());
if is_plan_mode {
status_bar.set_planning_mode(true);
}
Ok(Self {
chat_panel: ChatPanel::new(theme.clone()),
input_box: InputBox::new(theme.clone()),
status_bar,
todo_panel: TodoPanel::new(theme.clone()),
plan_panel: PlanPanel::new(theme.clone()),
permission_popup: None,
autocomplete: Autocomplete::new(theme.clone()),
command_help: CommandHelp::new(theme.clone()),
provider_tabs: ProviderTabs::with_provider(provider, theme.clone()),
model_picker: ModelPicker::new(theme.clone()),
theme,
stats: SessionStats::new(),
history: SessionHistory::new(),
checkpoint_mgr: CheckpointManager::new(PathBuf::from(".owlen/checkpoints")),
todo_list: TodoList::new(),
provider_mode: ProviderMode::Shared(provider_manager),
opts,
perms,
ctx: ToolContext::new(),
settings,
engine_tx: None,
shared_state: None,
engine_rx: None,
running: true,
waiting_for_llm: false,
pending_tool: None,
permission_tx: None,
vim_mode: VimMode::Insert,
})
}
/// Get the current LLM provider client
fn get_client(&mut self) -> Result<Arc<dyn LlmProvider>> {
match &mut self.provider_mode {
ProviderMode::Single(client) => Ok(Arc::clone(client)),
ProviderMode::Multi(manager) => manager
.get_provider()
.map_err(|e| color_eyre::eyre::eyre!("{}", e)),
ProviderMode::Shared(manager) => {
let mut guard = futures::executor::block_on(manager.lock());
guard.get_provider()
.map_err(|e| color_eyre::eyre::eyre!("{}", e))
}
}
}
/// Switch to a different provider (works in multi-provider and shared modes)
fn switch_provider(&mut self, provider_type: ProviderType) -> Result<()> {
// Helper to update UI after successful switch
let update_ui = |s: &mut Self, model: String| {
let provider = match provider_type {
ProviderType::Anthropic => Provider::Claude,
ProviderType::OpenAI => Provider::OpenAI,
ProviderType::Ollama => Provider::Ollama,
};
s.provider_tabs.set_active(provider);
s.opts.model = model.clone();
s.status_bar = StatusBar::new(model.clone(), s.perms.mode(), s.theme.clone());
s.chat_panel.add_message(ChatMessage::System(
format!("Switched to {} (model: {})", provider_type, model)
));
};
match &mut self.provider_mode {
ProviderMode::Multi(manager) => {
match manager.switch_provider(provider_type) {
Ok(_) => {
let model = manager.current_model().to_string();
update_ui(self, model);
Ok(())
}
Err(e) => {
self.chat_panel.add_message(ChatMessage::System(
format!("Failed to switch provider: {}", e)
));
Err(color_eyre::eyre::eyre!("{}", e))
}
}
}
ProviderMode::Shared(manager) => {
let mut guard = futures::executor::block_on(manager.lock());
match guard.switch_provider(provider_type) {
Ok(_) => {
let model = guard.current_model().to_string();
drop(guard); // Release lock before updating UI
update_ui(self, model);
Ok(())
}
Err(e) => {
self.chat_panel.add_message(ChatMessage::System(
format!("Failed to switch provider: {}", e)
));
Err(color_eyre::eyre::eyre!("{}", e))
}
}
}
ProviderMode::Single(_) => {
self.chat_panel.add_message(ChatMessage::System(
"Provider switching requires multi-provider mode. Restart with 'owlen' to enable.".to_string()
));
Ok(())
}
}
}
fn set_theme(&mut self, theme: Theme) {
self.theme = theme.clone();
self.chat_panel = ChatPanel::new(theme.clone());
self.input_box = InputBox::new(theme.clone());
self.status_bar = StatusBar::new(self.opts.model.clone(), self.perms.mode(), theme.clone());
self.todo_panel.set_theme(theme.clone());
self.plan_panel.set_theme(theme.clone());
self.autocomplete.set_theme(theme.clone());
self.command_help.set_theme(theme.clone());
self.provider_tabs.set_theme(theme.clone());
self.model_picker.set_theme(theme);
}
/// Open the model picker for the current provider
async fn open_model_picker(&mut self) {
match &self.provider_mode {
ProviderMode::Multi(manager) => {
let provider_type = manager.current_provider_type();
let current_model = manager.current_model().to_string();
// Show loading state immediately
self.model_picker.show_loading(provider_type);
// Fetch models from provider
match manager.list_models_for_provider(provider_type).await {
Ok(models) => {
if models.is_empty() {
self.model_picker.show_error("No models available".to_string());
} else {
self.model_picker.show(models, &provider_type.to_string(), &current_model);
}
}
Err(e) => {
self.model_picker.show_error(e.to_string());
}
}
}
ProviderMode::Shared(manager) => {
let guard = manager.lock().await;
let provider_type = guard.current_provider_type();
let current_model = guard.current_model().to_string();
// Show loading state immediately
self.model_picker.show_loading(provider_type);
// Fetch models from provider
match guard.list_models_for_provider(provider_type).await {
Ok(models) => {
drop(guard); // Release lock before updating UI
if models.is_empty() {
self.model_picker.show_error("No models available".to_string());
} else {
self.model_picker.show(models, &provider_type.to_string(), &current_model);
}
}
Err(e) => {
self.model_picker.show_error(e.to_string());
}
}
}
ProviderMode::Single(_) => {
self.chat_panel.add_message(ChatMessage::System(
"Model picker requires multi-provider mode. Use [1][2][3] to switch providers first.".to_string()
));
}
}
}
/// Set the model for the current provider
fn set_current_model(&mut self, model: String) {
match &mut self.provider_mode {
ProviderMode::Multi(manager) => {
manager.set_current_model(model.clone());
self.opts.model = model.clone();
self.status_bar = StatusBar::new(model.clone(), self.perms.mode(), self.theme.clone());
self.chat_panel.add_message(ChatMessage::System(
format!("Model changed to: {}", model)
));
}
ProviderMode::Shared(manager) => {
let mut guard = futures::executor::block_on(manager.lock());
guard.set_current_model(model.clone());
drop(guard);
self.opts.model = model.clone();
self.status_bar = StatusBar::new(model.clone(), self.perms.mode(), self.theme.clone());
self.chat_panel.add_message(ChatMessage::System(
format!("Model changed to: {}", model)
));
}
ProviderMode::Single(_) => {}
}
}
/// Show keyboard shortcuts help
fn show_shortcuts_help(&mut self) {
let shortcuts = r#"
--- Keyboard Shortcuts ---
Provider Switching (Normal mode or empty input):
[1] [2] [3] Switch provider (Claude/Ollama/OpenAI)
Tab Cycle through providers
M Open model picker
Chat Navigation (Normal mode or empty input):
j / k Select next/prev message
J / K Scroll chat down/up (3 lines)
g / G Scroll to top/bottom
Esc Clear selection
Scrolling (works anytime):
PageUp/Down Scroll page up/down
Vim Modes:
Esc Normal mode (navigation)
i Insert mode (typing)
: Command mode
Input:
Enter Send message
Ctrl+c Quit
Commands: /help, /model <name>, /clear, /theme <name>
"#;
self.chat_panel.add_message(ChatMessage::System(shortcuts.trim().to_string()));
}
/// Get the public todo list for external updates
pub fn todo_list(&self) -> &TodoList {
&self.todo_list
}
/// Get the current accumulated plan from shared state (if any)
fn get_current_plan(&self) -> Option<tools_plan::AccumulatedPlan> {
if let Some(state) = &self.shared_state {
if let Ok(guard) = state.try_lock() {
return guard.accumulated_plan.clone();
}
}
None
}
/// Show the plan panel
pub fn show_plan_panel(&mut self) {
self.plan_panel.show();
}
/// Hide the plan panel
pub fn hide_plan_panel(&mut self) {
self.plan_panel.hide();
}
/// Render the header line: OWLEN left, model + vim mode right
fn render_header(&self, frame: &mut ratatui::Frame, area: Rect) {
let vim_indicator = self.vim_mode.indicator(&self.theme.symbols);
// Calculate right side content
let right_content = format!("{} {}", self.opts.model, vim_indicator);
let right_len = right_content.len();
// Calculate padding
let name = "OWLEN";
let padding = area.width.saturating_sub(name.len() as u16 + right_len as u16 + 2);
let line = Line::from(vec![
Span::styled(" ", Style::default()),
Span::styled(name, self.theme.header_accent),
Span::raw(" ".repeat(padding as usize)),
Span::styled(&self.opts.model, self.theme.status_dim),
Span::styled(" ", Style::default()),
Span::styled(vim_indicator, self.theme.header_accent),
Span::styled(" ", Style::default()),
]);
let paragraph = Paragraph::new(line);
frame.render_widget(paragraph, area);
}
/// Render a horizontal rule divider
fn render_divider(&self, frame: &mut ratatui::Frame, area: Rect) {
let rule = self.theme.symbols.horizontal_rule.repeat(area.width as usize);
let line = Line::from(Span::styled(rule, Style::default().fg(self.theme.palette.border)));
let paragraph = Paragraph::new(line);
frame.render_widget(paragraph, area);
}
/// Render empty state placeholder (centered)
fn render_empty_state(&self, frame: &mut ratatui::Frame, area: Rect) {
let message = "Start a conversation...";
let padding = area.width.saturating_sub(message.len() as u16) / 2;
let vertical_center = area.height / 2;
// Create centered area
let centered_area = Rect {
x: area.x,
y: area.y + vertical_center,
width: area.width,
height: 1,
};
let line = Line::from(vec![
Span::raw(" ".repeat(padding as usize)),
Span::styled(message, self.theme.status_dim),
]);
let paragraph = Paragraph::new(line);
frame.render_widget(paragraph, centered_area);
}
/// Check if the chat panel is empty (no real messages)
fn is_chat_empty(&self) -> bool {
self.chat_panel.messages().is_empty()
}
pub async fn run(&mut self) -> Result<()> {
// Setup terminal
enable_raw_mode()?;
stdout()
.execute(EnterAlternateScreen)?
.execute(EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout());
let mut terminal = Terminal::new(backend)?;
terminal.clear()?;
// Create event channel
let (event_tx, mut event_rx) = mpsc::unbounded_channel();
// Spawn terminal event listener
let tx_clone = event_tx.clone();
tokio::spawn(async move {
let mut reader = EventStream::new();
while let Some(event) = reader.next().await {
match event {
Ok(Event::Key(key)) => {
if let Some(app_event) = handle_key_event(key) {
let _ = tx_clone.send(app_event);
}
}
Ok(Event::Mouse(mouse)) => {
use crossterm::event::MouseEventKind;
match mouse.kind {
MouseEventKind::ScrollUp => {
let _ = tx_clone.send(AppEvent::ScrollUp);
}
MouseEventKind::ScrollDown => {
let _ = tx_clone.send(AppEvent::ScrollDown);
}
_ => {}
}
}
Ok(Event::Resize(w, h)) => {
let _ = tx_clone.send(AppEvent::Resize {
width: w,
height: h,
});
}
_ => {}
}
}
});
// Spawn engine event listener
if let Some(mut rx) = self.engine_rx.take() {
let tx_clone = event_tx.clone();
tokio::spawn(async move {
while let Some(msg) = rx.recv().await {
let _ = tx_clone.send(AppEvent::EngineMessage(msg));
}
});
}
// Show first-run welcome message if this is the first time
if config_agent::is_first_run() {
self.chat_panel.add_message(ChatMessage::System(
"Welcome to Owlen! 🦉\n\n\
You're starting with:\n\
• Provider: Ollama (local, free)\n\
• Model: qwen3:8b (tool-capable)\n\n\
Quick start:\n\
• [1][2][3] - Switch providers (Claude/Ollama/OpenAI)\n\
• [Tab] - Cycle through providers\n\
• [m] - Open model picker (in Normal mode)\n\
• [Esc] - Enter Normal mode\n\
• /help - Show all commands\n\n\
To authenticate with cloud providers:\n\
• Run: owlen login anthropic\n\
• Run: owlen login openai".to_string()
));
}
// Main event loop
while self.running {
// Render
terminal.draw(|frame| {
let size = frame.area();
let todo_height = self.todo_panel.min_height();
let layout = AppLayout::calculate_with_todo(size, todo_height);
// Render header: OWLEN left, model + vim mode right
self.render_header(frame, layout.header_area);
// Render provider tabs
self.provider_tabs.render(frame, layout.tabs_area);
// Render top divider (horizontal rule)
self.render_divider(frame, layout.top_divider);
// Update scroll position before rendering
self.chat_panel.update_scroll(layout.chat_area);
// Render chat area or empty state
if self.is_chat_empty() {
self.render_empty_state(frame, layout.chat_area);
} else {
self.chat_panel.render(frame, layout.chat_area);
}
// Render todo panel if visible
if todo_height > 0 {
self.todo_panel.render(frame, layout.todo_area, &self.todo_list);
}
// Render bottom divider
self.render_divider(frame, layout.bottom_divider);
// Render input area
self.input_box.render(frame, layout.input_area);
// Render status bar
self.status_bar.render(frame, layout.status_area);
// Render overlays (in order of priority)
// 1. Autocomplete dropdown (above input)
if self.autocomplete.is_visible() {
self.autocomplete.render(frame, layout.input_area);
}
// 2. Model picker (centered modal)
if self.model_picker.is_visible() {
self.model_picker.render(frame, size);
}
// 3. Plan panel (when accumulating or reviewing a plan)
if self.plan_panel.is_visible() {
let plan = self.get_current_plan();
// Calculate centered area for plan panel (60% width, 50% height)
let plan_width = (size.width * 3 / 5).max(60).min(size.width - 4);
let plan_height = (size.height / 2).max(15).min(size.height - 4);
let plan_area = AppLayout::center_popup(size, plan_width, plan_height);
self.plan_panel.render(frame, plan_area, plan.as_ref());
}
// 4. Command help overlay (centered modal)
if self.command_help.is_visible() {
self.command_help.render(frame, size);
}
// 5. Permission popup (highest priority)
if let Some(popup) = &self.permission_popup {
popup.render(frame, size);
}
})?;
// Handle events
if let Ok(event) = event_rx.try_recv() {
self.handle_event(event, &event_tx).await?;
}
// Small delay to prevent busy-waiting
tokio::time::sleep(tokio::time::Duration::from_millis(16)).await;
}
// Cleanup terminal
disable_raw_mode()?;
stdout()
.execute(LeaveAlternateScreen)?
.execute(DisableMouseCapture)?;
Ok(())
}
async fn handle_event(
&mut self,
event: AppEvent,
event_tx: &mpsc::UnboundedSender<AppEvent>,
) -> Result<()> {
match event {
AppEvent::Input(key) => {
// Handle overlays in priority order (highest first)
// 1. Permission popup (highest priority)
if let Some(popup) = &mut self.permission_popup {
if let Some(option) = popup.handle_key(key) {
use crate::components::PermissionOption;
match option {
PermissionOption::AllowOnce => {
self.chat_panel.add_message(ChatMessage::System(
"Permission granted once".to_string()
));
if let Some(tx) = self.permission_tx.take() {
let _ = tx.send(true);
}
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::PermissionResult(true))).await;
}
}
PermissionOption::AlwaysAllow => {
// Add rule to permission manager
if let Some(pending) = &self.pending_tool {
self.perms.add_rule(
pending.perm_tool,
pending.context.clone(),
Action::Allow,
);
self.chat_panel.add_message(ChatMessage::System(
format!("Always allowed: {}", pending.tool_name)
));
}
if let Some(tx) = self.permission_tx.take() {
let _ = tx.send(true);
}
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::PermissionResult(true))).await;
}
}
PermissionOption::Deny => {
self.chat_panel.add_message(ChatMessage::System(
"Permission denied".to_string()
));
if let Some(tx) = self.permission_tx.take() {
let _ = tx.send(false);
}
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::PermissionResult(false))).await;
}
}
PermissionOption::Explain => {
// Show explanation
if let Some(pending) = &self.pending_tool {
let explanation = format!(
"Tool '{}' requires permission. This operation will {}.",
pending.tool_name,
match pending.tool_name.as_str() {
"read" => "read a file from disk",
"write" => "write or overwrite a file",
"edit" => "modify an existing file",
"bash" => "execute a shell command",
"grep" => "search for patterns in files",
"glob" => "list files matching a pattern",
_ => "perform an operation",
}
);
self.chat_panel.add_message(ChatMessage::System(explanation));
}
// Don't close popup, let user choose again
return Ok(());
}
}
self.permission_popup = None;
self.pending_tool = None;
self.status_bar.set_state(crate::components::AppState::Idle);
self.status_bar.set_pending_permission(None);
}
return Ok(());
}
// 2. Plan panel (when reviewing accumulated plan)
if self.plan_panel.is_visible() {
match key.code {
// Navigation
KeyCode::Char('j') | KeyCode::Down => {
if let Some(plan) = self.get_current_plan() {
self.plan_panel.select_next(plan.steps.len());
}
}
KeyCode::Char('k') | KeyCode::Up => {
self.plan_panel.select_prev();
}
// Toggle step approval
KeyCode::Char(' ') => {
if let Some(state) = &self.shared_state {
if let Ok(mut guard) = state.try_lock() {
if let Some(plan) = guard.current_plan_mut() {
let idx = self.plan_panel.selected_index();
if let Some(step) = plan.steps.get_mut(idx) {
step.approved = match step.approved {
None => Some(true),
Some(true) => Some(false),
Some(false) => None,
};
}
}
}
}
}
// Finalize plan (stop accumulating, enter review)
KeyCode::Char('f') | KeyCode::Char('F') => {
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::FinalizePlan)).await;
}
}
// Approve all pending
KeyCode::Char('a') | KeyCode::Char('A') => {
if let Some(state) = &self.shared_state {
if let Ok(mut guard) = state.try_lock() {
if let Some(plan) = guard.current_plan_mut() {
for step in &mut plan.steps {
if step.approved.is_none() {
step.approved = Some(true);
}
}
}
}
}
}
// Reject all pending
KeyCode::Char('r') | KeyCode::Char('R') => {
if let Some(state) = &self.shared_state {
if let Ok(mut guard) = state.try_lock() {
if let Some(plan) = guard.current_plan_mut() {
for step in &mut plan.steps {
if step.approved.is_none() {
step.approved = Some(false);
}
}
}
}
}
}
// Execute approved steps
KeyCode::Enter => {
if let Some(plan) = self.get_current_plan() {
if plan.status == AccumulatedPlanStatus::Reviewing {
// Collect approval decisions
let approved: Vec<_> = plan.steps.iter()
.filter(|s| s.approved == Some(true))
.map(|s| s.id.clone())
.collect();
let rejected: Vec<_> = plan.steps.iter()
.filter(|s| s.approved == Some(false))
.map(|s| s.id.clone())
.collect();
let approval = tools_plan::PlanApproval {
approved_ids: approved,
rejected_ids: rejected,
};
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::PlanApproval(approval))).await;
}
self.plan_panel.hide();
}
}
}
// Save plan
KeyCode::Char('s') | KeyCode::Char('S') => {
if let Some(tx) = &self.engine_tx {
// TODO: Prompt for plan name
let name = format!("plan-{}", std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs());
let _ = tx.send(Message::UserAction(UserAction::SavePlan(name))).await;
}
}
// Cancel/close
KeyCode::Esc | KeyCode::Char('c') | KeyCode::Char('C') => {
if let Some(plan) = self.get_current_plan() {
if plan.status == AccumulatedPlanStatus::Accumulating {
// Cancel the plan entirely
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::CancelPlan)).await;
}
}
}
self.plan_panel.hide();
}
_ => {}
}
return Ok(());
}
// 3. Command help overlay
if self.command_help.is_visible() {
self.command_help.handle_key(key);
return Ok(());
}
// 4. Model picker
if self.model_picker.is_visible() {
let current_model = self.opts.model.clone();
match self.model_picker.handle_key(key, &current_model) {
PickerResult::Selected(model) => {
self.set_current_model(model);
}
PickerResult::Cancelled => {
// Just closed, no action
}
PickerResult::Handled | PickerResult::NotHandled => {
// Navigation or unhandled key
}
}
return Ok(());
}
// 4. Autocomplete dropdown
if self.autocomplete.is_visible() {
match self.autocomplete.handle_key(key) {
AutocompleteResult::Confirmed(cmd) => {
// Insert confirmed command into input box
self.input_box.set_text(cmd);
self.autocomplete.hide();
}
AutocompleteResult::Cancelled => {
self.autocomplete.hide();
}
AutocompleteResult::Handled => {
// Key was handled (navigation), do nothing
}
AutocompleteResult::NotHandled => {
// Pass through to input box
self.handle_input_key(key, event_tx).await?;
}
}
return Ok(());
}
// 4. Normal input handling
self.handle_input_key(key, event_tx).await?;
}
AppEvent::ScrollUp => {
self.chat_panel.scroll_up(3);
}
AppEvent::ScrollDown => {
self.chat_panel.scroll_down(3);
}
AppEvent::UserMessage(message) => {
self.chat_panel
.add_message(ChatMessage::User(message.clone()));
self.history.add_user_message(message);
}
AppEvent::StreamStart => {
// Streaming started - indicator will show in chat panel
self.waiting_for_llm = true;
self.chat_panel.set_streaming(true);
}
AppEvent::LlmChunk(chunk) => {
// APPEND to last assistant message (don't create new one each time)
self.chat_panel.append_to_assistant(&chunk);
}
AppEvent::StreamEnd { response } => {
// Streaming finished
self.waiting_for_llm = false;
self.chat_panel.set_streaming(false);
self.status_bar.set_state(crate::components::AppState::Idle);
self.history.add_assistant_message(response.clone());
// Update stats (rough estimate)
let tokens = response.len() / 4;
self.stats.record_message(tokens, std::time::Duration::from_secs(1));
}
AppEvent::StreamError(error) => {
// Streaming error
self.waiting_for_llm = false;
self.chat_panel.set_streaming(false);
self.status_bar.set_state(crate::components::AppState::Idle);
self.chat_panel.add_message(ChatMessage::System(
format!("Error: {}", error)
));
}
AppEvent::ToolCall { name, args } => {
self.chat_panel.add_message(ChatMessage::ToolCall {
name: name.clone(),
args: args.to_string(),
});
self.status_bar.set_last_tool(name);
self.stats.record_tool_call();
}
AppEvent::ToolResult { success, output } => {
self.chat_panel
.add_message(ChatMessage::ToolResult { success, output });
}
AppEvent::PermissionRequest { tool, context } => {
self.permission_popup =
Some(PermissionPopup::new(tool, context, self.theme.clone()));
}
AppEvent::StatusUpdate(stats) => {
self.stats = stats.clone();
self.status_bar.update_stats(stats);
}
AppEvent::Resize { .. } => {
// Terminal will automatically re-layout on next draw
}
AppEvent::ToggleTodo => {
self.todo_panel.toggle();
}
AppEvent::EngineMessage(msg) => {
match msg {
Message::AgentResponse(res) => {
match res {
AgentResponse::Token(t) => {
// Map to LlmChunk
if !self.waiting_for_llm {
self.waiting_for_llm = true;
self.chat_panel.set_streaming(true);
}
self.chat_panel.append_to_assistant(&t);
}
AgentResponse::Complete => {
self.waiting_for_llm = false;
self.chat_panel.set_streaming(false);
self.status_bar.set_state(crate::components::AppState::Idle);
}
AgentResponse::Error(e) => {
self.waiting_for_llm = false;
self.chat_panel.set_streaming(false);
self.status_bar.set_state(crate::components::AppState::Idle);
self.chat_panel.add_message(ChatMessage::System(format!("Error: {}", e)));
}
AgentResponse::PermissionRequest { tool, context } => {
self.permission_popup = Some(PermissionPopup::new(tool.clone(), context, self.theme.clone()));
self.status_bar.set_state(crate::components::AppState::WaitingPermission);
self.status_bar.set_pending_permission(Some(tool));
}
AgentResponse::PlanStaging(staging) => {
self.chat_panel.add_message(ChatMessage::System("--- PENDING PLAN ---".to_string()));
for tc in &staging {
self.chat_panel.add_message(ChatMessage::ToolCall { name: tc.name.clone(), args: tc.args.clone() });
}
self.chat_panel.add_message(ChatMessage::System("Approve plan in status bar? (y/n)".to_string()));
self.status_bar.set_state(crate::components::AppState::WaitingPermission);
self.status_bar.set_pending_permission(Some("PLAN".to_string()));
}
AgentResponse::ToolCall { name, args } => {
self.chat_panel.add_message(ChatMessage::ToolCall { name, args });
}
// Plan mode responses
AgentResponse::PlanStepAdded(step) => {
let msg = format!(
"[PLAN] Step {}: {} ({})",
step.turn,
step.tool,
if step.args.is_object() {
step.args.to_string().chars().take(50).collect::<String>()
} else {
step.args.to_string()
}
);
self.chat_panel.add_message(ChatMessage::System(msg));
// Auto-show plan panel when steps are being accumulated
if !self.plan_panel.is_visible() {
self.plan_panel.show();
}
}
AgentResponse::PlanComplete { total_steps, status: _ } => {
self.chat_panel.add_message(ChatMessage::System(
format!("--- PLAN COMPLETE ({} steps) ---", total_steps)
));
self.chat_panel.add_message(ChatMessage::System(
"Press [Enter] to execute or [Esc] to cancel".to_string()
));
// Show plan panel for review
self.plan_panel.show();
self.plan_panel.reset_selection();
self.status_bar.set_state(crate::components::AppState::WaitingPermission);
self.status_bar.set_pending_permission(Some("PLAN REVIEW".to_string()));
}
AgentResponse::PlanExecuting { step_id: _, step_index, total_steps } => {
self.chat_panel.add_message(ChatMessage::System(
format!("Executing step {}/{}", step_index + 1, total_steps)
));
}
AgentResponse::PlanExecutionComplete { executed, skipped } => {
self.chat_panel.add_message(ChatMessage::System(
format!("Plan execution complete: {} executed, {} skipped", executed, skipped)
));
// Hide plan panel after execution
self.plan_panel.hide();
self.status_bar.set_state(crate::components::AppState::Idle);
self.status_bar.set_pending_permission(None);
}
}
}
Message::System(sys) => {
match sys {
agent_core::messages::SystemNotification::Warning(w) => {
self.chat_panel.add_message(ChatMessage::System(format!("Warning: {}", w)));
}
agent_core::messages::SystemNotification::StateUpdate(_s) => {
// Handle state updates if needed
}
agent_core::messages::SystemNotification::PlanSaved { id, path } => {
self.chat_panel.add_message(ChatMessage::System(
format!("Plan saved: {} -> {}", id, path)
));
}
agent_core::messages::SystemNotification::PlanLoaded { id, name, steps } => {
let name_str = name.unwrap_or_else(|| "(unnamed)".to_string());
self.chat_panel.add_message(ChatMessage::System(
format!("Plan loaded: {} '{}' ({} steps)", id, name_str, steps)
));
}
}
}
_ => {}
}
}
AppEvent::SwitchProvider(provider_type) => {
let _ = self.switch_provider(provider_type);
}
AppEvent::CycleProvider => {
// Get current provider type and cycle to next
let current = match self.provider_tabs.active() {
Provider::Claude => ProviderType::Anthropic,
Provider::Ollama => ProviderType::Ollama,
Provider::OpenAI => ProviderType::OpenAI,
};
let next = match current {
ProviderType::Anthropic => ProviderType::Ollama,
ProviderType::Ollama => ProviderType::OpenAI,
ProviderType::OpenAI => ProviderType::Anthropic,
};
let _ = self.switch_provider(next);
}
AppEvent::OpenModelPicker => {
self.open_model_picker().await;
}
AppEvent::Quit => {
self.running = false;
}
}
Ok(())
}
/// Handle input keys with autocomplete integration
async fn handle_input_key(
&mut self,
key: crossterm::event::KeyEvent,
event_tx: &mpsc::UnboundedSender<AppEvent>,
) -> Result<()> {
use crate::components::InputEvent;
// Global navigation keys that work in any mode
match key.code {
// PageUp - Scroll chat up one page (always works)
KeyCode::PageUp => {
self.chat_panel.page_up(20);
return Ok(());
}
// PageDown - Scroll chat down one page (always works)
KeyCode::PageDown => {
self.chat_panel.page_down(20);
return Ok(());
}
_ => {}
}
// Check for provider switching keys when input is empty or in Normal mode
let input_empty = self.input_box.text().is_empty();
if input_empty || self.vim_mode == VimMode::Normal {
match key.code {
// [1] - Switch to Claude (Anthropic)
KeyCode::Char('1') => {
let _ = event_tx.send(AppEvent::SwitchProvider(ProviderType::Anthropic));
return Ok(());
}
// [2] - Switch to Ollama
KeyCode::Char('2') => {
let _ = event_tx.send(AppEvent::SwitchProvider(ProviderType::Ollama));
return Ok(());
}
// [3] - Switch to OpenAI
KeyCode::Char('3') => {
let _ = event_tx.send(AppEvent::SwitchProvider(ProviderType::OpenAI));
return Ok(());
}
// Tab - Cycle providers
KeyCode::Tab => {
let _ = event_tx.send(AppEvent::CycleProvider);
return Ok(());
}
// '?' - Show shortcuts help
KeyCode::Char('?') => {
self.show_shortcuts_help();
return Ok(());
}
// 'M' (Shift+m) - Open model picker
KeyCode::Char('M') => {
let _ = event_tx.send(AppEvent::OpenModelPicker);
return Ok(());
}
// 'j' - Navigate to next message (focus)
KeyCode::Char('j') => {
self.chat_panel.focus_next();
return Ok(());
}
// 'k' - Navigate to previous message (focus)
KeyCode::Char('k') => {
self.chat_panel.focus_previous();
return Ok(());
}
// 'J' (Shift+j) - Scroll chat down
KeyCode::Char('J') => {
self.chat_panel.scroll_down(3);
return Ok(());
}
// 'K' (Shift+k) - Scroll chat up
KeyCode::Char('K') => {
self.chat_panel.scroll_up(3);
return Ok(());
}
// 'G' (Shift+g) - Scroll to bottom
KeyCode::Char('G') => {
self.chat_panel.scroll_to_bottom();
return Ok(());
}
// 'g' - Scroll to top (vim-like gg, simplified to single g)
KeyCode::Char('g') => {
self.chat_panel.scroll_to_top();
return Ok(());
}
// Esc also clears message focus
KeyCode::Esc => {
self.chat_panel.clear_focus();
// Don't return - let it also handle vim mode change
}
_ => {}
}
}
// Handle the key in input box
if let Some(event) = self.input_box.handle_key(key) {
match event {
InputEvent::Message(message) => {
// Hide autocomplete before processing
self.autocomplete.hide();
self.handle_user_message(message, event_tx).await?;
}
InputEvent::Command(cmd) => {
// Commands from vim command mode (without /)
self.autocomplete.hide();
self.handle_command(&format!("/{}", cmd))?;
}
InputEvent::ModeChange(mode) => {
self.vim_mode = mode;
self.status_bar.set_vim_mode(mode);
}
InputEvent::Cancel => {
self.autocomplete.hide();
self.waiting_for_llm = false;
}
InputEvent::Expand => {
// TODO: Expand to multiline input
}
}
}
// Check if we should show/update autocomplete
let input = self.input_box.text();
if input.starts_with('/') {
let query = &input[1..]; // Text after /
if !self.autocomplete.is_visible() {
self.autocomplete.show();
}
self.autocomplete.update_filter(query);
} else {
// Hide autocomplete if input doesn't start with /
if self.autocomplete.is_visible() {
self.autocomplete.hide();
}
}
Ok(())
}
async fn handle_user_message(
&mut self,
message: String,
event_tx: &mpsc::UnboundedSender<AppEvent>,
) -> Result<()> {
// Handle slash commands
if message.starts_with('/') {
self.handle_command(&message)?;
return Ok(());
}
// Add user message to chat IMMEDIATELY so it shows before AI response
self.chat_panel
.add_message(ChatMessage::User(message.clone()));
self.history.add_user_message(message.clone());
// Start streaming indicator
self.waiting_for_llm = true;
self.chat_panel.set_streaming(true);
self.status_bar.set_state(crate::components::AppState::Streaming);
let _ = event_tx.send(AppEvent::StreamStart);
// Check if we have an engine connection - use it for proper agent loop
if let Some(tx) = &self.engine_tx {
let _ = tx.send(Message::UserAction(UserAction::Input(message.clone()))).await;
} else {
// Fallback to legacy background stream if no engine
// Only get client when needed for legacy path
let client = match self.get_client() {
Ok(c) => c,
Err(e) => {
self.waiting_for_llm = false;
self.chat_panel.set_streaming(false);
self.status_bar.set_state(crate::components::AppState::Idle);
self.chat_panel.add_message(ChatMessage::System(
format!("Failed to get provider: {}", e)
));
return Ok(());
}
};
// Spawn streaming in background task
let opts = self.opts.clone();
let tx = event_tx.clone();
let message_owned = message.clone();
tokio::spawn(async move {
match Self::run_background_stream(client, opts, message_owned, tx.clone()).await {
Ok(response) => {
let _ = tx.send(AppEvent::StreamEnd { response });
}
Err(e) => {
let _ = tx.send(AppEvent::StreamError(e.to_string()));
}
}
});
}
Ok(())
}
/// Run streaming in background, sending chunks through channel
async fn run_background_stream(
client: Arc<dyn LlmProvider>,
opts: ChatOptions,
prompt: String,
tx: mpsc::UnboundedSender<AppEvent>,
) -> Result<String> {
let messages = vec![LLMChatMessage::user(&prompt)];
let tools = get_tool_definitions();
let mut stream = client
.chat_stream(&messages, &opts, Some(&tools))
.await
.map_err(|e| color_eyre::eyre::eyre!("LLM provider error: {}", e))?;
let mut response_content = String::new();
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|e| color_eyre::eyre::eyre!("Stream error: {}", e))?;
if let Some(content) = chunk.content {
response_content.push_str(&content);
// Send chunk to UI for immediate display
let _ = tx.send(AppEvent::LlmChunk(content));
}
// TODO: Handle tool calls in background streaming
// For now, tool calls are ignored in background mode
}
Ok(response_content)
}
/// Execute a tool with permission handling
///
/// This method checks permissions and either:
/// - Executes the tool immediately if allowed
/// - Returns an error if denied by policy
/// - Shows a permission popup and waits for user decision if permission is needed
///
/// The async wait for user decision works correctly because:
/// 1. The event loop continues running while we await the channel
/// 2. Keyboard events are processed by the separate event listener task
/// 3. When user responds to popup, the channel is signaled and we resume
///
/// Returns Ok(result) if allowed and executed, Err if denied or failed
#[allow(dead_code)] // Reserved for interactive tool permission flow
async fn execute_tool_with_permission(
&mut self,
tool_name: &str,
arguments: &Value,
) -> Result<String> {
// Map tool name to permission tool enum
let perm_tool = match tool_name {
"read" => PermTool::Read,
"write" => PermTool::Write,
"edit" => PermTool::Edit,
"bash" => PermTool::Bash,
"grep" => PermTool::Grep,
"glob" => PermTool::Glob,
_ => PermTool::Read, // Default fallback
};
// Extract context from arguments
let context = match tool_name {
"read" | "write" | "edit" => arguments.get("path").and_then(|v| v.as_str()).map(String::from),
"bash" => arguments.get("command").and_then(|v| v.as_str()).map(String::from),
_ => None,
};
// Check permission
let decision = self.perms.check(perm_tool, context.as_deref());
match decision {
PermissionDecision::Allow => {
// Execute directly
execute_tool(tool_name, arguments, &self.perms, &self.ctx).await
}
PermissionDecision::Deny => {
Err(color_eyre::eyre::eyre!("Permission denied by policy"))
}
PermissionDecision::Ask => {
// Create channel for response
let (tx, rx) = tokio::sync::oneshot::channel();
self.permission_tx = Some(tx);
// Store pending tool info
self.pending_tool = Some(PendingToolCall {
tool_name: tool_name.to_string(),
arguments: arguments.clone(),
perm_tool,
context: context.clone(),
});
// Show permission popup
self.permission_popup = Some(PermissionPopup::new(
tool_name.to_string(),
context,
self.theme.clone(),
));
// Wait for user decision (with timeout)
match tokio::time::timeout(std::time::Duration::from_secs(300), rx).await {
Ok(Ok(true)) => {
// Permission granted, execute tool
execute_tool(tool_name, arguments, &self.perms, &self.ctx).await
}
Ok(Ok(false)) => {
// Permission denied
Err(color_eyre::eyre::eyre!("Permission denied by user"))
}
Ok(Err(_)) => {
// Channel closed without response
Err(color_eyre::eyre::eyre!("Permission request cancelled"))
}
Err(_) => {
// Timeout
self.permission_popup = None;
self.pending_tool = None;
Err(color_eyre::eyre::eyre!("Permission request timed out"))
}
}
}
}
}
#[allow(dead_code)] // Reserved for full agent loop integration
async fn run_streaming_agent_loop(&mut self, user_prompt: &str) -> Result<String> {
let tools = get_tool_definitions();
let mut messages = vec![LLMChatMessage::user(user_prompt)];
let max_iterations = 10;
let mut iteration = 0;
let mut final_response = String::new();
// Get the current provider client
let client = self.get_client()?;
loop {
iteration += 1;
if iteration > max_iterations {
self.chat_panel.add_message(ChatMessage::System(
"⚠️ Max iterations reached".to_string()
));
break;
}
// Call LLM with streaming using the LlmProvider trait
let mut stream = client
.chat_stream(&messages, &self.opts, Some(&tools))
.await
.map_err(|e| color_eyre::eyre::eyre!("LLM provider error: {}", e))?;
let mut response_content = String::new();
let mut accumulated_tool_calls: Vec<llm_core::ToolCall> = Vec::new();
// Collect the streamed response
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|e| color_eyre::eyre::eyre!("Stream error: {}", e))?;
if let Some(content) = chunk.content {
response_content.push_str(&content);
// Stream chunks to UI - append to last assistant message
self.chat_panel.append_to_assistant(&content);
}
// Accumulate tool calls from deltas
if let Some(deltas) = chunk.tool_calls {
for delta in deltas {
// Ensure the accumulated_tool_calls vec is large enough
while accumulated_tool_calls.len() <= delta.index {
accumulated_tool_calls.push(llm_core::ToolCall {
id: String::new(),
call_type: "function".to_string(),
function: llm_core::FunctionCall {
name: String::new(),
arguments: serde_json::Value::Null,
},
});
}
let tool_call = &mut accumulated_tool_calls[delta.index];
if let Some(id) = delta.id {
tool_call.id = id;
}
if let Some(name) = delta.function_name {
tool_call.function.name = name;
}
if let Some(args_delta) = delta.arguments_delta {
// Accumulate the arguments string
let current_args = if tool_call.function.arguments.is_null() {
String::new()
} else {
tool_call.function.arguments.to_string()
};
let new_args = current_args + &args_delta;
// Try to parse as JSON, but keep as string if incomplete
tool_call.function.arguments = serde_json::from_str(&new_args)
.unwrap_or_else(|_| serde_json::Value::String(new_args));
}
}
}
}
drop(stream);
// Save the response for final return
if !response_content.is_empty() {
final_response = response_content.clone();
}
// Filter out incomplete tool calls and check if we have valid ones
let valid_tool_calls: Vec<_> = accumulated_tool_calls
.into_iter()
.filter(|tc| !tc.id.is_empty() && !tc.function.name.is_empty())
.collect();
// Check if LLM wants to call tools
if !valid_tool_calls.is_empty() {
// Add assistant message with tool calls to conversation
messages.push(LLMChatMessage {
role: llm_core::Role::Assistant,
content: if response_content.is_empty() {
None
} else {
Some(response_content.clone())
},
tool_calls: Some(valid_tool_calls.clone()),
tool_call_id: None,
name: None,
});
// Execute each tool call
for call in valid_tool_calls {
let tool_name = &call.function.name;
let arguments = &call.function.arguments;
// Show tool call in UI
self.chat_panel.add_message(ChatMessage::ToolCall {
name: tool_name.clone(),
args: arguments.to_string(),
});
self.stats.record_tool_call();
match self.execute_tool_with_permission(tool_name, arguments).await {
Ok(result) => {
// Show success in UI
self.chat_panel.add_message(ChatMessage::ToolResult {
success: true,
output: result.clone(),
});
// Add tool result to conversation
messages.push(LLMChatMessage::tool_result(&call.id, result));
}
Err(e) => {
let error_msg = format!("Error: {}", e);
// Show error in UI
self.chat_panel.add_message(ChatMessage::ToolResult {
success: false,
output: error_msg.clone(),
});
// Add error to conversation
messages.push(LLMChatMessage::tool_result(&call.id, error_msg));
}
}
}
// Continue loop to get next response
continue;
}
// No tool calls, we're done
break;
}
Ok(final_response)
}
fn handle_command(&mut self, command: &str) -> Result<()> {
match command {
"/help" | "/?" => {
// Show command help overlay
self.command_help.show();
}
"/status" => {
let elapsed = self.stats.start_time.elapsed().unwrap_or_default();
self.chat_panel.add_message(ChatMessage::System(format!(
"Model: {} | Mode: {:?} | Messages: {} | Tools: {} | Uptime: {}",
self.opts.model,
self.perms.mode(),
self.stats.total_messages,
self.stats.total_tool_calls,
SessionStats::format_duration(elapsed)
)));
}
"/permissions" => {
self.chat_panel.add_message(ChatMessage::System(format!(
"Permission mode: {:?}",
self.perms.mode()
)));
}
"/cost" => {
self.chat_panel.add_message(ChatMessage::System(format!(
"Estimated tokens: ~{} | Total time: {} | Note: Ollama is free!",
self.stats.estimated_tokens,
SessionStats::format_duration(self.stats.total_duration)
)));
}
"/history" => {
let count = self.history.user_prompts.len();
self.chat_panel.add_message(ChatMessage::System(format!(
"Conversation has {} messages",
count
)));
}
"/checkpoint" => {
let checkpoint_id = format!(
"checkpoint-{}",
SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs()
);
match self
.checkpoint_mgr
.save_checkpoint(checkpoint_id.clone(), self.stats.clone(), &self.history)
{
Ok(_) => {
self.chat_panel.add_message(ChatMessage::System(format!(
"💾 Checkpoint saved: {}",
checkpoint_id
)));
}
Err(e) => {
self.chat_panel.add_message(ChatMessage::System(format!(
"❌ Failed to save checkpoint: {}",
e
)));
}
}
}
"/checkpoints" => {
match self.checkpoint_mgr.list_checkpoints() {
Ok(checkpoints) => {
if checkpoints.is_empty() {
self.chat_panel
.add_message(ChatMessage::System("No checkpoints saved yet".to_string()));
} else {
self.chat_panel.add_message(ChatMessage::System(format!(
"Saved checkpoints: {}",
checkpoints.join(", ")
)));
}
}
Err(e) => {
self.chat_panel.add_message(ChatMessage::System(format!(
"❌ Failed to list checkpoints: {}",
e
)));
}
}
}
"/clear" => {
self.chat_panel.clear();
self.history.clear();
self.stats = SessionStats::new();
self.chat_panel
.add_message(ChatMessage::System("Session cleared".to_string()));
}
"/compact" => {
self.chat_panel.add_message(ChatMessage::System(
"Context compaction not yet implemented".to_string()
));
}
"/plan" => {
// Toggle plan panel visibility
if self.plan_panel.is_visible() {
self.plan_panel.hide();
} else if self.get_current_plan().is_some() {
self.plan_panel.show();
} else {
self.chat_panel.add_message(ChatMessage::System(
"No active plan. Start planning mode with a prompt.".to_string()
));
}
}
"/provider" => {
// Show available providers
self.chat_panel.add_message(ChatMessage::System(
"Available providers:".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • ollama - Local LLM (default)".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • anthropic - Claude API (requires ANTHROPIC_API_KEY)".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • openai - OpenAI API (requires OPENAI_API_KEY)".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
"Use '/provider <name>' to switch".to_string()
));
}
cmd if cmd.starts_with("/provider ") => {
let provider_name = cmd.strip_prefix("/provider ").unwrap().trim();
let provider_type = match provider_name {
"ollama" => Some(ProviderType::Ollama),
"anthropic" | "claude" => Some(ProviderType::Anthropic),
"openai" | "gpt" => Some(ProviderType::OpenAI),
_ => None,
};
if let Some(pt) = provider_type {
if let Err(e) = self.switch_provider(pt) {
self.chat_panel.add_message(ChatMessage::System(format!(
"Failed to switch provider: {}", e
)));
}
} else {
self.chat_panel.add_message(ChatMessage::System(format!(
"Unknown provider: {}. Available: ollama, anthropic, openai", provider_name
)));
}
}
"/model" => {
// Show current model
self.chat_panel.add_message(ChatMessage::System(format!(
"Current model: {}", self.opts.model
)));
self.chat_panel.add_message(ChatMessage::System(
"Use '/model <name>' to switch (e.g., /model llama3.2, /model qwen3:8b)".to_string()
));
}
cmd if cmd.starts_with("/model ") => {
let model_name = cmd.strip_prefix("/model ").unwrap().trim();
if model_name.is_empty() {
self.chat_panel.add_message(ChatMessage::System(format!(
"Current model: {}", self.opts.model
)));
} else {
// Use set_current_model to update both TUI and shared ProviderManager
self.set_current_model(model_name.to_string());
}
}
"/themes" => {
self.chat_panel.add_message(ChatMessage::System(
"Available themes:".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • tokyo-night - Modern and vibrant (default)".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • dracula - Classic dark theme".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • catppuccin - Warm and cozy".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • nord - Minimal and clean".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • synthwave - Vibrant retro".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • rose-pine - Elegant and muted".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
" • midnight-ocean - Deep and serene".to_string()
));
self.chat_panel.add_message(ChatMessage::System(
"Use '/theme <name>' to switch themes".to_string()
));
}
"/exit" => {
self.running = false;
}
cmd if cmd.starts_with("/theme ") => {
let theme_name = cmd.strip_prefix("/theme ").unwrap().trim();
let new_theme = match theme_name {
"tokyo-night" => Some(Theme::tokyo_night()),
"dracula" => Some(Theme::dracula()),
"catppuccin" => Some(Theme::catppuccin()),
"nord" => Some(Theme::nord()),
"synthwave" => Some(Theme::synthwave()),
"rose-pine" => Some(Theme::rose_pine()),
"midnight-ocean" => Some(Theme::midnight_ocean()),
_ => None,
};
if let Some(theme) = new_theme {
self.set_theme(theme);
self.chat_panel.add_message(ChatMessage::System(
format!("🎨 Theme changed to: {}", theme_name)
));
} else {
self.chat_panel.add_message(ChatMessage::System(
format!("❌ Unknown theme: {}. Use '/themes' to see available themes.", theme_name)
));
}
}
cmd if cmd.starts_with("/rewind ") => {
let checkpoint_id = cmd.strip_prefix("/rewind ").unwrap().trim();
match self.checkpoint_mgr.rewind_to(checkpoint_id) {
Ok(restored_files) => {
self.chat_panel.add_message(ChatMessage::System(format!(
"⏪ Rewound to checkpoint: {} ({} files restored)",
checkpoint_id,
restored_files.len()
)));
}
Err(e) => {
self.chat_panel.add_message(ChatMessage::System(format!(
"❌ Failed to rewind: {}",
e
)));
}
}
}
_ => {
self.chat_panel.add_message(ChatMessage::System(format!(
"❌ Unknown command: {}",
command
)));
}
}
Ok(())
}
}