feat(agent): load configurable profiles from .owlen/agents

This commit is contained in:
2025-10-26 03:12:31 +01:00
parent 44b07c8e27
commit 353c0a8239
6 changed files with 780 additions and 25 deletions

View File

@@ -83,6 +83,7 @@ async fn test_agent_single_tool_scenario() {
model: "llama3.2".to_string(),
temperature: Some(0.7),
max_tokens: None,
..AgentConfig::default()
};
let executor = AgentExecutor::new(provider, mcp_client, config);
@@ -119,6 +120,7 @@ async fn test_agent_multi_step_workflow() {
model: "llama3.2".to_string(),
temperature: Some(0.5), // Lower temperature for more consistent behavior
max_tokens: None,
..AgentConfig::default()
};
let executor = AgentExecutor::new(provider, mcp_client, config);
@@ -150,6 +152,7 @@ async fn test_agent_iteration_limit() {
model: "llama3.2".to_string(),
temperature: Some(0.7),
max_tokens: None,
..AgentConfig::default()
};
let executor = AgentExecutor::new(provider, mcp_client, config);
@@ -191,6 +194,7 @@ async fn test_agent_tool_budget_enforcement() {
model: "llama3.2".to_string(),
temperature: Some(0.7),
max_tokens: None,
..AgentConfig::default()
};
let executor = AgentExecutor::new(provider, mcp_client, config);
@@ -248,6 +252,8 @@ fn test_agent_config_defaults() {
assert_eq!(config.max_iterations, 15);
assert_eq!(config.model, "llama3.2:latest");
assert_eq!(config.temperature, Some(0.7));
assert_eq!(config.system_prompt, None);
assert!(config.sub_agents.is_empty());
// max_tool_calls field removed - agent now tracks iterations instead
}
@@ -258,6 +264,8 @@ fn test_agent_config_custom() {
model: "custom-model".to_string(),
temperature: Some(0.5),
max_tokens: Some(2000),
system_prompt: Some("Custom prompt".to_string()),
sub_agents: Vec::new(),
};
assert_eq!(config.max_iterations, 15);

View File

@@ -6,7 +6,7 @@
use crate::Provider;
use crate::mcp::{McpClient, McpToolCall, McpToolDescriptor, McpToolResponse};
use crate::types::{ChatParameters, ChatRequest, Message};
use crate::{Error, Result};
use crate::{Error, Result, SubAgentSpec};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
@@ -28,6 +28,38 @@ pub enum LlmResponse {
Reasoning { thought: String },
}
fn assemble_prompt_with_tools_and_subagents(
base_prompt: &str,
tools: &[McpToolDescriptor],
sub_agents: &[SubAgentSpec],
) -> String {
let mut prompt = base_prompt.trim().to_string();
prompt.push_str("\n\nYou have access to the following tools:\n");
for tool in tools {
prompt.push_str(&format!("- {}: {}\n", tool.name, tool.description));
}
append_subagent_guidance(&mut prompt, sub_agents);
prompt
}
fn append_subagent_guidance(prompt: &mut String, sub_agents: &[SubAgentSpec]) {
if sub_agents.is_empty() {
return;
}
prompt.push_str("\nYou may delegate focused tasks to the following specialised sub-agents:\n");
for sub in sub_agents {
prompt.push_str(&format!(
"- {}: {}\n{}\n",
sub.name.as_deref().unwrap_or(sub.id.as_str()),
sub.description
.as_deref()
.unwrap_or("No description provided."),
sub.prompt.trim()
));
}
}
/// Parse error when LLM response doesn't match expected format
#[derive(Debug, thiserror::Error)]
pub enum ParseError {
@@ -63,6 +95,10 @@ pub struct AgentConfig {
pub temperature: Option<f32>,
/// Max tokens per LLM call
pub max_tokens: Option<u32>,
/// Optional override for the system prompt presented to the LLM.
pub system_prompt: Option<String>,
/// Optional sub-agent prompts exposed to the executor.
pub sub_agents: Vec<SubAgentSpec>,
}
impl Default for AgentConfig {
@@ -72,6 +108,8 @@ impl Default for AgentConfig {
model: "llama3.2:latest".to_string(),
temperature: Some(0.7),
max_tokens: Some(4096),
system_prompt: None,
sub_agents: Vec::new(),
}
}
}
@@ -187,6 +225,14 @@ impl AgentExecutor {
/// Build the system prompt with ReAct format and tool descriptions
fn build_system_prompt(&self, tools: &[McpToolDescriptor]) -> String {
if let Some(custom) = &self.config.system_prompt {
return assemble_prompt_with_tools_and_subagents(
custom,
tools,
&self.config.sub_agents,
);
}
let mut prompt = String::from(
"You are an AI assistant that uses the ReAct (Reasoning and Acting) pattern to solve tasks.\n\n\
You have access to the following tools:\n\n",
@@ -213,6 +259,8 @@ impl AgentExecutor {
- Use FINAL_ANSWER only when you have sufficient information\n",
);
append_subagent_guidance(&mut prompt, &self.config.sub_agents);
prompt
}
@@ -233,7 +281,6 @@ impl AgentExecutor {
let response = self.llm_client.send_prompt(request).await?;
Ok(response.message.content)
}
/// Parse LLM response into structured format
pub fn parse_response(&self, text: &str) -> Result<LlmResponse> {
let lines: Vec<&str> = text.lines().collect();

View File

@@ -0,0 +1,462 @@
use crate::{Error, Result};
use serde::Deserialize;
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
/// Maximum allowed size (bytes) for an agent prompt file.
const MAX_PROMPT_SIZE_BYTES: usize = 128 * 1024;
/// Definition of a sub-agent that can be referenced by the primary agent prompt.
#[derive(Debug, Clone)]
pub struct SubAgentSpec {
pub id: String,
pub name: Option<String>,
pub description: Option<String>,
pub prompt: String,
}
/// Fully resolved agent profile loaded from configuration files.
#[derive(Debug, Clone)]
pub struct AgentProfile {
pub id: String,
pub name: Option<String>,
pub description: Option<String>,
pub system_prompt: String,
pub model: Option<String>,
pub temperature: Option<f32>,
pub max_iterations: Option<usize>,
pub max_tokens: Option<u32>,
pub tags: Vec<String>,
pub sub_agents: Vec<SubAgentSpec>,
pub source_path: PathBuf,
}
impl AgentProfile {
pub fn display_name(&self) -> &str {
self.name.as_deref().unwrap_or(self.id.as_str())
}
}
/// Registry responsible for discovering and loading user-defined agent profiles.
#[derive(Debug, Clone, Default)]
pub struct AgentRegistry {
profiles: Vec<AgentProfile>,
index: HashMap<String, usize>,
search_paths: Vec<PathBuf>,
}
impl AgentRegistry {
/// Build a registry by discovering configuration in standard locations.
pub fn discover(project_hint: Option<&Path>) -> Result<Self> {
let mut search_paths = Vec::new();
if let Some(config_dir) = dirs::config_dir() {
search_paths.push(config_dir.join("owlen").join("agents"));
}
search_paths.extend(discover_project_agent_paths(project_hint));
if let Ok(env) = std::env::var("OWLEN_AGENTS_PATH") {
for path in env.split(std::path::MAIN_SEPARATOR) {
if !path.trim().is_empty() {
search_paths.push(PathBuf::from(path));
}
}
}
Self::load_from_paths(search_paths)
}
/// Build the registry from explicit paths.
pub fn load_from_paths(paths: Vec<PathBuf>) -> Result<Self> {
let mut registry = Self {
profiles: Vec::new(),
index: HashMap::new(),
search_paths: paths.clone(),
};
for path in paths {
registry.load_directory(&path)?;
}
Ok(registry)
}
/// Return the list of discovered agent profiles.
pub fn profiles(&self) -> &[AgentProfile] {
&self.profiles
}
/// Return a profile by identifier.
pub fn get(&self, id: &str) -> Option<&AgentProfile> {
self.index.get(id).and_then(|idx| self.profiles.get(*idx))
}
/// Reload all search paths, replacing existing profiles.
pub fn reload(&mut self) -> Result<()> {
let paths = self.search_paths.clone();
self.profiles.clear();
self.index.clear();
for path in paths {
self.load_directory(&path)?;
}
Ok(())
}
fn load_directory(&mut self, dir: &Path) -> Result<()> {
if !dir.exists() {
return Ok(());
}
let mut files = Vec::new();
collect_agent_files(dir, &mut files)?;
files.sort();
for file in files {
match load_agent_file(&file) {
Ok(mut profiles) => {
for profile in profiles.drain(..) {
let id = profile.id.clone();
if let Some(existing) = self.index.get(&id).copied() {
// Later search paths override earlier ones.
self.profiles[existing] = profile;
} else {
let idx = self.profiles.len();
self.profiles.push(profile);
self.index.insert(id, idx);
}
}
}
Err(err) => {
return Err(Error::Config(format!(
"Failed to load agent definition {}: {err}",
file.display()
)));
}
}
}
Ok(())
}
}
fn collect_agent_files(dir: &Path, files: &mut Vec<PathBuf>) -> Result<()> {
if !dir.exists() {
return Ok(());
}
for entry in fs::read_dir(dir).map_err(Error::Io)? {
let entry = entry.map_err(Error::Io)?;
let path = entry.path();
if path.is_dir() {
collect_agent_files(&path, files)?;
} else if path
.extension()
.and_then(|ext| ext.to_str())
.map(|ext| ext.eq_ignore_ascii_case("toml"))
.unwrap_or(false)
{
files.push(path);
}
}
Ok(())
}
fn discover_project_agent_paths(project_hint: Option<&Path>) -> Vec<PathBuf> {
let mut results = Vec::new();
let mut current = project_hint
.map(PathBuf::from)
.or_else(|| std::env::current_dir().ok());
while let Some(path) = current {
let candidate = path.join(".owlen").join("agents");
if candidate.exists() {
results.push(candidate);
}
current = path.parent().map(PathBuf::from);
}
results
}
fn load_agent_file(path: &Path) -> Result<Vec<AgentProfile>> {
let raw = fs::read_to_string(path).map_err(Error::Io)?;
if raw.trim().is_empty() {
return Ok(Vec::new());
}
let document: AgentDocument = toml::from_str(&raw)
.map_err(|err| Error::Config(format!("Unable to parse {}: {err}", path.display())))?;
let mut profiles = Vec::new();
if document.agents.is_empty() {
let single: SingleAgentFile = toml::from_str(&raw).map_err(|err| {
Error::Config(format!(
"Agent definition {} must contain either [[agents]] tables or top-level id/prompt fields: {err}",
path.display()
))
})?;
profiles.push(resolve_agent_entry(path, &single.entry)?);
return Ok(profiles);
}
for entry in document.agents {
profiles.push(resolve_agent_entry(path, &entry)?);
}
Ok(profiles)
}
fn resolve_agent_entry(path: &Path, entry: &AgentEntry) -> Result<AgentProfile> {
let base_dir = path
.parent()
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."));
let system_prompt = entry
.prompt
.as_ref()
.ok_or_else(|| {
Error::Config(format!(
"Agent '{}' in {} is missing a `prompt` value",
entry.id,
path.display()
))
})?
.resolve(&base_dir)?;
let mut sub_agents = Vec::new();
for (id, sub) in &entry.sub_agents {
let prompt = sub.prompt.resolve(&base_dir)?;
sub_agents.push(SubAgentSpec {
id: id.clone(),
name: sub.name.clone(),
description: sub.description.clone(),
prompt,
});
}
Ok(AgentProfile {
id: entry.id.clone(),
name: entry.name.clone(),
description: entry.description.clone(),
system_prompt,
model: entry.parameters.as_ref().and_then(|p| p.model.clone()),
temperature: entry.parameters.as_ref().and_then(|p| p.temperature),
max_iterations: entry.parameters.as_ref().and_then(|p| p.max_iterations),
max_tokens: entry.parameters.as_ref().and_then(|p| p.max_tokens),
tags: entry.tags.clone().unwrap_or_default(),
sub_agents,
source_path: path.to_path_buf(),
})
}
#[derive(Debug, Deserialize)]
struct AgentDocument {
#[serde(default = "default_schema_version")]
_version: String,
#[serde(default)]
agents: Vec<AgentEntry>,
}
#[derive(Debug, Deserialize)]
struct SingleAgentFile {
#[serde(default = "default_schema_version")]
_version: String,
#[serde(flatten)]
entry: AgentEntry,
}
fn default_schema_version() -> String {
"1".to_string()
}
#[derive(Debug, Deserialize)]
struct AgentEntry {
id: String,
#[serde(default)]
name: Option<String>,
#[serde(default)]
description: Option<String>,
#[serde(default)]
tags: Option<Vec<String>>,
#[serde(default)]
prompt: Option<PromptSpec>,
#[serde(default)]
parameters: Option<AgentParameters>,
#[serde(default)]
sub_agents: HashMap<String, SubAgentEntry>,
}
#[derive(Debug, Deserialize)]
struct AgentParameters {
#[serde(default)]
model: Option<String>,
#[serde(default)]
temperature: Option<f32>,
#[serde(default)]
max_iterations: Option<usize>,
#[serde(default)]
max_tokens: Option<u32>,
}
#[derive(Debug, Deserialize)]
struct SubAgentEntry {
#[serde(default)]
name: Option<String>,
#[serde(default)]
description: Option<String>,
prompt: PromptSpec,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum PromptSpec {
Inline(String),
Source { file: String },
}
impl PromptSpec {
fn resolve(&self, base_dir: &Path) -> Result<String> {
match self {
PromptSpec::Inline(value) => Ok(value.trim().to_string()),
PromptSpec::Source { file } => {
let path = if Path::new(file).is_absolute() {
PathBuf::from(file)
} else {
base_dir.join(file)
};
let data = fs::read(&path).map_err(Error::Io)?;
if data.len() > MAX_PROMPT_SIZE_BYTES {
return Err(Error::Config(format!(
"Prompt file {} exceeds the maximum supported size ({MAX_PROMPT_SIZE_BYTES} bytes)",
path.display()
)));
}
let text = String::from_utf8(data).map_err(|_| {
Error::Config(format!("Prompt file {} is not valid UTF-8", path.display()))
})?;
Ok(text.trim().to_string())
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
use tempfile::tempdir;
#[test]
fn load_simple_agent() {
let dir = tempdir().expect("temp dir");
let agent_dir = dir.path().join("agents");
fs::create_dir_all(&agent_dir).unwrap();
let mut file = fs::File::create(agent_dir.join("support.toml")).unwrap();
writeln!(
file,
r#"
version = "1"
[[agents]]
id = "support"
name = "Support Specialist"
description = "Handles user support tickets."
prompt = "You are a helpful support assistant."
[agents.parameters]
model = "gpt-4"
max_iterations = 8
temperature = 0.2
[agents.sub_agents.first_line]
name = "First-line support"
description = "Handles simple issues"
prompt = "Escalate complex issues."
"#
)
.unwrap();
let registry = AgentRegistry::load_from_paths(vec![agent_dir]).unwrap();
assert_eq!(registry.profiles.len(), 1);
let profile = registry.get("support").unwrap();
assert_eq!(profile.display_name(), "Support Specialist");
assert_eq!(
profile.system_prompt,
"You are a helpful support assistant."
);
assert_eq!(profile.model.as_deref(), Some("gpt-4"));
assert_eq!(profile.max_iterations, Some(8));
assert_eq!(profile.sub_agents.len(), 1);
assert_eq!(profile.sub_agents[0].id, "first_line");
}
#[test]
fn prompt_from_file_resolves_relative_path() {
let dir = tempdir().expect("temp dir");
let agent_dir = dir.path().join(".owlen").join("agents");
let prompt_dir = agent_dir.join("prompts");
fs::create_dir_all(&prompt_dir).unwrap();
fs::write(
prompt_dir.join("researcher.md"),
"Research the latest documentation updates.",
)
.unwrap();
fs::write(
agent_dir.join("doc.toml"),
r#"
version = "1"
[[agents]]
id = "docs"
prompt = { file = "prompts/researcher.md" }
"#,
)
.unwrap();
let registry = AgentRegistry::load_from_paths(vec![agent_dir]).unwrap();
let profile = registry.get("docs").unwrap();
assert_eq!(
profile.system_prompt,
"Research the latest documentation updates."
);
}
#[test]
fn load_agent_from_flat_document() {
let dir = tempdir().expect("temp dir");
let agent_dir = dir.path().join("agents");
fs::create_dir_all(&agent_dir).unwrap();
fs::write(
agent_dir.join("flat.toml"),
r#"
version = "1"
id = "flat"
name = "Flat Agent"
prompt = "Operate using flat configuration."
"#,
)
.unwrap();
let registry = AgentRegistry::load_from_paths(vec![agent_dir]).unwrap();
let profile = registry.get("flat").expect("profile present");
assert_eq!(profile.display_name(), "Flat Agent");
assert_eq!(profile.system_prompt, "Operate using flat configuration.");
}
}

View File

@@ -6,6 +6,7 @@
//! LLM providers, routers, and MCP (Model Context Protocol) adapters.
pub mod agent;
pub mod agent_registry;
pub mod config;
pub mod consent;
pub mod conversation;
@@ -35,6 +36,7 @@ pub mod validation;
pub mod wrap_cursor;
pub use agent::*;
pub use agent_registry::*;
pub use config::*;
pub use consent::*;
pub use conversation::*;

View File

@@ -75,6 +75,7 @@ use owlen_core::config::{
OLLAMA_CLOUD_BASE_URL, OLLAMA_CLOUD_ENDPOINT_KEY, OLLAMA_MODE_KEY,
};
use owlen_core::credentials::{ApiCredentials, OLLAMA_CLOUD_CREDENTIAL_ID};
use owlen_core::{AgentProfile, AgentRegistry};
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
use std::collections::hash_map::DefaultHasher;
@@ -819,6 +820,10 @@ pub struct ChatApp {
agent_mode: bool,
/// Agent running flag
agent_running: bool,
/// Loaded agent profiles from configuration
agent_registry: AgentRegistry,
/// Currently selected agent profile identifier
active_agent_id: Option<String>,
/// Operating mode (Chat or Code)
operating_mode: owlen_core::mode::Mode,
/// Flag indicating new messages arrived while scrolled away from tail
@@ -1091,11 +1096,19 @@ impl ChatApp {
}
let workspace_root = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let file_tree = FileTreeState::new(workspace_root);
let file_tree = FileTreeState::new(workspace_root.clone());
let file_icons = FileIconResolver::from_mode(icon_mode);
install_global_logger();
let agent_registry = AgentRegistry::discover(Some(&workspace_root)).unwrap_or_else(|err| {
eprintln!(
"Warning: failed to load agent configurations from {}: {err}",
workspace_root.display()
);
AgentRegistry::default()
});
let mut app = Self {
controller,
mode: if show_onboarding {
@@ -1202,6 +1215,8 @@ impl ChatApp {
_execution_budget: 50,
agent_mode: false,
agent_running: false,
agent_registry,
active_agent_id: None,
operating_mode: owlen_core::mode::Mode::default(),
new_message_alert: false,
show_cursor_outside_insert,
@@ -2978,6 +2993,90 @@ impl ChatApp {
}
}
fn active_agent_profile(&self) -> Option<&AgentProfile> {
self.active_agent_id
.as_deref()
.and_then(|id| self.agent_registry.get(id))
}
fn ensure_active_agent(&mut self) -> Result<()> {
if self.active_agent_profile().is_some() {
return Ok(());
}
if let Some(profile) = self.agent_registry.profiles().first() {
let id = profile.id.clone();
let display_name = profile.display_name().to_string();
self.active_agent_id = Some(id);
self.error = None;
self.set_system_status(format!("🤖 Ready · {}", display_name));
Ok(())
} else {
let message = "No agent profiles found. Create .owlen/agents/*.toml or ~/.config/owlen/agents/*.toml";
self.error = Some(message.to_string());
self.status = message.to_string();
Err(anyhow!(message))
}
}
fn set_active_agent_from_query(&mut self, query: &str) -> Result<()> {
let trimmed = query.trim();
if trimmed.is_empty() {
return Err(anyhow!("Usage: :agent use <id>"));
}
let lookup = trimmed.to_ascii_lowercase();
let profile = self
.agent_registry
.profiles()
.iter()
.find(|profile| {
profile.id.eq_ignore_ascii_case(trimmed)
|| profile.display_name().to_ascii_lowercase() == lookup
})
.ok_or_else(|| {
anyhow!(format!(
"Unknown agent '{trimmed}'. Use :agent list to view available agents."
))
})?;
let id = profile.id.clone();
let display_name = profile.display_name().to_string();
self.active_agent_id = Some(id);
self.status = format!("Active agent: {}", display_name);
self.error = None;
self.set_system_status(format!("🤖 Ready · {}", display_name));
Ok(())
}
fn describe_agents(&self) -> String {
if self.agent_registry.profiles().is_empty() {
return "No agent profiles found. Add .toml files under ~/.config/owlen/agents or ./.owlen/agents.".to_string();
}
self.agent_registry
.profiles()
.iter()
.map(|profile| {
let is_active = self
.active_agent_id
.as_deref()
.map(|id| id.eq_ignore_ascii_case(&profile.id))
.unwrap_or(false);
let marker = if is_active { '*' } else { ' ' };
let label = profile.name.as_deref().unwrap_or("(unnamed)");
let description = profile.description.as_deref().unwrap_or("");
if description.is_empty() {
format!("{marker} {}{}", profile.id, label)
} else {
format!("{marker} {}{}{description}", profile.id, label)
}
})
.collect::<Vec<_>>()
.join("\n")
}
fn prune_toasts(&mut self) {
self.toasts.retain_active();
}
@@ -8592,7 +8691,7 @@ impl ChatApp {
// "run-agent" command removed to break circular dependency on owlen-cli.
"agent" => {
if let Some(subcommand) = args.first() {
match subcommand.to_lowercase().as_str() {
match subcommand.to_ascii_lowercase().as_str() {
"status" => {
let armed =
if self.agent_mode { "armed" } else { "idle" };
@@ -8601,21 +8700,105 @@ impl ChatApp {
} else {
"stopped"
};
self.status =
format!("Agent status: {armed} · {running}");
let agent_label = self
.active_agent_profile()
.map(|profile| {
profile.display_name().to_string()
})
.unwrap_or_else(|| "(none)".to_string());
self.status = format!(
"Agent status: {armed} · {running} · active: {agent_label}"
);
self.error = None;
}
"list" => {
let listing = self.describe_agents();
self.status = listing
.lines()
.next()
.unwrap_or("No agent profiles found.")
.to_string();
self.error = None;
self.push_toast_with_hint(
ToastLevel::Info,
listing,
":agent use <id>",
);
}
"use" => {
if args.len() < 2 {
self.error =
Some("Usage: :agent use <id>".to_string());
} else {
let target = args[1..].join(" ");
if let Err(err) =
self.set_active_agent_from_query(&target)
{
self.error = Some(err.to_string());
self.status =
"Failed to select agent".to_string();
}
}
}
"reload" => match self.agent_registry.reload() {
Ok(()) => {
if self
.active_agent_id
.as_deref()
.and_then(|id| self.agent_registry.get(id))
.is_none()
{
self.active_agent_id = None;
self.set_system_status(
"🤖 Idle".to_string(),
);
} else if let Some(profile) =
self.active_agent_profile()
{
self.set_system_status(format!(
"🤖 Ready · {}",
profile.display_name()
));
}
let count =
self.agent_registry.profiles().len();
self.status = format!(
"Reloaded agent profiles ({count})"
);
self.error = None;
}
Err(err) => {
let message =
format!("Failed to reload agents: {err}");
self.error = Some(message.clone());
self.status = "Agent reload failed".to_string();
self.push_toast(ToastLevel::Error, message);
}
},
"start" | "arm" => {
if self.agent_running {
self.status =
"Agent is already running".to_string();
} else {
} else if let Err(err) = self.ensure_active_agent()
{
self.error = Some(err.to_string());
} else if let Some(display_name) = self
.active_agent_profile()
.map(|p| p.display_name().to_string())
{
self.agent_mode = true;
self.status = "Agent armed. Next message will be processed by the agent.".to_string();
self.status = format!(
"Agent '{}' armed. Next message will run it.",
display_name
);
self.error = None;
self.set_system_status(format!(
"🤖 Ready · {}",
display_name
));
}
}
"stop" => {
"stop" | "disarm" => {
if self.agent_running {
self.agent_running = false;
self.agent_mode = false;
@@ -8623,11 +8806,13 @@ impl ChatApp {
self.status =
"Agent execution stopped".to_string();
self.error = None;
self.set_system_status("🤖 Idle".to_string());
} else if self.agent_mode {
self.agent_mode = false;
self.agent_actions = None;
self.status = "Agent disarmed".to_string();
self.error = None;
self.set_system_status("🤖 Idle".to_string());
} else {
self.status =
"No agent is currently running".to_string();
@@ -8640,11 +8825,26 @@ impl ChatApp {
}
} else if self.agent_running {
self.status = "Agent is already running".to_string();
} else {
} else if let Err(err) = self.ensure_active_agent() {
self.error = Some(err.to_string());
} else if let Some(display_name) = self
.active_agent_profile()
.map(|p| p.display_name().to_string())
{
self.agent_mode = true;
self.status = "Agent mode enabled. Next message will be processed by agent.".to_string();
self.status = format!(
"Agent '{}' armed. Next message will be processed by the agent.",
display_name
);
self.error = None;
self.set_system_status(format!(
"🤖 Ready · {}",
display_name
));
}
self.set_input_mode(InputMode::Normal);
self.command_palette.clear();
return Ok(AppState::Running);
}
"stop-agent" => {
if self.agent_running {
@@ -12261,10 +12461,6 @@ impl ChatApp {
use owlen_core::mcp::remote_client::RemoteMcpClient;
use std::sync::Arc;
self.agent_running = true;
self.status = "Agent is running...".to_string();
self.start_loading_animation();
// Get the last user message
let user_message = self
.controller
@@ -12276,14 +12472,50 @@ impl ChatApp {
.map(|m| m.content.clone())
.unwrap_or_default();
// Create agent config
let config = AgentConfig {
max_iterations: 10,
model: self.controller.selected_model().to_string(),
temperature: Some(0.7),
max_tokens: None,
let profile = match self.active_agent_profile().cloned() {
Some(profile) => profile,
None => {
if self.agent_registry.profiles().is_empty() {
self.error = Some(
"No agent profiles configured. Add files under .owlen/agents or ~/.config/owlen/agents.".to_string(),
);
} else {
self.error = Some(
"No active agent selected. Use :agent use <id> to choose one.".to_string(),
);
}
self.agent_running = false;
self.agent_mode = false;
self.stop_loading_animation();
return Ok(());
}
};
let selected_model = self.controller.selected_model().to_string();
let mut config = AgentConfig {
model: profile.model.clone().unwrap_or(selected_model),
system_prompt: Some(profile.system_prompt.clone()),
sub_agents: profile.sub_agents.clone(),
..AgentConfig::default()
};
if let Some(iterations) = profile.max_iterations {
config.max_iterations = iterations;
}
if let Some(temp) = profile.temperature {
config.temperature = Some(temp);
}
if let Some(max_tokens) = profile.max_tokens {
config.max_tokens = Some(max_tokens);
}
let agent_label = profile.display_name().to_string();
self.agent_running = true;
self.status = format!("Agent '{}' is running...", agent_label);
self.error = None;
self.set_system_status(format!("🤖 Working · {}", agent_label));
self.start_loading_animation();
// Get the provider
let provider = self.controller.provider().clone();
@@ -12312,7 +12544,11 @@ impl ChatApp {
self.agent_running = false;
self.agent_mode = false;
self.agent_actions = None;
self.status = format!("Agent completed in {} iterations", result.iterations);
self.status = format!(
"Agent '{}' completed in {} iterations",
agent_label, result.iterations
);
self.set_system_status(format!("🤖 Complete · {}", agent_label));
self.stop_loading_animation();
if let Some(active) = self.active_command.as_mut() {
active.record_response(message_id);
@@ -12322,12 +12558,13 @@ impl ChatApp {
Ok(())
}
Err(e) => {
let message = format!("Agent failed: {}", e);
let message = format!("Agent '{}' failed: {}", agent_label, e);
self.error = Some(message.clone());
self.agent_running = false;
self.agent_mode = false;
self.agent_actions = None;
self.stop_loading_animation();
self.set_system_status(format!("🤖 Failed · {}", agent_label));
self.mark_active_command_failed(Some(message));
Ok(())
}