fix: restore mcp flexibility and improve cli tooling

This commit is contained in:
2025-10-11 06:11:22 +02:00
parent 40c44470e8
commit 5ac0d152cb
19 changed files with 998 additions and 162 deletions

View File

@@ -26,6 +26,8 @@ required-features = ["chat-client"]
owlen-core = { path = "../owlen-core" }
# Optional TUI dependency, enabled by the "chat-client" feature.
owlen-tui = { path = "../owlen-tui", optional = true }
owlen-ollama = { path = "../owlen-ollama" }
log = "0.4"
# CLI framework
clap = { version = "4.0", features = ["derive"] }
@@ -45,3 +47,7 @@ serde_json = { workspace = true }
regex = "1"
thiserror = "1"
dirs = "5"
[dev-dependencies]
tokio = { workspace = true }
tokio-test = { workspace = true }

31
crates/owlen-cli/build.rs Normal file
View File

@@ -0,0 +1,31 @@
use std::process::Command;
fn main() {
const MIN_VERSION: (u32, u32, u32) = (1, 75, 0);
let rustc = std::env::var("RUSTC").unwrap_or_else(|_| "rustc".into());
let output = Command::new(&rustc)
.arg("--version")
.output()
.expect("failed to invoke rustc");
let version_line = String::from_utf8_lossy(&output.stdout);
let version_str = version_line.split_whitespace().nth(1).unwrap_or("0.0.0");
let sanitized = version_str.split('-').next().unwrap_or(version_str);
let mut parts = sanitized
.split('.')
.map(|part| part.parse::<u32>().unwrap_or(0));
let current = (
parts.next().unwrap_or(0),
parts.next().unwrap_or(0),
parts.next().unwrap_or(0),
);
if current < MIN_VERSION {
panic!(
"owlen requires rustc {}.{}.{} or newer (found {version_line})",
MIN_VERSION.0, MIN_VERSION.1, MIN_VERSION.2
);
}
}

View File

@@ -1,11 +1,17 @@
//! OWLEN CLI - Chat TUI client
use anyhow::Result;
use clap::Parser;
use clap::{Parser, Subcommand};
use owlen_core::config as core_config;
use owlen_core::{
mcp::remote_client::RemoteMcpClient, mode::Mode, session::SessionController,
storage::StorageManager, Provider,
config::{Config, McpMode},
mcp::remote_client::RemoteMcpClient,
mode::Mode,
session::SessionController,
storage::StorageManager,
Provider,
};
use owlen_ollama::OllamaProvider;
use owlen_tui::tui_controller::{TuiController, TuiRequest};
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
use std::io;
@@ -28,17 +34,216 @@ struct Args {
/// Start in code mode (enables all tools)
#[arg(long, short = 'c')]
code: bool,
#[command(subcommand)]
command: Option<OwlenCommand>,
}
#[derive(Debug, Subcommand)]
enum OwlenCommand {
/// Inspect or upgrade configuration files
#[command(subcommand)]
Config(ConfigCommand),
/// Show manual steps for updating Owlen to the latest revision
Upgrade,
}
#[derive(Debug, Subcommand)]
enum ConfigCommand {
/// Automatically upgrade legacy configuration values and ensure validity
Doctor,
/// Print the resolved configuration file path
Path,
}
fn build_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
match cfg.mcp.mode {
McpMode::RemotePreferred => {
let remote_result = if let Some(mcp_server) = cfg.mcp_servers.first() {
RemoteMcpClient::new_with_config(mcp_server)
} else {
RemoteMcpClient::new()
};
match remote_result {
Ok(client) => {
let provider: Arc<dyn Provider> = Arc::new(client);
Ok(provider)
}
Err(err) if cfg.mcp.allow_fallback => {
log::warn!(
"Remote MCP client unavailable ({}); falling back to local provider.",
err
);
build_local_provider(cfg)
}
Err(err) => Err(anyhow::Error::from(err)),
}
}
McpMode::RemoteOnly => {
let mcp_server = cfg.mcp_servers.first().ok_or_else(|| {
anyhow::anyhow!(
"[[mcp_servers]] must be configured when [mcp].mode = \"remote_only\""
)
})?;
let client = RemoteMcpClient::new_with_config(mcp_server)?;
let provider: Arc<dyn Provider> = Arc::new(client);
Ok(provider)
}
McpMode::LocalOnly | McpMode::Legacy => build_local_provider(cfg),
McpMode::Disabled => Err(anyhow::anyhow!(
"MCP mode 'disabled' is not supported by the owlen TUI"
)),
}
}
fn build_local_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
let provider_name = cfg.general.default_provider.clone();
let provider_cfg = cfg.provider(&provider_name).ok_or_else(|| {
anyhow::anyhow!(format!(
"No provider configuration found for '{provider_name}' in [providers]"
))
})?;
match provider_cfg.provider_type.as_str() {
"ollama" | "ollama-cloud" => {
let provider = OllamaProvider::from_config(provider_cfg, Some(&cfg.general))?;
let provider: Arc<dyn Provider> = Arc::new(provider);
Ok(provider)
}
other => Err(anyhow::anyhow!(format!(
"Provider type '{other}' is not supported in legacy/local MCP mode"
))),
}
}
fn run_command(command: OwlenCommand) -> Result<()> {
match command {
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
OwlenCommand::Upgrade => {
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force");
println!(
"If you installed from the AUR, use your package manager (e.g., yay -S owlen-git)."
);
Ok(())
}
}
}
fn run_config_command(command: ConfigCommand) -> Result<()> {
match command {
ConfigCommand::Doctor => run_config_doctor(),
ConfigCommand::Path => {
let path = core_config::default_config_path();
println!("{}", path.display());
Ok(())
}
}
}
fn run_config_doctor() -> Result<()> {
let config_path = core_config::default_config_path();
let existed = config_path.exists();
let mut config = config::try_load_config().unwrap_or_else(|| Config::default());
let mut changes = Vec::new();
if !existed {
changes.push("created configuration file from defaults".to_string());
}
if config
.providers
.get(&config.general.default_provider)
.is_none()
{
config.general.default_provider = "ollama".to_string();
changes.push("default provider missing; reset to 'ollama'".to_string());
}
if config.providers.get("ollama").is_none() {
core_config::ensure_provider_config(&mut config, "ollama");
changes.push("added default ollama provider configuration".to_string());
}
if config.providers.get("ollama-cloud").is_none() {
core_config::ensure_provider_config(&mut config, "ollama-cloud");
changes.push("added default ollama-cloud provider configuration".to_string());
}
match config.mcp.mode {
McpMode::Legacy => {
config.mcp.mode = McpMode::LocalOnly;
config.mcp.warn_on_legacy = true;
changes.push("converted [mcp].mode = 'legacy' to 'local_only'".to_string());
}
McpMode::RemoteOnly if config.mcp_servers.is_empty() => {
config.mcp.mode = McpMode::RemotePreferred;
config.mcp.allow_fallback = true;
changes.push(
"downgraded remote-only configuration to remote_preferred because no servers are defined"
.to_string(),
);
}
McpMode::RemotePreferred if !config.mcp.allow_fallback && config.mcp_servers.is_empty() => {
config.mcp.allow_fallback = true;
changes.push(
"enabled [mcp].allow_fallback because no remote servers are configured".to_string(),
);
}
_ => {}
}
config.validate()?;
config::save_config(&config)?;
if changes.is_empty() {
println!(
"Configuration already up to date: {}",
config_path.display()
);
} else {
println!("Updated {}:", config_path.display());
for change in changes {
println!(" - {change}");
}
}
Ok(())
}
fn warn_if_limited_terminal() {
const FALLBACK_TERM: &str = "unknown";
let term = std::env::var("TERM").unwrap_or_else(|_| FALLBACK_TERM.to_string());
let colorterm = std::env::var("COLORTERM").unwrap_or_default();
let term_lower = term.to_lowercase();
let color_lower = colorterm.to_lowercase();
let supports_256 = term_lower.contains("256color")
|| color_lower.contains("truecolor")
|| color_lower.contains("24bit");
if !supports_256 {
eprintln!(
"Warning: terminal '{}' may not fully support 256-color themes. \
Consider using a terminal with truecolor support for the best experience.",
term
);
}
}
#[tokio::main(flavor = "multi_thread")]
async fn main() -> Result<()> {
// Parse command-line arguments
let args = Args::parse();
let initial_mode = if args.code { Mode::Code } else { Mode::Chat };
let Args { code, command } = Args::parse();
if let Some(command) = command {
return run_command(command);
}
let initial_mode = if code { Mode::Code } else { Mode::Chat };
// Set auto-consent for TUI mode to prevent blocking stdin reads
std::env::set_var("OWLEN_AUTO_CONSENT", "1");
warn_if_limited_terminal();
let (tui_tx, _tui_rx) = mpsc::unbounded_channel::<TuiRequest>();
let tui_controller = Arc::new(TuiController::new(tui_tx));
@@ -46,15 +251,23 @@ async fn main() -> Result<()> {
let mut cfg = config::try_load_config().unwrap_or_default();
// Disable encryption for CLI to avoid password prompts in this environment.
cfg.privacy.encrypt_local_data = false;
cfg.validate()?;
// Create MCP LLM client as the provider (replaces direct OllamaProvider usage)
let provider: Arc<dyn Provider> = if let Some(mcp_server) = cfg.mcp_servers.first() {
// Use configured MCP server if available
Arc::new(RemoteMcpClient::new_with_config(mcp_server)?)
} else {
// Fall back to default MCP LLM server discovery
Arc::new(RemoteMcpClient::new()?)
};
// Create provider according to MCP configuration (supports legacy/local fallback)
let provider = build_provider(&cfg)?;
if let Err(err) = provider.health_check().await {
let hint = if matches!(cfg.mcp.mode, McpMode::RemotePreferred | McpMode::RemoteOnly)
&& !cfg.mcp_servers.is_empty()
{
"Ensure the configured MCP server is running and reachable."
} else {
"Ensure Ollama is running (`ollama serve`) and reachable at the configured base_url."
};
return Err(anyhow::anyhow!(format!(
"Provider health check failed: {err}. {hint}"
)));
}
let storage = Arc::new(StorageManager::new().await?);
let controller =

View File

@@ -38,7 +38,7 @@ async fn test_react_parsing_tool_call() {
async fn test_react_parsing_final_answer() {
let executor = create_test_executor();
let text = "THOUGHT: I have enough information now\nACTION: final_answer\nACTION_INPUT: The answer is 42\n";
let text = "THOUGHT: I have enough information now\nFINAL_ANSWER: The answer is 42\n";
let result = executor.parse_response(text);
@@ -244,8 +244,8 @@ fn create_test_executor() -> AgentExecutor {
fn test_agent_config_defaults() {
let config = AgentConfig::default();
assert_eq!(config.max_iterations, 10);
assert_eq!(config.model, "ollama");
assert_eq!(config.max_iterations, 15);
assert_eq!(config.model, "llama3.2:latest");
assert_eq!(config.temperature, Some(0.7));
// max_tool_calls field removed - agent now tracks iterations instead
}