Compare commits
2 Commits
38aba1a6bb
...
fab63d224b
| Author | SHA1 | Date | |
|---|---|---|---|
| fab63d224b | |||
| 15e5c1206b |
@@ -19,6 +19,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Global F1 keybinding for the in-app help overlay and a clearer status hint on launch.
|
- Global F1 keybinding for the in-app help overlay and a clearer status hint on launch.
|
||||||
- Automatic fallback to the new `ansi_basic` theme when the active terminal only advertises 16-color support.
|
- Automatic fallback to the new `ansi_basic` theme when the active terminal only advertises 16-color support.
|
||||||
- Offline provider shim that keeps the TUI usable while primary providers are unreachable and communicates recovery steps inline.
|
- Offline provider shim that keeps the TUI usable while primary providers are unreachable and communicates recovery steps inline.
|
||||||
|
- `owlen cloud` subcommands (`setup`, `status`, `models`, `logout`) for managing Ollama Cloud credentials without hand-editing config files.
|
||||||
|
- Tabbed model selector that separates local and cloud providers, including cloud indicators in the UI.
|
||||||
|
- Footer status line includes provider connectivity/credential summaries (e.g., cloud auth failures, missing API keys).
|
||||||
|
- Secure credential vault integration for Ollama Cloud API keys when `privacy.encrypt_local_data = true`.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- The main `README.md` has been updated to be more concise and link to the new documentation.
|
- The main `README.md` has been updated to be more concise and link to the new documentation.
|
||||||
@@ -28,6 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
- Ollama provider error handling now distinguishes timeouts, missing models, and authentication failures.
|
- Ollama provider error handling now distinguishes timeouts, missing models, and authentication failures.
|
||||||
- `owlen` warns when the active terminal likely lacks 256-color support.
|
- `owlen` warns when the active terminal likely lacks 256-color support.
|
||||||
- `config.toml` now carries a schema version (`1.1.0`) and is migrated automatically; deprecated keys such as `agent.max_tool_calls` trigger warnings instead of hard failures.
|
- `config.toml` now carries a schema version (`1.1.0`) and is migrated automatically; deprecated keys such as `agent.max_tool_calls` trigger warnings instead of hard failures.
|
||||||
|
- Model selector navigation (Tab/Shift-Tab) now switches between local and cloud tabs while preserving selection state.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ members = [
|
|||||||
"crates/owlen-core",
|
"crates/owlen-core",
|
||||||
"crates/owlen-tui",
|
"crates/owlen-tui",
|
||||||
"crates/owlen-cli",
|
"crates/owlen-cli",
|
||||||
"crates/owlen-ollama",
|
|
||||||
"crates/owlen-mcp-server",
|
"crates/owlen-mcp-server",
|
||||||
"crates/owlen-mcp-llm-server",
|
"crates/owlen-mcp-llm-server",
|
||||||
"crates/owlen-mcp-client",
|
"crates/owlen-mcp-client",
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ required-features = ["chat-client"]
|
|||||||
owlen-core = { path = "../owlen-core" }
|
owlen-core = { path = "../owlen-core" }
|
||||||
# Optional TUI dependency, enabled by the "chat-client" feature.
|
# Optional TUI dependency, enabled by the "chat-client" feature.
|
||||||
owlen-tui = { path = "../owlen-tui", optional = true }
|
owlen-tui = { path = "../owlen-tui", optional = true }
|
||||||
owlen-ollama = { path = "../owlen-ollama" }
|
|
||||||
log = { workspace = true }
|
log = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
|
|||||||
401
crates/owlen-cli/src/cloud.rs
Normal file
401
crates/owlen-cli/src/cloud.rs
Normal file
@@ -0,0 +1,401 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use anyhow::{anyhow, bail, Context, Result};
|
||||||
|
use clap::Subcommand;
|
||||||
|
use owlen_core::config as core_config;
|
||||||
|
use owlen_core::config::Config;
|
||||||
|
use owlen_core::credentials::{ApiCredentials, CredentialManager, OLLAMA_CLOUD_CREDENTIAL_ID};
|
||||||
|
use owlen_core::encryption;
|
||||||
|
use owlen_core::provider::{LLMProvider, ProviderConfig};
|
||||||
|
use owlen_core::providers::OllamaProvider;
|
||||||
|
use owlen_core::storage::StorageManager;
|
||||||
|
|
||||||
|
const DEFAULT_CLOUD_ENDPOINT: &str = "https://ollama.com";
|
||||||
|
|
||||||
|
#[derive(Debug, Subcommand)]
|
||||||
|
pub enum CloudCommand {
|
||||||
|
/// Configure Ollama Cloud credentials
|
||||||
|
Setup {
|
||||||
|
/// API key passed directly on the command line (prompted when omitted)
|
||||||
|
#[arg(long)]
|
||||||
|
api_key: Option<String>,
|
||||||
|
/// Override the cloud endpoint (default: https://ollama.com)
|
||||||
|
#[arg(long)]
|
||||||
|
endpoint: Option<String>,
|
||||||
|
/// Provider name to configure (default: ollama)
|
||||||
|
#[arg(long, default_value = "ollama")]
|
||||||
|
provider: String,
|
||||||
|
},
|
||||||
|
/// Check connectivity to Ollama Cloud
|
||||||
|
Status {
|
||||||
|
/// Provider name to check (default: ollama)
|
||||||
|
#[arg(long, default_value = "ollama")]
|
||||||
|
provider: String,
|
||||||
|
},
|
||||||
|
/// List available cloud-hosted models
|
||||||
|
Models {
|
||||||
|
/// Provider name to query (default: ollama)
|
||||||
|
#[arg(long, default_value = "ollama")]
|
||||||
|
provider: String,
|
||||||
|
},
|
||||||
|
/// Remove stored Ollama Cloud credentials
|
||||||
|
Logout {
|
||||||
|
/// Provider name to clear (default: ollama)
|
||||||
|
#[arg(long, default_value = "ollama")]
|
||||||
|
provider: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_cloud_command(command: CloudCommand) -> Result<()> {
|
||||||
|
match command {
|
||||||
|
CloudCommand::Setup {
|
||||||
|
api_key,
|
||||||
|
endpoint,
|
||||||
|
provider,
|
||||||
|
} => setup(provider, api_key, endpoint).await,
|
||||||
|
CloudCommand::Status { provider } => status(provider).await,
|
||||||
|
CloudCommand::Models { provider } => models(provider).await,
|
||||||
|
CloudCommand::Logout { provider } => logout(provider).await,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn setup(provider: String, api_key: Option<String>, endpoint: Option<String>) -> Result<()> {
|
||||||
|
let provider = canonical_provider_name(&provider);
|
||||||
|
let mut config = crate::config::try_load_config().unwrap_or_default();
|
||||||
|
let endpoint = endpoint.unwrap_or_else(|| DEFAULT_CLOUD_ENDPOINT.to_string());
|
||||||
|
|
||||||
|
ensure_provider_entry(&mut config, &provider, &endpoint);
|
||||||
|
|
||||||
|
let key = match api_key {
|
||||||
|
Some(value) if !value.trim().is_empty() => value,
|
||||||
|
_ => {
|
||||||
|
let prompt = format!("Enter API key for {provider}: ");
|
||||||
|
encryption::prompt_password(&prompt)?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if config.privacy.encrypt_local_data {
|
||||||
|
let storage = Arc::new(StorageManager::new().await?);
|
||||||
|
let manager = unlock_credential_manager(&config, storage.clone())?;
|
||||||
|
let credentials = ApiCredentials {
|
||||||
|
api_key: key.clone(),
|
||||||
|
endpoint: endpoint.clone(),
|
||||||
|
};
|
||||||
|
manager
|
||||||
|
.store_credentials(OLLAMA_CLOUD_CREDENTIAL_ID, &credentials)
|
||||||
|
.await?;
|
||||||
|
// Ensure plaintext key is not persisted to disk.
|
||||||
|
if let Some(entry) = config.providers.get_mut(&provider) {
|
||||||
|
entry.api_key = None;
|
||||||
|
}
|
||||||
|
} else if let Some(entry) = config.providers.get_mut(&provider) {
|
||||||
|
entry.api_key = Some(key.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(entry) = config.providers.get_mut(&provider) {
|
||||||
|
entry.base_url = Some(endpoint.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
crate::config::save_config(&config)?;
|
||||||
|
println!("Saved Ollama configuration for provider '{provider}'.");
|
||||||
|
if config.privacy.encrypt_local_data {
|
||||||
|
println!("API key stored securely in the encrypted credential vault.");
|
||||||
|
} else {
|
||||||
|
println!("API key stored in plaintext configuration (encryption disabled).");
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn status(provider: String) -> Result<()> {
|
||||||
|
let provider = canonical_provider_name(&provider);
|
||||||
|
let mut config = crate::config::try_load_config().unwrap_or_default();
|
||||||
|
let storage = Arc::new(StorageManager::new().await?);
|
||||||
|
let manager = if config.privacy.encrypt_local_data {
|
||||||
|
Some(unlock_credential_manager(&config, storage.clone())?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let api_key = hydrate_api_key(&mut config, manager.as_ref()).await?;
|
||||||
|
ensure_provider_entry(&mut config, &provider, DEFAULT_CLOUD_ENDPOINT);
|
||||||
|
|
||||||
|
let provider_cfg = config
|
||||||
|
.provider(&provider)
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| anyhow!("Provider '{provider}' is not configured"))?;
|
||||||
|
|
||||||
|
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
|
||||||
|
.with_context(|| "Failed to construct Ollama provider. Run `owlen cloud setup` first.")?;
|
||||||
|
|
||||||
|
match ollama.health_check().await {
|
||||||
|
Ok(_) => {
|
||||||
|
println!(
|
||||||
|
"✓ Connected to {provider} ({})",
|
||||||
|
provider_cfg
|
||||||
|
.base_url
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or(DEFAULT_CLOUD_ENDPOINT)
|
||||||
|
);
|
||||||
|
if api_key.is_none() && config.privacy.encrypt_local_data {
|
||||||
|
println!(
|
||||||
|
"Warning: No API key stored; connection succeeded via environment variables."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
println!("✗ Failed to reach {provider}: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn models(provider: String) -> Result<()> {
|
||||||
|
let provider = canonical_provider_name(&provider);
|
||||||
|
let mut config = crate::config::try_load_config().unwrap_or_default();
|
||||||
|
let storage = Arc::new(StorageManager::new().await?);
|
||||||
|
let manager = if config.privacy.encrypt_local_data {
|
||||||
|
Some(unlock_credential_manager(&config, storage.clone())?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
hydrate_api_key(&mut config, manager.as_ref()).await?;
|
||||||
|
|
||||||
|
ensure_provider_entry(&mut config, &provider, DEFAULT_CLOUD_ENDPOINT);
|
||||||
|
let provider_cfg = config
|
||||||
|
.provider(&provider)
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| anyhow!("Provider '{provider}' is not configured"))?;
|
||||||
|
|
||||||
|
let ollama = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
|
||||||
|
.with_context(|| "Failed to construct Ollama provider. Run `owlen cloud setup` first.")?;
|
||||||
|
|
||||||
|
match ollama.list_models().await {
|
||||||
|
Ok(models) => {
|
||||||
|
if models.is_empty() {
|
||||||
|
println!("No cloud models reported by '{}'.", provider);
|
||||||
|
} else {
|
||||||
|
println!("Models available via '{}':", provider);
|
||||||
|
for model in models {
|
||||||
|
if let Some(description) = &model.description {
|
||||||
|
println!(" - {} ({})", model.id, description);
|
||||||
|
} else {
|
||||||
|
println!(" - {}", model.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
bail!("Failed to list models: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn logout(provider: String) -> Result<()> {
|
||||||
|
let provider = canonical_provider_name(&provider);
|
||||||
|
let mut config = crate::config::try_load_config().unwrap_or_default();
|
||||||
|
let storage = Arc::new(StorageManager::new().await?);
|
||||||
|
|
||||||
|
if config.privacy.encrypt_local_data {
|
||||||
|
let manager = unlock_credential_manager(&config, storage.clone())?;
|
||||||
|
manager
|
||||||
|
.delete_credentials(OLLAMA_CLOUD_CREDENTIAL_ID)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(entry) = provider_entry_mut(&mut config) {
|
||||||
|
entry.api_key = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
crate::config::save_config(&config)?;
|
||||||
|
println!("Cleared credentials for provider '{provider}'.");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ensure_provider_entry(config: &mut Config, provider: &str, endpoint: &str) {
|
||||||
|
if provider == "ollama"
|
||||||
|
&& config.providers.contains_key("ollama-cloud")
|
||||||
|
&& !config.providers.contains_key("ollama")
|
||||||
|
{
|
||||||
|
if let Some(mut legacy) = config.providers.remove("ollama-cloud") {
|
||||||
|
legacy.provider_type = "ollama".to_string();
|
||||||
|
config.providers.insert("ollama".to_string(), legacy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
core_config::ensure_provider_config(config, provider);
|
||||||
|
|
||||||
|
if let Some(cfg) = config.providers.get_mut(provider) {
|
||||||
|
if cfg.provider_type != "ollama" {
|
||||||
|
cfg.provider_type = "ollama".to_string();
|
||||||
|
}
|
||||||
|
if cfg.base_url.is_none() {
|
||||||
|
cfg.base_url = Some(endpoint.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn canonical_provider_name(provider: &str) -> String {
|
||||||
|
let normalized = provider.trim().replace('_', "-").to_ascii_lowercase();
|
||||||
|
match normalized.as_str() {
|
||||||
|
"" => "ollama".to_string(),
|
||||||
|
"ollama-cloud" => "ollama".to_string(),
|
||||||
|
value => value.to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn set_env_if_missing(var: &str, value: &str) {
|
||||||
|
if std::env::var(var)
|
||||||
|
.map(|v| v.trim().is_empty())
|
||||||
|
.unwrap_or(true)
|
||||||
|
{
|
||||||
|
std::env::set_var(var, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn provider_entry_mut(config: &mut Config) -> Option<&mut ProviderConfig> {
|
||||||
|
if config.providers.contains_key("ollama") {
|
||||||
|
config.providers.get_mut("ollama")
|
||||||
|
} else {
|
||||||
|
config.providers.get_mut("ollama-cloud")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn provider_entry(config: &Config) -> Option<&ProviderConfig> {
|
||||||
|
if let Some(entry) = config.providers.get("ollama") {
|
||||||
|
return Some(entry);
|
||||||
|
}
|
||||||
|
config.providers.get("ollama-cloud")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlock_credential_manager(
|
||||||
|
config: &Config,
|
||||||
|
storage: Arc<StorageManager>,
|
||||||
|
) -> Result<Arc<CredentialManager>> {
|
||||||
|
if !config.privacy.encrypt_local_data {
|
||||||
|
bail!("Credential manager requested but encryption is disabled");
|
||||||
|
}
|
||||||
|
|
||||||
|
let secure_path = vault_path(&storage)?;
|
||||||
|
let handle = unlock_vault(&secure_path)?;
|
||||||
|
let master_key = Arc::new(handle.data.master_key.clone());
|
||||||
|
Ok(Arc::new(CredentialManager::new(
|
||||||
|
storage,
|
||||||
|
master_key.clone(),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn vault_path(storage: &StorageManager) -> Result<PathBuf> {
|
||||||
|
let base_dir = storage
|
||||||
|
.database_path()
|
||||||
|
.parent()
|
||||||
|
.map(|p| p.to_path_buf())
|
||||||
|
.or_else(dirs::data_local_dir)
|
||||||
|
.unwrap_or_else(|| PathBuf::from("."));
|
||||||
|
Ok(base_dir.join("encrypted_data.json"))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unlock_vault(path: &Path) -> Result<encryption::VaultHandle> {
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
if path.exists() {
|
||||||
|
if let Ok(password) = env::var("OWLEN_MASTER_PASSWORD") {
|
||||||
|
if !password.trim().is_empty() {
|
||||||
|
return encryption::unlock_with_password(path.to_path_buf(), &password)
|
||||||
|
.context("Failed to unlock vault with OWLEN_MASTER_PASSWORD");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for attempt in 0..3 {
|
||||||
|
let password = encryption::prompt_password("Enter master password: ")?;
|
||||||
|
match encryption::unlock_with_password(path.to_path_buf(), &password) {
|
||||||
|
Ok(handle) => {
|
||||||
|
env::set_var("OWLEN_MASTER_PASSWORD", password);
|
||||||
|
return Ok(handle);
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("Failed to unlock vault: {err}");
|
||||||
|
if attempt == 2 {
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bail!("Unable to unlock encrypted credential vault");
|
||||||
|
}
|
||||||
|
|
||||||
|
let handle = encryption::unlock_interactive(path.to_path_buf())?;
|
||||||
|
if env::var("OWLEN_MASTER_PASSWORD")
|
||||||
|
.map(|v| v.trim().is_empty())
|
||||||
|
.unwrap_or(true)
|
||||||
|
{
|
||||||
|
let password = encryption::prompt_password("Cache master password for this session: ")?;
|
||||||
|
env::set_var("OWLEN_MASTER_PASSWORD", password);
|
||||||
|
}
|
||||||
|
Ok(handle)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn hydrate_api_key(
|
||||||
|
config: &mut Config,
|
||||||
|
manager: Option<&Arc<CredentialManager>>,
|
||||||
|
) -> Result<Option<String>> {
|
||||||
|
if let Some(manager) = manager {
|
||||||
|
if let Some(credentials) = manager.get_credentials(OLLAMA_CLOUD_CREDENTIAL_ID).await? {
|
||||||
|
let key = credentials.api_key.trim().to_string();
|
||||||
|
if !key.is_empty() {
|
||||||
|
set_env_if_missing("OLLAMA_API_KEY", &key);
|
||||||
|
set_env_if_missing("OLLAMA_CLOUD_API_KEY", &key);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(cfg) = provider_entry_mut(config) {
|
||||||
|
if cfg.base_url.is_none() && !credentials.endpoint.trim().is_empty() {
|
||||||
|
cfg.base_url = Some(credentials.endpoint);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok(Some(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(cfg) = provider_entry(config) {
|
||||||
|
if let Some(key) = cfg
|
||||||
|
.api_key
|
||||||
|
.as_ref()
|
||||||
|
.map(|value| value.trim())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
{
|
||||||
|
set_env_if_missing("OLLAMA_API_KEY", key);
|
||||||
|
set_env_if_missing("OLLAMA_CLOUD_API_KEY", key);
|
||||||
|
return Ok(Some(key.to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_runtime_credentials(
|
||||||
|
config: &mut Config,
|
||||||
|
storage: Arc<StorageManager>,
|
||||||
|
) -> Result<()> {
|
||||||
|
if config.privacy.encrypt_local_data {
|
||||||
|
let manager = unlock_credential_manager(config, storage.clone())?;
|
||||||
|
hydrate_api_key(config, Some(&manager)).await?;
|
||||||
|
} else {
|
||||||
|
hydrate_api_key(config, None).await?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn canonicalises_provider_names() {
|
||||||
|
assert_eq!(canonical_provider_name("OLLAMA_CLOUD"), "ollama");
|
||||||
|
assert_eq!(canonical_provider_name(" ollama-cloud"), "ollama");
|
||||||
|
assert_eq!(canonical_provider_name(""), "ollama");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,23 @@
|
|||||||
//! OWLEN CLI - Chat TUI client
|
//! OWLEN CLI - Chat TUI client
|
||||||
|
|
||||||
|
mod cloud;
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand};
|
||||||
|
use cloud::{load_runtime_credentials, CloudCommand};
|
||||||
use owlen_core::config as core_config;
|
use owlen_core::config as core_config;
|
||||||
use owlen_core::{
|
use owlen_core::{
|
||||||
config::{Config, McpMode},
|
config::{Config, McpMode},
|
||||||
mcp::remote_client::RemoteMcpClient,
|
mcp::remote_client::RemoteMcpClient,
|
||||||
mode::Mode,
|
mode::Mode,
|
||||||
provider::ChatStream,
|
provider::ChatStream,
|
||||||
|
providers::OllamaProvider,
|
||||||
session::SessionController,
|
session::SessionController,
|
||||||
storage::StorageManager,
|
storage::StorageManager,
|
||||||
types::{ChatRequest, ChatResponse, Message, ModelInfo},
|
types::{ChatRequest, ChatResponse, Message, ModelInfo},
|
||||||
Error, Provider,
|
Error, Provider,
|
||||||
};
|
};
|
||||||
use owlen_ollama::OllamaProvider;
|
|
||||||
use owlen_tui::tui_controller::{TuiController, TuiRequest};
|
use owlen_tui::tui_controller::{TuiController, TuiRequest};
|
||||||
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
|
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
@@ -48,6 +51,9 @@ enum OwlenCommand {
|
|||||||
/// Inspect or upgrade configuration files
|
/// Inspect or upgrade configuration files
|
||||||
#[command(subcommand)]
|
#[command(subcommand)]
|
||||||
Config(ConfigCommand),
|
Config(ConfigCommand),
|
||||||
|
/// Manage Ollama Cloud credentials
|
||||||
|
#[command(subcommand)]
|
||||||
|
Cloud(CloudCommand),
|
||||||
/// Show manual steps for updating Owlen to the latest revision
|
/// Show manual steps for updating Owlen to the latest revision
|
||||||
Upgrade,
|
Upgrade,
|
||||||
}
|
}
|
||||||
@@ -112,8 +118,7 @@ fn build_local_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
|
|||||||
match provider_cfg.provider_type.as_str() {
|
match provider_cfg.provider_type.as_str() {
|
||||||
"ollama" | "ollama-cloud" => {
|
"ollama" | "ollama-cloud" => {
|
||||||
let provider = OllamaProvider::from_config(provider_cfg, Some(&cfg.general))?;
|
let provider = OllamaProvider::from_config(provider_cfg, Some(&cfg.general))?;
|
||||||
let provider: Arc<dyn Provider> = Arc::new(provider);
|
Ok(Arc::new(provider) as Arc<dyn Provider>)
|
||||||
Ok(provider)
|
|
||||||
}
|
}
|
||||||
other => Err(anyhow::anyhow!(format!(
|
other => Err(anyhow::anyhow!(format!(
|
||||||
"Provider type '{other}' is not supported in legacy/local MCP mode"
|
"Provider type '{other}' is not supported in legacy/local MCP mode"
|
||||||
@@ -121,9 +126,10 @@ fn build_local_provider(cfg: &Config) -> anyhow::Result<Arc<dyn Provider>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_command(command: OwlenCommand) -> Result<()> {
|
async fn run_command(command: OwlenCommand) -> Result<()> {
|
||||||
match command {
|
match command {
|
||||||
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
|
OwlenCommand::Config(config_cmd) => run_config_command(config_cmd),
|
||||||
|
OwlenCommand::Cloud(cloud_cmd) => cloud::run_cloud_command(cloud_cmd).await,
|
||||||
OwlenCommand::Upgrade => {
|
OwlenCommand::Upgrade => {
|
||||||
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force");
|
println!("To update Owlen from source:\n git pull\n cargo install --path crates/owlen-cli --force");
|
||||||
println!(
|
println!(
|
||||||
@@ -163,16 +169,34 @@ fn run_config_doctor() -> Result<()> {
|
|||||||
changes.push("default provider missing; reset to 'ollama'".to_string());
|
changes.push("default provider missing; reset to 'ollama'".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(mut legacy) = config.providers.remove("ollama-cloud") {
|
||||||
|
legacy.provider_type = "ollama".to_string();
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
match config.providers.entry("ollama".to_string()) {
|
||||||
|
Entry::Occupied(mut existing) => {
|
||||||
|
let entry = existing.get_mut();
|
||||||
|
if entry.api_key.is_none() {
|
||||||
|
entry.api_key = legacy.api_key.take();
|
||||||
|
}
|
||||||
|
if entry.base_url.is_none() && legacy.base_url.is_some() {
|
||||||
|
entry.base_url = legacy.base_url.take();
|
||||||
|
}
|
||||||
|
entry.extra.extend(legacy.extra);
|
||||||
|
}
|
||||||
|
Entry::Vacant(slot) => {
|
||||||
|
slot.insert(legacy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
changes.push(
|
||||||
|
"migrated legacy 'ollama-cloud' provider into unified 'ollama' entry".to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if !config.providers.contains_key("ollama") {
|
if !config.providers.contains_key("ollama") {
|
||||||
core_config::ensure_provider_config(&mut config, "ollama");
|
core_config::ensure_provider_config(&mut config, "ollama");
|
||||||
changes.push("added default ollama provider configuration".to_string());
|
changes.push("added default ollama provider configuration".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
if !config.providers.contains_key("ollama-cloud") {
|
|
||||||
core_config::ensure_provider_config(&mut config, "ollama-cloud");
|
|
||||||
changes.push("added default ollama-cloud provider configuration".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
match config.mcp.mode {
|
match config.mcp.mode {
|
||||||
McpMode::Legacy => {
|
McpMode::Legacy => {
|
||||||
config.mcp.mode = McpMode::LocalOnly;
|
config.mcp.mode = McpMode::LocalOnly;
|
||||||
@@ -329,7 +353,7 @@ async fn main() -> Result<()> {
|
|||||||
// Parse command-line arguments
|
// Parse command-line arguments
|
||||||
let Args { code, command } = Args::parse();
|
let Args { code, command } = Args::parse();
|
||||||
if let Some(command) = command {
|
if let Some(command) = command {
|
||||||
return run_command(command);
|
return run_command(command).await;
|
||||||
}
|
}
|
||||||
let initial_mode = if code { Mode::Code } else { Mode::Chat };
|
let initial_mode = if code { Mode::Code } else { Mode::Chat };
|
||||||
|
|
||||||
@@ -339,8 +363,6 @@ async fn main() -> Result<()> {
|
|||||||
let color_support = detect_terminal_color_support();
|
let color_support = detect_terminal_color_support();
|
||||||
// Load configuration (or fall back to defaults) for the session controller.
|
// Load configuration (or fall back to defaults) for the session controller.
|
||||||
let mut cfg = config::try_load_config().unwrap_or_default();
|
let mut cfg = config::try_load_config().unwrap_or_default();
|
||||||
// Disable encryption for CLI to avoid password prompts in this environment.
|
|
||||||
cfg.privacy.encrypt_local_data = false;
|
|
||||||
if let Some(previous_theme) = apply_terminal_theme(&mut cfg, &color_support) {
|
if let Some(previous_theme) = apply_terminal_theme(&mut cfg, &color_support) {
|
||||||
let term_label = match &color_support {
|
let term_label = match &color_support {
|
||||||
TerminalColorSupport::Limited { term } => Cow::from(term.as_str()),
|
TerminalColorSupport::Limited { term } => Cow::from(term.as_str()),
|
||||||
@@ -357,6 +379,8 @@ async fn main() -> Result<()> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
cfg.validate()?;
|
cfg.validate()?;
|
||||||
|
let storage = Arc::new(StorageManager::new().await?);
|
||||||
|
load_runtime_credentials(&mut cfg, storage.clone()).await?;
|
||||||
|
|
||||||
let (tui_tx, _tui_rx) = mpsc::unbounded_channel::<TuiRequest>();
|
let (tui_tx, _tui_rx) = mpsc::unbounded_channel::<TuiRequest>();
|
||||||
let tui_controller = Arc::new(TuiController::new(tui_tx));
|
let tui_controller = Arc::new(TuiController::new(tui_tx));
|
||||||
@@ -387,7 +411,6 @@ async fn main() -> Result<()> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let storage = Arc::new(StorageManager::new().await?);
|
|
||||||
let controller =
|
let controller =
|
||||||
SessionController::new(provider, cfg, storage.clone(), tui_controller, false).await?;
|
SessionController::new(provider, cfg, storage.clone(), tui_controller, false).await?;
|
||||||
let (mut app, mut session_rx) = ChatApp::new(controller).await?;
|
let (mut app, mut session_rx) = ChatApp::new(controller).await?;
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ unicode-width = "0.1"
|
|||||||
uuid = { workspace = true }
|
uuid = { workspace = true }
|
||||||
textwrap = { workspace = true }
|
textwrap = { workspace = true }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
|
futures-util = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
shellexpand = { workspace = true }
|
shellexpand = { workspace = true }
|
||||||
@@ -45,6 +46,7 @@ path-clean = "1.0"
|
|||||||
tokio-stream = { workspace = true }
|
tokio-stream = { workspace = true }
|
||||||
tokio-tungstenite = "0.21"
|
tokio-tungstenite = "0.21"
|
||||||
tungstenite = "0.21"
|
tungstenite = "0.21"
|
||||||
|
ollama-rs = { version = "0.3", features = ["stream", "headers"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio-test = { workspace = true }
|
tokio-test = { workspace = true }
|
||||||
|
|||||||
@@ -57,10 +57,6 @@ impl Default for Config {
|
|||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let mut providers = HashMap::new();
|
let mut providers = HashMap::new();
|
||||||
providers.insert("ollama".to_string(), default_ollama_provider_config());
|
providers.insert("ollama".to_string(), default_ollama_provider_config());
|
||||||
providers.insert(
|
|
||||||
"ollama-cloud".to_string(),
|
|
||||||
default_ollama_cloud_provider_config(),
|
|
||||||
);
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
schema_version: Self::default_schema_version(),
|
schema_version: Self::default_schema_version(),
|
||||||
@@ -138,10 +134,13 @@ impl Config {
|
|||||||
config.ensure_defaults();
|
config.ensure_defaults();
|
||||||
config.mcp.apply_backward_compat();
|
config.mcp.apply_backward_compat();
|
||||||
config.apply_schema_migrations(&previous_version);
|
config.apply_schema_migrations(&previous_version);
|
||||||
|
config.expand_provider_env_vars()?;
|
||||||
config.validate()?;
|
config.validate()?;
|
||||||
Ok(config)
|
Ok(config)
|
||||||
} else {
|
} else {
|
||||||
Ok(Config::default())
|
let mut config = Config::default();
|
||||||
|
config.expand_provider_env_vars()?;
|
||||||
|
Ok(config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -200,12 +199,18 @@ impl Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ensure_provider_config(self, "ollama");
|
ensure_provider_config(self, "ollama");
|
||||||
ensure_provider_config(self, "ollama-cloud");
|
|
||||||
if self.schema_version.is_empty() {
|
if self.schema_version.is_empty() {
|
||||||
self.schema_version = Self::default_schema_version();
|
self.schema_version = Self::default_schema_version();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn expand_provider_env_vars(&mut self) -> Result<()> {
|
||||||
|
for (provider_name, provider) in self.providers.iter_mut() {
|
||||||
|
expand_provider_entry(provider_name, provider)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Validate configuration invariants and surface actionable error messages.
|
/// Validate configuration invariants and surface actionable error messages.
|
||||||
pub fn validate(&self) -> Result<()> {
|
pub fn validate(&self) -> Result<()> {
|
||||||
self.validate_default_provider()?;
|
self.validate_default_provider()?;
|
||||||
@@ -222,9 +227,42 @@ impl Config {
|
|||||||
CONFIG_SCHEMA_VERSION
|
CONFIG_SCHEMA_VERSION
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(legacy_cloud) = self.providers.remove("ollama_cloud") {
|
||||||
|
self.merge_legacy_ollama_provider(legacy_cloud);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(legacy_cloud) = self.providers.remove("ollama-cloud") {
|
||||||
|
self.merge_legacy_ollama_provider(legacy_cloud);
|
||||||
|
}
|
||||||
|
|
||||||
self.schema_version = CONFIG_SCHEMA_VERSION.to_string();
|
self.schema_version = CONFIG_SCHEMA_VERSION.to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn merge_legacy_ollama_provider(&mut self, mut legacy_cloud: ProviderConfig) {
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
legacy_cloud.provider_type = "ollama".to_string();
|
||||||
|
|
||||||
|
match self.providers.entry("ollama".to_string()) {
|
||||||
|
Entry::Occupied(mut entry) => {
|
||||||
|
let target = entry.get_mut();
|
||||||
|
if target.base_url.is_none() {
|
||||||
|
target.base_url = legacy_cloud.base_url.take();
|
||||||
|
}
|
||||||
|
if target.api_key.is_none() {
|
||||||
|
target.api_key = legacy_cloud.api_key.take();
|
||||||
|
}
|
||||||
|
if target.extra.is_empty() && !legacy_cloud.extra.is_empty() {
|
||||||
|
target.extra = legacy_cloud.extra;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(legacy_cloud);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn validate_default_provider(&self) -> Result<()> {
|
fn validate_default_provider(&self) -> Result<()> {
|
||||||
if self.general.default_provider.trim().is_empty() {
|
if self.general.default_provider.trim().is_empty() {
|
||||||
return Err(crate::Error::Config(
|
return Err(crate::Error::Config(
|
||||||
@@ -308,12 +346,53 @@ fn default_ollama_provider_config() -> ProviderConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_ollama_cloud_provider_config() -> ProviderConfig {
|
fn expand_provider_entry(provider_name: &str, provider: &mut ProviderConfig) -> Result<()> {
|
||||||
ProviderConfig {
|
if let Some(ref mut base_url) = provider.base_url {
|
||||||
provider_type: "ollama-cloud".to_string(),
|
let expanded = expand_env_string(
|
||||||
base_url: Some("https://ollama.com".to_string()),
|
base_url.as_str(),
|
||||||
api_key: None,
|
&format!("providers.{provider_name}.base_url"),
|
||||||
extra: HashMap::new(),
|
)?;
|
||||||
|
*base_url = expanded;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref mut api_key) = provider.api_key {
|
||||||
|
let expanded = expand_env_string(
|
||||||
|
api_key.as_str(),
|
||||||
|
&format!("providers.{provider_name}.api_key"),
|
||||||
|
)?;
|
||||||
|
*api_key = expanded;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (extra_key, extra_value) in provider.extra.iter_mut() {
|
||||||
|
if let serde_json::Value::String(current) = extra_value {
|
||||||
|
let expanded = expand_env_string(
|
||||||
|
current.as_str(),
|
||||||
|
&format!("providers.{provider_name}.{}", extra_key),
|
||||||
|
)?;
|
||||||
|
*current = expanded;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_env_string(input: &str, field_path: &str) -> Result<String> {
|
||||||
|
if !input.contains('$') {
|
||||||
|
return Ok(input.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
match shellexpand::env(input) {
|
||||||
|
Ok(expanded) => Ok(expanded.into_owned()),
|
||||||
|
Err(err) => match err.cause {
|
||||||
|
std::env::VarError::NotPresent => Err(crate::Error::Config(format!(
|
||||||
|
"Environment variable {} referenced in {field_path} is not set",
|
||||||
|
err.var_name
|
||||||
|
))),
|
||||||
|
std::env::VarError::NotUnicode(_) => Err(crate::Error::Config(format!(
|
||||||
|
"Environment variable {} referenced in {field_path} contains invalid Unicode",
|
||||||
|
err.var_name
|
||||||
|
))),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -787,11 +866,14 @@ pub fn ensure_provider_config<'a>(
|
|||||||
) -> &'a ProviderConfig {
|
) -> &'a ProviderConfig {
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
if matches!(provider_name, "ollama_cloud" | "ollama-cloud") {
|
||||||
|
return ensure_provider_config(config, "ollama");
|
||||||
|
}
|
||||||
|
|
||||||
match config.providers.entry(provider_name.to_string()) {
|
match config.providers.entry(provider_name.to_string()) {
|
||||||
Entry::Occupied(entry) => entry.into_mut(),
|
Entry::Occupied(entry) => entry.into_mut(),
|
||||||
Entry::Vacant(entry) => {
|
Entry::Vacant(entry) => {
|
||||||
let default = match provider_name {
|
let default = match provider_name {
|
||||||
"ollama-cloud" => default_ollama_cloud_provider_config(),
|
|
||||||
"ollama" => default_ollama_provider_config(),
|
"ollama" => default_ollama_provider_config(),
|
||||||
other => ProviderConfig {
|
other => ProviderConfig {
|
||||||
provider_type: other.to_string(),
|
provider_type: other.to_string(),
|
||||||
@@ -814,6 +896,48 @@ pub fn session_timeout(config: &Config) -> Duration {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn expand_provider_env_vars_resolves_api_key() {
|
||||||
|
std::env::set_var("OWLEN_TEST_API_KEY", "super-secret");
|
||||||
|
|
||||||
|
let mut config = Config::default();
|
||||||
|
if let Some(ollama) = config.providers.get_mut("ollama") {
|
||||||
|
ollama.api_key = Some("${OWLEN_TEST_API_KEY}".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
config
|
||||||
|
.expand_provider_env_vars()
|
||||||
|
.expect("environment expansion succeeded");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
config.providers["ollama"].api_key.as_deref(),
|
||||||
|
Some("super-secret")
|
||||||
|
);
|
||||||
|
|
||||||
|
std::env::remove_var("OWLEN_TEST_API_KEY");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn expand_provider_env_vars_errors_for_missing_variable() {
|
||||||
|
std::env::remove_var("OWLEN_TEST_MISSING");
|
||||||
|
|
||||||
|
let mut config = Config::default();
|
||||||
|
if let Some(ollama) = config.providers.get_mut("ollama") {
|
||||||
|
ollama.api_key = Some("${OWLEN_TEST_MISSING}".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let error = config
|
||||||
|
.expand_provider_env_vars()
|
||||||
|
.expect_err("missing variables should error");
|
||||||
|
|
||||||
|
match error {
|
||||||
|
crate::Error::Config(message) => {
|
||||||
|
assert!(message.contains("OWLEN_TEST_MISSING"));
|
||||||
|
}
|
||||||
|
other => panic!("expected config error, got {other:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_storage_platform_specific_paths() {
|
fn test_storage_platform_specific_paths() {
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
@@ -857,20 +981,44 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn default_config_contains_local_and_cloud_providers() {
|
fn default_config_contains_local_provider() {
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
assert!(config.providers.contains_key("ollama"));
|
assert!(config.providers.contains_key("ollama"));
|
||||||
assert!(config.providers.contains_key("ollama-cloud"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn ensure_provider_config_backfills_cloud_defaults() {
|
fn ensure_provider_config_aliases_cloud_defaults() {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.providers.remove("ollama-cloud");
|
config.providers.clear();
|
||||||
|
|
||||||
let cloud = ensure_provider_config(&mut config, "ollama-cloud");
|
let cloud = ensure_provider_config(&mut config, "ollama-cloud");
|
||||||
assert_eq!(cloud.provider_type, "ollama-cloud");
|
assert_eq!(cloud.provider_type, "ollama");
|
||||||
assert_eq!(cloud.base_url.as_deref(), Some("https://ollama.com"));
|
assert_eq!(cloud.base_url.as_deref(), Some("http://localhost:11434"));
|
||||||
|
assert!(config.providers.contains_key("ollama"));
|
||||||
|
assert!(!config.providers.contains_key("ollama-cloud"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn migrate_ollama_cloud_underscore_key() {
|
||||||
|
let mut config = Config::default();
|
||||||
|
config.providers.clear();
|
||||||
|
config.providers.insert(
|
||||||
|
"ollama_cloud".to_string(),
|
||||||
|
ProviderConfig {
|
||||||
|
provider_type: "ollama_cloud".to_string(),
|
||||||
|
base_url: Some("https://api.ollama.com".to_string()),
|
||||||
|
api_key: Some("secret".to_string()),
|
||||||
|
extra: HashMap::new(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
config.apply_schema_migrations("1.0.0");
|
||||||
|
|
||||||
|
assert!(config.providers.get("ollama_cloud").is_none());
|
||||||
|
assert!(config.providers.get("ollama-cloud").is_none());
|
||||||
|
let cloud = config.providers.get("ollama").expect("migrated config");
|
||||||
|
assert_eq!(cloud.provider_type, "ollama");
|
||||||
|
assert_eq!(cloud.base_url.as_deref(), Some("https://api.ollama.com"));
|
||||||
|
assert_eq!(cloud.api_key.as_deref(), Some("secret"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ pub struct ApiCredentials {
|
|||||||
pub endpoint: String,
|
pub endpoint: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub const OLLAMA_CLOUD_CREDENTIAL_ID: &str = "provider_ollama_cloud";
|
||||||
|
|
||||||
pub struct CredentialManager {
|
pub struct CredentialManager {
|
||||||
storage: Arc<StorageManager>,
|
storage: Arc<StorageManager>,
|
||||||
master_key: Arc<Vec<u8>>,
|
master_key: Arc<Vec<u8>>,
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ pub mod mcp;
|
|||||||
pub mod mode;
|
pub mod mode;
|
||||||
pub mod model;
|
pub mod model;
|
||||||
pub mod provider;
|
pub mod provider;
|
||||||
|
pub mod providers;
|
||||||
pub mod router;
|
pub mod router;
|
||||||
pub mod sandbox;
|
pub mod sandbox;
|
||||||
pub mod session;
|
pub mod session;
|
||||||
@@ -43,6 +44,7 @@ pub use mode::*;
|
|||||||
pub use model::*;
|
pub use model::*;
|
||||||
// Export provider types but exclude test_utils to avoid ambiguity
|
// Export provider types but exclude test_utils to avoid ambiguity
|
||||||
pub use provider::{ChatStream, LLMProvider, Provider, ProviderConfig, ProviderRegistry};
|
pub use provider::{ChatStream, LLMProvider, Provider, ProviderConfig, ProviderRegistry};
|
||||||
|
pub use providers::*;
|
||||||
pub use router::*;
|
pub use router::*;
|
||||||
pub use sandbox::*;
|
pub use sandbox::*;
|
||||||
pub use session::*;
|
pub use session::*;
|
||||||
|
|||||||
8
crates/owlen-core/src/providers/mod.rs
Normal file
8
crates/owlen-core/src/providers/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
//! Built-in LLM provider implementations.
|
||||||
|
//!
|
||||||
|
//! Each provider integration lives in its own module so that maintenance
|
||||||
|
//! stays focused and configuration remains clear.
|
||||||
|
|
||||||
|
pub mod ollama;
|
||||||
|
|
||||||
|
pub use ollama::OllamaProvider;
|
||||||
841
crates/owlen-core/src/providers/ollama.rs
Normal file
841
crates/owlen-core/src/providers/ollama.rs
Normal file
@@ -0,0 +1,841 @@
|
|||||||
|
//! Ollama provider built on top of the `ollama-rs` crate.
|
||||||
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env,
|
||||||
|
pin::Pin,
|
||||||
|
time::{Duration, SystemTime},
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use futures::{future::join_all, future::BoxFuture, Stream, StreamExt};
|
||||||
|
use log::{debug, warn};
|
||||||
|
use ollama_rs::{
|
||||||
|
error::OllamaError,
|
||||||
|
generation::chat::{
|
||||||
|
request::ChatMessageRequest as OllamaChatRequest, ChatMessage as OllamaMessage,
|
||||||
|
ChatMessageResponse as OllamaChatResponse, MessageRole as OllamaRole,
|
||||||
|
},
|
||||||
|
generation::tools::{ToolCall as OllamaToolCall, ToolCallFunction as OllamaToolCallFunction},
|
||||||
|
headers::{HeaderMap, HeaderValue, AUTHORIZATION},
|
||||||
|
models::{LocalModel, ModelInfo as OllamaModelInfo, ModelOptions},
|
||||||
|
Ollama,
|
||||||
|
};
|
||||||
|
use reqwest::{Client, StatusCode, Url};
|
||||||
|
use serde_json::{json, Map as JsonMap, Value};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
config::GeneralSettings,
|
||||||
|
mcp::McpToolDescriptor,
|
||||||
|
model::ModelManager,
|
||||||
|
provider::{LLMProvider, ProviderConfig},
|
||||||
|
types::{
|
||||||
|
ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage, ToolCall,
|
||||||
|
},
|
||||||
|
Error, Result,
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_TIMEOUT_SECS: u64 = 120;
|
||||||
|
const DEFAULT_MODEL_CACHE_TTL_SECS: u64 = 60;
|
||||||
|
const CLOUD_BASE_URL: &str = "https://ollama.com";
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
enum OllamaMode {
|
||||||
|
Local,
|
||||||
|
Cloud,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OllamaMode {
|
||||||
|
fn default_base_url(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Self::Local => "http://localhost:11434",
|
||||||
|
Self::Cloud => CLOUD_BASE_URL,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct OllamaOptions {
|
||||||
|
mode: OllamaMode,
|
||||||
|
base_url: String,
|
||||||
|
request_timeout: Duration,
|
||||||
|
model_cache_ttl: Duration,
|
||||||
|
api_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OllamaOptions {
|
||||||
|
fn new(mode: OllamaMode, base_url: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
mode,
|
||||||
|
base_url: base_url.into(),
|
||||||
|
request_timeout: Duration::from_secs(DEFAULT_TIMEOUT_SECS),
|
||||||
|
model_cache_ttl: Duration::from_secs(DEFAULT_MODEL_CACHE_TTL_SECS),
|
||||||
|
api_key: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_general(mut self, general: &GeneralSettings) -> Self {
|
||||||
|
self.model_cache_ttl = general.model_cache_ttl();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ollama provider implementation backed by `ollama-rs`.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct OllamaProvider {
|
||||||
|
mode: OllamaMode,
|
||||||
|
client: Ollama,
|
||||||
|
http_client: Client,
|
||||||
|
base_url: String,
|
||||||
|
model_manager: ModelManager,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OllamaProvider {
|
||||||
|
/// Create a provider targeting an explicit base URL (local usage).
|
||||||
|
pub fn new(base_url: impl Into<String>) -> Result<Self> {
|
||||||
|
let input = base_url.into();
|
||||||
|
let normalized =
|
||||||
|
normalize_base_url(Some(&input), OllamaMode::Local).map_err(Error::Config)?;
|
||||||
|
Self::with_options(OllamaOptions::new(OllamaMode::Local, normalized))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a provider from configuration settings.
|
||||||
|
pub fn from_config(config: &ProviderConfig, general: Option<&GeneralSettings>) -> Result<Self> {
|
||||||
|
let mut api_key = resolve_api_key(config.api_key.clone())
|
||||||
|
.or_else(|| env_var_non_empty("OLLAMA_API_KEY"))
|
||||||
|
.or_else(|| env_var_non_empty("OLLAMA_CLOUD_API_KEY"));
|
||||||
|
|
||||||
|
let mode = if api_key.is_some() {
|
||||||
|
OllamaMode::Cloud
|
||||||
|
} else {
|
||||||
|
OllamaMode::Local
|
||||||
|
};
|
||||||
|
|
||||||
|
let base_candidate = if mode == OllamaMode::Cloud {
|
||||||
|
Some(CLOUD_BASE_URL)
|
||||||
|
} else {
|
||||||
|
config.base_url.as_deref()
|
||||||
|
};
|
||||||
|
|
||||||
|
let normalized_base_url =
|
||||||
|
normalize_base_url(base_candidate, mode).map_err(Error::Config)?;
|
||||||
|
|
||||||
|
let mut options = OllamaOptions::new(mode, normalized_base_url);
|
||||||
|
|
||||||
|
if let Some(timeout) = config
|
||||||
|
.extra
|
||||||
|
.get("timeout_secs")
|
||||||
|
.and_then(|value| value.as_u64())
|
||||||
|
{
|
||||||
|
options.request_timeout = Duration::from_secs(timeout.max(5));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(cache_ttl) = config
|
||||||
|
.extra
|
||||||
|
.get("model_cache_ttl_secs")
|
||||||
|
.and_then(|value| value.as_u64())
|
||||||
|
{
|
||||||
|
options.model_cache_ttl = Duration::from_secs(cache_ttl.max(5));
|
||||||
|
}
|
||||||
|
|
||||||
|
options.api_key = api_key.take();
|
||||||
|
|
||||||
|
if let Some(general) = general {
|
||||||
|
options = options.with_general(general);
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::with_options(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn with_options(options: OllamaOptions) -> Result<Self> {
|
||||||
|
let OllamaOptions {
|
||||||
|
mode,
|
||||||
|
base_url,
|
||||||
|
request_timeout,
|
||||||
|
model_cache_ttl,
|
||||||
|
api_key,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
let url = Url::parse(&base_url)
|
||||||
|
.map_err(|err| Error::Config(format!("Invalid Ollama base URL '{base_url}': {err}")))?;
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
if let Some(ref key) = api_key {
|
||||||
|
let value = HeaderValue::from_str(&format!("Bearer {key}")).map_err(|_| {
|
||||||
|
Error::Config("OLLAMA API key contains invalid characters".to_string())
|
||||||
|
})?;
|
||||||
|
headers.insert(AUTHORIZATION, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut client_builder = Client::builder().timeout(request_timeout);
|
||||||
|
if !headers.is_empty() {
|
||||||
|
client_builder = client_builder.default_headers(headers.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let http_client = client_builder
|
||||||
|
.build()
|
||||||
|
.map_err(|err| Error::Config(format!("Failed to build HTTP client: {err}")))?;
|
||||||
|
|
||||||
|
let port = url.port_or_known_default().ok_or_else(|| {
|
||||||
|
Error::Config(format!("Unable to determine port for Ollama URL '{}'", url))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut ollama_client = Ollama::new_with_client(url.clone(), port, http_client.clone());
|
||||||
|
if !headers.is_empty() {
|
||||||
|
ollama_client.set_headers(Some(headers.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
mode,
|
||||||
|
client: ollama_client,
|
||||||
|
http_client,
|
||||||
|
base_url: base_url.trim_end_matches('/').to_string(),
|
||||||
|
model_manager: ModelManager::new(model_cache_ttl),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn api_url(&self, endpoint: &str) -> String {
|
||||||
|
build_api_endpoint(&self.base_url, endpoint)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prepare_chat_request(
|
||||||
|
&self,
|
||||||
|
model: String,
|
||||||
|
messages: Vec<Message>,
|
||||||
|
parameters: ChatParameters,
|
||||||
|
tools: Option<Vec<McpToolDescriptor>>,
|
||||||
|
) -> Result<(String, OllamaChatRequest)> {
|
||||||
|
if self.mode == OllamaMode::Cloud && !model.contains("-cloud") {
|
||||||
|
warn!(
|
||||||
|
"Model '{}' does not use the '-cloud' suffix. Cloud-only models may fail to load.",
|
||||||
|
model
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(descriptors) = &tools {
|
||||||
|
if !descriptors.is_empty() {
|
||||||
|
debug!(
|
||||||
|
"Ignoring {} MCP tool descriptors for Ollama request (tool calling unsupported)",
|
||||||
|
descriptors.len()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let converted_messages = messages.into_iter().map(convert_message).collect();
|
||||||
|
let mut request = OllamaChatRequest::new(model.clone(), converted_messages);
|
||||||
|
|
||||||
|
if let Some(options) = build_model_options(¶meters)? {
|
||||||
|
request.options = Some(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((model, request))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_models(&self) -> Result<Vec<ModelInfo>> {
|
||||||
|
let models = self
|
||||||
|
.client
|
||||||
|
.list_local_models()
|
||||||
|
.await
|
||||||
|
.map_err(|err| self.map_ollama_error("list models", err, None))?;
|
||||||
|
|
||||||
|
let client = self.client.clone();
|
||||||
|
let fetched = join_all(models.into_iter().map(|local| {
|
||||||
|
let client = client.clone();
|
||||||
|
async move {
|
||||||
|
let name = local.name.clone();
|
||||||
|
let detail = match client.show_model_info(name.clone()).await {
|
||||||
|
Ok(info) => Some(info),
|
||||||
|
Err(err) => {
|
||||||
|
debug!("Failed to fetch Ollama model info for '{name}': {err}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(local, detail)
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
Ok(fetched
|
||||||
|
.into_iter()
|
||||||
|
.map(|(local, detail)| self.convert_model(local, detail))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert_model(&self, model: LocalModel, detail: Option<OllamaModelInfo>) -> ModelInfo {
|
||||||
|
let scope = match self.mode {
|
||||||
|
OllamaMode::Local => "local",
|
||||||
|
OllamaMode::Cloud => "cloud",
|
||||||
|
};
|
||||||
|
|
||||||
|
let name = model.name;
|
||||||
|
let mut capabilities: Vec<String> = detail
|
||||||
|
.as_ref()
|
||||||
|
.map(|info| {
|
||||||
|
info.capabilities
|
||||||
|
.iter()
|
||||||
|
.map(|cap| cap.to_ascii_lowercase())
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
push_capability(&mut capabilities, "chat");
|
||||||
|
|
||||||
|
for heuristic in heuristic_capabilities(&name) {
|
||||||
|
push_capability(&mut capabilities, &heuristic);
|
||||||
|
}
|
||||||
|
|
||||||
|
let description = build_model_description(scope, detail.as_ref());
|
||||||
|
|
||||||
|
ModelInfo {
|
||||||
|
id: name.clone(),
|
||||||
|
name,
|
||||||
|
description: Some(description),
|
||||||
|
provider: "ollama".to_string(),
|
||||||
|
context_window: None,
|
||||||
|
capabilities,
|
||||||
|
supports_tools: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert_ollama_response(response: OllamaChatResponse, streaming: bool) -> ChatResponse {
|
||||||
|
let usage = response.final_data.as_ref().map(|data| {
|
||||||
|
let prompt = clamp_to_u32(data.prompt_eval_count);
|
||||||
|
let completion = clamp_to_u32(data.eval_count);
|
||||||
|
TokenUsage {
|
||||||
|
prompt_tokens: prompt,
|
||||||
|
completion_tokens: completion,
|
||||||
|
total_tokens: prompt.saturating_add(completion),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ChatResponse {
|
||||||
|
message: convert_ollama_message(response.message),
|
||||||
|
usage,
|
||||||
|
is_streaming: streaming,
|
||||||
|
is_final: if streaming { response.done } else { true },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_ollama_error(&self, action: &str, err: OllamaError, model: Option<&str>) -> Error {
|
||||||
|
match err {
|
||||||
|
OllamaError::ReqwestError(request_err) => {
|
||||||
|
if let Some(status) = request_err.status() {
|
||||||
|
self.map_http_failure(action, status, request_err.to_string(), model)
|
||||||
|
} else if request_err.is_timeout() {
|
||||||
|
Error::Timeout(format!("Ollama {action} timed out: {request_err}"))
|
||||||
|
} else {
|
||||||
|
Error::Network(format!("Ollama {action} request failed: {request_err}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
OllamaError::InternalError(internal) => Error::Provider(anyhow!(internal.message)),
|
||||||
|
OllamaError::Other(message) => Error::Provider(anyhow!(message)),
|
||||||
|
OllamaError::JsonError(err) => Error::Serialization(err),
|
||||||
|
OllamaError::ToolCallError(err) => Error::Provider(anyhow!(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_http_failure(
|
||||||
|
&self,
|
||||||
|
action: &str,
|
||||||
|
status: StatusCode,
|
||||||
|
detail: String,
|
||||||
|
model: Option<&str>,
|
||||||
|
) -> Error {
|
||||||
|
match status {
|
||||||
|
StatusCode::NOT_FOUND => {
|
||||||
|
if let Some(model) = model {
|
||||||
|
Error::InvalidInput(format!(
|
||||||
|
"Model '{model}' was not found at {}. Verify the name or pull it with `ollama pull`.",
|
||||||
|
self.base_url
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Error::InvalidInput(format!(
|
||||||
|
"{action} returned 404 from {}: {detail}",
|
||||||
|
self.base_url
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
StatusCode::UNAUTHORIZED | StatusCode::FORBIDDEN => Error::Auth(format!(
|
||||||
|
"Ollama rejected the request ({status}): {detail}. Check your API key and account permissions."
|
||||||
|
)),
|
||||||
|
StatusCode::BAD_REQUEST => Error::InvalidInput(format!(
|
||||||
|
"{action} rejected by Ollama ({status}): {detail}"
|
||||||
|
)),
|
||||||
|
StatusCode::SERVICE_UNAVAILABLE | StatusCode::GATEWAY_TIMEOUT => Error::Timeout(
|
||||||
|
format!(
|
||||||
|
"Ollama {action} timed out ({status}). The model may still be loading."
|
||||||
|
),
|
||||||
|
),
|
||||||
|
_ => Error::Network(format!(
|
||||||
|
"Ollama {action} failed ({status}): {detail}"
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LLMProvider for OllamaProvider {
|
||||||
|
type Stream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
|
||||||
|
type ListModelsFuture<'a>
|
||||||
|
= BoxFuture<'a, Result<Vec<ModelInfo>>>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
type ChatFuture<'a>
|
||||||
|
= BoxFuture<'a, Result<ChatResponse>>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
type ChatStreamFuture<'a>
|
||||||
|
= BoxFuture<'a, Result<Self::Stream>>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
type HealthCheckFuture<'a>
|
||||||
|
= BoxFuture<'a, Result<()>>
|
||||||
|
where
|
||||||
|
Self: 'a;
|
||||||
|
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"ollama"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn list_models(&self) -> Self::ListModelsFuture<'_> {
|
||||||
|
Box::pin(async move {
|
||||||
|
self.model_manager
|
||||||
|
.get_or_refresh(false, || async { self.fetch_models().await })
|
||||||
|
.await
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chat(&self, request: ChatRequest) -> Self::ChatFuture<'_> {
|
||||||
|
Box::pin(async move {
|
||||||
|
let ChatRequest {
|
||||||
|
model,
|
||||||
|
messages,
|
||||||
|
parameters,
|
||||||
|
tools,
|
||||||
|
} = request;
|
||||||
|
|
||||||
|
let (model_id, ollama_request) =
|
||||||
|
self.prepare_chat_request(model, messages, parameters, tools)?;
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.send_chat_messages(ollama_request)
|
||||||
|
.await
|
||||||
|
.map_err(|err| self.map_ollama_error("chat", err, Some(&model_id)))?;
|
||||||
|
|
||||||
|
Ok(Self::convert_ollama_response(response, false))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn chat_stream(&self, request: ChatRequest) -> Self::ChatStreamFuture<'_> {
|
||||||
|
Box::pin(async move {
|
||||||
|
let ChatRequest {
|
||||||
|
model,
|
||||||
|
messages,
|
||||||
|
parameters,
|
||||||
|
tools,
|
||||||
|
} = request;
|
||||||
|
|
||||||
|
let (model_id, ollama_request) =
|
||||||
|
self.prepare_chat_request(model, messages, parameters, tools)?;
|
||||||
|
|
||||||
|
let stream = self
|
||||||
|
.client
|
||||||
|
.send_chat_messages_stream(ollama_request)
|
||||||
|
.await
|
||||||
|
.map_err(|err| self.map_ollama_error("chat_stream", err, Some(&model_id)))?;
|
||||||
|
|
||||||
|
let mapped = stream.map(|item| match item {
|
||||||
|
Ok(chunk) => Ok(Self::convert_ollama_response(chunk, true)),
|
||||||
|
Err(_) => Err(Error::Provider(anyhow!(
|
||||||
|
"Ollama returned a malformed streaming chunk"
|
||||||
|
))),
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Box::pin(mapped) as Self::Stream)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn health_check(&self) -> Self::HealthCheckFuture<'_> {
|
||||||
|
Box::pin(async move {
|
||||||
|
let url = self.api_url("version");
|
||||||
|
let response = self
|
||||||
|
.http_client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|err| map_reqwest_error("health check", err))?;
|
||||||
|
|
||||||
|
if response.status().is_success() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let status = response.status();
|
||||||
|
let detail = response.text().await.unwrap_or_else(|err| err.to_string());
|
||||||
|
Err(self.map_http_failure("health check", status, detail, None))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config_schema(&self) -> serde_json::Value {
|
||||||
|
serde_json::json!({
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"base_url": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Base URL for the Ollama API (ignored when api_key is provided)",
|
||||||
|
"default": self.mode.default_base_url()
|
||||||
|
},
|
||||||
|
"timeout_secs": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "HTTP request timeout in seconds",
|
||||||
|
"minimum": 5,
|
||||||
|
"default": DEFAULT_TIMEOUT_SECS
|
||||||
|
},
|
||||||
|
"model_cache_ttl_secs": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Seconds to cache model listings",
|
||||||
|
"minimum": 5,
|
||||||
|
"default": DEFAULT_MODEL_CACHE_TTL_SECS
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_model_options(parameters: &ChatParameters) -> Result<Option<ModelOptions>> {
|
||||||
|
let mut options = JsonMap::new();
|
||||||
|
|
||||||
|
for (key, value) in ¶meters.extra {
|
||||||
|
options.insert(key.clone(), value.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(temperature) = parameters.temperature {
|
||||||
|
options.insert("temperature".to_string(), json!(temperature));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(max_tokens) = parameters.max_tokens {
|
||||||
|
let capped = i32::try_from(max_tokens).unwrap_or(i32::MAX);
|
||||||
|
options.insert("num_predict".to_string(), json!(capped));
|
||||||
|
}
|
||||||
|
|
||||||
|
if options.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
serde_json::from_value(Value::Object(options))
|
||||||
|
.map(Some)
|
||||||
|
.map_err(|err| Error::Config(format!("Invalid Ollama options: {err}")))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert_message(message: Message) -> OllamaMessage {
|
||||||
|
let Message {
|
||||||
|
role,
|
||||||
|
content,
|
||||||
|
metadata,
|
||||||
|
tool_calls,
|
||||||
|
..
|
||||||
|
} = message;
|
||||||
|
|
||||||
|
let role = match role {
|
||||||
|
Role::User => OllamaRole::User,
|
||||||
|
Role::Assistant => OllamaRole::Assistant,
|
||||||
|
Role::System => OllamaRole::System,
|
||||||
|
Role::Tool => OllamaRole::Tool,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tool_calls = tool_calls
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.map(|tool_call| OllamaToolCall {
|
||||||
|
function: OllamaToolCallFunction {
|
||||||
|
name: tool_call.name,
|
||||||
|
arguments: tool_call.arguments,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let thinking = metadata
|
||||||
|
.get("thinking")
|
||||||
|
.and_then(|value| value.as_str().map(|s| s.to_owned()));
|
||||||
|
|
||||||
|
OllamaMessage {
|
||||||
|
role,
|
||||||
|
content,
|
||||||
|
tool_calls,
|
||||||
|
images: None,
|
||||||
|
thinking,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert_ollama_message(message: OllamaMessage) -> Message {
|
||||||
|
let role = match message.role {
|
||||||
|
OllamaRole::Assistant => Role::Assistant,
|
||||||
|
OllamaRole::System => Role::System,
|
||||||
|
OllamaRole::Tool => Role::Tool,
|
||||||
|
OllamaRole::User => Role::User,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tool_calls = if message.tool_calls.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(
|
||||||
|
message
|
||||||
|
.tool_calls
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, tool_call)| ToolCall {
|
||||||
|
id: format!("tool-call-{idx}"),
|
||||||
|
name: tool_call.function.name,
|
||||||
|
arguments: tool_call.function.arguments,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut metadata = HashMap::new();
|
||||||
|
if let Some(thinking) = message.thinking {
|
||||||
|
metadata.insert("thinking".to_string(), Value::String(thinking));
|
||||||
|
}
|
||||||
|
|
||||||
|
Message {
|
||||||
|
id: Uuid::new_v4(),
|
||||||
|
role,
|
||||||
|
content: message.content,
|
||||||
|
metadata,
|
||||||
|
timestamp: SystemTime::now(),
|
||||||
|
tool_calls,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clamp_to_u32(value: u64) -> u32 {
|
||||||
|
u32::try_from(value).unwrap_or(u32::MAX)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_capability(capabilities: &mut Vec<String>, capability: &str) {
|
||||||
|
let candidate = capability.to_ascii_lowercase();
|
||||||
|
if !capabilities
|
||||||
|
.iter()
|
||||||
|
.any(|existing| existing.eq_ignore_ascii_case(&candidate))
|
||||||
|
{
|
||||||
|
capabilities.push(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn heuristic_capabilities(name: &str) -> Vec<String> {
|
||||||
|
let lowercase = name.to_ascii_lowercase();
|
||||||
|
let mut detected = Vec::new();
|
||||||
|
|
||||||
|
if lowercase.contains("vision")
|
||||||
|
|| lowercase.contains("multimodal")
|
||||||
|
|| lowercase.contains("image")
|
||||||
|
{
|
||||||
|
detected.push("vision".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if lowercase.contains("think")
|
||||||
|
|| lowercase.contains("reason")
|
||||||
|
|| lowercase.contains("deepseek-r1")
|
||||||
|
|| lowercase.contains("r1")
|
||||||
|
{
|
||||||
|
detected.push("thinking".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if lowercase.contains("audio") || lowercase.contains("speech") || lowercase.contains("voice") {
|
||||||
|
detected.push("audio".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
detected
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_model_description(scope: &str, detail: Option<&OllamaModelInfo>) -> String {
|
||||||
|
if let Some(info) = detail {
|
||||||
|
let mut parts = Vec::new();
|
||||||
|
|
||||||
|
if let Some(family) = info
|
||||||
|
.model_info
|
||||||
|
.get("family")
|
||||||
|
.and_then(|value| value.as_str())
|
||||||
|
{
|
||||||
|
parts.push(family.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(parameter_size) = info
|
||||||
|
.model_info
|
||||||
|
.get("parameter_size")
|
||||||
|
.and_then(|value| value.as_str())
|
||||||
|
{
|
||||||
|
parts.push(parameter_size.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(variant) = info
|
||||||
|
.model_info
|
||||||
|
.get("variant")
|
||||||
|
.and_then(|value| value.as_str())
|
||||||
|
{
|
||||||
|
parts.push(variant.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !parts.is_empty() {
|
||||||
|
return format!("Ollama ({scope}) – {}", parts.join(" · "));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
format!("Ollama ({scope}) model")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn env_var_non_empty(name: &str) -> Option<String> {
|
||||||
|
env::var(name)
|
||||||
|
.ok()
|
||||||
|
.map(|value| value.trim().to_string())
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_api_key(configured: Option<String>) -> Option<String> {
|
||||||
|
let raw = configured?.trim().to_string();
|
||||||
|
if raw.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(variable) = raw
|
||||||
|
.strip_prefix("${")
|
||||||
|
.and_then(|value| value.strip_suffix('}'))
|
||||||
|
.or_else(|| raw.strip_prefix('$'))
|
||||||
|
{
|
||||||
|
let var_name = variable.trim();
|
||||||
|
if var_name.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
return env_var_non_empty(var_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(raw)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map_reqwest_error(action: &str, err: reqwest::Error) -> Error {
|
||||||
|
if err.is_timeout() {
|
||||||
|
Error::Timeout(format!("Ollama {action} request timed out: {err}"))
|
||||||
|
} else {
|
||||||
|
Error::Network(format!("Ollama {action} request failed: {err}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn normalize_base_url(
|
||||||
|
input: Option<&str>,
|
||||||
|
mode_hint: OllamaMode,
|
||||||
|
) -> std::result::Result<String, String> {
|
||||||
|
let mut candidate = input
|
||||||
|
.map(str::trim)
|
||||||
|
.filter(|value| !value.is_empty())
|
||||||
|
.map(|value| value.to_string())
|
||||||
|
.unwrap_or_else(|| mode_hint.default_base_url().to_string());
|
||||||
|
|
||||||
|
if !candidate.starts_with("http://") && !candidate.starts_with("https://") {
|
||||||
|
candidate = format!("https://{candidate}");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut url =
|
||||||
|
Url::parse(&candidate).map_err(|err| format!("Invalid Ollama URL '{candidate}': {err}"))?;
|
||||||
|
|
||||||
|
if url.cannot_be_a_base() {
|
||||||
|
return Err(format!("URL '{candidate}' cannot be used as a base URL"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if mode_hint == OllamaMode::Cloud && url.scheme() != "https" {
|
||||||
|
return Err("Ollama Cloud requires https:// base URLs".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = url.path().trim_end_matches('/');
|
||||||
|
if path == "/api" {
|
||||||
|
url.set_path("/");
|
||||||
|
} else if !path.is_empty() && path != "/" {
|
||||||
|
return Err("Ollama base URLs must not include additional path segments".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
url.set_query(None);
|
||||||
|
url.set_fragment(None);
|
||||||
|
|
||||||
|
Ok(url.to_string().trim_end_matches('/').to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_api_endpoint(base_url: &str, endpoint: &str) -> String {
|
||||||
|
let trimmed_base = base_url.trim_end_matches('/');
|
||||||
|
let trimmed_endpoint = endpoint.trim_start_matches('/');
|
||||||
|
|
||||||
|
if trimmed_base.ends_with("/api") {
|
||||||
|
format!("{trimmed_base}/{trimmed_endpoint}")
|
||||||
|
} else {
|
||||||
|
format!("{trimmed_base}/api/{trimmed_endpoint}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn resolve_api_key_prefers_literal_value() {
|
||||||
|
assert_eq!(
|
||||||
|
resolve_api_key(Some("direct-key".into())),
|
||||||
|
Some("direct-key".into())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn resolve_api_key_expands_env_var() {
|
||||||
|
std::env::set_var("OLLAMA_TEST_KEY", "secret");
|
||||||
|
assert_eq!(
|
||||||
|
resolve_api_key(Some("${OLLAMA_TEST_KEY}".into())),
|
||||||
|
Some("secret".into())
|
||||||
|
);
|
||||||
|
std::env::remove_var("OLLAMA_TEST_KEY");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normalize_base_url_removes_api_path() {
|
||||||
|
let url = normalize_base_url(Some("https://ollama.com/api"), OllamaMode::Cloud).unwrap();
|
||||||
|
assert_eq!(url, "https://ollama.com");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn normalize_base_url_rejects_cloud_without_https() {
|
||||||
|
let err = normalize_base_url(Some("http://ollama.com"), OllamaMode::Cloud).unwrap_err();
|
||||||
|
assert!(err.contains("https"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn build_model_options_merges_parameters() {
|
||||||
|
let mut parameters = ChatParameters::default();
|
||||||
|
parameters.temperature = Some(0.3);
|
||||||
|
parameters.max_tokens = Some(128);
|
||||||
|
parameters
|
||||||
|
.extra
|
||||||
|
.insert("num_ctx".into(), Value::from(4096_u64));
|
||||||
|
|
||||||
|
let options = build_model_options(¶meters)
|
||||||
|
.expect("options built")
|
||||||
|
.expect("options present");
|
||||||
|
let serialized = serde_json::to_value(&options).expect("serialize options");
|
||||||
|
let temperature = serialized["temperature"]
|
||||||
|
.as_f64()
|
||||||
|
.expect("temperature present");
|
||||||
|
assert!((temperature - 0.3).abs() < 1e-6);
|
||||||
|
assert_eq!(serialized["num_predict"], json!(128));
|
||||||
|
assert_eq!(serialized["num_ctx"], json!(4096));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn heuristic_capabilities_detects_thinking_models() {
|
||||||
|
let caps = heuristic_capabilities("deepseek-r1");
|
||||||
|
assert!(caps.iter().any(|cap| cap == "thinking"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_capability_avoids_duplicates() {
|
||||||
|
let mut caps = vec!["chat".to_string()];
|
||||||
|
push_capability(&mut caps, "Chat");
|
||||||
|
push_capability(&mut caps, "Vision");
|
||||||
|
push_capability(&mut caps, "vision");
|
||||||
|
|
||||||
|
assert_eq!(caps.len(), 2);
|
||||||
|
assert!(caps.iter().any(|cap| cap == "vision"));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
use futures::StreamExt;
|
|
||||||
use owlen_core::provider::test_utils::MockProvider;
|
|
||||||
use owlen_core::{provider::ProviderRegistry, types::*, Router};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
fn request(message: &str) -> ChatRequest {
|
|
||||||
ChatRequest {
|
|
||||||
model: "mock-model".to_string(),
|
|
||||||
messages: vec![Message::new(Role::User, message.to_string())],
|
|
||||||
parameters: ChatParameters::default(),
|
|
||||||
tools: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn router_routes_to_registered_provider() {
|
|
||||||
let mut router = Router::new();
|
|
||||||
router.register_provider(MockProvider::default());
|
|
||||||
router.set_default_provider("mock".to_string());
|
|
||||||
|
|
||||||
let resp = router.chat(request("ping")).await.expect("chat succeeded");
|
|
||||||
assert_eq!(resp.message.content, "Mock response to: ping");
|
|
||||||
|
|
||||||
let mut stream = router
|
|
||||||
.chat_stream(request("pong"))
|
|
||||||
.await
|
|
||||||
.expect("stream returned");
|
|
||||||
let first = stream.next().await.expect("stream item").expect("ok item");
|
|
||||||
assert_eq!(first.message.content, "Mock response to: pong");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn registry_lists_models_from_all_providers() {
|
|
||||||
let mut registry = ProviderRegistry::new();
|
|
||||||
registry.register(MockProvider::default());
|
|
||||||
registry.register_arc(Arc::new(MockProvider::default()));
|
|
||||||
|
|
||||||
let models = registry.list_all_models().await.expect("listed");
|
|
||||||
assert!(
|
|
||||||
models.iter().any(|m| m.name == "mock-model"),
|
|
||||||
"expected mock-model in model list"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,6 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
owlen-core = { path = "../owlen-core" }
|
owlen-core = { path = "../owlen-core" }
|
||||||
owlen-ollama = { path = "../owlen-ollama" }
|
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|||||||
@@ -14,13 +14,14 @@ use owlen_core::mcp::protocol::{
|
|||||||
};
|
};
|
||||||
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
|
use owlen_core::mcp::{McpToolCall, McpToolDescriptor, McpToolResponse};
|
||||||
use owlen_core::provider::ProviderConfig;
|
use owlen_core::provider::ProviderConfig;
|
||||||
|
use owlen_core::providers::OllamaProvider;
|
||||||
use owlen_core::types::{ChatParameters, ChatRequest, Message};
|
use owlen_core::types::{ChatParameters, ChatRequest, Message};
|
||||||
use owlen_core::Provider;
|
use owlen_core::Provider;
|
||||||
use owlen_ollama::OllamaProvider;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt};
|
use tokio::io::{self, AsyncBufReadExt, AsyncWriteExt};
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
|
|
||||||
@@ -108,42 +109,56 @@ fn resources_list_descriptor() -> McpToolDescriptor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn provider_from_config() -> Result<OllamaProvider, RpcError> {
|
fn provider_from_config() -> Result<Arc<dyn Provider>, RpcError> {
|
||||||
let mut config = OwlenConfig::load(None).unwrap_or_default();
|
let mut config = OwlenConfig::load(None).unwrap_or_default();
|
||||||
let provider_name =
|
let requested_name =
|
||||||
env::var("OWLEN_PROVIDER").unwrap_or_else(|_| config.general.default_provider.clone());
|
env::var("OWLEN_PROVIDER").unwrap_or_else(|_| config.general.default_provider.clone());
|
||||||
if config.provider(&provider_name).is_none() {
|
let provider_key = canonical_provider_name(&requested_name);
|
||||||
ensure_provider_config(&mut config, &provider_name);
|
if config.provider(&provider_key).is_none() {
|
||||||
|
ensure_provider_config(&mut config, &provider_key);
|
||||||
}
|
}
|
||||||
let provider_cfg: ProviderConfig =
|
let provider_cfg: ProviderConfig =
|
||||||
config.provider(&provider_name).cloned().ok_or_else(|| {
|
config.provider(&provider_key).cloned().ok_or_else(|| {
|
||||||
RpcError::internal_error(format!(
|
RpcError::internal_error(format!(
|
||||||
"Provider '{provider_name}' not found in configuration"
|
"Provider '{provider_key}' not found in configuration"
|
||||||
))
|
))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if provider_cfg.provider_type != "ollama" && provider_cfg.provider_type != "ollama-cloud" {
|
match provider_cfg.provider_type.as_str() {
|
||||||
return Err(RpcError::internal_error(format!(
|
"ollama" | "ollama-cloud" => {
|
||||||
"Unsupported provider type '{}' for MCP LLM server",
|
let provider = OllamaProvider::from_config(&provider_cfg, Some(&config.general))
|
||||||
provider_cfg.provider_type
|
.map_err(|e| {
|
||||||
)));
|
RpcError::internal_error(format!(
|
||||||
|
"Failed to init Ollama provider from config: {e}"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
Ok(Arc::new(provider) as Arc<dyn Provider>)
|
||||||
|
}
|
||||||
|
other => Err(RpcError::internal_error(format!(
|
||||||
|
"Unsupported provider type '{other}' for MCP LLM server"
|
||||||
|
))),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
OllamaProvider::from_config(&provider_cfg, Some(&config.general)).map_err(|e| {
|
fn create_provider() -> Result<Arc<dyn Provider>, RpcError> {
|
||||||
RpcError::internal_error(format!("Failed to init OllamaProvider from config: {}", e))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_provider() -> Result<OllamaProvider, RpcError> {
|
|
||||||
if let Ok(url) = env::var("OLLAMA_URL") {
|
if let Ok(url) = env::var("OLLAMA_URL") {
|
||||||
return OllamaProvider::new(&url).map_err(|e| {
|
let provider = OllamaProvider::new(&url).map_err(|e| {
|
||||||
RpcError::internal_error(format!("Failed to init OllamaProvider: {}", e))
|
RpcError::internal_error(format!("Failed to init Ollama provider: {e}"))
|
||||||
});
|
})?;
|
||||||
|
return Ok(Arc::new(provider) as Arc<dyn Provider>);
|
||||||
}
|
}
|
||||||
|
|
||||||
provider_from_config()
|
provider_from_config()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn canonical_provider_name(name: &str) -> String {
|
||||||
|
if name.eq_ignore_ascii_case("ollama-cloud") {
|
||||||
|
"ollama".to_string()
|
||||||
|
} else {
|
||||||
|
name.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn handle_generate_text(args: GenerateTextArgs) -> Result<String, RpcError> {
|
async fn handle_generate_text(args: GenerateTextArgs) -> Result<String, RpcError> {
|
||||||
let provider = create_provider()?;
|
let provider = create_provider()?;
|
||||||
|
|
||||||
@@ -409,16 +424,14 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Initialize Ollama provider and start streaming
|
// Initialize provider and start streaming
|
||||||
let ollama_url = env::var("OLLAMA_URL")
|
let provider = match create_provider() {
|
||||||
.unwrap_or_else(|_| "http://localhost:11434".to_string());
|
|
||||||
let provider = match OllamaProvider::new(&ollama_url) {
|
|
||||||
Ok(p) => p,
|
Ok(p) => p,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let err_resp = RpcErrorResponse::new(
|
let err_resp = RpcErrorResponse::new(
|
||||||
id.clone(),
|
id.clone(),
|
||||||
RpcError::internal_error(format!(
|
RpcError::internal_error(format!(
|
||||||
"Failed to init OllamaProvider: {}",
|
"Failed to initialize provider: {:?}",
|
||||||
e
|
e
|
||||||
)),
|
)),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "owlen-ollama"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
description = "Ollama provider for OWLEN LLM client"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
owlen-core = { path = "../owlen-core" }
|
|
||||||
|
|
||||||
# HTTP client
|
|
||||||
reqwest = { workspace = true }
|
|
||||||
|
|
||||||
# Async runtime
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-stream = { workspace = true }
|
|
||||||
futures = { workspace = true }
|
|
||||||
futures-util = { workspace = true }
|
|
||||||
|
|
||||||
# Serialization
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
uuid = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = { workspace = true }
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Owlen Ollama
|
|
||||||
|
|
||||||
This crate provides an implementation of the `owlen-core::Provider` trait for the [Ollama](https://ollama.ai) backend.
|
|
||||||
|
|
||||||
It allows Owlen to communicate with a local Ollama instance, sending requests and receiving responses from locally-run large language models. You can also target [Ollama Cloud](https://docs.ollama.com/cloud) by pointing the provider at `https://ollama.com` (or `https://api.ollama.com`) and providing an API key through your Owlen configuration (or the `OLLAMA_API_KEY` / `OLLAMA_CLOUD_API_KEY` environment variables). The client automatically adds the required Bearer authorization header when a key is supplied, accepts either host without rewriting, and expands inline environment references like `$OLLAMA_API_KEY` if you prefer not to check the secret into your config file. The generated configuration now includes both `providers.ollama` and `providers.ollama-cloud` entries—switch between them by updating `general.default_provider`.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
To use this provider, you need to have Ollama installed and running. The default address is `http://localhost:11434`. You can configure this in your `config.toml` if your Ollama instance is running elsewhere.
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -17,7 +17,7 @@ use crate::config;
|
|||||||
use crate::events::Event;
|
use crate::events::Event;
|
||||||
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
|
// Agent executor moved to separate binary `owlen-agent`. The TUI no longer directly
|
||||||
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
|
// imports `AgentExecutor` to avoid a circular dependency on `owlen-cli`.
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
const ONBOARDING_STATUS_LINE: &str =
|
const ONBOARDING_STATUS_LINE: &str =
|
||||||
@@ -2392,25 +2392,11 @@ impl ChatApp {
|
|||||||
let mut models = Vec::new();
|
let mut models = Vec::new();
|
||||||
let mut errors = Vec::new();
|
let mut errors = Vec::new();
|
||||||
|
|
||||||
for (name, provider_cfg) in provider_entries {
|
|
||||||
let provider_type = provider_cfg.provider_type.to_ascii_lowercase();
|
|
||||||
if provider_type != "ollama" && provider_type != "ollama-cloud" {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// All providers communicate via MCP LLM server (Phase 10).
|
|
||||||
// For cloud providers, the URL is passed via the provider config.
|
|
||||||
let client_result = if provider_type == "ollama-cloud" {
|
|
||||||
// Cloud Ollama - create MCP client with custom URL via env var
|
|
||||||
use owlen_core::config::McpServerConfig;
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.join("../..")
|
.join("../..")
|
||||||
.canonicalize()
|
.canonicalize()
|
||||||
.ok();
|
.ok();
|
||||||
|
let server_binary = workspace_root.as_ref().and_then(|root| {
|
||||||
let binary_path = workspace_root.and_then(|root| {
|
|
||||||
let candidates = [
|
let candidates = [
|
||||||
"target/debug/owlen-mcp-llm-server",
|
"target/debug/owlen-mcp-llm-server",
|
||||||
"target/release/owlen-mcp-llm-server",
|
"target/release/owlen-mcp-llm-server",
|
||||||
@@ -2419,43 +2405,72 @@ impl ChatApp {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|rel| root.join(rel))
|
.map(|rel| root.join(rel))
|
||||||
.find(|p| p.exists())
|
.find(|p| p.exists())
|
||||||
|
.map(|p| p.to_string_lossy().into_owned())
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(path) = binary_path {
|
for (name, provider_cfg) in provider_entries {
|
||||||
let mut env_vars = HashMap::new();
|
let provider_type = provider_cfg.provider_type.to_ascii_lowercase();
|
||||||
if let Some(url) = &provider_cfg.base_url {
|
if provider_type != "ollama" && provider_type != "ollama-cloud" {
|
||||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let canonical_name = if name.eq_ignore_ascii_case("ollama-cloud") {
|
||||||
|
"ollama".to_string()
|
||||||
|
} else {
|
||||||
|
name.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
// All providers communicate via MCP LLM server (Phase 10).
|
||||||
|
// Select provider by name via OWLEN_PROVIDER so per-provider settings apply.
|
||||||
|
let mut env_vars = HashMap::new();
|
||||||
|
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.clone());
|
||||||
|
|
||||||
|
let client_result = if let Some(binary_path) = server_binary.as_ref() {
|
||||||
|
use owlen_core::config::McpServerConfig;
|
||||||
|
|
||||||
let config = McpServerConfig {
|
let config = McpServerConfig {
|
||||||
name: name.clone(),
|
name: format!("provider::{canonical_name}"),
|
||||||
command: path.to_string_lossy().into_owned(),
|
command: binary_path.clone(),
|
||||||
args: Vec::new(),
|
args: Vec::new(),
|
||||||
transport: "stdio".to_string(),
|
transport: "stdio".to_string(),
|
||||||
env: env_vars,
|
env: env_vars.clone(),
|
||||||
};
|
};
|
||||||
RemoteMcpClient::new_with_config(&config)
|
RemoteMcpClient::new_with_config(&config)
|
||||||
} else {
|
} else {
|
||||||
Err(owlen_core::Error::NotImplemented(
|
// Fallback to legacy discovery: temporarily set env vars while spawning.
|
||||||
"MCP server binary not found".into(),
|
let backups: Vec<(String, Option<String>)> = env_vars
|
||||||
))
|
.keys()
|
||||||
|
.map(|key| (key.clone(), std::env::var(key).ok()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for (key, value) in env_vars.iter() {
|
||||||
|
std::env::set_var(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let result = RemoteMcpClient::new();
|
||||||
|
|
||||||
|
for (key, original) in backups {
|
||||||
|
if let Some(value) = original {
|
||||||
|
std::env::set_var(&key, value);
|
||||||
} else {
|
} else {
|
||||||
// Local Ollama - use default MCP client
|
std::env::remove_var(&key);
|
||||||
RemoteMcpClient::new()
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
};
|
};
|
||||||
|
|
||||||
match client_result {
|
match client_result {
|
||||||
Ok(client) => match client.list_models().await {
|
Ok(client) => match client.list_models().await {
|
||||||
Ok(mut provider_models) => {
|
Ok(mut provider_models) => {
|
||||||
for model in &mut provider_models {
|
for model in &mut provider_models {
|
||||||
model.provider = name.clone();
|
model.provider = canonical_name.clone();
|
||||||
}
|
}
|
||||||
models.extend(provider_models);
|
models.extend(provider_models);
|
||||||
}
|
}
|
||||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
Err(err) => errors.push(format!("{}: {}", name, err)),
|
||||||
},
|
},
|
||||||
Err(err) => errors.push(format!("{}: {}", name, err)),
|
Err(err) => errors.push(format!("{}: {}", canonical_name, err)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2497,13 +2512,50 @@ impl ChatApp {
|
|||||||
items.push(ModelSelectorItem::header(provider.clone(), is_expanded));
|
items.push(ModelSelectorItem::header(provider.clone(), is_expanded));
|
||||||
|
|
||||||
if is_expanded {
|
if is_expanded {
|
||||||
let mut matches: Vec<(usize, &ModelInfo)> = self
|
let relevant: Vec<(usize, &ModelInfo)> = self
|
||||||
.models
|
.models
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter(|(_, model)| &model.provider == provider)
|
.filter(|(_, model)| &model.provider == provider)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let mut best_by_canonical: HashMap<String, (i8, (usize, &ModelInfo))> =
|
||||||
|
HashMap::new();
|
||||||
|
|
||||||
|
let provider_lower = provider.to_ascii_lowercase();
|
||||||
|
|
||||||
|
for (idx, model) in relevant {
|
||||||
|
let canonical = model.id.to_string();
|
||||||
|
|
||||||
|
let is_cloud_id = model.id.ends_with("-cloud");
|
||||||
|
let priority = match provider_lower.as_str() {
|
||||||
|
"ollama" | "ollama-cloud" => {
|
||||||
|
if is_cloud_id {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
best_by_canonical
|
||||||
|
.entry(canonical)
|
||||||
|
.and_modify(|entry| {
|
||||||
|
if priority > entry.0
|
||||||
|
|| (priority == entry.0 && model.id < entry.1 .1.id)
|
||||||
|
{
|
||||||
|
*entry = (priority, (idx, model));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.or_insert((priority, (idx, model)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut matches: Vec<(usize, &ModelInfo)> = best_by_canonical
|
||||||
|
.into_values()
|
||||||
|
.map(|entry| entry.1)
|
||||||
|
.collect();
|
||||||
|
|
||||||
matches.sort_by(|(_, a), (_, b)| a.id.cmp(&b.id));
|
matches.sort_by(|(_, a), (_, b)| a.id.cmp(&b.id));
|
||||||
|
|
||||||
if matches.is_empty() {
|
if matches.is_empty() {
|
||||||
@@ -2680,54 +2732,67 @@ impl ChatApp {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let provider_cfg = if let Some(cfg) = self.controller.config().provider(provider_name) {
|
|
||||||
cfg.clone()
|
|
||||||
} else {
|
|
||||||
let mut guard = self.controller.config_mut();
|
|
||||||
// Pass a mutable reference directly; avoid unnecessary deref
|
|
||||||
let cfg = config::ensure_provider_config(&mut guard, provider_name);
|
|
||||||
cfg.clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
// All providers use MCP architecture (Phase 10).
|
|
||||||
// For cloud providers, pass the URL via environment variable.
|
|
||||||
let provider: Arc<dyn owlen_core::provider::Provider> = if provider_cfg
|
|
||||||
.provider_type
|
|
||||||
.eq_ignore_ascii_case("ollama-cloud")
|
|
||||||
{
|
|
||||||
// Cloud Ollama - create MCP client with custom URL
|
|
||||||
use owlen_core::config::McpServerConfig;
|
use owlen_core::config::McpServerConfig;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
let canonical_name = if provider_name.eq_ignore_ascii_case("ollama-cloud") {
|
||||||
|
"ollama"
|
||||||
|
} else {
|
||||||
|
provider_name
|
||||||
|
};
|
||||||
|
|
||||||
|
if self.controller.config().provider(canonical_name).is_none() {
|
||||||
|
let mut guard = self.controller.config_mut();
|
||||||
|
config::ensure_provider_config(&mut guard, canonical_name);
|
||||||
|
}
|
||||||
|
|
||||||
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
let workspace_root = std::path::Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
.join("../..")
|
.join("../..")
|
||||||
.canonicalize()?;
|
.canonicalize()
|
||||||
|
.ok();
|
||||||
let binary_path = [
|
let server_binary = workspace_root.as_ref().and_then(|root| {
|
||||||
|
[
|
||||||
"target/debug/owlen-mcp-llm-server",
|
"target/debug/owlen-mcp-llm-server",
|
||||||
"target/release/owlen-mcp-llm-server",
|
"target/release/owlen-mcp-llm-server",
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|rel| workspace_root.join(rel))
|
.map(|rel| root.join(rel))
|
||||||
.find(|p| p.exists())
|
.find(|p| p.exists())
|
||||||
.ok_or_else(|| anyhow::anyhow!("MCP LLM server binary not found"))?;
|
});
|
||||||
|
|
||||||
let mut env_vars = HashMap::new();
|
let mut env_vars = HashMap::new();
|
||||||
if let Some(url) = &provider_cfg.base_url {
|
env_vars.insert("OWLEN_PROVIDER".to_string(), canonical_name.to_string());
|
||||||
env_vars.insert("OLLAMA_URL".to_string(), url.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
let provider: Arc<dyn owlen_core::provider::Provider> = if let Some(path) = server_binary {
|
||||||
let config = McpServerConfig {
|
let config = McpServerConfig {
|
||||||
name: provider_name.to_string(),
|
name: canonical_name.to_string(),
|
||||||
command: binary_path.to_string_lossy().into_owned(),
|
command: path.to_string_lossy().into_owned(),
|
||||||
args: Vec::new(),
|
args: Vec::new(),
|
||||||
transport: "stdio".to_string(),
|
transport: "stdio".to_string(),
|
||||||
env: env_vars,
|
env: env_vars,
|
||||||
};
|
};
|
||||||
Arc::new(RemoteMcpClient::new_with_config(&config)?)
|
Arc::new(RemoteMcpClient::new_with_config(&config)?)
|
||||||
} else {
|
} else {
|
||||||
// Local Ollama via default MCP client
|
let backups: Vec<(String, Option<String>)> = env_vars
|
||||||
Arc::new(RemoteMcpClient::new()?)
|
.keys()
|
||||||
|
.map(|key| (key.clone(), std::env::var(key).ok()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for (key, value) in env_vars.iter() {
|
||||||
|
std::env::set_var(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = RemoteMcpClient::new();
|
||||||
|
|
||||||
|
for (key, original) in backups {
|
||||||
|
if let Some(value) = original {
|
||||||
|
std::env::set_var(&key, value);
|
||||||
|
} else {
|
||||||
|
std::env::remove_var(&key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Arc::new(result?)
|
||||||
};
|
};
|
||||||
|
|
||||||
self.controller.switch_provider(provider).await?;
|
self.controller.switch_provider(provider).await?;
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use tui_textarea::TextArea;
|
|||||||
use unicode_width::UnicodeWidthStr;
|
use unicode_width::UnicodeWidthStr;
|
||||||
|
|
||||||
use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT};
|
use crate::chat_app::{ChatApp, ModelSelectorItemKind, HELP_TAB_COUNT};
|
||||||
use owlen_core::types::Role;
|
use owlen_core::types::{ModelInfo, Role};
|
||||||
use owlen_core::ui::{FocusedPanel, InputMode};
|
use owlen_core::ui::{FocusedPanel, InputMode};
|
||||||
|
|
||||||
const PRIVACY_TAB_INDEX: usize = HELP_TAB_COUNT - 1;
|
const PRIVACY_TAB_INDEX: usize = HELP_TAB_COUNT - 1;
|
||||||
@@ -1371,6 +1371,47 @@ fn render_provider_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
|||||||
frame.render_stateful_widget(list, area, &mut state);
|
frame.render_stateful_widget(list, area, &mut state);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn model_badge_icons(model: &ModelInfo) -> Vec<&'static str> {
|
||||||
|
let mut badges = Vec::new();
|
||||||
|
|
||||||
|
if model.supports_tools {
|
||||||
|
badges.push("🔧");
|
||||||
|
}
|
||||||
|
|
||||||
|
if model_has_feature(model, &["think", "reason"]) {
|
||||||
|
badges.push("🧠");
|
||||||
|
}
|
||||||
|
|
||||||
|
if model_has_feature(model, &["vision", "multimodal", "image"]) {
|
||||||
|
badges.push("👁️");
|
||||||
|
}
|
||||||
|
|
||||||
|
if model_has_feature(model, &["audio", "speech", "voice"]) {
|
||||||
|
badges.push("🎧");
|
||||||
|
}
|
||||||
|
|
||||||
|
badges
|
||||||
|
}
|
||||||
|
|
||||||
|
fn model_has_feature(model: &ModelInfo, keywords: &[&str]) -> bool {
|
||||||
|
let name_lower = model.name.to_ascii_lowercase();
|
||||||
|
if keywords.iter().any(|kw| name_lower.contains(kw)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(description) = &model.description {
|
||||||
|
let description_lower = description.to_ascii_lowercase();
|
||||||
|
if keywords.iter().any(|kw| description_lower.contains(kw)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
model.capabilities.iter().any(|cap| {
|
||||||
|
let lower = cap.to_ascii_lowercase();
|
||||||
|
keywords.iter().any(|kw| lower.contains(kw))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
||||||
let theme = app.theme();
|
let theme = app.theme();
|
||||||
let area = centered_rect(60, 60, frame.area());
|
let area = centered_rect(60, 60, frame.area());
|
||||||
@@ -1390,16 +1431,14 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
|||||||
.add_modifier(Modifier::BOLD),
|
.add_modifier(Modifier::BOLD),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
ModelSelectorItemKind::Model {
|
ModelSelectorItemKind::Model { model_index, .. } => {
|
||||||
provider: _,
|
|
||||||
model_index,
|
|
||||||
} => {
|
|
||||||
if let Some(model) = app.model_info_by_index(*model_index) {
|
if let Some(model) = app.model_info_by_index(*model_index) {
|
||||||
let tool_indicator = if model.supports_tools { "🔧 " } else { " " };
|
let badges = model_badge_icons(model);
|
||||||
let label = if model.name.is_empty() {
|
|
||||||
format!(" {}{}", tool_indicator, model.id)
|
let label = if badges.is_empty() {
|
||||||
|
format!(" {}", model.id)
|
||||||
} else {
|
} else {
|
||||||
format!(" {}{} — {}", tool_indicator, model.id, model.name)
|
format!(" {} - {}", model.id, badges.join(" "))
|
||||||
};
|
};
|
||||||
ListItem::new(Span::styled(
|
ListItem::new(Span::styled(
|
||||||
label,
|
label,
|
||||||
@@ -1427,7 +1466,7 @@ fn render_model_selector(frame: &mut Frame<'_>, app: &ChatApp) {
|
|||||||
.block(
|
.block(
|
||||||
Block::default()
|
Block::default()
|
||||||
.title(Span::styled(
|
.title(Span::styled(
|
||||||
"Select Model — 🔧 = Tool Support",
|
"Select Model — 🔧 tools • 🧠 thinking • 👁️ vision • 🎧 audio",
|
||||||
Style::default()
|
Style::default()
|
||||||
.fg(theme.focused_panel_border)
|
.fg(theme.focused_panel_border)
|
||||||
.add_modifier(Modifier::BOLD),
|
.add_modifier(Modifier::BOLD),
|
||||||
@@ -1601,6 +1640,67 @@ fn render_consent_dialog(frame: &mut Frame<'_>, app: &ChatApp) {
|
|||||||
frame.render_widget(paragraph, area);
|
frame.render_widget(paragraph, area);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn model_with(capabilities: Vec<&str>, description: Option<&str>) -> ModelInfo {
|
||||||
|
ModelInfo {
|
||||||
|
id: "model".into(),
|
||||||
|
name: "model".into(),
|
||||||
|
description: description.map(|s| s.to_string()),
|
||||||
|
provider: "test".into(),
|
||||||
|
context_window: None,
|
||||||
|
capabilities: capabilities.into_iter().map(|s| s.to_string()).collect(),
|
||||||
|
supports_tools: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn badges_include_tool_icon() {
|
||||||
|
let model = ModelInfo {
|
||||||
|
id: "tool-model".into(),
|
||||||
|
name: "tool-model".into(),
|
||||||
|
description: None,
|
||||||
|
provider: "test".into(),
|
||||||
|
context_window: None,
|
||||||
|
capabilities: vec![],
|
||||||
|
supports_tools: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(model_badge_icons(&model).contains(&"🔧"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn badges_detect_thinking_capability() {
|
||||||
|
let model = model_with(vec!["Thinking"], None);
|
||||||
|
let icons = model_badge_icons(&model);
|
||||||
|
assert!(icons.contains(&"🧠"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn badges_detect_vision_from_description() {
|
||||||
|
let model = model_with(vec!["chat"], Some("Supports multimodal vision"));
|
||||||
|
let icons = model_badge_icons(&model);
|
||||||
|
assert!(icons.contains(&"👁️"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn badges_detect_audio_from_name() {
|
||||||
|
let model = ModelInfo {
|
||||||
|
id: "voice-specialist".into(),
|
||||||
|
name: "Voice-Specialist".into(),
|
||||||
|
description: None,
|
||||||
|
provider: "test".into(),
|
||||||
|
context_window: None,
|
||||||
|
capabilities: vec![],
|
||||||
|
supports_tools: false,
|
||||||
|
};
|
||||||
|
let icons = model_badge_icons(&model);
|
||||||
|
assert!(icons.contains(&"🎧"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn render_privacy_settings(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
|
fn render_privacy_settings(frame: &mut Frame<'_>, area: Rect, app: &ChatApp) {
|
||||||
let theme = app.theme();
|
let theme = app.theme();
|
||||||
let config = app.config();
|
let config = app.config();
|
||||||
|
|||||||
@@ -133,13 +133,28 @@ base_url = "https://ollama.com"
|
|||||||
|
|
||||||
### Using Ollama Cloud
|
### Using Ollama Cloud
|
||||||
|
|
||||||
To talk to [Ollama Cloud](https://docs.ollama.com/cloud), point the base URL at the hosted endpoint and supply your API key:
|
Owlen now ships a single unified `ollama` provider. When an API key is present, Owlen automatically routes traffic to [Ollama Cloud](https://docs.ollama.com/cloud); otherwise it talks to the local daemon. A minimal configuration looks like this:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
[providers.ollama-cloud]
|
[providers.ollama]
|
||||||
provider_type = "ollama-cloud"
|
provider_type = "ollama"
|
||||||
base_url = "https://ollama.com"
|
base_url = "http://localhost:11434" # ignored once an API key is supplied
|
||||||
api_key = "${OLLAMA_API_KEY}"
|
api_key = "${OLLAMA_API_KEY}"
|
||||||
```
|
```
|
||||||
|
|
||||||
Requests target the same `/api/chat` endpoint documented by Ollama and automatically include the API key using a `Bearer` authorization header. If you prefer not to store the key in the config file, you can leave `api_key` unset and provide it via the `OLLAMA_API_KEY` (or `OLLAMA_CLOUD_API_KEY`) environment variable instead. You can also reference an environment variable inline (for example `api_key = "$OLLAMA_API_KEY"` or `api_key = "${OLLAMA_API_KEY}"`), which Owlen expands when the configuration is loaded. The base URL is normalised automatically—Owlen enforces HTTPS, trims trailing slashes, and accepts both `https://ollama.com` and `https://api.ollama.com` without rewriting the host.
|
Requests target the same `/api/chat` endpoint documented by Ollama and automatically include the API key using a `Bearer` authorization header. If you prefer not to store the key in the config file, you can leave `api_key` unset and provide it via the `OLLAMA_API_KEY` (or `OLLAMA_CLOUD_API_KEY`) environment variable instead. You can also reference an environment variable inline (for example `api_key = "$OLLAMA_API_KEY"` or `api_key = "${OLLAMA_API_KEY}"`), which Owlen expands when the configuration is loaded. The base URL is normalised automatically—Owlen enforces HTTPS, trims trailing slashes, and accepts both `https://ollama.com` and `https://api.ollama.com` without rewriting the host.
|
||||||
|
|
||||||
|
> **Tip:** If the official `ollama signin` flow fails on Linux v0.12.3, follow the [Linux Ollama sign-in workaround](#linux-ollama-sign-in-workaround-v0123) in the troubleshooting guide to copy keys from a working machine or register them manually.
|
||||||
|
|
||||||
|
### Managing cloud credentials via CLI
|
||||||
|
|
||||||
|
Owlen now ships with an interactive helper for Ollama Cloud:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
owlen cloud setup # Prompt for your API key (or use --api-key)
|
||||||
|
owlen cloud status # Verify authentication/latency
|
||||||
|
owlen cloud models # List the hosted models your account can access
|
||||||
|
owlen cloud logout # Forget the stored API key
|
||||||
|
```
|
||||||
|
|
||||||
|
When `privacy.encrypt_local_data = true`, the API key is written to Owlen's encrypted credential vault instead of being persisted in plaintext. Subsequent invocations automatically load the key into the runtime environment so that the config file can remain redacted. If encryption is disabled, the key is stored under `[providers.ollama-cloud].api_key` as before.
|
||||||
|
|||||||
@@ -40,11 +40,68 @@ If Owlen is not behaving as you expect, there might be an issue with your config
|
|||||||
|
|
||||||
## Ollama Cloud Authentication Errors
|
## Ollama Cloud Authentication Errors
|
||||||
|
|
||||||
If you see `Auth` errors when using the `ollama-cloud` provider:
|
If you see `Auth` errors when using the hosted service:
|
||||||
|
|
||||||
1. Ensure `providers.ollama-cloud.api_key` is set **or** export `OLLAMA_API_KEY` / `OLLAMA_CLOUD_API_KEY` before launching Owlen.
|
1. Run `owlen cloud setup` to register your API key (with `--api-key` for non-interactive use).
|
||||||
2. Confirm the key has access to the requested models.
|
2. Use `owlen cloud status` to verify Owlen can authenticate against [Ollama Cloud](https://docs.ollama.com/cloud).
|
||||||
3. Avoid pasting extra quotes or whitespace into the config file—`owlen config doctor` will normalise the entry for you.
|
3. Ensure `providers.ollama.api_key` is set **or** export `OLLAMA_API_KEY` / `OLLAMA_CLOUD_API_KEY` when encryption is disabled. With `privacy.encrypt_local_data = true`, the key lives in the encrypted vault and is loaded automatically.
|
||||||
|
4. Confirm the key has access to the requested models.
|
||||||
|
5. Avoid pasting extra quotes or whitespace into the config file—`owlen config doctor` will normalise the entry for you.
|
||||||
|
|
||||||
|
### Linux Ollama Sign-In Workaround (v0.12.3)
|
||||||
|
|
||||||
|
Ollama v0.12.3 on Linux ships with a broken `ollama signin` command. Until you can upgrade to ≥0.12.4, use one of the manual workflows below to register your key pair.
|
||||||
|
|
||||||
|
#### 1. Manual key copy
|
||||||
|
|
||||||
|
1. **Locate (or generate) keys on Linux**
|
||||||
|
```bash
|
||||||
|
ls -la /usr/share/ollama/.ollama/
|
||||||
|
sudo systemctl start ollama # start the service if the directory is empty
|
||||||
|
```
|
||||||
|
2. **Copy keys from a working Mac**
|
||||||
|
```bash
|
||||||
|
# On macOS (source machine)
|
||||||
|
cat ~/.ollama/id_ed25519.pub
|
||||||
|
cat ~/.ollama/id_ed25519
|
||||||
|
```
|
||||||
|
```bash
|
||||||
|
# On Linux (target machine)
|
||||||
|
sudo systemctl stop ollama
|
||||||
|
sudo mkdir -p /usr/share/ollama/.ollama
|
||||||
|
sudo tee /usr/share/ollama/.ollama/id_ed25519.pub <<'EOF'
|
||||||
|
<paste mac public key>
|
||||||
|
EOF
|
||||||
|
sudo tee /usr/share/ollama/.ollama/id_ed25519 <<'EOF'
|
||||||
|
<paste mac private key>
|
||||||
|
EOF
|
||||||
|
sudo chown -R ollama:ollama /usr/share/ollama/.ollama/
|
||||||
|
sudo chmod 600 /usr/share/ollama/.ollama/id_ed25519
|
||||||
|
sudo chmod 644 /usr/share/ollama/.ollama/id_ed25519.pub
|
||||||
|
sudo systemctl start ollama
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Manual web registration
|
||||||
|
|
||||||
|
1. Read the Linux public key:
|
||||||
|
```bash
|
||||||
|
sudo cat /usr/share/ollama/.ollama/id_ed25519.pub
|
||||||
|
```
|
||||||
|
2. Open <https://ollama.com/settings/keys> and paste the public key.
|
||||||
|
|
||||||
|
After either method, confirm access:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ollama list
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Troubleshooting
|
||||||
|
|
||||||
|
- Permissions: `sudo chown -R ollama:ollama /usr/share/ollama/.ollama/` then re-apply `chmod` (`600` private, `644` public).
|
||||||
|
- Service status: `sudo systemctl status ollama` and `sudo journalctl -u ollama -f`.
|
||||||
|
- Alternate paths: Some distros run Ollama as a user process (`~/.ollama`). Copy the keys into that directory if `/usr/share/ollama/.ollama` is unused.
|
||||||
|
|
||||||
|
This workaround mirrors what `ollama signin` should do—register the key pair with Ollama Cloud—without waiting for the patched release. Once you upgrade to v0.12.4 or newer, the interactive sign-in command works again.
|
||||||
|
|
||||||
## Performance Tuning
|
## Performance Tuning
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user