feat(ollama): add cloud support with api key and model suffix detection

Add support for Ollama Cloud by detecting model names with "-cloud" suffix
and checking for API key presence. Update config to read OLLAMA_API_KEY
environment variable. When both conditions are met, automatically use
https://ollama.com endpoint; otherwise use local/configured URL.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-01 18:20:33 +01:00
parent ff49e7ce93
commit dcda8216dc
2 changed files with 53 additions and 11 deletions

View File

@@ -5,30 +5,57 @@ use futures_util::TryStreamExt;
use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage}; use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage};
use std::io::{self, Write}; use std::io::{self, Write};
#[derive(clap::Subcommand, Debug)]
enum Cmd {
Read {path: String},
Glob {root: String},
Grep {root: String, pattern: String},
}
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(name = "code", version, about = "Rust code-agent (Ollama)")] #[command(name = "code", version)]
struct Args { struct Args {
/// Override Ollama base URL (local or cloud)
#[arg(long)] #[arg(long)]
ollama_url: Option<String>, ollama_url: Option<String>,
/// Model name
#[arg(long)] #[arg(long)]
model: Option<String>, model: Option<String>,
#[arg(long)]
/// Print response only (headless-like) api_key: Option<String>,
#[arg(long)] #[arg(long)]
print: bool, print: bool,
/// Prompt to send
#[arg()] #[arg()]
prompt: Vec<String>, prompt: Vec<String>,
#[command(subcommand)]
cmd: Option<Cmd>,
} }
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
color_eyre::install()?; color_eyre::install()?;
let args = Args::parse(); let args = Args::parse();
let settings = load_settings(None).unwrap_or_default();
if let Some(cmd) = args.cmd {
match cmd {
Cmd::Read { path } => {
let s = tools_fs::read_file(&path)?;
println!("{}", s);
return Ok(());
}
Cmd::Glob { root } => {
for p in tools_fs::glob_list(&root)? {
println!("{}", p);
}
return Ok(());
}
Cmd::Grep { root, pattern } => {
for (path, line_number, text) in tools_fs::grep(&root, &pattern)? {
println!("{path}:{line_number}:{text}")
}
return Ok(());
}
}
}
let prompt = if args.prompt.is_empty() { let prompt = if args.prompt.is_empty() {
"Say hello".to_string() "Say hello".to_string()
@@ -36,11 +63,21 @@ async fn main() -> Result<()> {
args.prompt.join(" ") args.prompt.join(" ")
}; };
let settings = load_settings(None).unwrap_or_default();
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
let model = args.model.unwrap_or(settings.model); let model = args.model.unwrap_or(settings.model);
let api_key = args.api_key.or(settings.api_key);
let client = OllamaClient::new(base_url); // Use Ollama Cloud when model has "-cloud" suffix AND API key is set
let use_cloud = model.ends_with("-cloud") && api_key.is_some();
let client = if use_cloud {
OllamaClient::with_cloud().with_api_key(api_key.unwrap())
} else {
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
let mut client = OllamaClient::new(base_url);
if let Some(key) = api_key {
client = client.with_api_key(key);
}
client
};
let opts = OllamaOptions { let opts = OllamaOptions {
model, model,
stream: true, stream: true,

View File

@@ -14,6 +14,8 @@ pub struct Settings {
pub model: String, pub model: String,
#[serde(default = "default_mode")] #[serde(default = "default_mode")]
pub mode: String, // "plan" (read-only) for now pub mode: String, // "plan" (read-only) for now
#[serde(default)]
pub api_key: Option<String>, // For Ollama Cloud or other API authentication
} }
fn default_ollama_url() -> String { fn default_ollama_url() -> String {
@@ -32,6 +34,7 @@ impl Default for Settings {
ollama_url: default_ollama_url(), ollama_url: default_ollama_url(),
model: default_model(), model: default_model(),
mode: default_mode(), mode: default_mode(),
api_key: None,
} }
} }
} }
@@ -52,6 +55,8 @@ pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Er
// Environment variables have highest precedence // Environment variables have highest precedence
fig = fig.merge(Env::prefixed("OWLEN_").split("__")); fig = fig.merge(Env::prefixed("OWLEN_").split("__"));
// Support OLLAMA_API_KEY, OLLAMA_MODEL, etc. (without nesting)
fig = fig.merge(Env::prefixed("OLLAMA_"));
fig.extract() fig.extract()
} }