feat(workspace): initialize Rust workspace structure for v2

Set up Cargo workspace with initial crates:
- cli: main application entry point with chat streaming tests
- config: configuration management
- llm/ollama: Ollama client integration with NDJSON support

Includes .gitignore for Rust and JetBrains IDEs.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-01 16:30:09 +01:00
parent 491fd049b0
commit 2a651ebd7b
15 changed files with 513 additions and 0 deletions

67
crates/cli/src/main.rs Normal file
View File

@@ -0,0 +1,67 @@
use clap::Parser;
use color_eyre::eyre::Result;
use config_agent::load_settings;
use futures_util::TryStreamExt;
use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage};
use std::io::{self, Write};
#[derive(Parser, Debug)]
#[command(name = "code", version, about = "Rust code-agent (Ollama)")]
struct Args {
/// Override Ollama base URL (local or cloud)
#[arg(long)]
ollama_url: Option<String>,
/// Model name
#[arg(long)]
model: Option<String>,
/// Print response only (headless-like)
#[arg(long)]
print: bool,
/// Prompt to send
#[arg()]
prompt: Vec<String>,
}
#[tokio::main]
async fn main() -> Result<()> {
color_eyre::install()?;
let args = Args::parse();
let prompt = if args.prompt.is_empty() {
"Say hello".to_string()
} else {
args.prompt.join(" ")
};
let settings = load_settings(None).unwrap_or_default();
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
let model = args.model.unwrap_or(settings.model);
let client = OllamaClient::new(base_url);
let opts = OllamaOptions {
model,
stream: true,
};
let msgs = vec![ChatMessage {
role: "user".into(),
content: prompt,
}];
let mut stream = client.chat_stream(&msgs, &opts).await?;
while let Ok(Some(chunk)) = stream.try_next().await {
if let Some(m) = chunk.message {
if let Some(c) = m.content {
print!("{c}");
io::stdout().flush()?;
}
}
if matches!(chunk.done, Some(true)) {
break;
}
}
Ok(())
}