chore(git): ignore custom documentation files

Add AGENTS.md and CLAUDE.md to .gitignore to exclude project-specific documentation files.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
2025-11-01 18:49:44 +01:00
parent 7f39bf1eca
commit d21945dbc0
9 changed files with 7 additions and 3 deletions

22
crates/app/cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,22 @@
/target
### Rust template
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
### rust-analyzer template
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
rust-project.json

23
crates/app/cli/Cargo.toml Normal file
View File

@@ -0,0 +1,23 @@
[package]
name = "owlen"
version = "0.1.0"
edition.workspace = true
license.workspace = true
rust-version.workspace = true
[dependencies]
clap = { version = "4.5", features = ["derive"] }
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
color-eyre = "0.6"
llm-ollama = { path = "../../llm/ollama" }
tools-fs = {path = "../../tools/fs" }
config-agent = { package = "config-agent", path = "../../config" }
futures-util = "0.3.31"
[dev-dependencies]
assert_cmd = "2.0"
predicates = "3.1"
httpmock = "0.7"
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }

105
crates/app/cli/src/main.rs Normal file
View File

@@ -0,0 +1,105 @@
use clap::Parser;
use color_eyre::eyre::Result;
use config_agent::load_settings;
use futures_util::TryStreamExt;
use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage};
use std::io::{self, Write};
#[derive(clap::Subcommand, Debug)]
enum Cmd {
Read {path: String},
Glob {pattern: String},
Grep {root: String, pattern: String},
}
#[derive(Parser, Debug)]
#[command(name = "code", version)]
struct Args {
#[arg(long)]
ollama_url: Option<String>,
#[arg(long)]
model: Option<String>,
#[arg(long)]
api_key: Option<String>,
#[arg(long)]
print: bool,
#[arg()]
prompt: Vec<String>,
#[command(subcommand)]
cmd: Option<Cmd>,
}
#[tokio::main]
async fn main() -> Result<()> {
color_eyre::install()?;
let args = Args::parse();
let settings = load_settings(None).unwrap_or_default();
if let Some(cmd) = args.cmd {
match cmd {
Cmd::Read { path } => {
let s = tools_fs::read_file(&path)?;
println!("{}", s);
return Ok(());
}
Cmd::Glob { pattern } => {
for p in tools_fs::glob_list(&pattern)? {
println!("{}", p);
}
return Ok(());
}
Cmd::Grep { root, pattern } => {
for (path, line_number, text) in tools_fs::grep(&root, &pattern)? {
println!("{path}:{line_number}:{text}")
}
return Ok(());
}
}
}
let prompt = if args.prompt.is_empty() {
"Say hello".to_string()
} else {
args.prompt.join(" ")
};
let model = args.model.unwrap_or(settings.model);
let api_key = args.api_key.or(settings.api_key);
// Use Ollama Cloud when model has "-cloud" suffix AND API key is set
let use_cloud = model.ends_with("-cloud") && api_key.is_some();
let client = if use_cloud {
OllamaClient::with_cloud().with_api_key(api_key.unwrap())
} else {
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
let mut client = OllamaClient::new(base_url);
if let Some(key) = api_key {
client = client.with_api_key(key);
}
client
};
let opts = OllamaOptions {
model,
stream: true,
};
let msgs = vec![ChatMessage {
role: "user".into(),
content: prompt,
}];
let mut stream = client.chat_stream(&msgs, &opts).await?;
while let Some(chunk) = stream.try_next().await? {
if let Some(m) = chunk.message {
if let Some(c) = m.content {
print!("{c}");
io::stdout().flush()?;
}
}
if matches!(chunk.done, Some(true)) {
break;
}
}
println!(); // Newline after response
Ok(())
}

View File

@@ -0,0 +1,39 @@
use assert_cmd::Command;
use httpmock::prelude::*;
use predicates::prelude::PredicateBooleanExt;
#[tokio::test]
async fn headless_streams_ndjson() {
let server = MockServer::start_async().await;
// Mock /api/chat with NDJSON lines
let body = serde_json::json!({
"model": "qwen2.5",
"messages": [{"role": "user", "content": "hello"}],
"stream": true
});
let response = concat!(
r#"{"message":{"role":"assistant","content":"Hel"}}"#,"\n",
r#"{"message":{"role":"assistant","content":"lo"}}"#,"\n",
r#"{"done":true}"#,"\n",
);
let _m = server.mock(|when, then| {
when.method(POST)
.path("/api/chat")
.json_body(body.clone());
then.status(200)
.header("content-type", "application/x-ndjson")
.body(response);
});
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
cmd.arg("--ollama-url").arg(server.base_url())
.arg("--model").arg("qwen2.5")
.arg("--print")
.arg("hello");
cmd.assert()
.success()
.stdout(predicates::str::contains("Hello").count(1).or(predicates::str::contains("Hel").and(predicates::str::contains("lo"))));
}