From 2a651ebd7b67fa4bc703a2380c115ec573ee7fd9 Mon Sep 17 00:00:00 2001 From: vikingowl Date: Sat, 1 Nov 2025 16:30:09 +0100 Subject: [PATCH] feat(workspace): initialize Rust workspace structure for v2 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Set up Cargo workspace with initial crates: - cli: main application entry point with chat streaming tests - config: configuration management - llm/ollama: Ollama client integration with NDJSON support Includes .gitignore for Rust and JetBrains IDEs. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .gitignore | 100 ++++++++++++++++++++++++++++++ Cargo.toml | 12 ++++ crates/cli/.gitignore | 22 +++++++ crates/cli/Cargo.toml | 22 +++++++ crates/cli/src/main.rs | 67 ++++++++++++++++++++ crates/cli/tests/chat_stream.rs | 39 ++++++++++++ crates/config/.gitignore | 22 +++++++ crates/config/Cargo.toml | 13 ++++ crates/config/src/lib.rs | 55 ++++++++++++++++ crates/llm/ollama/.gitignore | 22 +++++++ crates/llm/ollama/Cargo.toml | 16 +++++ crates/llm/ollama/src/client.rs | 84 +++++++++++++++++++++++++ crates/llm/ollama/src/lib.rs | 5 ++ crates/llm/ollama/src/types.rs | 22 +++++++ crates/llm/ollama/tests/ndjson.rs | 12 ++++ 15 files changed, 513 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 crates/cli/.gitignore create mode 100644 crates/cli/Cargo.toml create mode 100644 crates/cli/src/main.rs create mode 100644 crates/cli/tests/chat_stream.rs create mode 100644 crates/config/.gitignore create mode 100644 crates/config/Cargo.toml create mode 100644 crates/config/src/lib.rs create mode 100644 crates/llm/ollama/.gitignore create mode 100644 crates/llm/ollama/Cargo.toml create mode 100644 crates/llm/ollama/src/client.rs create mode 100644 crates/llm/ollama/src/lib.rs create mode 100644 crates/llm/ollama/src/types.rs create mode 100644 crates/llm/ollama/tests/ndjson.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..494fe19 --- /dev/null +++ b/.gitignore @@ -0,0 +1,100 @@ +### Rust template +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +### JetBrains template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +.idea/artifacts +.idea/compiler.xml +.idea/jarRepositories.xml +.idea/modules.xml +.idea/*.iml +.idea/modules +*.iml +*.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### rust-analyzer template +# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules) +rust-project.json + + diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..8761c26 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,12 @@ +[workspace] +members = [ + "crates/cli", + "crates/llm/ollama", + "crates/config" +] +resolver = "2" + +[workspace.package] +edition = "2024" +license = "AGPL-3.0" +rust-version = "1.91" diff --git a/crates/cli/.gitignore b/crates/cli/.gitignore new file mode 100644 index 0000000..34f5f2c --- /dev/null +++ b/crates/cli/.gitignore @@ -0,0 +1,22 @@ +/target +### Rust template +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +### rust-analyzer template +# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules) +rust-project.json + + diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml new file mode 100644 index 0000000..1b5d046 --- /dev/null +++ b/crates/cli/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "code" +version = "0.1.0" +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[dependencies] +clap = { version = "4.5", features = ["derive"] } +tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" +color-eyre = "0.6" +llm-ollama = { path = "../llm/ollama" } +config-agent = { package = "config-agent", path = "../config" } +futures-util = "0.3.31" + +[dev-dependencies] +assert_cmd = "2.0" +predicates = "3.1" +httpmock = "0.7" +tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs new file mode 100644 index 0000000..27f7a30 --- /dev/null +++ b/crates/cli/src/main.rs @@ -0,0 +1,67 @@ +use clap::Parser; +use color_eyre::eyre::Result; +use config_agent::load_settings; +use futures_util::TryStreamExt; +use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage}; +use std::io::{self, Write}; + +#[derive(Parser, Debug)] +#[command(name = "code", version, about = "Rust code-agent (Ollama)")] +struct Args { + /// Override Ollama base URL (local or cloud) + #[arg(long)] + ollama_url: Option, + + /// Model name + #[arg(long)] + model: Option, + + /// Print response only (headless-like) + #[arg(long)] + print: bool, + + /// Prompt to send + #[arg()] + prompt: Vec, +} + +#[tokio::main] +async fn main() -> Result<()> { + color_eyre::install()?; + let args = Args::parse(); + + let prompt = if args.prompt.is_empty() { + "Say hello".to_string() + } else { + args.prompt.join(" ") + }; + + let settings = load_settings(None).unwrap_or_default(); + let base_url = args.ollama_url.unwrap_or(settings.ollama_url); + let model = args.model.unwrap_or(settings.model); + + let client = OllamaClient::new(base_url); + let opts = OllamaOptions { + model, + stream: true, + }; + + let msgs = vec![ChatMessage { + role: "user".into(), + content: prompt, + }]; + + let mut stream = client.chat_stream(&msgs, &opts).await?; + while let Ok(Some(chunk)) = stream.try_next().await { + if let Some(m) = chunk.message { + if let Some(c) = m.content { + print!("{c}"); + io::stdout().flush()?; + } + } + if matches!(chunk.done, Some(true)) { + break; + } + } + Ok(()) +} diff --git a/crates/cli/tests/chat_stream.rs b/crates/cli/tests/chat_stream.rs new file mode 100644 index 0000000..8a001f5 --- /dev/null +++ b/crates/cli/tests/chat_stream.rs @@ -0,0 +1,39 @@ +use assert_cmd::Command; +use httpmock::prelude::*; +use predicates::prelude::PredicateBooleanExt; + +#[tokio::test] +async fn headless_streams_ndjson() { + let server = MockServer::start_async().await; + // Mock /api/chat with NDJSON lines + let body = serde_json::json!({ + "model": "qwen2.5", + "messages": [{"role": "user", "content": "hello"}], + "stream": true + }); + + let response = concat!( + r#"{"message":{"role":"assistant","content":"Hel"}}"#,"\n", + r#"{"message":{"role":"assistant","content":"lo"}}"#,"\n", + r#"{"done":true}"#,"\n", + ); + + let _m = server.mock(|when, then| { + when.method(POST) + .path("/api/chat") + .json_body(body.clone()); + then.status(200) + .header("content-type", "application/x-ndjson") + .body(response); + }); + + let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("code")); + cmd.arg("--ollama-url").arg(server.base_url()) + .arg("--model").arg("qwen2.5") + .arg("--print") + .arg("hello"); + + cmd.assert() + .success() + .stdout(predicates::str::contains("Hello").count(1).or(predicates::str::contains("Hel").and(predicates::str::contains("lo")))); +} diff --git a/crates/config/.gitignore b/crates/config/.gitignore new file mode 100644 index 0000000..34f5f2c --- /dev/null +++ b/crates/config/.gitignore @@ -0,0 +1,22 @@ +/target +### Rust template +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +### rust-analyzer template +# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules) +rust-project.json + + diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml new file mode 100644 index 0000000..edb1c06 --- /dev/null +++ b/crates/config/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "config-agent" +version = "0.1.0" +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[dependencies] +serde = { version = "1", features = ["derive"] } +serde_json = "1" +directories = "5" +figment = { version = "0.10", features = ["toml", "env"] } +toml = "0.8" diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs new file mode 100644 index 0000000..80252bd --- /dev/null +++ b/crates/config/src/lib.rs @@ -0,0 +1,55 @@ +use directories::ProjectDirs; +use figment::{ + Figment, + providers::{Env, Format, Serialized, Toml}, +}; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Settings { + #[serde(default = "default_ollama_url")] + pub ollama_url: String, + #[serde(default = "default_model")] + pub model: String, + #[serde(default = "default_mode")] + pub mode: String, // "plan" (read-only) for now +} + +fn default_ollama_url() -> String { + "http://localhost:11434".into() +} +fn default_model() -> String { + "qwen2.5".into() +} +fn default_mode() -> String { + "plan".into() +} + +impl Default for Settings { + fn default() -> Self { + Self { + ollama_url: default_ollama_url(), + model: default_model(), + mode: default_mode(), + } + } +} + +pub fn load_settings(project_root: Option<&str>) -> Result { + let mut fig = Figment::from(Serialized::defaults(Settings::default())) + .merge(Env::prefixed("CODE_").split("__")); + + // User file: ~/.config/owlen/config.toml + if let Some(pd) = ProjectDirs::from("dev", "owlibou", "owlen") { + let user = pd.config_dir().join("config.toml"); + fig = fig.merge(Toml::file(user)); + } + + // Project file: /.owlen.toml + if let Some(root) = project_root { + fig = fig.merge(Toml::file(PathBuf::from(root).join(".owlen.toml"))); + } + + fig.extract() +} diff --git a/crates/llm/ollama/.gitignore b/crates/llm/ollama/.gitignore new file mode 100644 index 0000000..34f5f2c --- /dev/null +++ b/crates/llm/ollama/.gitignore @@ -0,0 +1,22 @@ +/target +### Rust template +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +### rust-analyzer template +# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules) +rust-project.json + + diff --git a/crates/llm/ollama/Cargo.toml b/crates/llm/ollama/Cargo.toml new file mode 100644 index 0000000..70935fb --- /dev/null +++ b/crates/llm/ollama/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "llm-ollama" +version = "0.1.0" +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[dependencies] +reqwest = { version = "0.12", features = ["json", "stream"] } +tokio = { version = "1.39", features = ["rt-multi-thread"] } +futures = "0.3" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +thiserror = "1" +bytes = "1" +tokio-stream = "0.1.17" diff --git a/crates/llm/ollama/src/client.rs b/crates/llm/ollama/src/client.rs new file mode 100644 index 0000000..4a0c386 --- /dev/null +++ b/crates/llm/ollama/src/client.rs @@ -0,0 +1,84 @@ +use crate::types::{ChatMessage, ChatResponseChunk}; +use futures::{Stream, TryStreamExt}; +use reqwest::Client; +use serde::Serialize; +use thiserror::Error; + +#[derive(Debug, Clone)] +pub struct OllamaClient { + http: Client, + base_url: String, // e.g. "http://localhost:11434" +} + +#[derive(Debug, Clone, Default)] +pub struct OllamaOptions { + pub model: String, + pub stream: bool, +} + +#[derive(Error, Debug)] +pub enum OllamaError { + #[error("http: {0}")] + Http(#[from] reqwest::Error), + #[error("json: {0}")] + Json(#[from] serde_json::Error), + #[error("protocol: {0}")] + Protocol(String), +} + +impl OllamaClient { + pub fn new(base_url: impl Into) -> Self { + Self { + http: Client::new(), + base_url: base_url.into().trim_end_matches('/').to_string(), + } + } + + pub fn with_cloud() -> Self { + // Same API, different base + Self::new("https://ollama.com") + } + + pub async fn chat_stream( + &self, + messages: &[ChatMessage], + opts: &OllamaOptions, + ) -> Result>, OllamaError> { + #[derive(Serialize)] + struct Body<'a> { + model: &'a str, + messages: &'a [ChatMessage], + stream: bool, + } + let url = format!("{}/api/chat", self.base_url); + let body = Body {model: &opts.model, messages, stream: true}; + let resp = self.http.post(url).json(&body).send().await?; + let bytes_stream = resp.bytes_stream(); + + // NDJSON parser: split by '\n', parse each as JSON and stream the results + let out = bytes_stream + .map_err(OllamaError::Http) + .map_ok(|bytes| { + // Convert the chunk to a UTF‑8 string and own it + let txt = String::from_utf8_lossy(&bytes).into_owned(); + // Parse each non‑empty line into a ChatResponseChunk + let results: Vec> = txt + .lines() + .filter_map(|line| { + let trimmed = line.trim(); + if trimmed.is_empty() { + None + } else { + Some( + serde_json::from_str::(trimmed) + .map_err(OllamaError::Json), + ) + } + }) + .collect(); + futures::stream::iter(results) + }) + .try_flatten(); // Stream> + Ok(out) + } +} diff --git a/crates/llm/ollama/src/lib.rs b/crates/llm/ollama/src/lib.rs new file mode 100644 index 0000000..1b4af68 --- /dev/null +++ b/crates/llm/ollama/src/lib.rs @@ -0,0 +1,5 @@ +pub mod client; +pub mod types; + +pub use client::{OllamaClient, OllamaOptions}; +pub use types::{ChatMessage, ChatResponseChunk}; diff --git a/crates/llm/ollama/src/types.rs b/crates/llm/ollama/src/types.rs new file mode 100644 index 0000000..10ff880 --- /dev/null +++ b/crates/llm/ollama/src/types.rs @@ -0,0 +1,22 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatMessage { + pub role: String, // "user", | "assistant" | "system" + pub content: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ChatResponseChunk { + pub model: Option, + pub created_at: Option, + pub message: Option, + pub done: Option, + pub total_duration: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ChunkMessage { + pub role: Option, + pub content: Option, +} diff --git a/crates/llm/ollama/tests/ndjson.rs b/crates/llm/ollama/tests/ndjson.rs new file mode 100644 index 0000000..63d8942 --- /dev/null +++ b/crates/llm/ollama/tests/ndjson.rs @@ -0,0 +1,12 @@ +use llm_ollama::{OllamaClient, OllamaOptions}; + +// This test stubs NDJSON by spinning a tiny local server is overkill for M0. +// Instead, test the line parser indirectly by mocking reqwest is complex. +// We'll smoke-test the client type compiles and leave end-to-end to cli tests. + +#[tokio::test] +async fn client_compiles_smoke() { + let _ = OllamaClient::new("http://localhost:11434"); + let _ = OllamaClient::with_cloud(); + let _ = OllamaOptions { model: "qwen2.5".into(), stream: true }; +}