Set up Cargo workspace with initial crates: - cli: main application entry point with chat streaming tests - config: configuration management - llm/ollama: Ollama client integration with NDJSON support Includes .gitignore for Rust and JetBrains IDEs. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
17 lines
401 B
TOML
17 lines
401 B
TOML
[package]
|
|
name = "llm-ollama"
|
|
version = "0.1.0"
|
|
edition.workspace = true
|
|
license.workspace = true
|
|
rust-version.workspace = true
|
|
|
|
[dependencies]
|
|
reqwest = { version = "0.12", features = ["json", "stream"] }
|
|
tokio = { version = "1.39", features = ["rt-multi-thread"] }
|
|
futures = "0.3"
|
|
serde = { version = "1", features = ["derive"] }
|
|
serde_json = "1"
|
|
thiserror = "1"
|
|
bytes = "1"
|
|
tokio-stream = "0.1.17"
|