- Include detailed architecture overview in `docs/architecture.md`. - Add `docs/configuration.md`, detailing configuration file structure and settings. - Provide a step-by-step provider implementation guide in `docs/provider-implementation.md`. - Add frequently asked questions (FAQ) document in `docs/faq.md`. - Create `docs/migration-guide.md` for future breaking changes and version upgrades. - Introduce new examples in `examples/` showcasing basic chat, custom providers, and theming. - Add a changelog (`CHANGELOG.md`) for tracking significant changes. - Provide contribution guidelines (`CONTRIBUTING.md`) and a Code of Conduct (`CODE_OF_CONDUCT.md`).
31 lines
999 B
Rust
31 lines
999 B
Rust
// This example demonstrates a basic chat interaction without the TUI.
|
|
|
|
use owlen_core::model::Model;
|
|
use owlen_core::session::Session;
|
|
use owlen_ollama::OllamaProvider; // Assuming you have an Ollama provider
|
|
use owlen_core::provider::Provider;
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<(), anyhow::Error> {
|
|
// This example requires a running Ollama instance.
|
|
// Make sure you have a model available, e.g., `ollama pull llama2`
|
|
|
|
let provider = OllamaProvider;
|
|
let model = Model::new("llama2"); // Change to a model you have
|
|
let mut session = Session::new("basic-chat-session");
|
|
|
|
println!("Starting basic chat with model: {}", model.name);
|
|
|
|
let user_message = "What is the capital of France?";
|
|
session.add_message("user", user_message);
|
|
println!("User: {}", user_message);
|
|
|
|
// Send the chat to the provider
|
|
let response = provider.chat(&session, &model).await?;
|
|
|
|
session.add_message("bot", &response);
|
|
println!("Bot: {}", response);
|
|
|
|
Ok(())
|
|
}
|