Compare commits
219 Commits
v0.1.0
...
688d1fe58a
| Author | SHA1 | Date | |
|---|---|---|---|
| 688d1fe58a | |||
| b1b95a4560 | |||
| a024a764d6 | |||
| 686526bbd4 | |||
| 5134462deb | |||
| d7ddc365ec | |||
| 6108b9e3d1 | |||
| a6cf8585ef | |||
| baf833427a | |||
| d21945dbc0 | |||
| 7f39bf1eca | |||
| dcda8216dc | |||
| ff49e7ce93 | |||
| b63d26f0cd | |||
| 64fd3206a2 | |||
| 2a651ebd7b | |||
| 491fd049b0 | |||
| c9e2f9bae6 | |||
| 7b87459a72 | |||
| 4935a64a13 | |||
| a84c8a425d | |||
| 16c0e71147 | |||
| 0728262a9e | |||
| 7aa80fb0a4 | |||
| 28b6eb0a9a | |||
| 353c0a8239 | |||
| 44b07c8e27 | |||
| 76e59c2d0e | |||
| c92e07b866 | |||
| 9aa8722ec3 | |||
| 7daa4f4ebe | |||
| a788b8941e | |||
| 16bc534837 | |||
| eef0e3dea0 | |||
| 5d9ecec82c | |||
| 6980640324 | |||
| a0868a9b49 | |||
| 877ece07be | |||
| f6a3f235df | |||
| a4f7a45e56 | |||
| 94ef08db6b | |||
| 57942219a8 | |||
| 03244e8d24 | |||
| d7066d7d37 | |||
| 124db19e68 | |||
| e89da02d49 | |||
| cf0a8f21d5 | |||
| 2d45406982 | |||
| f592840d39 | |||
| 9090bddf68 | |||
| 4981a63224 | |||
| 1238bbe000 | |||
| f29f306692 | |||
| 9024e2b914 | |||
| 6849d5ef12 | |||
| 3c6e689de9 | |||
| 1994367a2e | |||
| c3a92a092b | |||
| 6a94373c4f | |||
| 83280f68cc | |||
| 21759898fb | |||
| 02df6d893c | |||
| 8f9d601fdc | |||
| 40e42c8918 | |||
| 6e12bb3acb | |||
| 16b6f24e3e | |||
| 25628d1d58 | |||
| e813736b47 | |||
| 7e2c6ea037 | |||
| 3f6d7d56f6 | |||
| bbb94367e1 | |||
| 79fdafce97 | |||
| 24671f5f2a | |||
| e0b14a42f2 | |||
| 3e8788dd44 | |||
| 38a4c55eaa | |||
| c7b7fe98ec | |||
| 4820a6706f | |||
| 3308b483f7 | |||
| 4ce4ac0b0e | |||
| 3722840d2c | |||
| 02f25b7bec | |||
| d86888704f | |||
| de6b6e20a5 | |||
| 1e8a5e08ed | |||
| 218ebbf32f | |||
| c49e7f4b22 | |||
| 9588c8c562 | |||
| 1948ac1284 | |||
| 3f92b7d963 | |||
| 5553e61dbf | |||
| 7f987737f9 | |||
| 5182f86133 | |||
| a50099ad74 | |||
| 20ba5523ee | |||
| 0b2b3701dc | |||
| 438b05b8a3 | |||
| e2a31b192f | |||
| b827d3d047 | |||
| 9c0cf274a3 | |||
| 85ae319690 | |||
| 449f133a1f | |||
| 2f6b03ef65 | |||
| d4030dc598 | |||
| 3271697f6b | |||
| cbfef5a5df | |||
| 52efd5f341 | |||
| 200cdbc4bd | |||
| 8525819ab4 | |||
| bcd52d526c | |||
| 7effade1d3 | |||
| dc0fee2ee3 | |||
| ea04a25ed6 | |||
| 282dcdce88 | |||
| b49f58bc16 | |||
| cdc425ae93 | |||
| 3525cb3949 | |||
| 9d85420bf6 | |||
| 641c95131f | |||
| 708c626176 | |||
| 5210e196f2 | |||
| 30c375b6c5 | |||
| baf49b1e69 | |||
| 96e0436d43 | |||
| 498e6e61b6 | |||
| 99064b6c41 | |||
| ee58b0ac32 | |||
| 990f93d467 | |||
| 44a00619b5 | |||
| 6923ee439f | |||
| c997b19b53 | |||
| c9daf68fea | |||
| ba9d083088 | |||
| 825dfc0722 | |||
| 3e4eacd1d3 | |||
| 23253219a3 | |||
| cc2b85a86d | |||
| 58dd6f3efa | |||
| c81d0f1593 | |||
| ae0dd3fc51 | |||
| 80dffa9f41 | |||
| ab0ae4fe04 | |||
| d31e068277 | |||
| 690f5c7056 | |||
| 0da8a3f193 | |||
| 15f81d9728 | |||
| b80db89391 | |||
| f413a63c5a | |||
| 33ad3797a1 | |||
| 55e6b0583d | |||
| ae9c3af096 | |||
| 0bd560b408 | |||
| 083b621b7d | |||
| d2a193e5c1 | |||
| acbfe47a4b | |||
| 60c859b3ab | |||
| 82078afd6d | |||
| 7851af14a9 | |||
| c2f5ccea3b | |||
| fab63d224b | |||
| 15e5c1206b | |||
| 38aba1a6bb | |||
| d0d3079df5 | |||
| 56de1170ee | |||
| 952e4819fe | |||
| 5ac0d152cb | |||
| 40c44470e8 | |||
| 5c37df1b22 | |||
| 5e81185df3 | |||
| 7534c9ef8d | |||
| 9545a4b3ad | |||
| e94df2c48a | |||
| cdf95002fc | |||
| 4c066bf2da | |||
| e57844e742 | |||
| 33d11ae223 | |||
| 05e90d3e2b | |||
| fe414d49e6 | |||
| d002d35bde | |||
| c9c3d17db0 | |||
| a909455f97 | |||
| 67381b02db | |||
| 235f84fa19 | |||
| 9c777c8429 | |||
| 0b17a0f4c8 | |||
| 2eabe55fe6 | |||
| 4d7ad2c330 | |||
| 13af046eff | |||
| 5b202fed4f | |||
| 979347bf53 | |||
| 76b55ccff5 | |||
| f0e162d551 | |||
| 6c4571804f | |||
| a0cdcfdf6c | |||
| 96e2482782 | |||
| 6a3f44f911 | |||
| e0e5a2a83d | |||
| 23e86591d1 | |||
| b60a317788 | |||
| 2788e8b7e2 | |||
| 7c186882dc | |||
| bdda669d4d | |||
| 108070db4b | |||
| 08ba04e99f | |||
| e58032deae | |||
| 5c59539120 | |||
| c725bb1ce6 | |||
| c4a6bb1c0f | |||
| dcbfe6ef06 | |||
| e468658d63 | |||
| 2ad801f0c1 | |||
| 1bfc6e5956 | |||
| 6b8774f0aa | |||
| ec6876727f | |||
| e3eb4d7a04 | |||
| 7234021014 | |||
| 662d5bd919 | |||
| 263b629257 | |||
| ff90b20baa |
@@ -1,149 +0,0 @@
|
|||||||
name: Release
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags:
|
|
||||||
- 'v*'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build ${{ matrix.target }}
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# Linux
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: x86_64-unknown-linux-gnu
|
|
||||||
artifact_name: owlen-linux-x86_64-gnu
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: x86_64-unknown-linux-musl
|
|
||||||
artifact_name: owlen-linux-x86_64-musl
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: aarch64-unknown-linux-gnu
|
|
||||||
artifact_name: owlen-linux-aarch64-gnu
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: aarch64-unknown-linux-musl
|
|
||||||
artifact_name: owlen-linux-aarch64-musl
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: armv7-unknown-linux-gnueabihf
|
|
||||||
artifact_name: owlen-linux-armv7-gnu
|
|
||||||
- os: ubuntu-latest
|
|
||||||
target: armv7-unknown-linux-musleabihf
|
|
||||||
artifact_name: owlen-linux-armv7-musl
|
|
||||||
# Windows
|
|
||||||
- os: windows-latest
|
|
||||||
target: x86_64-pc-windows-msvc
|
|
||||||
artifact_name: owlen-windows-x86_64
|
|
||||||
- os: windows-latest
|
|
||||||
target: aarch64-pc-windows-msvc
|
|
||||||
artifact_name: owlen-windows-aarch64
|
|
||||||
# macOS
|
|
||||||
- os: macos-latest
|
|
||||||
target: x86_64-apple-darwin
|
|
||||||
artifact_name: owlen-macos-x86_64
|
|
||||||
- os: macos-latest
|
|
||||||
target: aarch64-apple-darwin
|
|
||||||
artifact_name: owlen-macos-aarch64
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: https://github.com/dtolnay/rust-toolchain@stable
|
|
||||||
with:
|
|
||||||
targets: ${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Install cross-compilation tools (Linux)
|
|
||||||
if: runner.os == 'Linux'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y musl-tools gcc-aarch64-linux-gnu gcc-arm-linux-gnueabihf
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
case "${{ matrix.target }}" in
|
|
||||||
aarch64-unknown-linux-gnu)
|
|
||||||
export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc
|
|
||||||
;;
|
|
||||||
aarch64-unknown-linux-musl)
|
|
||||||
export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=aarch64-linux-gnu-gcc
|
|
||||||
export CC_aarch64_unknown_linux_musl=aarch64-linux-gnu-gcc
|
|
||||||
;;
|
|
||||||
armv7-unknown-linux-gnueabihf)
|
|
||||||
export CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc
|
|
||||||
;;
|
|
||||||
armv7-unknown-linux-musleabihf)
|
|
||||||
export CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=arm-linux-gnueabihf-gcc
|
|
||||||
export CC_armv7_unknown_linux_musleabihf=arm-linux-gnueabihf-gcc
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
cargo build --release --all-features --target ${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Package binaries (Unix)
|
|
||||||
if: runner.os != 'Windows'
|
|
||||||
run: |
|
|
||||||
mkdir -p dist
|
|
||||||
cp target/${{ matrix.target }}/release/owlen dist/owlen
|
|
||||||
cp target/${{ matrix.target }}/release/owlen-code dist/owlen-code
|
|
||||||
cd dist
|
|
||||||
tar czf ${{ matrix.artifact_name }}.tar.gz owlen owlen-code
|
|
||||||
cd ..
|
|
||||||
mv dist/${{ matrix.artifact_name }}.tar.gz .
|
|
||||||
|
|
||||||
- name: Package binaries (Windows)
|
|
||||||
if: runner.os == 'Windows'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir -p dist
|
|
||||||
cp target/${{ matrix.target }}/release/owlen.exe dist/owlen.exe
|
|
||||||
cp target/${{ matrix.target }}/release/owlen-code.exe dist/owlen-code.exe
|
|
||||||
cd dist
|
|
||||||
7z a -tzip ../${{ matrix.artifact_name }}.zip owlen.exe owlen-code.exe
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.artifact_name }}
|
|
||||||
path: |
|
|
||||||
${{ matrix.artifact_name }}.tar.gz
|
|
||||||
${{ matrix.artifact_name }}.zip
|
|
||||||
|
|
||||||
release:
|
|
||||||
name: Create Release
|
|
||||||
needs: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download all artifacts
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: artifacts
|
|
||||||
|
|
||||||
- name: Create source tarball
|
|
||||||
run: |
|
|
||||||
git archive --format=tar.gz --prefix=owlen/ -o owlen-${{ github.ref_name }}.tar.gz ${{ github.ref_name }}
|
|
||||||
|
|
||||||
- name: Generate checksums
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
cd artifacts
|
|
||||||
find . -name "*.tar.gz" -exec mv {} . \; 2>/dev/null || true
|
|
||||||
find . -name "*.zip" -exec mv {} . \; 2>/dev/null || true
|
|
||||||
cd ..
|
|
||||||
mv artifacts/*.tar.gz . 2>/dev/null || true
|
|
||||||
mv artifacts/*.zip . 2>/dev/null || true
|
|
||||||
sha256sum *.tar.gz *.zip > checksums.txt 2>/dev/null || sha256sum * > checksums.txt
|
|
||||||
|
|
||||||
- name: Create Release
|
|
||||||
uses: https://gitea.com/gitea/release-action@main
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
*.tar.gz
|
|
||||||
*.zip
|
|
||||||
checksums.txt
|
|
||||||
api_key: ${{ secrets.RELEASE_TOKEN }}
|
|
||||||
34
.gitignore
vendored
34
.gitignore
vendored
@@ -1,9 +1,12 @@
|
|||||||
|
### Custom
|
||||||
|
AGENTS.md
|
||||||
|
CLAUDE.md
|
||||||
|
|
||||||
### Rust template
|
### Rust template
|
||||||
# Generated by Cargo
|
# Generated by Cargo
|
||||||
# will have compiled files and executables
|
# will have compiled files and executables
|
||||||
debug/
|
debug/
|
||||||
target/
|
target/
|
||||||
dev/
|
|
||||||
|
|
||||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||||
@@ -15,17 +18,10 @@ Cargo.lock
|
|||||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
*.pdb
|
*.pdb
|
||||||
|
|
||||||
# RustRover
|
|
||||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
||||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
||||||
#.idea/
|
|
||||||
### JetBrains template
|
### JetBrains template
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
.idea/
|
|
||||||
# User-specific stuff
|
# User-specific stuff
|
||||||
.idea/**/workspace.xml
|
.idea/**/workspace.xml
|
||||||
.idea/**/tasks.xml
|
.idea/**/tasks.xml
|
||||||
@@ -56,14 +52,15 @@ Cargo.lock
|
|||||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
# since they will be recreated, and may cause churn. Uncomment if using
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
# auto-import.
|
# auto-import.
|
||||||
# .idea/artifacts
|
.idea/artifacts
|
||||||
# .idea/compiler.xml
|
.idea/compiler.xml
|
||||||
# .idea/jarRepositories.xml
|
.idea/jarRepositories.xml
|
||||||
# .idea/modules.xml
|
.idea/modules.xml
|
||||||
# .idea/*.iml
|
.idea/*.iml
|
||||||
# .idea/modules
|
.idea/modules
|
||||||
# *.iml
|
*.iml
|
||||||
# *.ipr
|
*.ipr
|
||||||
|
.idea
|
||||||
|
|
||||||
# CMake
|
# CMake
|
||||||
cmake-build-*/
|
cmake-build-*/
|
||||||
@@ -101,3 +98,8 @@ fabric.properties
|
|||||||
# Android studio 3.1+ serialized cache file
|
# Android studio 3.1+ serialized cache file
|
||||||
.idea/caches/build_file_checksums.ser
|
.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
|
### rust-analyzer template
|
||||||
|
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
|
||||||
|
rust-project.json
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
70
Cargo.toml
70
Cargo.toml
@@ -1,64 +1,18 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
|
||||||
members = [
|
members = [
|
||||||
"crates/owlen-core",
|
"crates/app/cli",
|
||||||
"crates/owlen-tui",
|
"crates/llm/ollama",
|
||||||
"crates/owlen-cli",
|
"crates/platform/config",
|
||||||
"crates/owlen-ollama",
|
"crates/platform/hooks",
|
||||||
|
"crates/platform/permissions",
|
||||||
|
"crates/tools/bash",
|
||||||
|
"crates/tools/fs",
|
||||||
|
"crates/tools/slash",
|
||||||
|
"crates/integration/mcp-client",
|
||||||
]
|
]
|
||||||
exclude = []
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.1.0"
|
edition = "2024"
|
||||||
edition = "2021"
|
|
||||||
authors = ["Owlibou"]
|
|
||||||
license = "AGPL-3.0"
|
license = "AGPL-3.0"
|
||||||
repository = "https://somegit.dev/Owlibou/owlen"
|
rust-version = "1.91"
|
||||||
homepage = "https://somegit.dev/Owlibou/owlen"
|
|
||||||
keywords = ["llm", "tui", "cli", "ollama", "chat"]
|
|
||||||
categories = ["command-line-utilities"]
|
|
||||||
|
|
||||||
[workspace.dependencies]
|
|
||||||
# Async runtime and utilities
|
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
|
||||||
tokio-stream = "0.1"
|
|
||||||
tokio-util = { version = "0.7", features = ["rt"] }
|
|
||||||
futures = "0.3"
|
|
||||||
futures-util = "0.3"
|
|
||||||
|
|
||||||
# TUI framework
|
|
||||||
ratatui = "0.28"
|
|
||||||
crossterm = "0.28"
|
|
||||||
tui-textarea = "0.6"
|
|
||||||
|
|
||||||
# HTTP client and JSON handling
|
|
||||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
serde_json = "1.0"
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
uuid = { version = "1.0", features = ["v4", "serde"] }
|
|
||||||
anyhow = "1.0"
|
|
||||||
thiserror = "1.0"
|
|
||||||
|
|
||||||
# Configuration
|
|
||||||
toml = "0.8"
|
|
||||||
shellexpand = "3.1"
|
|
||||||
|
|
||||||
# Database
|
|
||||||
sled = "0.34"
|
|
||||||
|
|
||||||
# For better text handling
|
|
||||||
textwrap = "0.16"
|
|
||||||
|
|
||||||
# Async traits
|
|
||||||
async-trait = "0.1"
|
|
||||||
|
|
||||||
# CLI framework
|
|
||||||
clap = { version = "4.0", features = ["derive"] }
|
|
||||||
|
|
||||||
# Dev dependencies
|
|
||||||
tempfile = "3.8"
|
|
||||||
tokio-test = "0.4"
|
|
||||||
|
|
||||||
# For more keys and their definitions, see https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|||||||
662
LICENSE
662
LICENSE
@@ -1,662 +0,0 @@
|
|||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
|
||||||
this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
45
PKGBUILD
45
PKGBUILD
@@ -1,45 +0,0 @@
|
|||||||
# Maintainer: Owlibou
|
|
||||||
pkgname=owlen
|
|
||||||
pkgver=0.1.0
|
|
||||||
pkgrel=1
|
|
||||||
pkgdesc="Terminal User Interface LLM client for Ollama with chat and code assistance features"
|
|
||||||
arch=('x86_64' 'aarch64')
|
|
||||||
url="https://somegit.dev/Owlibou/owlen"
|
|
||||||
license=('AGPL-3.0-only')
|
|
||||||
depends=('gcc-libs')
|
|
||||||
makedepends=('cargo' 'git')
|
|
||||||
source=("${pkgname}-${pkgver}.tar.gz::https://somegit.dev/Owlibou/owlen/archive/v${pkgver}.tar.gz")
|
|
||||||
sha256sums=('SKIP') # Update this after first release
|
|
||||||
|
|
||||||
prepare() {
|
|
||||||
cd "$pkgname"
|
|
||||||
export RUSTUP_TOOLCHAIN=stable
|
|
||||||
cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')"
|
|
||||||
}
|
|
||||||
|
|
||||||
build() {
|
|
||||||
cd "$pkgname"
|
|
||||||
export RUSTUP_TOOLCHAIN=stable
|
|
||||||
export CARGO_TARGET_DIR=target
|
|
||||||
cargo build --frozen --release --all-features
|
|
||||||
}
|
|
||||||
|
|
||||||
check() {
|
|
||||||
cd "$pkgname"
|
|
||||||
export RUSTUP_TOOLCHAIN=stable
|
|
||||||
cargo test --frozen --all-features
|
|
||||||
}
|
|
||||||
|
|
||||||
package() {
|
|
||||||
cd "$pkgname"
|
|
||||||
|
|
||||||
# Install binaries
|
|
||||||
install -Dm755 "target/release/owlen" "$pkgdir/usr/bin/owlen"
|
|
||||||
install -Dm755 "target/release/owlen-code" "$pkgdir/usr/bin/owlen-code"
|
|
||||||
|
|
||||||
# Install license
|
|
||||||
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
|
|
||||||
|
|
||||||
# Install documentation
|
|
||||||
install -Dm644 README.md "$pkgdir/usr/share/doc/$pkgname/README.md"
|
|
||||||
}
|
|
||||||
279
README.md
279
README.md
@@ -1,279 +0,0 @@
|
|||||||
# OWLEN
|
|
||||||
|
|
||||||
> Terminal-native assistant for running local language models with a comfortable TUI.
|
|
||||||
|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
## Alpha Status
|
|
||||||
|
|
||||||
- This project is currently in **alpha** (v0.1.0) and under active development.
|
|
||||||
- Core features are functional but expect occasional bugs and missing polish.
|
|
||||||
- Breaking changes may occur between releases as we refine the API.
|
|
||||||
- Feedback, bug reports, and contributions are very welcome!
|
|
||||||
|
|
||||||
## What Is OWLEN?
|
|
||||||
|
|
||||||
OWLEN is a Rust-powered, terminal-first interface for interacting with local large
|
|
||||||
language models. It provides a responsive chat workflow that runs against
|
|
||||||
[Ollama](https://ollama.com/) with a focus on developer productivity, vim-style navigation,
|
|
||||||
and seamless session management—all without leaving your terminal.
|
|
||||||
|
|
||||||
## Screenshots
|
|
||||||
|
|
||||||
### Initial Layout
|
|
||||||

|
|
||||||
|
|
||||||
The OWLEN interface features a clean, multi-panel layout with vim-inspired navigation. See more screenshots in the [`images/`](images/) directory including:
|
|
||||||
- Full chat conversations (`chat_view.png`)
|
|
||||||
- Help menu (`help.png`)
|
|
||||||
- Model selection (`model_select.png`)
|
|
||||||
- Visual selection mode (`select_mode.png`)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
### Chat Client (`owlen`)
|
|
||||||
- **Vim-style Navigation** - Normal, editing, visual, and command modes
|
|
||||||
- **Streaming Responses** - Real-time token streaming from Ollama
|
|
||||||
- **Multi-Panel Interface** - Separate panels for chat, thinking content, and input
|
|
||||||
- **Advanced Text Editing** - Multi-line input with `tui-textarea`, history navigation
|
|
||||||
- **Visual Selection & Clipboard** - Yank/paste text across panels
|
|
||||||
- **Flexible Scrolling** - Half-page, full-page, and cursor-based navigation
|
|
||||||
- **Model Management** - Interactive model and provider selection (press `m`)
|
|
||||||
- **Session Management** - Start new conversations, clear history
|
|
||||||
- **Thinking Mode Support** - Dedicated panel for extended reasoning content
|
|
||||||
- **Bracketed Paste** - Safe paste handling for multi-line content
|
|
||||||
|
|
||||||
### Code Client (`owlen-code`) [Experimental]
|
|
||||||
- All chat client features
|
|
||||||
- Optimized system prompt for programming assistance
|
|
||||||
- Foundation for future code-specific features
|
|
||||||
|
|
||||||
### Core Infrastructure
|
|
||||||
- **Modular Architecture** - Separated core logic, TUI components, and providers
|
|
||||||
- **Provider System** - Extensible provider trait (currently: Ollama)
|
|
||||||
- **Session Controller** - Unified conversation and state management
|
|
||||||
- **Configuration Management** - TOML-based config with sensible defaults
|
|
||||||
- **Message Formatting** - Markdown rendering, thinking content extraction
|
|
||||||
- **Async Runtime** - Built on Tokio for efficient streaming
|
|
||||||
|
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
- Rust 1.75+ and Cargo (`rustup` recommended)
|
|
||||||
- A running Ollama instance with at least one model pulled
|
|
||||||
(defaults to `http://localhost:11434`)
|
|
||||||
- A terminal that supports 256 colors
|
|
||||||
|
|
||||||
### Clone and Build
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://somegit.dev/Owlibou/owlen.git
|
|
||||||
cd owlen
|
|
||||||
cargo build --release
|
|
||||||
```
|
|
||||||
|
|
||||||
### Run the Chat Client
|
|
||||||
|
|
||||||
Make sure Ollama is running, then launch:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./target/release/owlen
|
|
||||||
# or during development:
|
|
||||||
cargo run --bin owlen
|
|
||||||
```
|
|
||||||
|
|
||||||
### (Optional) Try the Code Client
|
|
||||||
|
|
||||||
The coding-focused TUI is experimental:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo build --release --bin owlen-code --features code-client
|
|
||||||
./target/release/owlen-code
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using the TUI
|
|
||||||
|
|
||||||
### Mode System (Vim-inspired)
|
|
||||||
|
|
||||||
**Normal Mode** (default):
|
|
||||||
- `i` / `Enter` - Enter editing mode
|
|
||||||
- `a` - Append (move right and enter editing mode)
|
|
||||||
- `A` - Append at end of line
|
|
||||||
- `I` - Insert at start of line
|
|
||||||
- `o` - Insert new line below
|
|
||||||
- `O` - Insert new line above
|
|
||||||
- `v` - Enter visual mode (text selection)
|
|
||||||
- `:` - Enter command mode
|
|
||||||
- `h/j/k/l` - Navigate left/down/up/right
|
|
||||||
- `w/b/e` - Word navigation
|
|
||||||
- `0/$` - Jump to line start/end
|
|
||||||
- `gg` - Jump to top
|
|
||||||
- `G` - Jump to bottom
|
|
||||||
- `Ctrl-d/u` - Half-page scroll
|
|
||||||
- `Ctrl-f/b` - Full-page scroll
|
|
||||||
- `Tab` - Cycle focus between panels
|
|
||||||
- `p` - Paste from clipboard
|
|
||||||
- `dd` - Clear input buffer
|
|
||||||
- `q` - Quit
|
|
||||||
|
|
||||||
**Editing Mode**:
|
|
||||||
- `Esc` - Return to normal mode
|
|
||||||
- `Enter` - Send message and return to normal mode
|
|
||||||
- `Ctrl-J` / `Shift-Enter` - Insert newline
|
|
||||||
- `Ctrl-↑/↓` - Navigate input history
|
|
||||||
- Paste events handled automatically
|
|
||||||
|
|
||||||
**Visual Mode**:
|
|
||||||
- `j/k/h/l` - Extend selection
|
|
||||||
- `w/b/e` - Word-based selection
|
|
||||||
- `y` - Yank (copy) selection
|
|
||||||
- `d` - Cut selection (Input panel only)
|
|
||||||
- `Esc` - Cancel selection
|
|
||||||
|
|
||||||
**Command Mode**:
|
|
||||||
- `:q` / `:quit` - Quit application
|
|
||||||
- `:c` / `:clear` - Clear conversation
|
|
||||||
- `:m` / `:model` - Open model selector
|
|
||||||
- `:n` / `:new` - Start new conversation
|
|
||||||
- `:h` / `:help` - Show help
|
|
||||||
|
|
||||||
### Panel Management
|
|
||||||
- Three panels: Chat, Thinking, and Input
|
|
||||||
- `Tab` / `Shift-Tab` - Cycle focus forward/backward
|
|
||||||
- Focused panel receives scroll and navigation commands
|
|
||||||
- Thinking panel appears when extended reasoning is available
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
OWLEN stores configuration in `~/.config/owlen/config.toml`. The file is created
|
|
||||||
on first run and can be edited to customize behavior:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[general]
|
|
||||||
default_model = "llama3.2:latest"
|
|
||||||
default_provider = "ollama"
|
|
||||||
enable_streaming = true
|
|
||||||
project_context_file = "OWLEN.md"
|
|
||||||
|
|
||||||
[providers.ollama]
|
|
||||||
provider_type = "ollama"
|
|
||||||
base_url = "http://localhost:11434"
|
|
||||||
timeout = 300
|
|
||||||
```
|
|
||||||
|
|
||||||
Configuration is automatically saved when you change models or providers.
|
|
||||||
|
|
||||||
## Repository Layout
|
|
||||||
|
|
||||||
```
|
|
||||||
owlen/
|
|
||||||
├── crates/
|
|
||||||
│ ├── owlen-core/ # Core types, session management, shared UI components
|
|
||||||
│ ├── owlen-ollama/ # Ollama provider implementation
|
|
||||||
│ ├── owlen-tui/ # TUI components (chat_app, code_app, rendering)
|
|
||||||
│ └── owlen-cli/ # Binary entry points (owlen, owlen-code)
|
|
||||||
├── LICENSE # AGPL-3.0 License
|
|
||||||
├── Cargo.toml # Workspace configuration
|
|
||||||
└── README.md
|
|
||||||
```
|
|
||||||
|
|
||||||
### Architecture Highlights
|
|
||||||
- **owlen-core**: Provider-agnostic core with session controller, UI primitives (AutoScroll, InputMode, FocusedPanel), and shared utilities
|
|
||||||
- **owlen-tui**: Ratatui-based UI implementation with vim-style modal editing
|
|
||||||
- **Separation of Concerns**: Clean boundaries between business logic, presentation, and provider implementations
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
### Building
|
|
||||||
```bash
|
|
||||||
# Debug build
|
|
||||||
cargo build
|
|
||||||
|
|
||||||
# Release build
|
|
||||||
cargo build --release
|
|
||||||
|
|
||||||
# Build with all features
|
|
||||||
cargo build --all-features
|
|
||||||
|
|
||||||
# Run tests
|
|
||||||
cargo test
|
|
||||||
|
|
||||||
# Check code
|
|
||||||
cargo clippy
|
|
||||||
cargo fmt
|
|
||||||
```
|
|
||||||
|
|
||||||
### Development Notes
|
|
||||||
- Standard Rust workflows apply (`cargo fmt`, `cargo clippy`, `cargo test`)
|
|
||||||
- Codebase uses async Rust (`tokio`) for event handling and streaming
|
|
||||||
- Configuration is cached in `~/.config/owlen` (wipe to reset)
|
|
||||||
- UI components are extensively tested in `owlen-core/src/ui.rs`
|
|
||||||
|
|
||||||
## Roadmap
|
|
||||||
|
|
||||||
### Completed ✓
|
|
||||||
- [x] Streaming responses with real-time display
|
|
||||||
- [x] Autoscroll and viewport management
|
|
||||||
- [x] Push user message before loading LLM response
|
|
||||||
- [x] Thinking mode support with dedicated panel
|
|
||||||
- [x] Vim-style modal editing (Normal, Visual, Command modes)
|
|
||||||
- [x] Multi-panel focus management
|
|
||||||
- [x] Text selection and clipboard functionality
|
|
||||||
- [x] Comprehensive keyboard navigation
|
|
||||||
- [x] Bracketed paste support
|
|
||||||
|
|
||||||
### In Progress
|
|
||||||
- [ ] Theming options and color customization
|
|
||||||
- [ ] Enhanced configuration UX (in-app settings)
|
|
||||||
- [ ] Chat history management (save/load/export)
|
|
||||||
|
|
||||||
### Planned
|
|
||||||
- [ ] Code Client Enhancement
|
|
||||||
- [ ] In-project code navigation
|
|
||||||
- [ ] Syntax highlighting for code blocks
|
|
||||||
- [ ] File tree browser integration
|
|
||||||
- [ ] Project-aware context management
|
|
||||||
- [ ] Code snippets and templates
|
|
||||||
- [ ] Additional LLM Providers
|
|
||||||
- [ ] OpenAI API support
|
|
||||||
- [ ] Anthropic Claude support
|
|
||||||
- [ ] Local model providers (llama.cpp, etc.)
|
|
||||||
- [ ] Advanced Features
|
|
||||||
- [ ] Conversation search and filtering
|
|
||||||
- [ ] Multi-session management
|
|
||||||
- [ ] Export conversations (Markdown, JSON)
|
|
||||||
- [ ] Custom keybindings
|
|
||||||
- [ ] Plugin system
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
Contributions are welcome! Here's how to get started:
|
|
||||||
|
|
||||||
1. Fork the repository
|
|
||||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
|
||||||
3. Make your changes and add tests
|
|
||||||
4. Run `cargo fmt` and `cargo clippy`
|
|
||||||
5. Commit your changes (`git commit -m 'Add amazing feature'`)
|
|
||||||
6. Push to the branch (`git push origin feature/amazing-feature`)
|
|
||||||
7. Open a Pull Request
|
|
||||||
|
|
||||||
Please open an issue first for significant changes to discuss the approach.
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is licensed under the GNU Affero General Public License v3.0 (AGPL-3.0) - see the [LICENSE](LICENSE) file for details.
|
|
||||||
|
|
||||||
## Acknowledgments
|
|
||||||
|
|
||||||
Built with:
|
|
||||||
- [ratatui](https://ratatui.rs/) - Terminal UI framework
|
|
||||||
- [crossterm](https://github.com/crossterm-rs/crossterm) - Cross-platform terminal manipulation
|
|
||||||
- [tokio](https://tokio.rs/) - Async runtime
|
|
||||||
- [Ollama](https://ollama.com/) - Local LLM runtime
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Status**: Alpha v0.1.0 | **License**: AGPL-3.0 | **Made with Rust** 🦀
|
|
||||||
22
crates/app/cli/.gitignore
vendored
Normal file
22
crates/app/cli/.gitignore
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/target
|
||||||
|
### Rust template
|
||||||
|
# Generated by Cargo
|
||||||
|
# will have compiled files and executables
|
||||||
|
debug/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||||
|
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||||
|
Cargo.lock
|
||||||
|
|
||||||
|
# These are backup files generated by rustfmt
|
||||||
|
**/*.rs.bk
|
||||||
|
|
||||||
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
### rust-analyzer template
|
||||||
|
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
|
||||||
|
rust-project.json
|
||||||
|
|
||||||
|
|
||||||
28
crates/app/cli/Cargo.toml
Normal file
28
crates/app/cli/Cargo.toml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
[package]
|
||||||
|
name = "owlen"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
clap = { version = "4.5", features = ["derive"] }
|
||||||
|
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
color-eyre = "0.6"
|
||||||
|
llm-ollama = { path = "../../llm/ollama" }
|
||||||
|
tools-fs = { path = "../../tools/fs" }
|
||||||
|
tools-bash = { path = "../../tools/bash" }
|
||||||
|
tools-slash = { path = "../../tools/slash" }
|
||||||
|
config-agent = { package = "config-agent", path = "../../platform/config" }
|
||||||
|
permissions = { path = "../../platform/permissions" }
|
||||||
|
hooks = { path = "../../platform/hooks" }
|
||||||
|
futures-util = "0.3.31"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
assert_cmd = "2.0"
|
||||||
|
predicates = "3.1"
|
||||||
|
httpmock = "0.7"
|
||||||
|
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||||
|
tempfile = "3.23.0"
|
||||||
580
crates/app/cli/src/main.rs
Normal file
580
crates/app/cli/src/main.rs
Normal file
@@ -0,0 +1,580 @@
|
|||||||
|
use clap::{Parser, ValueEnum};
|
||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use config_agent::load_settings;
|
||||||
|
use futures_util::TryStreamExt;
|
||||||
|
use hooks::{HookEvent, HookManager, HookResult};
|
||||||
|
use llm_ollama::{OllamaClient, OllamaOptions, types::ChatMessage};
|
||||||
|
use permissions::{PermissionDecision, Tool};
|
||||||
|
use serde::Serialize;
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, ValueEnum)]
|
||||||
|
enum OutputFormat {
|
||||||
|
Text,
|
||||||
|
Json,
|
||||||
|
StreamJson,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct SessionOutput {
|
||||||
|
session_id: String,
|
||||||
|
messages: Vec<serde_json::Value>,
|
||||||
|
stats: Stats,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
result: Option<serde_json::Value>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
tool: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct Stats {
|
||||||
|
total_tokens: u64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
prompt_tokens: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
completion_tokens: Option<u64>,
|
||||||
|
duration_ms: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct StreamEvent {
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
event_type: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
session_id: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
content: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
stats: Option<Stats>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_session_id() -> String {
|
||||||
|
let timestamp = SystemTime::now()
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_millis();
|
||||||
|
format!("session-{}", timestamp)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_tool_result(
|
||||||
|
format: OutputFormat,
|
||||||
|
tool: &str,
|
||||||
|
result: serde_json::Value,
|
||||||
|
session_id: &str,
|
||||||
|
) -> Result<()> {
|
||||||
|
match format {
|
||||||
|
OutputFormat::Text => {
|
||||||
|
// For text, just print the result as-is
|
||||||
|
if let Some(s) = result.as_str() {
|
||||||
|
println!("{}", s);
|
||||||
|
} else {
|
||||||
|
println!("{}", serde_json::to_string_pretty(&result)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
OutputFormat::Json => {
|
||||||
|
let output = SessionOutput {
|
||||||
|
session_id: session_id.to_string(),
|
||||||
|
messages: vec![],
|
||||||
|
stats: Stats {
|
||||||
|
total_tokens: 0,
|
||||||
|
prompt_tokens: None,
|
||||||
|
completion_tokens: None,
|
||||||
|
duration_ms: 0,
|
||||||
|
},
|
||||||
|
result: Some(result),
|
||||||
|
tool: Some(tool.to_string()),
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
|
}
|
||||||
|
OutputFormat::StreamJson => {
|
||||||
|
// For stream-json, emit session_start, result, and session_end
|
||||||
|
let session_start = StreamEvent {
|
||||||
|
event_type: "session_start".to_string(),
|
||||||
|
session_id: Some(session_id.to_string()),
|
||||||
|
content: None,
|
||||||
|
stats: None,
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&session_start)?);
|
||||||
|
|
||||||
|
let result_event = StreamEvent {
|
||||||
|
event_type: "tool_result".to_string(),
|
||||||
|
session_id: None,
|
||||||
|
content: Some(serde_json::to_string(&result)?),
|
||||||
|
stats: None,
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&result_event)?);
|
||||||
|
|
||||||
|
let session_end = StreamEvent {
|
||||||
|
event_type: "session_end".to_string(),
|
||||||
|
session_id: None,
|
||||||
|
content: None,
|
||||||
|
stats: Some(Stats {
|
||||||
|
total_tokens: 0,
|
||||||
|
prompt_tokens: None,
|
||||||
|
completion_tokens: None,
|
||||||
|
duration_ms: 0,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&session_end)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(clap::Subcommand, Debug)]
|
||||||
|
enum Cmd {
|
||||||
|
Read { path: String },
|
||||||
|
Glob { pattern: String },
|
||||||
|
Grep { root: String, pattern: String },
|
||||||
|
Write { path: String, content: String },
|
||||||
|
Edit { path: String, old_string: String, new_string: String },
|
||||||
|
Bash { command: String, #[arg(long)] timeout: Option<u64> },
|
||||||
|
Slash { command_name: String, args: Vec<String> },
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Parser, Debug)]
|
||||||
|
#[command(name = "code", version)]
|
||||||
|
struct Args {
|
||||||
|
#[arg(long)]
|
||||||
|
ollama_url: Option<String>,
|
||||||
|
#[arg(long)]
|
||||||
|
model: Option<String>,
|
||||||
|
#[arg(long)]
|
||||||
|
api_key: Option<String>,
|
||||||
|
#[arg(long)]
|
||||||
|
print: bool,
|
||||||
|
/// Override the permission mode (plan, acceptEdits, code)
|
||||||
|
#[arg(long)]
|
||||||
|
mode: Option<String>,
|
||||||
|
/// Output format (text, json, stream-json)
|
||||||
|
#[arg(long, value_enum, default_value = "text")]
|
||||||
|
output_format: OutputFormat,
|
||||||
|
#[arg()]
|
||||||
|
prompt: Vec<String>,
|
||||||
|
#[command(subcommand)]
|
||||||
|
cmd: Option<Cmd>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
color_eyre::install()?;
|
||||||
|
let args = Args::parse();
|
||||||
|
let mut settings = load_settings(None).unwrap_or_default();
|
||||||
|
|
||||||
|
// Override mode if specified via CLI
|
||||||
|
if let Some(mode) = args.mode {
|
||||||
|
settings.mode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create permission manager from settings
|
||||||
|
let perms = settings.create_permission_manager();
|
||||||
|
|
||||||
|
// Create hook manager
|
||||||
|
let hook_mgr = HookManager::new(".");
|
||||||
|
|
||||||
|
// Generate session ID
|
||||||
|
let session_id = generate_session_id();
|
||||||
|
let output_format = args.output_format;
|
||||||
|
|
||||||
|
if let Some(cmd) = args.cmd {
|
||||||
|
match cmd {
|
||||||
|
Cmd::Read { path } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::Read, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({"path": &path}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Read operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let s = tools_fs::read_file(&path)?;
|
||||||
|
output_tool_result(output_format, "Read", serde_json::json!(s), &session_id)?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Read operation requires approval. Use --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Read operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Glob { pattern } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::Glob, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Glob".to_string(),
|
||||||
|
args: serde_json::json!({"pattern": &pattern}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Glob operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
for p in tools_fs::glob_list(&pattern)? {
|
||||||
|
println!("{}", p);
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Glob operation requires approval. Use --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Glob operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Grep { root, pattern } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::Grep, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Grep".to_string(),
|
||||||
|
args: serde_json::json!({"root": &root, "pattern": &pattern}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Grep operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (path, line_number, text) in tools_fs::grep(&root, &pattern)? {
|
||||||
|
println!("{path}:{line_number}:{text}")
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Grep operation requires approval. Use --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Grep operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Write { path, content } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::Write, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Write".to_string(),
|
||||||
|
args: serde_json::json!({"path": &path, "content": &content}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Write operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
tools_fs::write_file(&path, &content)?;
|
||||||
|
println!("File written: {}", path);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Write operation requires approval. Use --mode acceptEdits or --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Write operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Edit { path, old_string, new_string } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::Edit, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Edit".to_string(),
|
||||||
|
args: serde_json::json!({"path": &path, "old_string": &old_string, "new_string": &new_string}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Edit operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
tools_fs::edit_file(&path, &old_string, &new_string)?;
|
||||||
|
println!("File edited: {}", path);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Edit operation requires approval. Use --mode acceptEdits or --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Edit operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Bash { command, timeout } => {
|
||||||
|
// Check permission with command context for pattern matching
|
||||||
|
match perms.check(Tool::Bash, Some(&command)) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Bash".to_string(),
|
||||||
|
args: serde_json::json!({"command": &command, "timeout": timeout}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied Bash operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut session = tools_bash::BashSession::new().await?;
|
||||||
|
let output = session.execute(&command, timeout).await?;
|
||||||
|
|
||||||
|
// Print stdout
|
||||||
|
if !output.stdout.is_empty() {
|
||||||
|
print!("{}", output.stdout);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Print stderr to stderr
|
||||||
|
if !output.stderr.is_empty() {
|
||||||
|
eprint!("{}", output.stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
session.close().await?;
|
||||||
|
|
||||||
|
// Exit with same code as command
|
||||||
|
if !output.success {
|
||||||
|
std::process::exit(output.exit_code);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Bash operation requires approval. Use --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Bash operation is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Cmd::Slash { command_name, args } => {
|
||||||
|
// Check permission
|
||||||
|
match perms.check(Tool::SlashCommand, None) {
|
||||||
|
PermissionDecision::Allow => {
|
||||||
|
// Check PreToolUse hook
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "SlashCommand".to_string(),
|
||||||
|
args: serde_json::json!({"command_name": &command_name, "args": &args}),
|
||||||
|
};
|
||||||
|
match hook_mgr.execute(&event, Some(5000)).await? {
|
||||||
|
HookResult::Deny => {
|
||||||
|
return Err(eyre!("Hook denied SlashCommand operation"));
|
||||||
|
}
|
||||||
|
HookResult::Allow => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for command file in .owlen/commands/
|
||||||
|
let command_path = format!(".owlen/commands/{}.md", command_name);
|
||||||
|
|
||||||
|
// Read the command file
|
||||||
|
let content = match tools_fs::read_file(&command_path) {
|
||||||
|
Ok(c) => c,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Slash command '{}' not found at {}",
|
||||||
|
command_name,
|
||||||
|
command_path
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse with arguments
|
||||||
|
let args_refs: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
|
||||||
|
let slash_cmd = tools_slash::parse_slash_command(&content, &args_refs)?;
|
||||||
|
|
||||||
|
// Resolve file references
|
||||||
|
let resolved_body = slash_cmd.resolve_file_refs()?;
|
||||||
|
|
||||||
|
// Print the resolved command body
|
||||||
|
println!("{}", resolved_body);
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
PermissionDecision::Ask => {
|
||||||
|
return Err(eyre!(
|
||||||
|
"Permission denied: Slash command requires approval. Use --mode code to allow."
|
||||||
|
));
|
||||||
|
}
|
||||||
|
PermissionDecision::Deny => {
|
||||||
|
return Err(eyre!("Permission denied: Slash command is blocked."));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let prompt = if args.prompt.is_empty() {
|
||||||
|
"Say hello".to_string()
|
||||||
|
} else {
|
||||||
|
args.prompt.join(" ")
|
||||||
|
};
|
||||||
|
|
||||||
|
let model = args.model.unwrap_or(settings.model);
|
||||||
|
let api_key = args.api_key.or(settings.api_key);
|
||||||
|
|
||||||
|
// Use Ollama Cloud when model has "-cloud" suffix AND API key is set
|
||||||
|
let use_cloud = model.ends_with("-cloud") && api_key.is_some();
|
||||||
|
let client = if use_cloud {
|
||||||
|
OllamaClient::with_cloud().with_api_key(api_key.unwrap())
|
||||||
|
} else {
|
||||||
|
let base_url = args.ollama_url.unwrap_or(settings.ollama_url);
|
||||||
|
let mut client = OllamaClient::new(base_url);
|
||||||
|
if let Some(key) = api_key {
|
||||||
|
client = client.with_api_key(key);
|
||||||
|
}
|
||||||
|
client
|
||||||
|
};
|
||||||
|
let opts = OllamaOptions {
|
||||||
|
model,
|
||||||
|
stream: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
let msgs = vec![ChatMessage {
|
||||||
|
role: "user".into(),
|
||||||
|
content: prompt.clone(),
|
||||||
|
}];
|
||||||
|
|
||||||
|
let start_time = SystemTime::now();
|
||||||
|
|
||||||
|
// Handle different output formats
|
||||||
|
match output_format {
|
||||||
|
OutputFormat::Text => {
|
||||||
|
// Text format: stream to stdout as before
|
||||||
|
let mut stream = client.chat_stream(&msgs, &opts).await?;
|
||||||
|
while let Some(chunk) = stream.try_next().await? {
|
||||||
|
if let Some(m) = chunk.message {
|
||||||
|
if let Some(c) = m.content {
|
||||||
|
print!("{c}");
|
||||||
|
io::stdout().flush()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if matches!(chunk.done, Some(true)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println!(); // Newline after response
|
||||||
|
}
|
||||||
|
OutputFormat::Json => {
|
||||||
|
// JSON format: collect all chunks, then output final JSON
|
||||||
|
let mut stream = client.chat_stream(&msgs, &opts).await?;
|
||||||
|
let mut response = String::new();
|
||||||
|
|
||||||
|
while let Some(chunk) = stream.try_next().await? {
|
||||||
|
if let Some(m) = chunk.message {
|
||||||
|
if let Some(c) = m.content {
|
||||||
|
response.push_str(&c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if matches!(chunk.done, Some(true)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let duration_ms = start_time.elapsed().unwrap().as_millis() as u64;
|
||||||
|
|
||||||
|
// Rough token estimate (tokens ~= chars / 4)
|
||||||
|
let estimated_tokens = ((prompt.len() + response.len()) / 4) as u64;
|
||||||
|
|
||||||
|
let output = SessionOutput {
|
||||||
|
session_id,
|
||||||
|
messages: vec![
|
||||||
|
serde_json::json!({"role": "user", "content": prompt}),
|
||||||
|
serde_json::json!({"role": "assistant", "content": response}),
|
||||||
|
],
|
||||||
|
stats: Stats {
|
||||||
|
total_tokens: estimated_tokens,
|
||||||
|
prompt_tokens: Some((prompt.len() / 4) as u64),
|
||||||
|
completion_tokens: Some((response.len() / 4) as u64),
|
||||||
|
duration_ms,
|
||||||
|
},
|
||||||
|
result: None,
|
||||||
|
tool: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
println!("{}", serde_json::to_string(&output)?);
|
||||||
|
}
|
||||||
|
OutputFormat::StreamJson => {
|
||||||
|
// Stream-JSON format: emit session_start, chunks, and session_end
|
||||||
|
let session_start = StreamEvent {
|
||||||
|
event_type: "session_start".to_string(),
|
||||||
|
session_id: Some(session_id.clone()),
|
||||||
|
content: None,
|
||||||
|
stats: None,
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&session_start)?);
|
||||||
|
|
||||||
|
let mut stream = client.chat_stream(&msgs, &opts).await?;
|
||||||
|
let mut response = String::new();
|
||||||
|
|
||||||
|
while let Some(chunk) = stream.try_next().await? {
|
||||||
|
if let Some(m) = chunk.message {
|
||||||
|
if let Some(c) = m.content {
|
||||||
|
response.push_str(&c);
|
||||||
|
let chunk_event = StreamEvent {
|
||||||
|
event_type: "chunk".to_string(),
|
||||||
|
session_id: None,
|
||||||
|
content: Some(c),
|
||||||
|
stats: None,
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&chunk_event)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if matches!(chunk.done, Some(true)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let duration_ms = start_time.elapsed().unwrap().as_millis() as u64;
|
||||||
|
|
||||||
|
// Rough token estimate
|
||||||
|
let estimated_tokens = ((prompt.len() + response.len()) / 4) as u64;
|
||||||
|
|
||||||
|
let session_end = StreamEvent {
|
||||||
|
event_type: "session_end".to_string(),
|
||||||
|
session_id: None,
|
||||||
|
content: None,
|
||||||
|
stats: Some(Stats {
|
||||||
|
total_tokens: estimated_tokens,
|
||||||
|
prompt_tokens: Some((prompt.len() / 4) as u64),
|
||||||
|
completion_tokens: Some((response.len() / 4) as u64),
|
||||||
|
duration_ms,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
println!("{}", serde_json::to_string(&session_end)?);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
39
crates/app/cli/tests/chat_stream.rs
Normal file
39
crates/app/cli/tests/chat_stream.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use assert_cmd::Command;
|
||||||
|
use httpmock::prelude::*;
|
||||||
|
use predicates::prelude::PredicateBooleanExt;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn headless_streams_ndjson() {
|
||||||
|
let server = MockServer::start_async().await;
|
||||||
|
// Mock /api/chat with NDJSON lines
|
||||||
|
let body = serde_json::json!({
|
||||||
|
"model": "qwen2.5",
|
||||||
|
"messages": [{"role": "user", "content": "hello"}],
|
||||||
|
"stream": true
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = concat!(
|
||||||
|
r#"{"message":{"role":"assistant","content":"Hel"}}"#,"\n",
|
||||||
|
r#"{"message":{"role":"assistant","content":"lo"}}"#,"\n",
|
||||||
|
r#"{"done":true}"#,"\n",
|
||||||
|
);
|
||||||
|
|
||||||
|
let _m = server.mock(|when, then| {
|
||||||
|
when.method(POST)
|
||||||
|
.path("/api/chat")
|
||||||
|
.json_body(body.clone());
|
||||||
|
then.status(200)
|
||||||
|
.header("content-type", "application/x-ndjson")
|
||||||
|
.body(response);
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--ollama-url").arg(server.base_url())
|
||||||
|
.arg("--model").arg("qwen2.5")
|
||||||
|
.arg("--print")
|
||||||
|
.arg("hello");
|
||||||
|
|
||||||
|
cmd.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(predicates::str::contains("Hello").count(1).or(predicates::str::contains("Hel").and(predicates::str::contains("lo"))));
|
||||||
|
}
|
||||||
145
crates/app/cli/tests/headless.rs
Normal file
145
crates/app/cli/tests/headless.rs
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
use assert_cmd::Command;
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn print_json_has_session_id_and_stats() {
|
||||||
|
let mut cmd = Command::cargo_bin("owlen").unwrap();
|
||||||
|
cmd.arg("--output-format")
|
||||||
|
.arg("json")
|
||||||
|
.arg("Say hello");
|
||||||
|
|
||||||
|
let output = cmd.assert().success();
|
||||||
|
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
|
||||||
|
|
||||||
|
// Parse JSON output
|
||||||
|
let json: Value = serde_json::from_str(&stdout).expect("Output should be valid JSON");
|
||||||
|
|
||||||
|
// Verify session_id exists
|
||||||
|
assert!(json.get("session_id").is_some(), "JSON output should have session_id");
|
||||||
|
let session_id = json["session_id"].as_str().unwrap();
|
||||||
|
assert!(!session_id.is_empty(), "session_id should not be empty");
|
||||||
|
|
||||||
|
// Verify stats exist
|
||||||
|
assert!(json.get("stats").is_some(), "JSON output should have stats");
|
||||||
|
let stats = &json["stats"];
|
||||||
|
|
||||||
|
// Check for token counts
|
||||||
|
assert!(stats.get("total_tokens").is_some(), "stats should have total_tokens");
|
||||||
|
|
||||||
|
// Check for messages
|
||||||
|
assert!(json.get("messages").is_some(), "JSON output should have messages");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn stream_json_sequence_is_well_formed() {
|
||||||
|
let mut cmd = Command::cargo_bin("owlen").unwrap();
|
||||||
|
cmd.arg("--output-format")
|
||||||
|
.arg("stream-json")
|
||||||
|
.arg("Say hello");
|
||||||
|
|
||||||
|
let output = cmd.assert().success();
|
||||||
|
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
|
||||||
|
|
||||||
|
// Stream-JSON is NDJSON - each line should be valid JSON
|
||||||
|
let lines: Vec<&str> = stdout.lines().filter(|l| !l.is_empty()).collect();
|
||||||
|
|
||||||
|
assert!(!lines.is_empty(), "Stream-JSON should produce at least one event");
|
||||||
|
|
||||||
|
// Each line should be valid JSON
|
||||||
|
for (i, line) in lines.iter().enumerate() {
|
||||||
|
let json: Value = serde_json::from_str(line)
|
||||||
|
.expect(&format!("Line {} should be valid JSON: {}", i, line));
|
||||||
|
|
||||||
|
// Each event should have a type
|
||||||
|
assert!(json.get("type").is_some(), "Event should have a type field");
|
||||||
|
}
|
||||||
|
|
||||||
|
// First event should be session_start
|
||||||
|
let first: Value = serde_json::from_str(lines[0]).unwrap();
|
||||||
|
assert_eq!(first["type"].as_str().unwrap(), "session_start");
|
||||||
|
assert!(first.get("session_id").is_some());
|
||||||
|
|
||||||
|
// Last event should be session_end or complete
|
||||||
|
let last: Value = serde_json::from_str(lines[lines.len() - 1]).unwrap();
|
||||||
|
let last_type = last["type"].as_str().unwrap();
|
||||||
|
assert!(
|
||||||
|
last_type == "session_end" || last_type == "complete",
|
||||||
|
"Last event should be session_end or complete, got: {}",
|
||||||
|
last_type
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn text_format_is_default() {
|
||||||
|
let mut cmd = Command::cargo_bin("owlen").unwrap();
|
||||||
|
cmd.arg("Say hello");
|
||||||
|
|
||||||
|
let output = cmd.assert().success();
|
||||||
|
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
|
||||||
|
|
||||||
|
// Text format should not be JSON
|
||||||
|
assert!(serde_json::from_str::<Value>(&stdout).is_err(),
|
||||||
|
"Default output should be text, not JSON");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn json_format_with_tool_execution() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
fs::write(&file, "hello world").unwrap();
|
||||||
|
|
||||||
|
let mut cmd = Command::cargo_bin("owlen").unwrap();
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("--output-format")
|
||||||
|
.arg("json")
|
||||||
|
.arg("read")
|
||||||
|
.arg(file.to_str().unwrap());
|
||||||
|
|
||||||
|
let output = cmd.assert().success();
|
||||||
|
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
|
||||||
|
|
||||||
|
let json: Value = serde_json::from_str(&stdout).expect("Output should be valid JSON");
|
||||||
|
|
||||||
|
// Should have result
|
||||||
|
assert!(json.get("result").is_some());
|
||||||
|
|
||||||
|
// Should have tool info
|
||||||
|
assert!(json.get("tool").is_some());
|
||||||
|
assert_eq!(json["tool"].as_str().unwrap(), "Read");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn stream_json_includes_chunk_events() {
|
||||||
|
let mut cmd = Command::cargo_bin("owlen").unwrap();
|
||||||
|
cmd.arg("--output-format")
|
||||||
|
.arg("stream-json")
|
||||||
|
.arg("Say hello");
|
||||||
|
|
||||||
|
let output = cmd.assert().success();
|
||||||
|
let stdout = String::from_utf8_lossy(&output.get_output().stdout);
|
||||||
|
|
||||||
|
let lines: Vec<&str> = stdout.lines().filter(|l| !l.is_empty()).collect();
|
||||||
|
|
||||||
|
// Should have chunk events between session_start and session_end
|
||||||
|
let chunk_events: Vec<&str> = lines.iter()
|
||||||
|
.filter(|line| {
|
||||||
|
if let Ok(json) = serde_json::from_str::<Value>(line) {
|
||||||
|
json["type"].as_str() == Some("chunk")
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.copied()
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
assert!(!chunk_events.is_empty(), "Should have at least one chunk event");
|
||||||
|
|
||||||
|
// Each chunk should have content
|
||||||
|
for chunk_line in chunk_events {
|
||||||
|
let chunk: Value = serde_json::from_str(chunk_line).unwrap();
|
||||||
|
assert!(chunk.get("content").is_some(), "Chunk should have content");
|
||||||
|
}
|
||||||
|
}
|
||||||
255
crates/app/cli/tests/permissions.rs
Normal file
255
crates/app/cli/tests/permissions.rs
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
use assert_cmd::Command;
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_allows_read_operations() {
|
||||||
|
// Create a temp file to read
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
fs::write(&file, "hello world").unwrap();
|
||||||
|
|
||||||
|
// Read operation should work in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("read").arg(file.to_str().unwrap());
|
||||||
|
cmd.assert().success().stdout("hello world\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_allows_glob_operations() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
fs::write(dir.path().join("a.txt"), "test").unwrap();
|
||||||
|
fs::write(dir.path().join("b.txt"), "test").unwrap();
|
||||||
|
|
||||||
|
let pattern = format!("{}/*.txt", dir.path().display());
|
||||||
|
|
||||||
|
// Glob operation should work in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("glob").arg(&pattern);
|
||||||
|
cmd.assert().success();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_allows_grep_operations() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
fs::write(dir.path().join("test.txt"), "hello world\nfoo bar").unwrap();
|
||||||
|
|
||||||
|
// Grep operation should work in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("grep").arg(dir.path().to_str().unwrap()).arg("hello");
|
||||||
|
cmd.assert().success();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mode_override_via_cli_flag() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
fs::write(&file, "content").unwrap();
|
||||||
|
|
||||||
|
// Test with --mode code (should also allow read)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("read")
|
||||||
|
.arg(file.to_str().unwrap());
|
||||||
|
cmd.assert().success().stdout("content\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_blocks_write_operations() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("new.txt");
|
||||||
|
|
||||||
|
// Write operation should be blocked in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("write").arg(file.to_str().unwrap()).arg("content");
|
||||||
|
cmd.assert().failure();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_blocks_edit_operations() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
fs::write(&file, "old content").unwrap();
|
||||||
|
|
||||||
|
// Edit operation should be blocked in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("edit")
|
||||||
|
.arg(file.to_str().unwrap())
|
||||||
|
.arg("old")
|
||||||
|
.arg("new");
|
||||||
|
cmd.assert().failure();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accept_edits_mode_allows_write() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("new.txt");
|
||||||
|
|
||||||
|
// Write operation should work in acceptEdits mode
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("acceptEdits")
|
||||||
|
.arg("write")
|
||||||
|
.arg(file.to_str().unwrap())
|
||||||
|
.arg("new content");
|
||||||
|
cmd.assert().success();
|
||||||
|
|
||||||
|
// Verify file was written
|
||||||
|
assert_eq!(fs::read_to_string(&file).unwrap(), "new content");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accept_edits_mode_allows_edit() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
fs::write(&file, "line 1\nline 2\nline 3").unwrap();
|
||||||
|
|
||||||
|
// Edit operation should work in acceptEdits mode
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("acceptEdits")
|
||||||
|
.arg("edit")
|
||||||
|
.arg(file.to_str().unwrap())
|
||||||
|
.arg("line 2")
|
||||||
|
.arg("modified line");
|
||||||
|
cmd.assert().success();
|
||||||
|
|
||||||
|
// Verify file was edited
|
||||||
|
assert_eq!(
|
||||||
|
fs::read_to_string(&file).unwrap(),
|
||||||
|
"line 1\nmodified line\nline 3"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn code_mode_allows_all_operations() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file = dir.path().join("test.txt");
|
||||||
|
|
||||||
|
// Write in code mode
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("write")
|
||||||
|
.arg(file.to_str().unwrap())
|
||||||
|
.arg("initial content");
|
||||||
|
cmd.assert().success();
|
||||||
|
|
||||||
|
// Edit in code mode
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("edit")
|
||||||
|
.arg(file.to_str().unwrap())
|
||||||
|
.arg("initial")
|
||||||
|
.arg("modified");
|
||||||
|
cmd.assert().success();
|
||||||
|
|
||||||
|
assert_eq!(fs::read_to_string(&file).unwrap(), "modified content");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_blocks_bash_operations() {
|
||||||
|
// Bash operation should be blocked in plan mode (default)
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("bash").arg("echo hello");
|
||||||
|
cmd.assert().failure();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn code_mode_allows_bash() {
|
||||||
|
// Bash operation should work in code mode
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode").arg("code").arg("bash").arg("echo hello");
|
||||||
|
cmd.assert().success().stdout("hello\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bash_command_timeout_works() {
|
||||||
|
// Test that timeout works
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("bash")
|
||||||
|
.arg("sleep 10")
|
||||||
|
.arg("--timeout")
|
||||||
|
.arg("1000");
|
||||||
|
cmd.assert().failure();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_command_works() {
|
||||||
|
// Create .owlen/commands directory in temp dir
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let commands_dir = dir.path().join(".owlen/commands");
|
||||||
|
fs::create_dir_all(&commands_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a test slash command
|
||||||
|
let command_content = r#"---
|
||||||
|
description: "Test command"
|
||||||
|
---
|
||||||
|
Hello from slash command!
|
||||||
|
Args: $ARGUMENTS
|
||||||
|
First: $1
|
||||||
|
"#;
|
||||||
|
let command_file = commands_dir.join("test.md");
|
||||||
|
fs::write(&command_file, command_content).unwrap();
|
||||||
|
|
||||||
|
// Execute slash command with args from the temp directory
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.current_dir(dir.path())
|
||||||
|
.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("slash")
|
||||||
|
.arg("test")
|
||||||
|
.arg("arg1");
|
||||||
|
|
||||||
|
cmd.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(predicates::str::contains("Hello from slash command!"))
|
||||||
|
.stdout(predicates::str::contains("Args: arg1"))
|
||||||
|
.stdout(predicates::str::contains("First: arg1"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_command_file_refs() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let commands_dir = dir.path().join(".owlen/commands");
|
||||||
|
fs::create_dir_all(&commands_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a file to reference
|
||||||
|
let data_file = dir.path().join("data.txt");
|
||||||
|
fs::write(&data_file, "Referenced content").unwrap();
|
||||||
|
|
||||||
|
// Create slash command with file reference
|
||||||
|
let command_content = format!("File content: @{}", data_file.display());
|
||||||
|
fs::write(commands_dir.join("reftest.md"), command_content).unwrap();
|
||||||
|
|
||||||
|
// Execute slash command
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.current_dir(dir.path())
|
||||||
|
.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("slash")
|
||||||
|
.arg("reftest");
|
||||||
|
|
||||||
|
cmd.assert()
|
||||||
|
.success()
|
||||||
|
.stdout(predicates::str::contains("Referenced content"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_command_not_found() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
|
||||||
|
// Try to execute non-existent slash command
|
||||||
|
let mut cmd = Command::new(assert_cmd::cargo::cargo_bin!("owlen"));
|
||||||
|
cmd.current_dir(dir.path())
|
||||||
|
.arg("--mode")
|
||||||
|
.arg("code")
|
||||||
|
.arg("slash")
|
||||||
|
.arg("nonexistent");
|
||||||
|
|
||||||
|
cmd.assert().failure();
|
||||||
|
}
|
||||||
16
crates/integration/mcp-client/Cargo.toml
Normal file
16
crates/integration/mcp-client/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "mcp-client"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
tokio = { version = "1.39", features = ["process", "io-util", "sync", "time"] }
|
||||||
|
color-eyre = "0.6"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.23.0"
|
||||||
|
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||||
272
crates/integration/mcp-client/src/lib.rs
Normal file
272
crates/integration/mcp-client/src/lib.rs
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::process::Stdio;
|
||||||
|
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||||
|
use tokio::process::{Child, Command};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
|
/// JSON-RPC 2.0 request
|
||||||
|
#[derive(Debug, Serialize)]
|
||||||
|
struct JsonRpcRequest {
|
||||||
|
jsonrpc: String,
|
||||||
|
id: u64,
|
||||||
|
method: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
params: Option<Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// JSON-RPC 2.0 response
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct JsonRpcResponse {
|
||||||
|
jsonrpc: String,
|
||||||
|
id: u64,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
result: Option<Value>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
error: Option<JsonRpcError>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct JsonRpcError {
|
||||||
|
code: i32,
|
||||||
|
message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP server capabilities
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct ServerCapabilities {
|
||||||
|
#[serde(default)]
|
||||||
|
pub tools: Option<ToolsCapability>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub resources: Option<ResourcesCapability>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct ToolsCapability {
|
||||||
|
#[serde(default)]
|
||||||
|
pub list_changed: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct ResourcesCapability {
|
||||||
|
#[serde(default)]
|
||||||
|
pub subscribe: Option<bool>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub list_changed: Option<bool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP Tool definition
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct McpTool {
|
||||||
|
pub name: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub input_schema: Option<Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP Resource definition
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct McpResource {
|
||||||
|
pub uri: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub name: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub mime_type: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// MCP Client over stdio transport
|
||||||
|
pub struct McpClient {
|
||||||
|
process: Mutex<Child>,
|
||||||
|
next_id: Mutex<u64>,
|
||||||
|
server_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl McpClient {
|
||||||
|
/// Create a new MCP client by spawning a subprocess
|
||||||
|
pub async fn spawn(command: &str, args: &[&str], server_name: &str) -> Result<Self> {
|
||||||
|
let mut child = Command::new(command)
|
||||||
|
.args(args)
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
// Verify process is running
|
||||||
|
if child.try_wait()?.is_some() {
|
||||||
|
return Err(eyre!("MCP server process exited immediately"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
process: Mutex::new(child),
|
||||||
|
next_id: Mutex::new(1),
|
||||||
|
server_name: server_name.to_string(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Initialize the MCP connection
|
||||||
|
pub async fn initialize(&self) -> Result<ServerCapabilities> {
|
||||||
|
let params = serde_json::json!({
|
||||||
|
"protocolVersion": "2024-11-05",
|
||||||
|
"capabilities": {
|
||||||
|
"roots": {
|
||||||
|
"listChanged": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clientInfo": {
|
||||||
|
"name": "owlen",
|
||||||
|
"version": env!("CARGO_PKG_VERSION")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = self.send_request("initialize", Some(params)).await?;
|
||||||
|
|
||||||
|
let capabilities = response
|
||||||
|
.get("capabilities")
|
||||||
|
.ok_or_else(|| eyre!("No capabilities in initialize response"))?;
|
||||||
|
|
||||||
|
Ok(serde_json::from_value(capabilities.clone())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List available tools
|
||||||
|
pub async fn list_tools(&self) -> Result<Vec<McpTool>> {
|
||||||
|
let response = self.send_request("tools/list", None).await?;
|
||||||
|
|
||||||
|
let tools = response
|
||||||
|
.get("tools")
|
||||||
|
.ok_or_else(|| eyre!("No tools in response"))?;
|
||||||
|
|
||||||
|
Ok(serde_json::from_value(tools.clone())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Call a tool
|
||||||
|
pub async fn call_tool(&self, name: &str, arguments: Value) -> Result<Value> {
|
||||||
|
let params = serde_json::json!({
|
||||||
|
"name": name,
|
||||||
|
"arguments": arguments
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = self.send_request("tools/call", Some(params)).await?;
|
||||||
|
|
||||||
|
response
|
||||||
|
.get("content")
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| eyre!("No content in tool call response"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List available resources
|
||||||
|
pub async fn list_resources(&self) -> Result<Vec<McpResource>> {
|
||||||
|
let response = self.send_request("resources/list", None).await?;
|
||||||
|
|
||||||
|
let resources = response
|
||||||
|
.get("resources")
|
||||||
|
.ok_or_else(|| eyre!("No resources in response"))?;
|
||||||
|
|
||||||
|
Ok(serde_json::from_value(resources.clone())?)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a resource
|
||||||
|
pub async fn read_resource(&self, uri: &str) -> Result<Value> {
|
||||||
|
let params = serde_json::json!({
|
||||||
|
"uri": uri
|
||||||
|
});
|
||||||
|
|
||||||
|
let response = self.send_request("resources/read", Some(params)).await?;
|
||||||
|
|
||||||
|
response
|
||||||
|
.get("contents")
|
||||||
|
.cloned()
|
||||||
|
.ok_or_else(|| eyre!("No contents in resource read response"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the server name
|
||||||
|
pub fn server_name(&self) -> &str {
|
||||||
|
&self.server_name
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a JSON-RPC request and get the response
|
||||||
|
async fn send_request(&self, method: &str, params: Option<Value>) -> Result<Value> {
|
||||||
|
let mut next_id = self.next_id.lock().await;
|
||||||
|
let id = *next_id;
|
||||||
|
*next_id += 1;
|
||||||
|
drop(next_id);
|
||||||
|
|
||||||
|
let request = JsonRpcRequest {
|
||||||
|
jsonrpc: "2.0".to_string(),
|
||||||
|
id,
|
||||||
|
method: method.to_string(),
|
||||||
|
params,
|
||||||
|
};
|
||||||
|
|
||||||
|
let request_json = serde_json::to_string(&request)?;
|
||||||
|
|
||||||
|
let mut process = self.process.lock().await;
|
||||||
|
|
||||||
|
// Write request
|
||||||
|
let stdin = process.stdin.as_mut().ok_or_else(|| eyre!("No stdin"))?;
|
||||||
|
stdin.write_all(request_json.as_bytes()).await?;
|
||||||
|
stdin.write_all(b"\n").await?;
|
||||||
|
stdin.flush().await?;
|
||||||
|
|
||||||
|
// Read response
|
||||||
|
let stdout = process.stdout.take().ok_or_else(|| eyre!("No stdout"))?;
|
||||||
|
let mut reader = BufReader::new(stdout);
|
||||||
|
let mut response_line = String::new();
|
||||||
|
reader.read_line(&mut response_line).await?;
|
||||||
|
|
||||||
|
// Put stdout back
|
||||||
|
process.stdout = Some(reader.into_inner());
|
||||||
|
|
||||||
|
drop(process);
|
||||||
|
|
||||||
|
let response: JsonRpcResponse = serde_json::from_str(&response_line)?;
|
||||||
|
|
||||||
|
if response.id != id {
|
||||||
|
return Err(eyre!("Response ID mismatch: expected {}, got {}", id, response.id));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(error) = response.error {
|
||||||
|
return Err(eyre!("MCP error {}: {}", error.code, error.message));
|
||||||
|
}
|
||||||
|
|
||||||
|
response.result.ok_or_else(|| eyre!("No result in response"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Close the MCP connection
|
||||||
|
pub async fn close(self) -> Result<()> {
|
||||||
|
let mut process = self.process.into_inner();
|
||||||
|
|
||||||
|
// Close stdin to signal the server to exit
|
||||||
|
drop(process.stdin.take());
|
||||||
|
|
||||||
|
// Wait for process to exit (with timeout)
|
||||||
|
tokio::time::timeout(
|
||||||
|
std::time::Duration::from_secs(5),
|
||||||
|
process.wait()
|
||||||
|
).await??;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn jsonrpc_request_serializes() {
|
||||||
|
let req = JsonRpcRequest {
|
||||||
|
jsonrpc: "2.0".to_string(),
|
||||||
|
id: 1,
|
||||||
|
method: "test".to_string(),
|
||||||
|
params: Some(serde_json::json!({"key": "value"})),
|
||||||
|
};
|
||||||
|
|
||||||
|
let json = serde_json::to_string(&req).unwrap();
|
||||||
|
assert!(json.contains("\"method\":\"test\""));
|
||||||
|
assert!(json.contains("\"id\":1"));
|
||||||
|
}
|
||||||
|
}
|
||||||
347
crates/integration/mcp-client/tests/mcp.rs
Normal file
347
crates/integration/mcp-client/tests/mcp.rs
Normal file
@@ -0,0 +1,347 @@
|
|||||||
|
use mcp_client::McpClient;
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn mcp_server_capability_negotiation() {
|
||||||
|
// Create a mock MCP server script
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let server_script = dir.path().join("mock_server.py");
|
||||||
|
|
||||||
|
let script_content = r#"#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
def read_request():
|
||||||
|
line = sys.stdin.readline()
|
||||||
|
return json.loads(line)
|
||||||
|
|
||||||
|
def send_response(response):
|
||||||
|
sys.stdout.write(json.dumps(response) + '\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
# Main loop
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
req = read_request()
|
||||||
|
method = req.get('method')
|
||||||
|
req_id = req.get('id')
|
||||||
|
|
||||||
|
if method == 'initialize':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'protocolVersion': '2024-11-05',
|
||||||
|
'capabilities': {
|
||||||
|
'tools': {'list_changed': True},
|
||||||
|
'resources': {'subscribe': False}
|
||||||
|
},
|
||||||
|
'serverInfo': {
|
||||||
|
'name': 'test-server',
|
||||||
|
'version': '1.0.0'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
elif method == 'tools/list':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'tools': []
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'error': {
|
||||||
|
'code': -32601,
|
||||||
|
'message': f'Method not found: {method}'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
except EOFError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f'Error: {e}\n')
|
||||||
|
break
|
||||||
|
"#;
|
||||||
|
|
||||||
|
fs::write(&server_script, script_content).unwrap();
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
fs::set_permissions(&server_script, std::fs::Permissions::from_mode(0o755)).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the server
|
||||||
|
let client = McpClient::spawn(
|
||||||
|
"python3",
|
||||||
|
&[server_script.to_str().unwrap()],
|
||||||
|
"test-server"
|
||||||
|
).await.unwrap();
|
||||||
|
|
||||||
|
// Initialize
|
||||||
|
let capabilities = client.initialize().await.unwrap();
|
||||||
|
|
||||||
|
// Verify capabilities
|
||||||
|
assert!(capabilities.tools.is_some());
|
||||||
|
assert_eq!(capabilities.tools.unwrap().list_changed, Some(true));
|
||||||
|
|
||||||
|
client.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn mcp_tool_invocation() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let server_script = dir.path().join("mock_server.py");
|
||||||
|
|
||||||
|
let script_content = r#"#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
def read_request():
|
||||||
|
line = sys.stdin.readline()
|
||||||
|
return json.loads(line)
|
||||||
|
|
||||||
|
def send_response(response):
|
||||||
|
sys.stdout.write(json.dumps(response) + '\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
req = read_request()
|
||||||
|
method = req.get('method')
|
||||||
|
req_id = req.get('id')
|
||||||
|
params = req.get('params', {})
|
||||||
|
|
||||||
|
if method == 'initialize':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'protocolVersion': '2024-11-05',
|
||||||
|
'capabilities': {
|
||||||
|
'tools': {}
|
||||||
|
},
|
||||||
|
'serverInfo': {
|
||||||
|
'name': 'test-server',
|
||||||
|
'version': '1.0.0'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
elif method == 'tools/list':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'tools': [
|
||||||
|
{
|
||||||
|
'name': 'echo',
|
||||||
|
'description': 'Echo the input',
|
||||||
|
'input_schema': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'message': {'type': 'string'}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
elif method == 'tools/call':
|
||||||
|
tool_name = params.get('name')
|
||||||
|
arguments = params.get('arguments', {})
|
||||||
|
if tool_name == 'echo':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'content': [
|
||||||
|
{
|
||||||
|
'type': 'text',
|
||||||
|
'text': arguments.get('message', '')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'error': {
|
||||||
|
'code': -32602,
|
||||||
|
'message': f'Unknown tool: {tool_name}'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'error': {
|
||||||
|
'code': -32601,
|
||||||
|
'message': f'Method not found: {method}'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
except EOFError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f'Error: {e}\n')
|
||||||
|
break
|
||||||
|
"#;
|
||||||
|
|
||||||
|
fs::write(&server_script, script_content).unwrap();
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
fs::set_permissions(&server_script, std::fs::Permissions::from_mode(0o755)).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let client = McpClient::spawn(
|
||||||
|
"python3",
|
||||||
|
&[server_script.to_str().unwrap()],
|
||||||
|
"test-server"
|
||||||
|
).await.unwrap();
|
||||||
|
|
||||||
|
client.initialize().await.unwrap();
|
||||||
|
|
||||||
|
// List tools
|
||||||
|
let tools = client.list_tools().await.unwrap();
|
||||||
|
assert_eq!(tools.len(), 1);
|
||||||
|
assert_eq!(tools[0].name, "echo");
|
||||||
|
|
||||||
|
// Call tool
|
||||||
|
let result = client.call_tool(
|
||||||
|
"echo",
|
||||||
|
serde_json::json!({"message": "Hello, MCP!"})
|
||||||
|
).await.unwrap();
|
||||||
|
|
||||||
|
// Verify result
|
||||||
|
let content = result.as_array().unwrap();
|
||||||
|
assert_eq!(content[0]["text"].as_str().unwrap(), "Hello, MCP!");
|
||||||
|
|
||||||
|
client.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn mcp_resource_reads() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let server_script = dir.path().join("mock_server.py");
|
||||||
|
|
||||||
|
let script_content = r#"#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
def read_request():
|
||||||
|
line = sys.stdin.readline()
|
||||||
|
return json.loads(line)
|
||||||
|
|
||||||
|
def send_response(response):
|
||||||
|
sys.stdout.write(json.dumps(response) + '\n')
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
req = read_request()
|
||||||
|
method = req.get('method')
|
||||||
|
req_id = req.get('id')
|
||||||
|
params = req.get('params', {})
|
||||||
|
|
||||||
|
if method == 'initialize':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'protocolVersion': '2024-11-05',
|
||||||
|
'capabilities': {
|
||||||
|
'resources': {}
|
||||||
|
},
|
||||||
|
'serverInfo': {
|
||||||
|
'name': 'test-server',
|
||||||
|
'version': '1.0.0'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
elif method == 'resources/list':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'resources': [
|
||||||
|
{
|
||||||
|
'uri': 'file:///test.txt',
|
||||||
|
'name': 'Test File',
|
||||||
|
'description': 'A test file',
|
||||||
|
'mime_type': 'text/plain'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
elif method == 'resources/read':
|
||||||
|
uri = params.get('uri')
|
||||||
|
if uri == 'file:///test.txt':
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'result': {
|
||||||
|
'contents': [
|
||||||
|
{
|
||||||
|
'uri': uri,
|
||||||
|
'mime_type': 'text/plain',
|
||||||
|
'text': 'Hello from resource!'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'error': {
|
||||||
|
'code': -32602,
|
||||||
|
'message': f'Unknown resource: {uri}'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
send_response({
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': req_id,
|
||||||
|
'error': {
|
||||||
|
'code': -32601,
|
||||||
|
'message': f'Method not found: {method}'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
except EOFError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f'Error: {e}\n')
|
||||||
|
break
|
||||||
|
"#;
|
||||||
|
|
||||||
|
fs::write(&server_script, script_content).unwrap();
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
fs::set_permissions(&server_script, std::fs::Permissions::from_mode(0o755)).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let client = McpClient::spawn(
|
||||||
|
"python3",
|
||||||
|
&[server_script.to_str().unwrap()],
|
||||||
|
"test-server"
|
||||||
|
).await.unwrap();
|
||||||
|
|
||||||
|
client.initialize().await.unwrap();
|
||||||
|
|
||||||
|
// List resources
|
||||||
|
let resources = client.list_resources().await.unwrap();
|
||||||
|
assert_eq!(resources.len(), 1);
|
||||||
|
assert_eq!(resources[0].uri, "file:///test.txt");
|
||||||
|
|
||||||
|
// Read resource
|
||||||
|
let contents = client.read_resource("file:///test.txt").await.unwrap();
|
||||||
|
let contents_array = contents.as_array().unwrap();
|
||||||
|
assert_eq!(contents_array[0]["text"].as_str().unwrap(), "Hello from resource!");
|
||||||
|
|
||||||
|
client.close().await.unwrap();
|
||||||
|
}
|
||||||
22
crates/llm/ollama/.gitignore
vendored
Normal file
22
crates/llm/ollama/.gitignore
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/target
|
||||||
|
### Rust template
|
||||||
|
# Generated by Cargo
|
||||||
|
# will have compiled files and executables
|
||||||
|
debug/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||||
|
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||||
|
Cargo.lock
|
||||||
|
|
||||||
|
# These are backup files generated by rustfmt
|
||||||
|
**/*.rs.bk
|
||||||
|
|
||||||
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
### rust-analyzer template
|
||||||
|
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
|
||||||
|
rust-project.json
|
||||||
|
|
||||||
|
|
||||||
16
crates/llm/ollama/Cargo.toml
Normal file
16
crates/llm/ollama/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "llm-ollama"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||||
|
tokio = { version = "1.39", features = ["rt-multi-thread"] }
|
||||||
|
futures = "0.3"
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
bytes = "1"
|
||||||
|
tokio-stream = "0.1.17"
|
||||||
98
crates/llm/ollama/src/client.rs
Normal file
98
crates/llm/ollama/src/client.rs
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
use crate::types::{ChatMessage, ChatResponseChunk};
|
||||||
|
use futures::{Stream, TryStreamExt};
|
||||||
|
use reqwest::Client;
|
||||||
|
use serde::Serialize;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct OllamaClient {
|
||||||
|
http: Client,
|
||||||
|
base_url: String, // e.g. "http://localhost:11434"
|
||||||
|
api_key: Option<String>, // For Ollama Cloud authentication
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct OllamaOptions {
|
||||||
|
pub model: String,
|
||||||
|
pub stream: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum OllamaError {
|
||||||
|
#[error("http: {0}")]
|
||||||
|
Http(#[from] reqwest::Error),
|
||||||
|
#[error("json: {0}")]
|
||||||
|
Json(#[from] serde_json::Error),
|
||||||
|
#[error("protocol: {0}")]
|
||||||
|
Protocol(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OllamaClient {
|
||||||
|
pub fn new(base_url: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
http: Client::new(),
|
||||||
|
base_url: base_url.into().trim_end_matches('/').to_string(),
|
||||||
|
api_key: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_api_key(mut self, api_key: impl Into<String>) -> Self {
|
||||||
|
self.api_key = Some(api_key.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_cloud() -> Self {
|
||||||
|
// Same API, different base
|
||||||
|
Self::new("https://ollama.com")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn chat_stream(
|
||||||
|
&self,
|
||||||
|
messages: &[ChatMessage],
|
||||||
|
opts: &OllamaOptions,
|
||||||
|
) -> Result<impl Stream<Item = Result<ChatResponseChunk, OllamaError>>, OllamaError> {
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct Body<'a> {
|
||||||
|
model: &'a str,
|
||||||
|
messages: &'a [ChatMessage],
|
||||||
|
stream: bool,
|
||||||
|
}
|
||||||
|
let url = format!("{}/api/chat", self.base_url);
|
||||||
|
let body = Body {model: &opts.model, messages, stream: true};
|
||||||
|
let mut req = self.http.post(url).json(&body);
|
||||||
|
|
||||||
|
// Add Authorization header if API key is present
|
||||||
|
if let Some(ref key) = self.api_key {
|
||||||
|
req = req.header("Authorization", format!("Bearer {}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = req.send().await?;
|
||||||
|
let bytes_stream = resp.bytes_stream();
|
||||||
|
|
||||||
|
// NDJSON parser: split by '\n', parse each as JSON and stream the results
|
||||||
|
let out = bytes_stream
|
||||||
|
.map_err(OllamaError::Http)
|
||||||
|
.map_ok(|bytes| {
|
||||||
|
// Convert the chunk to a UTF‑8 string and own it
|
||||||
|
let txt = String::from_utf8_lossy(&bytes).into_owned();
|
||||||
|
// Parse each non‑empty line into a ChatResponseChunk
|
||||||
|
let results: Vec<Result<ChatResponseChunk, OllamaError>> = txt
|
||||||
|
.lines()
|
||||||
|
.filter_map(|line| {
|
||||||
|
let trimmed = line.trim();
|
||||||
|
if trimmed.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(
|
||||||
|
serde_json::from_str::<ChatResponseChunk>(trimmed)
|
||||||
|
.map_err(OllamaError::Json),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
futures::stream::iter(results)
|
||||||
|
})
|
||||||
|
.try_flatten(); // Stream<Item = Result<ChatResponseChunk, OllamaError>>
|
||||||
|
Ok(out)
|
||||||
|
}
|
||||||
|
}
|
||||||
5
crates/llm/ollama/src/lib.rs
Normal file
5
crates/llm/ollama/src/lib.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
pub mod client;
|
||||||
|
pub mod types;
|
||||||
|
|
||||||
|
pub use client::{OllamaClient, OllamaOptions};
|
||||||
|
pub use types::{ChatMessage, ChatResponseChunk};
|
||||||
22
crates/llm/ollama/src/types.rs
Normal file
22
crates/llm/ollama/src/types.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct ChatMessage {
|
||||||
|
pub role: String, // "user", | "assistant" | "system"
|
||||||
|
pub content: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
|
pub struct ChatResponseChunk {
|
||||||
|
pub model: Option<String>,
|
||||||
|
pub created_at: Option<String>,
|
||||||
|
pub message: Option<ChunkMessage>,
|
||||||
|
pub done: Option<bool>,
|
||||||
|
pub total_duration: Option<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||||
|
pub struct ChunkMessage {
|
||||||
|
pub role: Option<String>,
|
||||||
|
pub content: Option<String>,
|
||||||
|
}
|
||||||
12
crates/llm/ollama/tests/ndjson.rs
Normal file
12
crates/llm/ollama/tests/ndjson.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
use llm_ollama::{OllamaClient, OllamaOptions};
|
||||||
|
|
||||||
|
// This test stubs NDJSON by spinning a tiny local server is overkill for M0.
|
||||||
|
// Instead, test the line parser indirectly by mocking reqwest is complex.
|
||||||
|
// We'll smoke-test the client type compiles and leave end-to-end to cli tests.
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn client_compiles_smoke() {
|
||||||
|
let _ = OllamaClient::new("http://localhost:11434");
|
||||||
|
let _ = OllamaClient::with_cloud();
|
||||||
|
let _ = OllamaOptions { model: "qwen2.5".into(), stream: true };
|
||||||
|
}
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "owlen-cli"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
description = "Command-line interface for OWLEN LLM client"
|
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["chat-client"]
|
|
||||||
chat-client = []
|
|
||||||
code-client = []
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "owlen"
|
|
||||||
path = "src/main.rs"
|
|
||||||
required-features = ["chat-client"]
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "owlen-code"
|
|
||||||
path = "src/code_main.rs"
|
|
||||||
required-features = ["code-client"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
owlen-core = { path = "../owlen-core" }
|
|
||||||
owlen-tui = { path = "../owlen-tui" }
|
|
||||||
owlen-ollama = { path = "../owlen-ollama" }
|
|
||||||
|
|
||||||
# CLI framework
|
|
||||||
clap = { version = "4.0", features = ["derive"] }
|
|
||||||
|
|
||||||
# Async runtime
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-util = { workspace = true }
|
|
||||||
|
|
||||||
# TUI framework
|
|
||||||
ratatui = { workspace = true }
|
|
||||||
crossterm = { workspace = true }
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
//! OWLEN Code Mode - TUI client optimized for coding assistance
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use clap::{Arg, Command};
|
|
||||||
use owlen_core::session::SessionController;
|
|
||||||
use owlen_ollama::OllamaProvider;
|
|
||||||
use owlen_tui::{config, ui, AppState, CodeApp, Event, EventHandler, SessionEvent};
|
|
||||||
use std::io;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use tokio_util::sync::CancellationToken;
|
|
||||||
|
|
||||||
use crossterm::{
|
|
||||||
event::{DisableMouseCapture, EnableMouseCapture},
|
|
||||||
execute,
|
|
||||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
|
||||||
};
|
|
||||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let matches = Command::new("owlen-code")
|
|
||||||
.about("OWLEN Code Mode - TUI optimized for programming assistance")
|
|
||||||
.version(env!("CARGO_PKG_VERSION"))
|
|
||||||
.arg(
|
|
||||||
Arg::new("model")
|
|
||||||
.short('m')
|
|
||||||
.long("model")
|
|
||||||
.value_name("MODEL")
|
|
||||||
.help("Preferred model to use for this session"),
|
|
||||||
)
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
let mut config = config::try_load_config().unwrap_or_default();
|
|
||||||
|
|
||||||
if let Some(model) = matches.get_one::<String>("model") {
|
|
||||||
config.general.default_model = Some(model.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let provider_cfg = config::ensure_ollama_config(&mut config).clone();
|
|
||||||
let provider = Arc::new(OllamaProvider::from_config(
|
|
||||||
&provider_cfg,
|
|
||||||
Some(&config.general),
|
|
||||||
)?);
|
|
||||||
|
|
||||||
let controller = SessionController::new(provider, config.clone());
|
|
||||||
let (mut app, mut session_rx) = CodeApp::new(controller);
|
|
||||||
app.inner_mut().initialize_models().await?;
|
|
||||||
|
|
||||||
let cancellation_token = CancellationToken::new();
|
|
||||||
let (event_tx, event_rx) = mpsc::unbounded_channel();
|
|
||||||
let event_handler = EventHandler::new(event_tx, cancellation_token.clone());
|
|
||||||
let event_handle = tokio::spawn(async move { event_handler.run().await });
|
|
||||||
|
|
||||||
enable_raw_mode()?;
|
|
||||||
let mut stdout = io::stdout();
|
|
||||||
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
|
|
||||||
let backend = CrosstermBackend::new(stdout);
|
|
||||||
let mut terminal = Terminal::new(backend)?;
|
|
||||||
|
|
||||||
let result = run_app(&mut terminal, &mut app, event_rx, &mut session_rx).await;
|
|
||||||
|
|
||||||
cancellation_token.cancel();
|
|
||||||
event_handle.await?;
|
|
||||||
|
|
||||||
config::save_config(app.inner().config())?;
|
|
||||||
|
|
||||||
disable_raw_mode()?;
|
|
||||||
execute!(
|
|
||||||
terminal.backend_mut(),
|
|
||||||
LeaveAlternateScreen,
|
|
||||||
DisableMouseCapture
|
|
||||||
)?;
|
|
||||||
terminal.show_cursor()?;
|
|
||||||
|
|
||||||
if let Err(err) = result {
|
|
||||||
println!("{err:?}");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run_app(
|
|
||||||
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
|
|
||||||
app: &mut CodeApp,
|
|
||||||
mut event_rx: mpsc::UnboundedReceiver<Event>,
|
|
||||||
session_rx: &mut mpsc::UnboundedReceiver<SessionEvent>,
|
|
||||||
) -> Result<()> {
|
|
||||||
loop {
|
|
||||||
terminal.draw(|f| ui::render_chat(f, app.inner_mut()))?;
|
|
||||||
|
|
||||||
tokio::select! {
|
|
||||||
Some(event) = event_rx.recv() => {
|
|
||||||
if let AppState::Quit = app.handle_event(event).await? {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(session_event) = session_rx.recv() => {
|
|
||||||
app.handle_session_event(session_event)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
//! OWLEN CLI - Chat TUI client
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use clap::{Arg, Command};
|
|
||||||
use owlen_core::session::SessionController;
|
|
||||||
use owlen_ollama::OllamaProvider;
|
|
||||||
use owlen_tui::{config, ui, AppState, ChatApp, Event, EventHandler, SessionEvent};
|
|
||||||
use std::io;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use tokio_util::sync::CancellationToken;
|
|
||||||
|
|
||||||
use crossterm::{
|
|
||||||
event::{DisableBracketedPaste, DisableMouseCapture, EnableBracketedPaste, EnableMouseCapture},
|
|
||||||
execute,
|
|
||||||
terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen},
|
|
||||||
};
|
|
||||||
use ratatui::{backend::CrosstermBackend, Terminal};
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() -> Result<()> {
|
|
||||||
let matches = Command::new("owlen")
|
|
||||||
.about("OWLEN - A chat-focused TUI client for Ollama")
|
|
||||||
.version(env!("CARGO_PKG_VERSION"))
|
|
||||||
.arg(
|
|
||||||
Arg::new("model")
|
|
||||||
.short('m')
|
|
||||||
.long("model")
|
|
||||||
.value_name("MODEL")
|
|
||||||
.help("Preferred model to use for this session"),
|
|
||||||
)
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
let mut config = config::try_load_config().unwrap_or_default();
|
|
||||||
|
|
||||||
if let Some(model) = matches.get_one::<String>("model") {
|
|
||||||
config.general.default_model = Some(model.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare provider from configuration
|
|
||||||
let provider_cfg = config::ensure_ollama_config(&mut config).clone();
|
|
||||||
let provider = Arc::new(OllamaProvider::from_config(
|
|
||||||
&provider_cfg,
|
|
||||||
Some(&config.general),
|
|
||||||
)?);
|
|
||||||
|
|
||||||
let controller = SessionController::new(provider, config.clone());
|
|
||||||
let (mut app, mut session_rx) = ChatApp::new(controller);
|
|
||||||
app.initialize_models().await?;
|
|
||||||
|
|
||||||
// Event infrastructure
|
|
||||||
let cancellation_token = CancellationToken::new();
|
|
||||||
let (event_tx, event_rx) = mpsc::unbounded_channel();
|
|
||||||
let event_handler = EventHandler::new(event_tx, cancellation_token.clone());
|
|
||||||
let event_handle = tokio::spawn(async move { event_handler.run().await });
|
|
||||||
|
|
||||||
// Terminal setup
|
|
||||||
enable_raw_mode()?;
|
|
||||||
let mut stdout = io::stdout();
|
|
||||||
execute!(
|
|
||||||
stdout,
|
|
||||||
EnterAlternateScreen,
|
|
||||||
EnableMouseCapture,
|
|
||||||
EnableBracketedPaste
|
|
||||||
)?;
|
|
||||||
let backend = CrosstermBackend::new(stdout);
|
|
||||||
let mut terminal = Terminal::new(backend)?;
|
|
||||||
|
|
||||||
let result = run_app(&mut terminal, &mut app, event_rx, &mut session_rx).await;
|
|
||||||
|
|
||||||
// Shutdown
|
|
||||||
cancellation_token.cancel();
|
|
||||||
event_handle.await?;
|
|
||||||
|
|
||||||
// Persist configuration updates (e.g., selected model)
|
|
||||||
config::save_config(app.config())?;
|
|
||||||
|
|
||||||
disable_raw_mode()?;
|
|
||||||
execute!(
|
|
||||||
terminal.backend_mut(),
|
|
||||||
LeaveAlternateScreen,
|
|
||||||
DisableMouseCapture,
|
|
||||||
DisableBracketedPaste
|
|
||||||
)?;
|
|
||||||
terminal.show_cursor()?;
|
|
||||||
|
|
||||||
if let Err(err) = result {
|
|
||||||
println!("{err:?}");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn run_app(
|
|
||||||
terminal: &mut Terminal<CrosstermBackend<io::Stdout>>,
|
|
||||||
app: &mut ChatApp,
|
|
||||||
mut event_rx: mpsc::UnboundedReceiver<Event>,
|
|
||||||
session_rx: &mut mpsc::UnboundedReceiver<SessionEvent>,
|
|
||||||
) -> Result<()> {
|
|
||||||
loop {
|
|
||||||
// Advance loading animation frame
|
|
||||||
app.advance_loading_animation();
|
|
||||||
|
|
||||||
terminal.draw(|f| ui::render_chat(f, app))?;
|
|
||||||
|
|
||||||
// Process any pending LLM requests AFTER UI has been drawn
|
|
||||||
app.process_pending_llm_request().await?;
|
|
||||||
|
|
||||||
tokio::select! {
|
|
||||||
Some(event) = event_rx.recv() => {
|
|
||||||
if let AppState::Quit = app.handle_event(event).await? {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(session_event) = session_rx.recv() => {
|
|
||||||
app.handle_session_event(session_event)?;
|
|
||||||
}
|
|
||||||
// Add a timeout to keep the animation going even when there are no events
|
|
||||||
_ = tokio::time::sleep(tokio::time::Duration::from_millis(100)) => {
|
|
||||||
// This will cause the loop to continue and advance the animation
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "owlen-core"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
description = "Core traits and types for OWLEN LLM client"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = "1.0.75"
|
|
||||||
log = "0.4.20"
|
|
||||||
serde = { version = "1.0.188", features = ["derive"] }
|
|
||||||
serde_json = "1.0.105"
|
|
||||||
thiserror = "1.0.48"
|
|
||||||
tokio = { version = "1.32.0", features = ["full"] }
|
|
||||||
unicode-segmentation = "1.11"
|
|
||||||
unicode-width = "0.1"
|
|
||||||
uuid = { version = "1.4.1", features = ["v4", "serde"] }
|
|
||||||
textwrap = "0.16.0"
|
|
||||||
futures = "0.3.28"
|
|
||||||
async-trait = "0.1.73"
|
|
||||||
toml = "0.8.0"
|
|
||||||
shellexpand = "3.1.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = { workspace = true }
|
|
||||||
@@ -1,342 +0,0 @@
|
|||||||
use crate::provider::ProviderConfig;
|
|
||||||
use crate::Result;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
/// Default location for the OWLEN configuration file
|
|
||||||
pub const DEFAULT_CONFIG_PATH: &str = "~/.config/owlen/config.toml";
|
|
||||||
|
|
||||||
/// Core configuration shared by all OWLEN clients
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Config {
|
|
||||||
/// General application settings
|
|
||||||
pub general: GeneralSettings,
|
|
||||||
/// Provider specific configuration keyed by provider name
|
|
||||||
#[serde(default)]
|
|
||||||
pub providers: HashMap<String, ProviderConfig>,
|
|
||||||
/// UI preferences that frontends can opt into
|
|
||||||
#[serde(default)]
|
|
||||||
pub ui: UiSettings,
|
|
||||||
/// Storage related options
|
|
||||||
#[serde(default)]
|
|
||||||
pub storage: StorageSettings,
|
|
||||||
/// Input handling preferences
|
|
||||||
#[serde(default)]
|
|
||||||
pub input: InputSettings,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Config {
|
|
||||||
fn default() -> Self {
|
|
||||||
let mut providers = HashMap::new();
|
|
||||||
providers.insert(
|
|
||||||
"ollama".to_string(),
|
|
||||||
ProviderConfig {
|
|
||||||
provider_type: "ollama".to_string(),
|
|
||||||
base_url: Some("http://localhost:11434".to_string()),
|
|
||||||
api_key: None,
|
|
||||||
extra: HashMap::new(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
Self {
|
|
||||||
general: GeneralSettings::default(),
|
|
||||||
providers,
|
|
||||||
ui: UiSettings::default(),
|
|
||||||
storage: StorageSettings::default(),
|
|
||||||
input: InputSettings::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Config {
|
|
||||||
/// Load configuration from disk, falling back to defaults when missing
|
|
||||||
pub fn load(path: Option<&Path>) -> Result<Self> {
|
|
||||||
let path = match path {
|
|
||||||
Some(path) => path.to_path_buf(),
|
|
||||||
None => default_config_path(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if path.exists() {
|
|
||||||
let content = fs::read_to_string(&path)?;
|
|
||||||
let mut config: Config =
|
|
||||||
toml::from_str(&content).map_err(|e| crate::Error::Config(e.to_string()))?;
|
|
||||||
config.ensure_defaults();
|
|
||||||
Ok(config)
|
|
||||||
} else {
|
|
||||||
Ok(Config::default())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Persist configuration to disk
|
|
||||||
pub fn save(&self, path: Option<&Path>) -> Result<()> {
|
|
||||||
let path = match path {
|
|
||||||
Some(path) => path.to_path_buf(),
|
|
||||||
None => default_config_path(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(dir) = path.parent() {
|
|
||||||
fs::create_dir_all(dir)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let content =
|
|
||||||
toml::to_string_pretty(self).map_err(|e| crate::Error::Config(e.to_string()))?;
|
|
||||||
fs::write(path, content)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get provider configuration by provider name
|
|
||||||
pub fn provider(&self, name: &str) -> Option<&ProviderConfig> {
|
|
||||||
self.providers.get(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update or insert a provider configuration
|
|
||||||
pub fn upsert_provider(&mut self, name: impl Into<String>, config: ProviderConfig) {
|
|
||||||
self.providers.insert(name.into(), config);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve default model in order of priority: explicit default, first cached model, provider fallback
|
|
||||||
pub fn resolve_default_model<'a>(
|
|
||||||
&'a self,
|
|
||||||
models: &'a [crate::types::ModelInfo],
|
|
||||||
) -> Option<&'a str> {
|
|
||||||
if let Some(model) = self.general.default_model.as_deref() {
|
|
||||||
if models.iter().any(|m| m.id == model || m.name == model) {
|
|
||||||
return Some(model);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(first) = models.first() {
|
|
||||||
return Some(&first.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.general.default_model.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_defaults(&mut self) {
|
|
||||||
if self.general.default_provider.is_empty() {
|
|
||||||
self.general.default_provider = "ollama".to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
if !self.providers.contains_key("ollama") {
|
|
||||||
self.providers.insert(
|
|
||||||
"ollama".to_string(),
|
|
||||||
ProviderConfig {
|
|
||||||
provider_type: "ollama".to_string(),
|
|
||||||
base_url: Some("http://localhost:11434".to_string()),
|
|
||||||
api_key: None,
|
|
||||||
extra: HashMap::new(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Default configuration path with user home expansion
|
|
||||||
pub fn default_config_path() -> PathBuf {
|
|
||||||
PathBuf::from(shellexpand::tilde(DEFAULT_CONFIG_PATH).as_ref())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// General behaviour settings shared across clients
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct GeneralSettings {
|
|
||||||
/// Default provider name for routing
|
|
||||||
pub default_provider: String,
|
|
||||||
/// Optional default model id
|
|
||||||
#[serde(default)]
|
|
||||||
pub default_model: Option<String>,
|
|
||||||
/// Whether streaming responses are preferred
|
|
||||||
#[serde(default = "GeneralSettings::default_streaming")]
|
|
||||||
pub enable_streaming: bool,
|
|
||||||
/// Optional path to a project context file automatically injected as system prompt
|
|
||||||
#[serde(default)]
|
|
||||||
pub project_context_file: Option<String>,
|
|
||||||
/// TTL for cached model listings in seconds
|
|
||||||
#[serde(default = "GeneralSettings::default_model_cache_ttl")]
|
|
||||||
pub model_cache_ttl_secs: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GeneralSettings {
|
|
||||||
fn default_streaming() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_model_cache_ttl() -> u64 {
|
|
||||||
60
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Duration representation of model cache TTL
|
|
||||||
pub fn model_cache_ttl(&self) -> Duration {
|
|
||||||
Duration::from_secs(self.model_cache_ttl_secs.max(5))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for GeneralSettings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
default_provider: "ollama".to_string(),
|
|
||||||
default_model: Some("llama3.2:latest".to_string()),
|
|
||||||
enable_streaming: Self::default_streaming(),
|
|
||||||
project_context_file: Some("OWLEN.md".to_string()),
|
|
||||||
model_cache_ttl_secs: Self::default_model_cache_ttl(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// UI preferences that consumers can respect as needed
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct UiSettings {
|
|
||||||
#[serde(default = "UiSettings::default_theme")]
|
|
||||||
pub theme: String,
|
|
||||||
#[serde(default = "UiSettings::default_word_wrap")]
|
|
||||||
pub word_wrap: bool,
|
|
||||||
#[serde(default = "UiSettings::default_max_history_lines")]
|
|
||||||
pub max_history_lines: usize,
|
|
||||||
#[serde(default = "UiSettings::default_show_role_labels")]
|
|
||||||
pub show_role_labels: bool,
|
|
||||||
#[serde(default = "UiSettings::default_wrap_column")]
|
|
||||||
pub wrap_column: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UiSettings {
|
|
||||||
fn default_theme() -> String {
|
|
||||||
"default".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_word_wrap() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_max_history_lines() -> usize {
|
|
||||||
2000
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_show_role_labels() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_wrap_column() -> u16 {
|
|
||||||
100
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for UiSettings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
theme: Self::default_theme(),
|
|
||||||
word_wrap: Self::default_word_wrap(),
|
|
||||||
max_history_lines: Self::default_max_history_lines(),
|
|
||||||
show_role_labels: Self::default_show_role_labels(),
|
|
||||||
wrap_column: Self::default_wrap_column(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Storage related preferences
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct StorageSettings {
|
|
||||||
#[serde(default = "StorageSettings::default_conversation_dir")]
|
|
||||||
pub conversation_dir: String,
|
|
||||||
#[serde(default = "StorageSettings::default_auto_save")]
|
|
||||||
pub auto_save_sessions: bool,
|
|
||||||
#[serde(default = "StorageSettings::default_max_sessions")]
|
|
||||||
pub max_saved_sessions: usize,
|
|
||||||
#[serde(default = "StorageSettings::default_session_timeout")]
|
|
||||||
pub session_timeout_minutes: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StorageSettings {
|
|
||||||
fn default_conversation_dir() -> String {
|
|
||||||
"~/.local/share/owlen/conversations".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_auto_save() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_max_sessions() -> usize {
|
|
||||||
25
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_session_timeout() -> u64 {
|
|
||||||
120
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve storage directory path
|
|
||||||
pub fn conversation_path(&self) -> PathBuf {
|
|
||||||
PathBuf::from(shellexpand::tilde(&self.conversation_dir).as_ref())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for StorageSettings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
conversation_dir: Self::default_conversation_dir(),
|
|
||||||
auto_save_sessions: Self::default_auto_save(),
|
|
||||||
max_saved_sessions: Self::default_max_sessions(),
|
|
||||||
session_timeout_minutes: Self::default_session_timeout(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Input handling preferences shared across clients
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct InputSettings {
|
|
||||||
#[serde(default = "InputSettings::default_multiline")]
|
|
||||||
pub multiline: bool,
|
|
||||||
#[serde(default = "InputSettings::default_history_size")]
|
|
||||||
pub history_size: usize,
|
|
||||||
#[serde(default = "InputSettings::default_tab_width")]
|
|
||||||
pub tab_width: u8,
|
|
||||||
#[serde(default = "InputSettings::default_confirm_send")]
|
|
||||||
pub confirm_send: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InputSettings {
|
|
||||||
fn default_multiline() -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_history_size() -> usize {
|
|
||||||
100
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_tab_width() -> u8 {
|
|
||||||
4
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_confirm_send() -> bool {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for InputSettings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
multiline: Self::default_multiline(),
|
|
||||||
history_size: Self::default_history_size(),
|
|
||||||
tab_width: Self::default_tab_width(),
|
|
||||||
confirm_send: Self::default_confirm_send(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convenience accessor for an Ollama provider entry, creating a default if missing
|
|
||||||
pub fn ensure_ollama_config(config: &mut Config) -> &ProviderConfig {
|
|
||||||
config
|
|
||||||
.providers
|
|
||||||
.entry("ollama".to_string())
|
|
||||||
.or_insert_with(|| ProviderConfig {
|
|
||||||
provider_type: "ollama".to_string(),
|
|
||||||
base_url: Some("http://localhost:11434".to_string()),
|
|
||||||
api_key: None,
|
|
||||||
extra: HashMap::new(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculate absolute timeout for session data based on configuration
|
|
||||||
pub fn session_timeout(config: &Config) -> Duration {
|
|
||||||
Duration::from_secs(config.storage.session_timeout_minutes.max(1) * 60)
|
|
||||||
}
|
|
||||||
@@ -1,289 +0,0 @@
|
|||||||
use crate::types::{Conversation, Message};
|
|
||||||
use crate::Result;
|
|
||||||
use serde_json::{Number, Value};
|
|
||||||
use std::collections::{HashMap, VecDeque};
|
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
const STREAMING_FLAG: &str = "streaming";
|
|
||||||
const LAST_CHUNK_TS: &str = "last_chunk_ts";
|
|
||||||
const PLACEHOLDER_FLAG: &str = "placeholder";
|
|
||||||
|
|
||||||
/// Manage active and historical conversations, including streaming updates.
|
|
||||||
pub struct ConversationManager {
|
|
||||||
active: Conversation,
|
|
||||||
history: VecDeque<Conversation>,
|
|
||||||
message_index: HashMap<Uuid, usize>,
|
|
||||||
streaming: HashMap<Uuid, StreamingMetadata>,
|
|
||||||
max_history: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct StreamingMetadata {
|
|
||||||
started: Instant,
|
|
||||||
last_update: Instant,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ConversationManager {
|
|
||||||
/// Create a new conversation manager with a default model
|
|
||||||
pub fn new(model: impl Into<String>) -> Self {
|
|
||||||
Self::with_history_capacity(model, 32)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create with explicit history capacity
|
|
||||||
pub fn with_history_capacity(model: impl Into<String>, max_history: usize) -> Self {
|
|
||||||
let conversation = Conversation::new(model.into());
|
|
||||||
Self {
|
|
||||||
active: conversation,
|
|
||||||
history: VecDeque::new(),
|
|
||||||
message_index: HashMap::new(),
|
|
||||||
streaming: HashMap::new(),
|
|
||||||
max_history: max_history.max(1),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Access the active conversation
|
|
||||||
pub fn active(&self) -> &Conversation {
|
|
||||||
&self.active
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mutable access to the active conversation (auto refreshing indexes afterwards)
|
|
||||||
fn active_mut(&mut self) -> &mut Conversation {
|
|
||||||
&mut self.active
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Replace the active conversation with a provided one, archiving the existing conversation if it contains data
|
|
||||||
pub fn load(&mut self, conversation: Conversation) {
|
|
||||||
if !self.active.messages.is_empty() {
|
|
||||||
self.archive_active();
|
|
||||||
}
|
|
||||||
|
|
||||||
self.message_index.clear();
|
|
||||||
for (idx, message) in conversation.messages.iter().enumerate() {
|
|
||||||
self.message_index.insert(message.id, idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.stream_reset();
|
|
||||||
self.active = conversation;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Start a brand new conversation, archiving the previous one
|
|
||||||
pub fn start_new(&mut self, model: Option<String>, name: Option<String>) {
|
|
||||||
self.archive_active();
|
|
||||||
let model = model.unwrap_or_else(|| self.active.model.clone());
|
|
||||||
self.active = Conversation::new(model);
|
|
||||||
self.active.name = name;
|
|
||||||
self.message_index.clear();
|
|
||||||
self.stream_reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Archive the active conversation into history
|
|
||||||
pub fn archive_active(&mut self) {
|
|
||||||
if self.active.messages.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut archived = self.active.clone();
|
|
||||||
archived.updated_at = std::time::SystemTime::now();
|
|
||||||
self.history.push_front(archived);
|
|
||||||
|
|
||||||
while self.history.len() > self.max_history {
|
|
||||||
self.history.pop_back();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get immutable history
|
|
||||||
pub fn history(&self) -> impl Iterator<Item = &Conversation> {
|
|
||||||
self.history.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a user message and return its identifier
|
|
||||||
pub fn push_user_message(&mut self, content: impl Into<String>) -> Uuid {
|
|
||||||
let message = Message::user(content.into());
|
|
||||||
self.register_message(message)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a system message and return its identifier
|
|
||||||
pub fn push_system_message(&mut self, content: impl Into<String>) -> Uuid {
|
|
||||||
let message = Message::system(content.into());
|
|
||||||
self.register_message(message)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an assistant message (non-streaming) and return its identifier
|
|
||||||
pub fn push_assistant_message(&mut self, content: impl Into<String>) -> Uuid {
|
|
||||||
let message = Message::assistant(content.into());
|
|
||||||
self.register_message(message)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Push an arbitrary message into the active conversation
|
|
||||||
pub fn push_message(&mut self, message: Message) -> Uuid {
|
|
||||||
self.register_message(message)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Start tracking a streaming assistant response, returning the message id to update
|
|
||||||
pub fn start_streaming_response(&mut self) -> Uuid {
|
|
||||||
let mut message = Message::assistant(String::new());
|
|
||||||
message
|
|
||||||
.metadata
|
|
||||||
.insert(STREAMING_FLAG.to_string(), Value::Bool(true));
|
|
||||||
let id = message.id;
|
|
||||||
self.register_message(message);
|
|
||||||
self.streaming.insert(
|
|
||||||
id,
|
|
||||||
StreamingMetadata {
|
|
||||||
started: Instant::now(),
|
|
||||||
last_update: Instant::now(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Append streaming content to an assistant message
|
|
||||||
pub fn append_stream_chunk(
|
|
||||||
&mut self,
|
|
||||||
message_id: Uuid,
|
|
||||||
chunk: &str,
|
|
||||||
is_final: bool,
|
|
||||||
) -> Result<()> {
|
|
||||||
let index = self
|
|
||||||
.message_index
|
|
||||||
.get(&message_id)
|
|
||||||
.copied()
|
|
||||||
.ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?;
|
|
||||||
|
|
||||||
let conversation = self.active_mut();
|
|
||||||
if let Some(message) = conversation.messages.get_mut(index) {
|
|
||||||
let was_placeholder = message
|
|
||||||
.metadata
|
|
||||||
.remove(PLACEHOLDER_FLAG)
|
|
||||||
.and_then(|v| v.as_bool())
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
if was_placeholder {
|
|
||||||
message.content.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
if !chunk.is_empty() {
|
|
||||||
message.content.push_str(chunk);
|
|
||||||
}
|
|
||||||
message.timestamp = std::time::SystemTime::now();
|
|
||||||
let millis = std::time::SystemTime::now()
|
|
||||||
.duration_since(std::time::UNIX_EPOCH)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.as_millis() as u64;
|
|
||||||
message.metadata.insert(
|
|
||||||
LAST_CHUNK_TS.to_string(),
|
|
||||||
Value::Number(Number::from(millis)),
|
|
||||||
);
|
|
||||||
|
|
||||||
if is_final {
|
|
||||||
message
|
|
||||||
.metadata
|
|
||||||
.insert(STREAMING_FLAG.to_string(), Value::Bool(false));
|
|
||||||
self.streaming.remove(&message_id);
|
|
||||||
} else if let Some(info) = self.streaming.get_mut(&message_id) {
|
|
||||||
info.last_update = Instant::now();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set placeholder text for a streaming message
|
|
||||||
pub fn set_stream_placeholder(
|
|
||||||
&mut self,
|
|
||||||
message_id: Uuid,
|
|
||||||
text: impl Into<String>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let index = self
|
|
||||||
.message_index
|
|
||||||
.get(&message_id)
|
|
||||||
.copied()
|
|
||||||
.ok_or_else(|| crate::Error::Unknown(format!("Unknown message id: {message_id}")))?;
|
|
||||||
|
|
||||||
if let Some(message) = self.active_mut().messages.get_mut(index) {
|
|
||||||
message.content = text.into();
|
|
||||||
message.timestamp = std::time::SystemTime::now();
|
|
||||||
message
|
|
||||||
.metadata
|
|
||||||
.insert(PLACEHOLDER_FLAG.to_string(), Value::Bool(true));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the active model (used when user changes model mid session)
|
|
||||||
pub fn set_model(&mut self, model: impl Into<String>) {
|
|
||||||
self.active.model = model.into();
|
|
||||||
self.active.updated_at = std::time::SystemTime::now();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Provide read access to the cached streaming metadata
|
|
||||||
pub fn streaming_metadata(&self, message_id: &Uuid) -> Option<StreamingMetadata> {
|
|
||||||
self.streaming.get(message_id).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove inactive streaming messages that have stalled beyond the provided timeout
|
|
||||||
pub fn expire_stalled_streams(&mut self, idle_timeout: Duration) -> Vec<Uuid> {
|
|
||||||
let cutoff = Instant::now() - idle_timeout;
|
|
||||||
let mut expired = Vec::new();
|
|
||||||
|
|
||||||
self.streaming.retain(|id, meta| {
|
|
||||||
if meta.last_update < cutoff {
|
|
||||||
expired.push(*id);
|
|
||||||
false
|
|
||||||
} else {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
expired
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear all state
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.active.clear();
|
|
||||||
self.history.clear();
|
|
||||||
self.message_index.clear();
|
|
||||||
self.streaming.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn register_message(&mut self, message: Message) -> Uuid {
|
|
||||||
let id = message.id;
|
|
||||||
let idx;
|
|
||||||
{
|
|
||||||
let conversation = self.active_mut();
|
|
||||||
idx = conversation.messages.len();
|
|
||||||
conversation.messages.push(message);
|
|
||||||
conversation.updated_at = std::time::SystemTime::now();
|
|
||||||
}
|
|
||||||
self.message_index.insert(id, idx);
|
|
||||||
id
|
|
||||||
}
|
|
||||||
|
|
||||||
fn stream_reset(&mut self) {
|
|
||||||
self.streaming.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StreamingMetadata {
|
|
||||||
/// Duration since the stream started
|
|
||||||
pub fn elapsed(&self) -> Duration {
|
|
||||||
self.started.elapsed()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Duration since the last chunk was received
|
|
||||||
pub fn idle_duration(&self) -> Duration {
|
|
||||||
self.last_update.elapsed()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Timestamp when streaming started
|
|
||||||
pub fn started_at(&self) -> Instant {
|
|
||||||
self.started
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Timestamp of most recent update
|
|
||||||
pub fn last_update_at(&self) -> Instant {
|
|
||||||
self.last_update
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
use crate::types::Message;
|
|
||||||
|
|
||||||
/// Formats messages for display across different clients.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct MessageFormatter {
|
|
||||||
wrap_width: usize,
|
|
||||||
show_role_labels: bool,
|
|
||||||
preserve_empty_lines: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MessageFormatter {
|
|
||||||
/// Create a new formatter
|
|
||||||
pub fn new(wrap_width: usize, show_role_labels: bool) -> Self {
|
|
||||||
Self {
|
|
||||||
wrap_width: wrap_width.max(20),
|
|
||||||
show_role_labels,
|
|
||||||
preserve_empty_lines: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Override whether empty lines should be preserved
|
|
||||||
pub fn with_preserve_empty(mut self, preserve: bool) -> Self {
|
|
||||||
self.preserve_empty_lines = preserve;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the wrap width
|
|
||||||
pub fn set_wrap_width(&mut self, width: usize) {
|
|
||||||
self.wrap_width = width.max(20);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether role labels should be shown alongside messages
|
|
||||||
pub fn show_role_labels(&self) -> bool {
|
|
||||||
self.show_role_labels
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn format_message(&self, message: &Message) -> Vec<String> {
|
|
||||||
message
|
|
||||||
.content
|
|
||||||
.trim()
|
|
||||||
.lines()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extract thinking content from <think> tags, returning (content_without_think, thinking_content)
|
|
||||||
/// This handles both complete and incomplete (streaming) think tags.
|
|
||||||
pub fn extract_thinking(&self, content: &str) -> (String, Option<String>) {
|
|
||||||
let mut result = String::new();
|
|
||||||
let mut thinking = String::new();
|
|
||||||
let mut current_pos = 0;
|
|
||||||
|
|
||||||
while let Some(start_pos) = content[current_pos..].find("<think>") {
|
|
||||||
let abs_start = current_pos + start_pos;
|
|
||||||
|
|
||||||
// Add content before <think> tag to result
|
|
||||||
result.push_str(&content[current_pos..abs_start]);
|
|
||||||
|
|
||||||
// Find closing tag
|
|
||||||
if let Some(end_pos) = content[abs_start..].find("</think>") {
|
|
||||||
let abs_end = abs_start + end_pos;
|
|
||||||
let think_content = &content[abs_start + 7..abs_end]; // 7 = len("<think>")
|
|
||||||
|
|
||||||
if !thinking.is_empty() {
|
|
||||||
thinking.push_str("\n\n");
|
|
||||||
}
|
|
||||||
thinking.push_str(think_content.trim());
|
|
||||||
|
|
||||||
current_pos = abs_end + 8; // 8 = len("</think>")
|
|
||||||
} else {
|
|
||||||
// Unclosed tag - this is streaming content
|
|
||||||
// Extract everything after <think> as thinking content
|
|
||||||
let think_content = &content[abs_start + 7..]; // 7 = len("<think>")
|
|
||||||
|
|
||||||
if !thinking.is_empty() {
|
|
||||||
thinking.push_str("\n\n");
|
|
||||||
}
|
|
||||||
thinking.push_str(think_content);
|
|
||||||
|
|
||||||
current_pos = content.len();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add remaining content
|
|
||||||
result.push_str(&content[current_pos..]);
|
|
||||||
|
|
||||||
let thinking_result = if thinking.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(thinking)
|
|
||||||
};
|
|
||||||
|
|
||||||
(result, thinking_result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,217 +0,0 @@
|
|||||||
use std::collections::VecDeque;
|
|
||||||
|
|
||||||
/// Text input buffer with history and cursor management.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct InputBuffer {
|
|
||||||
buffer: String,
|
|
||||||
cursor: usize,
|
|
||||||
history: VecDeque<String>,
|
|
||||||
history_index: Option<usize>,
|
|
||||||
max_history: usize,
|
|
||||||
pub multiline: bool,
|
|
||||||
tab_width: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InputBuffer {
|
|
||||||
/// Create a new input buffer
|
|
||||||
pub fn new(max_history: usize, multiline: bool, tab_width: u8) -> Self {
|
|
||||||
Self {
|
|
||||||
buffer: String::new(),
|
|
||||||
cursor: 0,
|
|
||||||
history: VecDeque::with_capacity(max_history.max(1)),
|
|
||||||
history_index: None,
|
|
||||||
max_history: max_history.max(1),
|
|
||||||
multiline,
|
|
||||||
tab_width: tab_width.max(1),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get current text
|
|
||||||
pub fn text(&self) -> &str {
|
|
||||||
&self.buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Current cursor position
|
|
||||||
pub fn cursor(&self) -> usize {
|
|
||||||
self.cursor
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Replace buffer contents
|
|
||||||
pub fn set_text(&mut self, text: impl Into<String>) {
|
|
||||||
self.buffer = text.into();
|
|
||||||
self.cursor = self.buffer.len();
|
|
||||||
self.history_index = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear buffer and reset cursor
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.buffer.clear();
|
|
||||||
self.cursor = 0;
|
|
||||||
self.history_index = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert a character at the cursor position
|
|
||||||
pub fn insert_char(&mut self, ch: char) {
|
|
||||||
if ch == '\t' {
|
|
||||||
self.insert_tab();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.buffer.insert(self.cursor, ch);
|
|
||||||
self.cursor += ch.len_utf8();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert text at cursor
|
|
||||||
pub fn insert_text(&mut self, text: &str) {
|
|
||||||
self.buffer.insert_str(self.cursor, text);
|
|
||||||
self.cursor += text.len();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert spaces representing a tab
|
|
||||||
pub fn insert_tab(&mut self) {
|
|
||||||
let spaces = " ".repeat(self.tab_width as usize);
|
|
||||||
self.insert_text(&spaces);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove character before cursor
|
|
||||||
pub fn backspace(&mut self) {
|
|
||||||
if self.cursor == 0 {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let prev_index = prev_char_boundary(&self.buffer, self.cursor);
|
|
||||||
self.buffer.drain(prev_index..self.cursor);
|
|
||||||
self.cursor = prev_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove character at cursor
|
|
||||||
pub fn delete(&mut self) {
|
|
||||||
if self.cursor >= self.buffer.len() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let next_index = next_char_boundary(&self.buffer, self.cursor);
|
|
||||||
self.buffer.drain(self.cursor..next_index);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move cursor left by one grapheme
|
|
||||||
pub fn move_left(&mut self) {
|
|
||||||
if self.cursor == 0 {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
self.cursor = prev_char_boundary(&self.buffer, self.cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move cursor right by one grapheme
|
|
||||||
pub fn move_right(&mut self) {
|
|
||||||
if self.cursor >= self.buffer.len() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
self.cursor = next_char_boundary(&self.buffer, self.cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move cursor to start of the buffer
|
|
||||||
pub fn move_home(&mut self) {
|
|
||||||
self.cursor = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move cursor to end of the buffer
|
|
||||||
pub fn move_end(&mut self) {
|
|
||||||
self.cursor = self.buffer.len();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Push current buffer into history, clearing the buffer afterwards
|
|
||||||
pub fn commit_to_history(&mut self) -> String {
|
|
||||||
let text = std::mem::take(&mut self.buffer);
|
|
||||||
if !text.trim().is_empty() {
|
|
||||||
self.push_history_entry(text.clone());
|
|
||||||
}
|
|
||||||
self.cursor = 0;
|
|
||||||
self.history_index = None;
|
|
||||||
text
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Navigate to previous history entry
|
|
||||||
pub fn history_previous(&mut self) {
|
|
||||||
if self.history.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_index = match self.history_index {
|
|
||||||
Some(idx) if idx + 1 < self.history.len() => idx + 1,
|
|
||||||
None => 0,
|
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
self.history_index = Some(new_index);
|
|
||||||
if let Some(entry) = self.history.get(new_index) {
|
|
||||||
self.buffer = entry.clone();
|
|
||||||
self.cursor = self.buffer.len();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Navigate to next history entry
|
|
||||||
pub fn history_next(&mut self) {
|
|
||||||
if self.history.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(idx) = self.history_index {
|
|
||||||
if idx > 0 {
|
|
||||||
let new_idx = idx - 1;
|
|
||||||
self.history_index = Some(new_idx);
|
|
||||||
if let Some(entry) = self.history.get(new_idx) {
|
|
||||||
self.buffer = entry.clone();
|
|
||||||
self.cursor = self.buffer.len();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.history_index = None;
|
|
||||||
self.buffer.clear();
|
|
||||||
self.cursor = 0;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.buffer.clear();
|
|
||||||
self.cursor = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Push a new entry into the history buffer, enforcing capacity
|
|
||||||
pub fn push_history_entry(&mut self, entry: String) {
|
|
||||||
if self
|
|
||||||
.history
|
|
||||||
.front()
|
|
||||||
.map(|existing| existing == &entry)
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.history.push_front(entry);
|
|
||||||
while self.history.len() > self.max_history {
|
|
||||||
self.history.pop_back();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prev_char_boundary(buffer: &str, cursor: usize) -> usize {
|
|
||||||
buffer[..cursor]
|
|
||||||
.char_indices()
|
|
||||||
.last()
|
|
||||||
.map(|(idx, _)| idx)
|
|
||||||
.unwrap_or(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn next_char_boundary(buffer: &str, cursor: usize) -> usize {
|
|
||||||
if cursor >= buffer.len() {
|
|
||||||
return buffer.len();
|
|
||||||
}
|
|
||||||
|
|
||||||
let slice = &buffer[cursor..];
|
|
||||||
let mut iter = slice.char_indices();
|
|
||||||
iter.next();
|
|
||||||
if let Some((idx, _)) = iter.next() {
|
|
||||||
cursor + idx
|
|
||||||
} else {
|
|
||||||
buffer.len()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
//! Core traits and types for OWLEN LLM client
|
|
||||||
//!
|
|
||||||
//! This crate provides the foundational abstractions for building
|
|
||||||
//! LLM providers, routers, and MCP (Model Context Protocol) adapters.
|
|
||||||
|
|
||||||
pub mod config;
|
|
||||||
pub mod conversation;
|
|
||||||
pub mod formatting;
|
|
||||||
pub mod input;
|
|
||||||
pub mod model;
|
|
||||||
pub mod provider;
|
|
||||||
pub mod router;
|
|
||||||
pub mod session;
|
|
||||||
pub mod types;
|
|
||||||
pub mod ui;
|
|
||||||
pub mod wrap_cursor;
|
|
||||||
|
|
||||||
pub use config::*;
|
|
||||||
pub use conversation::*;
|
|
||||||
pub use formatting::*;
|
|
||||||
pub use input::*;
|
|
||||||
pub use model::*;
|
|
||||||
pub use provider::*;
|
|
||||||
pub use router::*;
|
|
||||||
pub use session::*;
|
|
||||||
|
|
||||||
/// Result type used throughout the OWLEN ecosystem
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
|
||||||
|
|
||||||
/// Core error types for OWLEN
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("Provider error: {0}")]
|
|
||||||
Provider(#[from] anyhow::Error),
|
|
||||||
|
|
||||||
#[error("Network error: {0}")]
|
|
||||||
Network(String),
|
|
||||||
|
|
||||||
#[error("Authentication error: {0}")]
|
|
||||||
Auth(String),
|
|
||||||
|
|
||||||
#[error("Configuration error: {0}")]
|
|
||||||
Config(String),
|
|
||||||
|
|
||||||
#[error("I/O error: {0}")]
|
|
||||||
Io(#[from] std::io::Error),
|
|
||||||
|
|
||||||
#[error("Invalid input: {0}")]
|
|
||||||
InvalidInput(String),
|
|
||||||
|
|
||||||
#[error("Operation timed out: {0}")]
|
|
||||||
Timeout(String),
|
|
||||||
|
|
||||||
#[error("Serialization error: {0}")]
|
|
||||||
Serialization(#[from] serde_json::Error),
|
|
||||||
|
|
||||||
#[error("Unknown error: {0}")]
|
|
||||||
Unknown(String),
|
|
||||||
}
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
use crate::types::ModelInfo;
|
|
||||||
use crate::Result;
|
|
||||||
use std::future::Future;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
use tokio::sync::RwLock;
|
|
||||||
|
|
||||||
#[derive(Default, Debug)]
|
|
||||||
struct ModelCache {
|
|
||||||
models: Vec<ModelInfo>,
|
|
||||||
last_refresh: Option<Instant>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Caches model listings for improved selection performance
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct ModelManager {
|
|
||||||
cache: Arc<RwLock<ModelCache>>,
|
|
||||||
ttl: Duration,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ModelManager {
|
|
||||||
/// Create a new manager with the desired cache TTL
|
|
||||||
pub fn new(ttl: Duration) -> Self {
|
|
||||||
Self {
|
|
||||||
cache: Arc::new(RwLock::new(ModelCache::default())),
|
|
||||||
ttl,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get cached models, refreshing via the provided fetcher when stale. Returns the up-to-date model list.
|
|
||||||
pub async fn get_or_refresh<F, Fut>(
|
|
||||||
&self,
|
|
||||||
force_refresh: bool,
|
|
||||||
fetcher: F,
|
|
||||||
) -> Result<Vec<ModelInfo>>
|
|
||||||
where
|
|
||||||
F: FnOnce() -> Fut,
|
|
||||||
Fut: Future<Output = Result<Vec<ModelInfo>>>,
|
|
||||||
{
|
|
||||||
if !force_refresh {
|
|
||||||
if let Some(models) = self.cached_if_fresh().await {
|
|
||||||
return Ok(models);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let models = fetcher().await?;
|
|
||||||
let mut cache = self.cache.write().await;
|
|
||||||
cache.models = models.clone();
|
|
||||||
cache.last_refresh = Some(Instant::now());
|
|
||||||
Ok(models)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return cached models without refreshing
|
|
||||||
pub async fn cached(&self) -> Vec<ModelInfo> {
|
|
||||||
self.cache.read().await.models.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Drop cached models, forcing next call to refresh
|
|
||||||
pub async fn invalidate(&self) {
|
|
||||||
let mut cache = self.cache.write().await;
|
|
||||||
cache.models.clear();
|
|
||||||
cache.last_refresh = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select a model by id or name from the cache
|
|
||||||
pub async fn select(&self, identifier: &str) -> Option<ModelInfo> {
|
|
||||||
let cache = self.cache.read().await;
|
|
||||||
cache
|
|
||||||
.models
|
|
||||||
.iter()
|
|
||||||
.find(|m| m.id == identifier || m.name == identifier)
|
|
||||||
.cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn cached_if_fresh(&self) -> Option<Vec<ModelInfo>> {
|
|
||||||
let cache = self.cache.read().await;
|
|
||||||
let fresh = matches!(cache.last_refresh, Some(ts) if ts.elapsed() < self.ttl);
|
|
||||||
if fresh && !cache.models.is_empty() {
|
|
||||||
Some(cache.models.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
//! Provider trait and related types
|
|
||||||
|
|
||||||
use crate::{types::*, Result};
|
|
||||||
use futures::Stream;
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
/// A stream of chat responses
|
|
||||||
pub type ChatStream = Pin<Box<dyn Stream<Item = Result<ChatResponse>> + Send>>;
|
|
||||||
|
|
||||||
/// Trait for LLM providers (Ollama, OpenAI, Anthropic, etc.)
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
pub trait Provider: Send + Sync {
|
|
||||||
/// Get the name of this provider
|
|
||||||
fn name(&self) -> &str;
|
|
||||||
|
|
||||||
/// List available models from this provider
|
|
||||||
async fn list_models(&self) -> Result<Vec<ModelInfo>>;
|
|
||||||
|
|
||||||
/// Send a chat completion request
|
|
||||||
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse>;
|
|
||||||
|
|
||||||
/// Send a streaming chat completion request
|
|
||||||
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream>;
|
|
||||||
|
|
||||||
/// Check if the provider is available/healthy
|
|
||||||
async fn health_check(&self) -> Result<()>;
|
|
||||||
|
|
||||||
/// Get provider-specific configuration schema
|
|
||||||
fn config_schema(&self) -> serde_json::Value {
|
|
||||||
serde_json::json!({})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configuration for a provider
|
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
|
||||||
pub struct ProviderConfig {
|
|
||||||
/// Provider type identifier
|
|
||||||
pub provider_type: String,
|
|
||||||
/// Base URL for API calls
|
|
||||||
pub base_url: Option<String>,
|
|
||||||
/// API key or token
|
|
||||||
pub api_key: Option<String>,
|
|
||||||
/// Additional provider-specific configuration
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub extra: std::collections::HashMap<String, serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A registry of providers
|
|
||||||
pub struct ProviderRegistry {
|
|
||||||
providers: std::collections::HashMap<String, Arc<dyn Provider>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProviderRegistry {
|
|
||||||
/// Create a new provider registry
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
providers: std::collections::HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register a provider
|
|
||||||
pub fn register<P: Provider + 'static>(&mut self, provider: P) {
|
|
||||||
self.register_arc(Arc::new(provider));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register an already wrapped provider
|
|
||||||
pub fn register_arc(&mut self, provider: Arc<dyn Provider>) {
|
|
||||||
let name = provider.name().to_string();
|
|
||||||
self.providers.insert(name, provider);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a provider by name
|
|
||||||
pub fn get(&self, name: &str) -> Option<Arc<dyn Provider>> {
|
|
||||||
self.providers.get(name).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all registered provider names
|
|
||||||
pub fn list_providers(&self) -> Vec<String> {
|
|
||||||
self.providers.keys().cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get all models from all providers
|
|
||||||
pub async fn list_all_models(&self) -> Result<Vec<ModelInfo>> {
|
|
||||||
let mut all_models = Vec::new();
|
|
||||||
|
|
||||||
for provider in self.providers.values() {
|
|
||||||
match provider.list_models().await {
|
|
||||||
Ok(mut models) => all_models.append(&mut models),
|
|
||||||
Err(e) => {
|
|
||||||
// Log error but continue with other providers
|
|
||||||
eprintln!("Failed to get models from {}: {}", provider.name(), e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(all_models)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ProviderRegistry {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
//! Router for managing multiple providers and routing requests
|
|
||||||
|
|
||||||
use crate::{provider::*, types::*, Result};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
/// A router that can distribute requests across multiple providers
|
|
||||||
pub struct Router {
|
|
||||||
registry: ProviderRegistry,
|
|
||||||
routing_rules: Vec<RoutingRule>,
|
|
||||||
default_provider: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A rule for routing requests to specific providers
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct RoutingRule {
|
|
||||||
/// Pattern to match against model names
|
|
||||||
pub model_pattern: String,
|
|
||||||
/// Provider to route to
|
|
||||||
pub provider: String,
|
|
||||||
/// Priority (higher numbers are checked first)
|
|
||||||
pub priority: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Router {
|
|
||||||
/// Create a new router
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
registry: ProviderRegistry::new(),
|
|
||||||
routing_rules: Vec::new(),
|
|
||||||
default_provider: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register a provider with the router
|
|
||||||
pub fn register_provider<P: Provider + 'static>(&mut self, provider: P) {
|
|
||||||
self.registry.register(provider);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the default provider
|
|
||||||
pub fn set_default_provider(&mut self, provider_name: String) {
|
|
||||||
self.default_provider = Some(provider_name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a routing rule
|
|
||||||
pub fn add_routing_rule(&mut self, rule: RoutingRule) {
|
|
||||||
self.routing_rules.push(rule);
|
|
||||||
// Sort by priority (descending)
|
|
||||||
self.routing_rules
|
|
||||||
.sort_by(|a, b| b.priority.cmp(&a.priority));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Route a request to the appropriate provider
|
|
||||||
pub async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> {
|
|
||||||
let provider = self.find_provider_for_model(&request.model)?;
|
|
||||||
provider.chat(request).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Route a streaming request to the appropriate provider
|
|
||||||
pub async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> {
|
|
||||||
let provider = self.find_provider_for_model(&request.model)?;
|
|
||||||
provider.chat_stream(request).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all available models from all providers
|
|
||||||
pub async fn list_models(&self) -> Result<Vec<ModelInfo>> {
|
|
||||||
self.registry.list_all_models().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Find the appropriate provider for a given model
|
|
||||||
fn find_provider_for_model(&self, model: &str) -> Result<Arc<dyn Provider>> {
|
|
||||||
// Check routing rules first
|
|
||||||
for rule in &self.routing_rules {
|
|
||||||
if self.matches_pattern(&rule.model_pattern, model) {
|
|
||||||
if let Some(provider) = self.registry.get(&rule.provider) {
|
|
||||||
return Ok(provider);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to default provider
|
|
||||||
if let Some(default) = &self.default_provider {
|
|
||||||
if let Some(provider) = self.registry.get(default) {
|
|
||||||
return Ok(provider);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no default, try to find any provider that has this model
|
|
||||||
// This is a fallback for cases where routing isn't configured
|
|
||||||
for provider_name in self.registry.list_providers() {
|
|
||||||
if let Some(provider) = self.registry.get(&provider_name) {
|
|
||||||
return Ok(provider);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(crate::Error::Provider(anyhow::anyhow!(
|
|
||||||
"No provider found for model: {}",
|
|
||||||
model
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a model name matches a pattern
|
|
||||||
fn matches_pattern(&self, pattern: &str, model: &str) -> bool {
|
|
||||||
// Simple pattern matching for now
|
|
||||||
// Could be extended to support more complex patterns
|
|
||||||
if pattern == "*" {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(prefix) = pattern.strip_suffix('*') {
|
|
||||||
return model.starts_with(prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(suffix) = pattern.strip_prefix('*') {
|
|
||||||
return model.ends_with(suffix);
|
|
||||||
}
|
|
||||||
|
|
||||||
pattern == model
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get routing configuration
|
|
||||||
pub fn get_routing_rules(&self) -> &[RoutingRule] {
|
|
||||||
&self.routing_rules
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the default provider name
|
|
||||||
pub fn get_default_provider(&self) -> Option<&str> {
|
|
||||||
self.default_provider.as_deref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Router {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_pattern_matching() {
|
|
||||||
let router = Router::new();
|
|
||||||
|
|
||||||
assert!(router.matches_pattern("*", "any-model"));
|
|
||||||
assert!(router.matches_pattern("gpt*", "gpt-4"));
|
|
||||||
assert!(router.matches_pattern("gpt*", "gpt-3.5-turbo"));
|
|
||||||
assert!(!router.matches_pattern("gpt*", "claude-3"));
|
|
||||||
assert!(router.matches_pattern("*:latest", "llama2:latest"));
|
|
||||||
assert!(router.matches_pattern("exact-match", "exact-match"));
|
|
||||||
assert!(!router.matches_pattern("exact-match", "different-model"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,221 +0,0 @@
|
|||||||
use crate::config::Config;
|
|
||||||
use crate::conversation::ConversationManager;
|
|
||||||
use crate::formatting::MessageFormatter;
|
|
||||||
use crate::input::InputBuffer;
|
|
||||||
use crate::model::ModelManager;
|
|
||||||
use crate::provider::{ChatStream, Provider};
|
|
||||||
use crate::types::{ChatParameters, ChatRequest, ChatResponse, Conversation, ModelInfo};
|
|
||||||
use crate::Result;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
/// Outcome of submitting a chat request
|
|
||||||
pub enum SessionOutcome {
|
|
||||||
/// Immediate response received (non-streaming)
|
|
||||||
Complete(ChatResponse),
|
|
||||||
/// Streaming response where chunks will arrive asynchronously
|
|
||||||
Streaming {
|
|
||||||
response_id: Uuid,
|
|
||||||
stream: ChatStream,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// High-level controller encapsulating session state and provider interactions
|
|
||||||
pub struct SessionController {
|
|
||||||
provider: Arc<dyn Provider>,
|
|
||||||
conversation: ConversationManager,
|
|
||||||
model_manager: ModelManager,
|
|
||||||
input_buffer: InputBuffer,
|
|
||||||
formatter: MessageFormatter,
|
|
||||||
config: Config,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SessionController {
|
|
||||||
/// Create a new controller with the given provider and configuration
|
|
||||||
pub fn new(provider: Arc<dyn Provider>, config: Config) -> Self {
|
|
||||||
let model = config
|
|
||||||
.general
|
|
||||||
.default_model
|
|
||||||
.clone()
|
|
||||||
.unwrap_or_else(|| "ollama/default".to_string());
|
|
||||||
|
|
||||||
let conversation =
|
|
||||||
ConversationManager::with_history_capacity(model, config.storage.max_saved_sessions);
|
|
||||||
let formatter =
|
|
||||||
MessageFormatter::new(config.ui.wrap_column as usize, config.ui.show_role_labels)
|
|
||||||
.with_preserve_empty(config.ui.word_wrap);
|
|
||||||
let input_buffer = InputBuffer::new(
|
|
||||||
config.input.history_size,
|
|
||||||
config.input.multiline,
|
|
||||||
config.input.tab_width,
|
|
||||||
);
|
|
||||||
|
|
||||||
let model_manager = ModelManager::new(config.general.model_cache_ttl());
|
|
||||||
|
|
||||||
Self {
|
|
||||||
provider,
|
|
||||||
conversation,
|
|
||||||
model_manager,
|
|
||||||
input_buffer,
|
|
||||||
formatter,
|
|
||||||
config,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Access the active conversation
|
|
||||||
pub fn conversation(&self) -> &Conversation {
|
|
||||||
self.conversation.active()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mutable access to the conversation manager
|
|
||||||
pub fn conversation_mut(&mut self) -> &mut ConversationManager {
|
|
||||||
&mut self.conversation
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Access input buffer
|
|
||||||
pub fn input_buffer(&self) -> &InputBuffer {
|
|
||||||
&self.input_buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mutable input buffer access
|
|
||||||
pub fn input_buffer_mut(&mut self) -> &mut InputBuffer {
|
|
||||||
&mut self.input_buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Formatter for rendering messages
|
|
||||||
pub fn formatter(&self) -> &MessageFormatter {
|
|
||||||
&self.formatter
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the wrap width of the message formatter
|
|
||||||
pub fn set_formatter_wrap_width(&mut self, width: usize) {
|
|
||||||
self.formatter.set_wrap_width(width);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Access configuration
|
|
||||||
pub fn config(&self) -> &Config {
|
|
||||||
&self.config
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mutable configuration access
|
|
||||||
pub fn config_mut(&mut self) -> &mut Config {
|
|
||||||
&mut self.config
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Currently selected model identifier
|
|
||||||
pub fn selected_model(&self) -> &str {
|
|
||||||
&self.conversation.active().model
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Change current model for upcoming requests
|
|
||||||
pub fn set_model(&mut self, model: String) {
|
|
||||||
self.conversation.set_model(model.clone());
|
|
||||||
self.config.general.default_model = Some(model);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieve cached models, refreshing from provider as needed
|
|
||||||
pub async fn models(&self, force_refresh: bool) -> Result<Vec<ModelInfo>> {
|
|
||||||
self.model_manager
|
|
||||||
.get_or_refresh(force_refresh, || async {
|
|
||||||
self.provider.list_models().await
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempt to select the configured default model from cached models
|
|
||||||
pub fn ensure_default_model(&mut self, models: &[ModelInfo]) {
|
|
||||||
if let Some(default) = self.config.general.default_model.clone() {
|
|
||||||
if models.iter().any(|m| m.id == default || m.name == default) {
|
|
||||||
self.set_model(default);
|
|
||||||
}
|
|
||||||
} else if let Some(model) = models.first() {
|
|
||||||
self.set_model(model.id.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Submit a user message; optionally stream the response
|
|
||||||
pub async fn send_message(
|
|
||||||
&mut self,
|
|
||||||
content: String,
|
|
||||||
mut parameters: ChatParameters,
|
|
||||||
) -> Result<SessionOutcome> {
|
|
||||||
let streaming = parameters.stream || self.config.general.enable_streaming;
|
|
||||||
parameters.stream = streaming;
|
|
||||||
|
|
||||||
self.conversation.push_user_message(content);
|
|
||||||
|
|
||||||
self.send_request_with_current_conversation(parameters)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Send a request using the current conversation without adding a new user message
|
|
||||||
pub async fn send_request_with_current_conversation(
|
|
||||||
&mut self,
|
|
||||||
mut parameters: ChatParameters,
|
|
||||||
) -> Result<SessionOutcome> {
|
|
||||||
let streaming = parameters.stream || self.config.general.enable_streaming;
|
|
||||||
parameters.stream = streaming;
|
|
||||||
|
|
||||||
let request = ChatRequest {
|
|
||||||
model: self.conversation.active().model.clone(),
|
|
||||||
messages: self.conversation.active().messages.clone(),
|
|
||||||
parameters,
|
|
||||||
};
|
|
||||||
|
|
||||||
if streaming {
|
|
||||||
match self.provider.chat_stream(request).await {
|
|
||||||
Ok(stream) => {
|
|
||||||
let response_id = self.conversation.start_streaming_response();
|
|
||||||
Ok(SessionOutcome::Streaming {
|
|
||||||
response_id,
|
|
||||||
stream,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
self.conversation
|
|
||||||
.push_assistant_message(format!("Error starting stream: {}", err));
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
match self.provider.chat(request).await {
|
|
||||||
Ok(response) => {
|
|
||||||
self.conversation.push_message(response.message.clone());
|
|
||||||
Ok(SessionOutcome::Complete(response))
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
self.conversation
|
|
||||||
.push_assistant_message(format!("Error: {}", err));
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mark a streaming response message with placeholder content
|
|
||||||
pub fn mark_stream_placeholder(&mut self, message_id: Uuid, text: &str) -> Result<()> {
|
|
||||||
self.conversation
|
|
||||||
.set_stream_placeholder(message_id, text.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Apply streaming chunk to the conversation
|
|
||||||
pub fn apply_stream_chunk(&mut self, message_id: Uuid, chunk: &ChatResponse) -> Result<()> {
|
|
||||||
self.conversation
|
|
||||||
.append_stream_chunk(message_id, &chunk.message.content, chunk.is_final)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Access conversation history
|
|
||||||
pub fn history(&self) -> Vec<Conversation> {
|
|
||||||
self.conversation.history().cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Start a new conversation optionally targeting a specific model
|
|
||||||
pub fn start_new_conversation(&mut self, model: Option<String>, name: Option<String>) {
|
|
||||||
self.conversation.start_new(model, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear current conversation messages
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.conversation.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,193 +0,0 @@
|
|||||||
//! Core types used across OWLEN
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fmt;
|
|
||||||
use uuid::Uuid;
|
|
||||||
|
|
||||||
/// A message in a conversation
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
pub struct Message {
|
|
||||||
/// Unique identifier for this message
|
|
||||||
pub id: Uuid,
|
|
||||||
/// Role of the message sender (user, assistant, system)
|
|
||||||
pub role: Role,
|
|
||||||
/// Content of the message
|
|
||||||
pub content: String,
|
|
||||||
/// Optional metadata
|
|
||||||
pub metadata: HashMap<String, serde_json::Value>,
|
|
||||||
/// Timestamp when the message was created
|
|
||||||
pub timestamp: std::time::SystemTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Role of a message sender
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum Role {
|
|
||||||
/// Message from the user
|
|
||||||
User,
|
|
||||||
/// Message from the AI assistant
|
|
||||||
Assistant,
|
|
||||||
/// System message (prompts, context, etc.)
|
|
||||||
System,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Role {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let label = match self {
|
|
||||||
Role::User => "user",
|
|
||||||
Role::Assistant => "assistant",
|
|
||||||
Role::System => "system",
|
|
||||||
};
|
|
||||||
f.write_str(label)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A conversation containing multiple messages
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Conversation {
|
|
||||||
/// Unique identifier for this conversation
|
|
||||||
pub id: Uuid,
|
|
||||||
/// Optional name/title for the conversation
|
|
||||||
pub name: Option<String>,
|
|
||||||
/// Messages in chronological order
|
|
||||||
pub messages: Vec<Message>,
|
|
||||||
/// Model used for this conversation
|
|
||||||
pub model: String,
|
|
||||||
/// When the conversation was created
|
|
||||||
pub created_at: std::time::SystemTime,
|
|
||||||
/// When the conversation was last updated
|
|
||||||
pub updated_at: std::time::SystemTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configuration for a chat completion request
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ChatRequest {
|
|
||||||
/// The model to use for completion
|
|
||||||
pub model: String,
|
|
||||||
/// The conversation messages
|
|
||||||
pub messages: Vec<Message>,
|
|
||||||
/// Optional parameters for the request
|
|
||||||
pub parameters: ChatParameters,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parameters for chat completion
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
|
||||||
pub struct ChatParameters {
|
|
||||||
/// Temperature for randomness (0.0 to 2.0)
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub temperature: Option<f32>,
|
|
||||||
/// Maximum tokens to generate
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub max_tokens: Option<u32>,
|
|
||||||
/// Whether to stream the response
|
|
||||||
#[serde(default)]
|
|
||||||
pub stream: bool,
|
|
||||||
/// Additional provider-specific parameters
|
|
||||||
#[serde(flatten)]
|
|
||||||
#[serde(default)]
|
|
||||||
pub extra: HashMap<String, serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Response from a chat completion request
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ChatResponse {
|
|
||||||
/// The generated message
|
|
||||||
pub message: Message,
|
|
||||||
/// Token usage information
|
|
||||||
pub usage: Option<TokenUsage>,
|
|
||||||
/// Whether this is a streaming chunk
|
|
||||||
#[serde(default)]
|
|
||||||
pub is_streaming: bool,
|
|
||||||
/// Whether this is the final chunk in a stream
|
|
||||||
#[serde(default)]
|
|
||||||
pub is_final: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Token usage information
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct TokenUsage {
|
|
||||||
/// Tokens in the prompt
|
|
||||||
pub prompt_tokens: u32,
|
|
||||||
/// Tokens in the completion
|
|
||||||
pub completion_tokens: u32,
|
|
||||||
/// Total tokens used
|
|
||||||
pub total_tokens: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Information about an available model
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct ModelInfo {
|
|
||||||
/// Model identifier
|
|
||||||
pub id: String,
|
|
||||||
/// Human-readable name
|
|
||||||
pub name: String,
|
|
||||||
/// Model description
|
|
||||||
pub description: Option<String>,
|
|
||||||
/// Provider that hosts this model
|
|
||||||
pub provider: String,
|
|
||||||
/// Context window size
|
|
||||||
pub context_window: Option<u32>,
|
|
||||||
/// Additional capabilities
|
|
||||||
pub capabilities: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Message {
|
|
||||||
/// Create a new message
|
|
||||||
pub fn new(role: Role, content: String) -> Self {
|
|
||||||
Self {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
role,
|
|
||||||
content,
|
|
||||||
metadata: HashMap::new(),
|
|
||||||
timestamp: std::time::SystemTime::now(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a user message
|
|
||||||
pub fn user(content: String) -> Self {
|
|
||||||
Self::new(Role::User, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an assistant message
|
|
||||||
pub fn assistant(content: String) -> Self {
|
|
||||||
Self::new(Role::Assistant, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a system message
|
|
||||||
pub fn system(content: String) -> Self {
|
|
||||||
Self::new(Role::System, content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Conversation {
|
|
||||||
/// Create a new conversation
|
|
||||||
pub fn new(model: String) -> Self {
|
|
||||||
let now = std::time::SystemTime::now();
|
|
||||||
Self {
|
|
||||||
id: Uuid::new_v4(),
|
|
||||||
name: None,
|
|
||||||
messages: Vec::new(),
|
|
||||||
model,
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a message to the conversation
|
|
||||||
pub fn add_message(&mut self, message: Message) {
|
|
||||||
self.messages.push(message);
|
|
||||||
self.updated_at = std::time::SystemTime::now();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the last message in the conversation
|
|
||||||
pub fn last_message(&self) -> Option<&Message> {
|
|
||||||
self.messages.last()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear all messages
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.messages.clear();
|
|
||||||
self.updated_at = std::time::SystemTime::now();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,419 +0,0 @@
|
|||||||
//! Shared UI components and state management for TUI applications
|
|
||||||
//!
|
|
||||||
//! This module contains reusable UI components that can be shared between
|
|
||||||
//! different TUI applications (chat, code, etc.)
|
|
||||||
|
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
/// Application state
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum AppState {
|
|
||||||
Running,
|
|
||||||
Quit,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Input modes for TUI applications
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum InputMode {
|
|
||||||
Normal,
|
|
||||||
Editing,
|
|
||||||
ProviderSelection,
|
|
||||||
ModelSelection,
|
|
||||||
Help,
|
|
||||||
Visual,
|
|
||||||
Command,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for InputMode {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let label = match self {
|
|
||||||
InputMode::Normal => "Normal",
|
|
||||||
InputMode::Editing => "Editing",
|
|
||||||
InputMode::ModelSelection => "Model",
|
|
||||||
InputMode::ProviderSelection => "Provider",
|
|
||||||
InputMode::Help => "Help",
|
|
||||||
InputMode::Visual => "Visual",
|
|
||||||
InputMode::Command => "Command",
|
|
||||||
};
|
|
||||||
f.write_str(label)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents which panel is currently focused
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum FocusedPanel {
|
|
||||||
Chat,
|
|
||||||
Thinking,
|
|
||||||
Input,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Auto-scroll state manager for scrollable panels
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct AutoScroll {
|
|
||||||
pub scroll: usize,
|
|
||||||
pub content_len: usize,
|
|
||||||
pub stick_to_bottom: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for AutoScroll {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
scroll: 0,
|
|
||||||
content_len: 0,
|
|
||||||
stick_to_bottom: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AutoScroll {
|
|
||||||
/// Update scroll position based on viewport height
|
|
||||||
pub fn on_viewport(&mut self, viewport_h: usize) {
|
|
||||||
let max = self.content_len.saturating_sub(viewport_h);
|
|
||||||
if self.stick_to_bottom {
|
|
||||||
self.scroll = max;
|
|
||||||
} else {
|
|
||||||
self.scroll = self.scroll.min(max);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handle user scroll input
|
|
||||||
pub fn on_user_scroll(&mut self, delta: isize, viewport_h: usize) {
|
|
||||||
let max = self.content_len.saturating_sub(viewport_h) as isize;
|
|
||||||
let s = (self.scroll as isize + delta).clamp(0, max) as usize;
|
|
||||||
self.scroll = s;
|
|
||||||
self.stick_to_bottom = s as isize == max;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scroll down half page
|
|
||||||
pub fn scroll_half_page_down(&mut self, viewport_h: usize) {
|
|
||||||
let delta = (viewport_h / 2) as isize;
|
|
||||||
self.on_user_scroll(delta, viewport_h);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scroll up half page
|
|
||||||
pub fn scroll_half_page_up(&mut self, viewport_h: usize) {
|
|
||||||
let delta = -((viewport_h / 2) as isize);
|
|
||||||
self.on_user_scroll(delta, viewport_h);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scroll down full page
|
|
||||||
pub fn scroll_full_page_down(&mut self, viewport_h: usize) {
|
|
||||||
let delta = viewport_h as isize;
|
|
||||||
self.on_user_scroll(delta, viewport_h);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scroll up full page
|
|
||||||
pub fn scroll_full_page_up(&mut self, viewport_h: usize) {
|
|
||||||
let delta = -(viewport_h as isize);
|
|
||||||
self.on_user_scroll(delta, viewport_h);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Jump to top
|
|
||||||
pub fn jump_to_top(&mut self) {
|
|
||||||
self.scroll = 0;
|
|
||||||
self.stick_to_bottom = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Jump to bottom
|
|
||||||
pub fn jump_to_bottom(&mut self, viewport_h: usize) {
|
|
||||||
self.stick_to_bottom = true;
|
|
||||||
self.on_viewport(viewport_h);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Visual selection state for text selection
|
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
pub struct VisualSelection {
|
|
||||||
pub start: Option<(usize, usize)>, // (row, col)
|
|
||||||
pub end: Option<(usize, usize)>, // (row, col)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VisualSelection {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn start_at(&mut self, pos: (usize, usize)) {
|
|
||||||
self.start = Some(pos);
|
|
||||||
self.end = Some(pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extend_to(&mut self, pos: (usize, usize)) {
|
|
||||||
self.end = Some(pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.start = None;
|
|
||||||
self.end = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_active(&self) -> bool {
|
|
||||||
self.start.is_some() && self.end.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_normalized(&self) -> Option<((usize, usize), (usize, usize))> {
|
|
||||||
if let (Some(s), Some(e)) = (self.start, self.end) {
|
|
||||||
// Normalize selection so start is always before end
|
|
||||||
if s.0 < e.0 || (s.0 == e.0 && s.1 <= e.1) {
|
|
||||||
Some((s, e))
|
|
||||||
} else {
|
|
||||||
Some((e, s))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Extract text from a selection range in a list of lines
|
|
||||||
pub fn extract_text_from_selection(
|
|
||||||
lines: &[String],
|
|
||||||
start: (usize, usize),
|
|
||||||
end: (usize, usize),
|
|
||||||
) -> Option<String> {
|
|
||||||
if lines.is_empty() || start.0 >= lines.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_row = start.0;
|
|
||||||
let start_col = start.1;
|
|
||||||
let end_row = end.0.min(lines.len() - 1);
|
|
||||||
let end_col = end.1;
|
|
||||||
|
|
||||||
if start_row == end_row {
|
|
||||||
// Single line selection
|
|
||||||
let line = &lines[start_row];
|
|
||||||
let chars: Vec<char> = line.chars().collect();
|
|
||||||
let start_c = start_col.min(chars.len());
|
|
||||||
let end_c = end_col.min(chars.len());
|
|
||||||
|
|
||||||
if start_c >= end_c {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let selected: String = chars[start_c..end_c].iter().collect();
|
|
||||||
Some(selected)
|
|
||||||
} else {
|
|
||||||
// Multi-line selection
|
|
||||||
let mut result = Vec::new();
|
|
||||||
|
|
||||||
// First line: from start_col to end
|
|
||||||
let first_line = &lines[start_row];
|
|
||||||
let first_chars: Vec<char> = first_line.chars().collect();
|
|
||||||
let start_c = start_col.min(first_chars.len());
|
|
||||||
if start_c < first_chars.len() {
|
|
||||||
result.push(first_chars[start_c..].iter().collect::<String>());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Middle lines: entire lines
|
|
||||||
for row in (start_row + 1)..end_row {
|
|
||||||
if row < lines.len() {
|
|
||||||
result.push(lines[row].clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Last line: from start to end_col
|
|
||||||
if end_row < lines.len() && end_row > start_row {
|
|
||||||
let last_line = &lines[end_row];
|
|
||||||
let last_chars: Vec<char> = last_line.chars().collect();
|
|
||||||
let end_c = end_col.min(last_chars.len());
|
|
||||||
if end_c > 0 {
|
|
||||||
result.push(last_chars[..end_c].iter().collect::<String>());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if result.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(result.join("\n"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Cursor position for navigating scrollable content
|
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
|
||||||
pub struct CursorPosition {
|
|
||||||
pub row: usize,
|
|
||||||
pub col: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CursorPosition {
|
|
||||||
pub fn new(row: usize, col: usize) -> Self {
|
|
||||||
Self { row, col }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_up(&mut self, amount: usize) {
|
|
||||||
self.row = self.row.saturating_sub(amount);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_down(&mut self, amount: usize, max: usize) {
|
|
||||||
self.row = (self.row + amount).min(max);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_left(&mut self, amount: usize) {
|
|
||||||
self.col = self.col.saturating_sub(amount);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_right(&mut self, amount: usize, max: usize) {
|
|
||||||
self.col = (self.col + amount).min(max);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_tuple(&self) -> (usize, usize) {
|
|
||||||
(self.row, self.col)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Word boundary detection for navigation
|
|
||||||
pub fn find_next_word_boundary(line: &str, col: usize) -> Option<usize> {
|
|
||||||
let chars: Vec<char> = line.chars().collect();
|
|
||||||
|
|
||||||
if col >= chars.len() {
|
|
||||||
return Some(chars.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pos = col;
|
|
||||||
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
|
|
||||||
|
|
||||||
// Skip current word
|
|
||||||
if is_word_char(chars[pos]) {
|
|
||||||
while pos < chars.len() && is_word_char(chars[pos]) {
|
|
||||||
pos += 1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Skip non-word characters
|
|
||||||
while pos < chars.len() && !is_word_char(chars[pos]) {
|
|
||||||
pos += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_word_end(line: &str, col: usize) -> Option<usize> {
|
|
||||||
let chars: Vec<char> = line.chars().collect();
|
|
||||||
|
|
||||||
if col >= chars.len() {
|
|
||||||
return Some(chars.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pos = col;
|
|
||||||
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
|
|
||||||
|
|
||||||
// If on a word character, move to end of current word
|
|
||||||
if is_word_char(chars[pos]) {
|
|
||||||
while pos < chars.len() && is_word_char(chars[pos]) {
|
|
||||||
pos += 1;
|
|
||||||
}
|
|
||||||
// Move back one to be ON the last character
|
|
||||||
pos = pos.saturating_sub(1);
|
|
||||||
} else {
|
|
||||||
// Skip non-word characters
|
|
||||||
while pos < chars.len() && !is_word_char(chars[pos]) {
|
|
||||||
pos += 1;
|
|
||||||
}
|
|
||||||
// Now on first char of next word, move to its end
|
|
||||||
while pos < chars.len() && is_word_char(chars[pos]) {
|
|
||||||
pos += 1;
|
|
||||||
}
|
|
||||||
pos = pos.saturating_sub(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_prev_word_boundary(line: &str, col: usize) -> Option<usize> {
|
|
||||||
let chars: Vec<char> = line.chars().collect();
|
|
||||||
|
|
||||||
if col == 0 || chars.is_empty() {
|
|
||||||
return Some(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut pos = col.min(chars.len());
|
|
||||||
let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
|
|
||||||
|
|
||||||
// Move back one position first
|
|
||||||
pos = pos.saturating_sub(1);
|
|
||||||
|
|
||||||
// Skip non-word characters
|
|
||||||
while pos > 0 && !is_word_char(chars[pos]) {
|
|
||||||
pos -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip word characters to find start of word
|
|
||||||
while pos > 0 && is_word_char(chars[pos - 1]) {
|
|
||||||
pos -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(pos)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_auto_scroll() {
|
|
||||||
let mut scroll = AutoScroll::default();
|
|
||||||
scroll.content_len = 100;
|
|
||||||
|
|
||||||
// Test on_viewport with stick_to_bottom
|
|
||||||
scroll.on_viewport(10);
|
|
||||||
assert_eq!(scroll.scroll, 90);
|
|
||||||
|
|
||||||
// Test user scroll up
|
|
||||||
scroll.on_user_scroll(-10, 10);
|
|
||||||
assert_eq!(scroll.scroll, 80);
|
|
||||||
assert!(!scroll.stick_to_bottom);
|
|
||||||
|
|
||||||
// Test jump to bottom
|
|
||||||
scroll.jump_to_bottom(10);
|
|
||||||
assert!(scroll.stick_to_bottom);
|
|
||||||
assert_eq!(scroll.scroll, 90);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_visual_selection() {
|
|
||||||
let mut selection = VisualSelection::new();
|
|
||||||
assert!(!selection.is_active());
|
|
||||||
|
|
||||||
selection.start_at((0, 0));
|
|
||||||
assert!(selection.is_active());
|
|
||||||
|
|
||||||
selection.extend_to((2, 5));
|
|
||||||
let normalized = selection.get_normalized();
|
|
||||||
assert_eq!(normalized, Some(((0, 0), (2, 5))));
|
|
||||||
|
|
||||||
selection.clear();
|
|
||||||
assert!(!selection.is_active());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_extract_text_single_line() {
|
|
||||||
let lines = vec!["Hello World".to_string()];
|
|
||||||
let result = extract_text_from_selection(&lines, (0, 0), (0, 5));
|
|
||||||
assert_eq!(result, Some("Hello".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_extract_text_multi_line() {
|
|
||||||
let lines = vec![
|
|
||||||
"First line".to_string(),
|
|
||||||
"Second line".to_string(),
|
|
||||||
"Third line".to_string(),
|
|
||||||
];
|
|
||||||
let result = extract_text_from_selection(&lines, (0, 6), (2, 5));
|
|
||||||
assert_eq!(result, Some("line\nSecond line\nThird".to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_word_boundaries() {
|
|
||||||
let line = "hello world test";
|
|
||||||
assert_eq!(find_next_word_boundary(line, 0), Some(5));
|
|
||||||
assert_eq!(find_next_word_boundary(line, 5), Some(6));
|
|
||||||
assert_eq!(find_next_word_boundary(line, 6), Some(11));
|
|
||||||
|
|
||||||
assert_eq!(find_prev_word_boundary(line, 16), Some(12));
|
|
||||||
assert_eq!(find_prev_word_boundary(line, 11), Some(6));
|
|
||||||
assert_eq!(find_prev_word_boundary(line, 6), Some(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
#![allow(clippy::cast_possible_truncation)]
|
|
||||||
|
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
|
||||||
use unicode_width::UnicodeWidthStr;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub struct ScreenPos {
|
|
||||||
pub row: u16,
|
|
||||||
pub col: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_cursor_map(text: &str, width: u16) -> Vec<ScreenPos> {
|
|
||||||
assert!(width > 0);
|
|
||||||
let width = width as usize;
|
|
||||||
let mut pos_map = vec![ScreenPos { row: 0, col: 0 }; text.len() + 1];
|
|
||||||
let mut row = 0;
|
|
||||||
let mut col = 0;
|
|
||||||
|
|
||||||
let mut word_start_idx = 0;
|
|
||||||
let mut word_start_col = 0;
|
|
||||||
|
|
||||||
for (byte_offset, grapheme) in text.grapheme_indices(true) {
|
|
||||||
let grapheme_width = UnicodeWidthStr::width(grapheme);
|
|
||||||
|
|
||||||
if grapheme == "\n" {
|
|
||||||
row += 1;
|
|
||||||
col = 0;
|
|
||||||
word_start_col = 0;
|
|
||||||
word_start_idx = byte_offset + grapheme.len();
|
|
||||||
// Set position for the end of this grapheme and any intermediate bytes
|
|
||||||
let end_pos = ScreenPos {
|
|
||||||
row: row as u16,
|
|
||||||
col: col as u16,
|
|
||||||
};
|
|
||||||
for i in 1..=grapheme.len() {
|
|
||||||
if byte_offset + i < pos_map.len() {
|
|
||||||
pos_map[byte_offset + i] = end_pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if grapheme.chars().all(char::is_whitespace) {
|
|
||||||
if col + grapheme_width > width {
|
|
||||||
// Whitespace causes wrap
|
|
||||||
row += 1;
|
|
||||||
col = 1; // Position after wrapping space
|
|
||||||
word_start_col = 1;
|
|
||||||
word_start_idx = byte_offset + grapheme.len();
|
|
||||||
} else {
|
|
||||||
col += grapheme_width;
|
|
||||||
word_start_col = col;
|
|
||||||
word_start_idx = byte_offset + grapheme.len();
|
|
||||||
}
|
|
||||||
} else if col + grapheme_width > width {
|
|
||||||
if word_start_col > 0 && byte_offset == word_start_idx {
|
|
||||||
// This is the first character of a new word that won't fit, wrap it
|
|
||||||
row += 1;
|
|
||||||
col = grapheme_width;
|
|
||||||
} else if word_start_col == 0 {
|
|
||||||
// No previous word boundary, hard break
|
|
||||||
row += 1;
|
|
||||||
col = grapheme_width;
|
|
||||||
} else {
|
|
||||||
// This is part of a word already on the line, let it extend beyond width
|
|
||||||
col += grapheme_width;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
col += grapheme_width;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set position for the end of this grapheme and any intermediate bytes
|
|
||||||
let end_pos = ScreenPos {
|
|
||||||
row: row as u16,
|
|
||||||
col: col as u16,
|
|
||||||
};
|
|
||||||
for i in 1..=grapheme.len() {
|
|
||||||
if byte_offset + i < pos_map.len() {
|
|
||||||
pos_map[byte_offset + i] = end_pos;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pos_map
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn byte_to_screen_pos(text: &str, byte_idx: usize, width: u16) -> ScreenPos {
|
|
||||||
let pos_map = build_cursor_map(text, width);
|
|
||||||
pos_map[byte_idx.min(text.len())]
|
|
||||||
}
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
use owlen_core::wrap_cursor::build_cursor_map;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn debug_long_word_wrapping() {
|
|
||||||
// Test the exact scenario from the user's issue
|
|
||||||
let text = "asdnklasdnaklsdnkalsdnaskldaskldnaskldnaskldnaskldnaskldnaskldnaskld asdnklska dnskadl dasnksdl asdn";
|
|
||||||
let width = 50; // Approximate width from the user's example
|
|
||||||
|
|
||||||
println!("Testing long word text with width {}", width);
|
|
||||||
println!("Text: '{}'", text);
|
|
||||||
|
|
||||||
// Check what the cursor map shows
|
|
||||||
let cursor_map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
println!("\nCursor map for key positions:");
|
|
||||||
let long_word_end = text.find(' ').unwrap_or(text.len());
|
|
||||||
for i in [
|
|
||||||
0,
|
|
||||||
10,
|
|
||||||
20,
|
|
||||||
30,
|
|
||||||
40,
|
|
||||||
50,
|
|
||||||
60,
|
|
||||||
70,
|
|
||||||
long_word_end,
|
|
||||||
long_word_end + 1,
|
|
||||||
text.len(),
|
|
||||||
] {
|
|
||||||
if i <= text.len() {
|
|
||||||
let pos = cursor_map[i];
|
|
||||||
let char_at = if i < text.len() {
|
|
||||||
format!("'{}'", text.chars().nth(i).unwrap_or('?'))
|
|
||||||
} else {
|
|
||||||
"END".to_string()
|
|
||||||
};
|
|
||||||
println!(
|
|
||||||
" Byte {}: {} -> row {}, col {}",
|
|
||||||
i, char_at, pos.row, pos.col
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test what my formatting function produces
|
|
||||||
let lines = format_text_with_word_wrap_debug(text, width);
|
|
||||||
|
|
||||||
println!("\nFormatted lines:");
|
|
||||||
for (i, line) in lines.iter().enumerate() {
|
|
||||||
println!(" Line {}: '{}' (length: {})", i, line, line.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
// The long word should be broken up, not kept on one line
|
|
||||||
assert!(
|
|
||||||
lines[0].len() <= width as usize + 5,
|
|
||||||
"First line is too long: {} chars",
|
|
||||||
lines[0].len()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn format_text_with_word_wrap_debug(text: &str, width: u16) -> Vec<String> {
|
|
||||||
if text.is_empty() {
|
|
||||||
return vec!["".to_string()];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use the cursor map to determine where line breaks should occur
|
|
||||||
let cursor_map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
let mut lines = Vec::new();
|
|
||||||
let mut current_line = String::new();
|
|
||||||
let mut current_row = 0;
|
|
||||||
|
|
||||||
for (byte_idx, ch) in text.char_indices() {
|
|
||||||
let pos_before = if byte_idx > 0 {
|
|
||||||
cursor_map[byte_idx]
|
|
||||||
} else {
|
|
||||||
cursor_map[0]
|
|
||||||
};
|
|
||||||
let pos_after = cursor_map[byte_idx + ch.len_utf8()];
|
|
||||||
|
|
||||||
println!(
|
|
||||||
"Processing '{}' at byte {}: before=({},{}) after=({},{})",
|
|
||||||
ch, byte_idx, pos_before.row, pos_before.col, pos_after.row, pos_after.col
|
|
||||||
);
|
|
||||||
|
|
||||||
// If the row changed, we need to start a new line
|
|
||||||
if pos_after.row > current_row {
|
|
||||||
println!(
|
|
||||||
" Row changed from {} to {}! Finishing line: '{}'",
|
|
||||||
current_row, pos_after.row, current_line
|
|
||||||
);
|
|
||||||
if !current_line.is_empty() {
|
|
||||||
lines.push(current_line.clone());
|
|
||||||
current_line.clear();
|
|
||||||
}
|
|
||||||
current_row = pos_after.row;
|
|
||||||
|
|
||||||
// If this character is a space that caused the wrap, don't include it
|
|
||||||
if ch.is_whitespace() && pos_before.row < pos_after.row {
|
|
||||||
println!(" Skipping wrapping space");
|
|
||||||
continue; // Skip the wrapping space
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
current_line.push(ch);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the final line
|
|
||||||
if !current_line.is_empty() {
|
|
||||||
lines.push(current_line);
|
|
||||||
} else if lines.is_empty() {
|
|
||||||
lines.push("".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
lines
|
|
||||||
}
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use owlen_core::wrap_cursor::{build_cursor_map, ScreenPos};
|
|
||||||
|
|
||||||
fn assert_cursor_pos(map: &[ScreenPos], byte_idx: usize, expected: ScreenPos) {
|
|
||||||
assert_eq!(map[byte_idx], expected, "Mismatch at byte {}", byte_idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_basic_wrap_at_spaces() {
|
|
||||||
let text = "hello world";
|
|
||||||
let width = 5;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 5, ScreenPos { row: 0, col: 5 }); // after "hello"
|
|
||||||
assert_cursor_pos(&map, 6, ScreenPos { row: 1, col: 1 }); // after "hello "
|
|
||||||
assert_cursor_pos(&map, 11, ScreenPos { row: 1, col: 6 }); // after "world"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_hard_line_break() {
|
|
||||||
let text = "a\nb";
|
|
||||||
let width = 10;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 1, ScreenPos { row: 0, col: 1 }); // after "a"
|
|
||||||
assert_cursor_pos(&map, 2, ScreenPos { row: 1, col: 0 }); // after "\n"
|
|
||||||
assert_cursor_pos(&map, 3, ScreenPos { row: 1, col: 1 }); // after "b"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_long_word_split() {
|
|
||||||
let text = "abcdefgh";
|
|
||||||
let width = 3;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 1, ScreenPos { row: 0, col: 1 });
|
|
||||||
assert_cursor_pos(&map, 2, ScreenPos { row: 0, col: 2 });
|
|
||||||
assert_cursor_pos(&map, 3, ScreenPos { row: 0, col: 3 });
|
|
||||||
assert_cursor_pos(&map, 4, ScreenPos { row: 1, col: 1 });
|
|
||||||
assert_cursor_pos(&map, 5, ScreenPos { row: 1, col: 2 });
|
|
||||||
assert_cursor_pos(&map, 6, ScreenPos { row: 1, col: 3 });
|
|
||||||
assert_cursor_pos(&map, 7, ScreenPos { row: 2, col: 1 });
|
|
||||||
assert_cursor_pos(&map, 8, ScreenPos { row: 2, col: 2 });
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_trailing_spaces_preserved() {
|
|
||||||
let text = "x y";
|
|
||||||
let width = 2;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 1, ScreenPos { row: 0, col: 1 }); // after "x"
|
|
||||||
assert_cursor_pos(&map, 2, ScreenPos { row: 0, col: 2 }); // after "x "
|
|
||||||
assert_cursor_pos(&map, 3, ScreenPos { row: 1, col: 1 }); // after "x "
|
|
||||||
assert_cursor_pos(&map, 4, ScreenPos { row: 1, col: 2 }); // after "y"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_graphemes_emoji() {
|
|
||||||
let text = "🙂🙂a";
|
|
||||||
let width = 3;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 4, ScreenPos { row: 0, col: 2 }); // after first emoji
|
|
||||||
assert_cursor_pos(&map, 8, ScreenPos { row: 1, col: 2 }); // after second emoji
|
|
||||||
assert_cursor_pos(&map, 9, ScreenPos { row: 1, col: 3 }); // after "a"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_graphemes_combining() {
|
|
||||||
let text = "e\u{0301}";
|
|
||||||
let width = 10;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 1, ScreenPos { row: 0, col: 1 }); // after "e"
|
|
||||||
assert_cursor_pos(&map, 3, ScreenPos { row: 0, col: 1 }); // after combining mark
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_exact_edge() {
|
|
||||||
let text = "abc def";
|
|
||||||
let width = 3;
|
|
||||||
let map = build_cursor_map(text, width);
|
|
||||||
|
|
||||||
assert_cursor_pos(&map, 0, ScreenPos { row: 0, col: 0 });
|
|
||||||
assert_cursor_pos(&map, 3, ScreenPos { row: 0, col: 3 }); // after "abc"
|
|
||||||
assert_cursor_pos(&map, 4, ScreenPos { row: 1, col: 1 }); // after " "
|
|
||||||
assert_cursor_pos(&map, 7, ScreenPos { row: 1, col: 4 }); // after "def"
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "owlen-ollama"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
description = "Ollama provider for OWLEN LLM client"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
owlen-core = { path = "../owlen-core" }
|
|
||||||
|
|
||||||
# HTTP client
|
|
||||||
reqwest = { workspace = true }
|
|
||||||
|
|
||||||
# Async runtime
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-stream = { workspace = true }
|
|
||||||
futures = { workspace = true }
|
|
||||||
futures-util = { workspace = true }
|
|
||||||
|
|
||||||
# Serialization
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
uuid = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = { workspace = true }
|
|
||||||
@@ -1,530 +0,0 @@
|
|||||||
//! Ollama provider for OWLEN LLM client
|
|
||||||
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
use owlen_core::{
|
|
||||||
config::GeneralSettings,
|
|
||||||
model::ModelManager,
|
|
||||||
provider::{ChatStream, Provider, ProviderConfig},
|
|
||||||
types::{ChatParameters, ChatRequest, ChatResponse, Message, ModelInfo, Role, TokenUsage},
|
|
||||||
Result,
|
|
||||||
};
|
|
||||||
use reqwest::Client;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json::{json, Value};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::io;
|
|
||||||
use std::time::Duration;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
|
||||||
|
|
||||||
const DEFAULT_TIMEOUT_SECS: u64 = 120;
|
|
||||||
const DEFAULT_MODEL_CACHE_TTL_SECS: u64 = 60;
|
|
||||||
|
|
||||||
/// Ollama provider implementation with enhanced configuration and caching
|
|
||||||
pub struct OllamaProvider {
|
|
||||||
client: Client,
|
|
||||||
base_url: String,
|
|
||||||
model_manager: ModelManager,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Options for configuring the Ollama provider
|
|
||||||
pub struct OllamaOptions {
|
|
||||||
pub base_url: String,
|
|
||||||
pub request_timeout: Duration,
|
|
||||||
pub model_cache_ttl: Duration,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OllamaOptions {
|
|
||||||
pub fn new(base_url: impl Into<String>) -> Self {
|
|
||||||
Self {
|
|
||||||
base_url: base_url.into(),
|
|
||||||
request_timeout: Duration::from_secs(DEFAULT_TIMEOUT_SECS),
|
|
||||||
model_cache_ttl: Duration::from_secs(DEFAULT_MODEL_CACHE_TTL_SECS),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_general(mut self, general: &GeneralSettings) -> Self {
|
|
||||||
self.model_cache_ttl = general.model_cache_ttl();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ollama-specific message format
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
struct OllamaMessage {
|
|
||||||
role: String,
|
|
||||||
content: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ollama chat request format
|
|
||||||
#[derive(Debug, Serialize)]
|
|
||||||
struct OllamaChatRequest {
|
|
||||||
model: String,
|
|
||||||
messages: Vec<OllamaMessage>,
|
|
||||||
stream: bool,
|
|
||||||
#[serde(flatten)]
|
|
||||||
options: HashMap<String, Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ollama chat response format
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct OllamaChatResponse {
|
|
||||||
message: Option<OllamaMessage>,
|
|
||||||
done: bool,
|
|
||||||
#[serde(default)]
|
|
||||||
prompt_eval_count: Option<u32>,
|
|
||||||
#[serde(default)]
|
|
||||||
eval_count: Option<u32>,
|
|
||||||
#[serde(default)]
|
|
||||||
error: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct OllamaErrorResponse {
|
|
||||||
error: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ollama models list response
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct OllamaModelsResponse {
|
|
||||||
models: Vec<OllamaModelInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ollama model information
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct OllamaModelInfo {
|
|
||||||
name: String,
|
|
||||||
#[serde(default)]
|
|
||||||
details: Option<OllamaModelDetails>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct OllamaModelDetails {
|
|
||||||
#[serde(default)]
|
|
||||||
family: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OllamaProvider {
|
|
||||||
/// Create a new Ollama provider with sensible defaults
|
|
||||||
pub fn new(base_url: impl Into<String>) -> Result<Self> {
|
|
||||||
Self::with_options(OllamaOptions::new(base_url))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a provider from configuration settings
|
|
||||||
pub fn from_config(config: &ProviderConfig, general: Option<&GeneralSettings>) -> Result<Self> {
|
|
||||||
let mut options = OllamaOptions::new(
|
|
||||||
config
|
|
||||||
.base_url
|
|
||||||
.clone()
|
|
||||||
.unwrap_or_else(|| "http://localhost:11434".to_string()),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(timeout) = config
|
|
||||||
.extra
|
|
||||||
.get("timeout_secs")
|
|
||||||
.and_then(|value| value.as_u64())
|
|
||||||
{
|
|
||||||
options.request_timeout = Duration::from_secs(timeout.max(5));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(cache_ttl) = config
|
|
||||||
.extra
|
|
||||||
.get("model_cache_ttl_secs")
|
|
||||||
.and_then(|value| value.as_u64())
|
|
||||||
{
|
|
||||||
options.model_cache_ttl = Duration::from_secs(cache_ttl.max(5));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(general) = general {
|
|
||||||
options = options.with_general(general);
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::with_options(options)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a provider from explicit options
|
|
||||||
pub fn with_options(options: OllamaOptions) -> Result<Self> {
|
|
||||||
let client = Client::builder()
|
|
||||||
.timeout(options.request_timeout)
|
|
||||||
.build()
|
|
||||||
.map_err(|e| owlen_core::Error::Config(format!("Failed to build HTTP client: {e}")))?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
client,
|
|
||||||
base_url: options.base_url.trim_end_matches('/').to_string(),
|
|
||||||
model_manager: ModelManager::new(options.model_cache_ttl),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Accessor for the underlying model manager
|
|
||||||
pub fn model_manager(&self) -> &ModelManager {
|
|
||||||
&self.model_manager
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert_message(message: &Message) -> OllamaMessage {
|
|
||||||
OllamaMessage {
|
|
||||||
role: match message.role {
|
|
||||||
Role::User => "user".to_string(),
|
|
||||||
Role::Assistant => "assistant".to_string(),
|
|
||||||
Role::System => "system".to_string(),
|
|
||||||
},
|
|
||||||
content: message.content.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn convert_ollama_message(message: &OllamaMessage) -> Message {
|
|
||||||
let role = match message.role.as_str() {
|
|
||||||
"user" => Role::User,
|
|
||||||
"assistant" => Role::Assistant,
|
|
||||||
"system" => Role::System,
|
|
||||||
_ => Role::Assistant,
|
|
||||||
};
|
|
||||||
|
|
||||||
Message::new(role, message.content.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_options(parameters: ChatParameters) -> HashMap<String, Value> {
|
|
||||||
let mut options = parameters.extra;
|
|
||||||
|
|
||||||
if let Some(temperature) = parameters.temperature {
|
|
||||||
options
|
|
||||||
.entry("temperature".to_string())
|
|
||||||
.or_insert(json!(temperature as f64));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(max_tokens) = parameters.max_tokens {
|
|
||||||
options
|
|
||||||
.entry("num_predict".to_string())
|
|
||||||
.or_insert(json!(max_tokens));
|
|
||||||
}
|
|
||||||
|
|
||||||
options
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fetch_models(&self) -> Result<Vec<ModelInfo>> {
|
|
||||||
let url = format!("{}/api/tags", self.base_url);
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.client
|
|
||||||
.get(&url)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| owlen_core::Error::Network(format!("Failed to fetch models: {e}")))?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
|
||||||
let code = response.status();
|
|
||||||
let error = parse_error_body(response).await;
|
|
||||||
return Err(owlen_core::Error::Network(format!(
|
|
||||||
"Ollama model listing failed ({code}): {error}"
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = response.text().await.map_err(|e| {
|
|
||||||
owlen_core::Error::Network(format!("Failed to read models response: {e}"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let ollama_response: OllamaModelsResponse =
|
|
||||||
serde_json::from_str(&body).map_err(owlen_core::Error::Serialization)?;
|
|
||||||
|
|
||||||
let models = ollama_response
|
|
||||||
.models
|
|
||||||
.into_iter()
|
|
||||||
.map(|model| ModelInfo {
|
|
||||||
id: model.name.clone(),
|
|
||||||
name: model.name.clone(),
|
|
||||||
description: model
|
|
||||||
.details
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|d| d.family.as_ref().map(|f| format!("Ollama {f} model"))),
|
|
||||||
provider: "ollama".to_string(),
|
|
||||||
context_window: None,
|
|
||||||
capabilities: vec!["chat".to_string()],
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(models)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl Provider for OllamaProvider {
|
|
||||||
fn name(&self) -> &str {
|
|
||||||
"ollama"
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn list_models(&self) -> Result<Vec<ModelInfo>> {
|
|
||||||
self.model_manager
|
|
||||||
.get_or_refresh(false, || async { self.fetch_models().await })
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn chat(&self, request: ChatRequest) -> Result<ChatResponse> {
|
|
||||||
let ChatRequest {
|
|
||||||
model,
|
|
||||||
messages,
|
|
||||||
parameters,
|
|
||||||
} = request;
|
|
||||||
|
|
||||||
let messages: Vec<OllamaMessage> = messages.iter().map(Self::convert_message).collect();
|
|
||||||
|
|
||||||
let options = Self::build_options(parameters);
|
|
||||||
|
|
||||||
let ollama_request = OllamaChatRequest {
|
|
||||||
model,
|
|
||||||
messages,
|
|
||||||
stream: false,
|
|
||||||
options,
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = format!("{}/api/chat", self.base_url);
|
|
||||||
let response = self
|
|
||||||
.client
|
|
||||||
.post(&url)
|
|
||||||
.json(&ollama_request)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| owlen_core::Error::Network(format!("Chat request failed: {e}")))?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
|
||||||
let code = response.status();
|
|
||||||
let error = parse_error_body(response).await;
|
|
||||||
return Err(owlen_core::Error::Network(format!(
|
|
||||||
"Ollama chat failed ({code}): {error}"
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = response.text().await.map_err(|e| {
|
|
||||||
owlen_core::Error::Network(format!("Failed to read chat response: {e}"))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut ollama_response: OllamaChatResponse =
|
|
||||||
serde_json::from_str(&body).map_err(owlen_core::Error::Serialization)?;
|
|
||||||
|
|
||||||
if let Some(error) = ollama_response.error.take() {
|
|
||||||
return Err(owlen_core::Error::Provider(anyhow::anyhow!(error)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let message = match ollama_response.message {
|
|
||||||
Some(ref msg) => Self::convert_ollama_message(msg),
|
|
||||||
None => {
|
|
||||||
return Err(owlen_core::Error::Provider(anyhow::anyhow!(
|
|
||||||
"Ollama response missing message"
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let usage = if let (Some(prompt_tokens), Some(completion_tokens)) = (
|
|
||||||
ollama_response.prompt_eval_count,
|
|
||||||
ollama_response.eval_count,
|
|
||||||
) {
|
|
||||||
Some(TokenUsage {
|
|
||||||
prompt_tokens,
|
|
||||||
completion_tokens,
|
|
||||||
total_tokens: prompt_tokens + completion_tokens,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(ChatResponse {
|
|
||||||
message,
|
|
||||||
usage,
|
|
||||||
is_streaming: false,
|
|
||||||
is_final: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn chat_stream(&self, request: ChatRequest) -> Result<ChatStream> {
|
|
||||||
let ChatRequest {
|
|
||||||
model,
|
|
||||||
messages,
|
|
||||||
parameters,
|
|
||||||
} = request;
|
|
||||||
|
|
||||||
let messages: Vec<OllamaMessage> = messages.iter().map(Self::convert_message).collect();
|
|
||||||
|
|
||||||
let options = Self::build_options(parameters);
|
|
||||||
|
|
||||||
let ollama_request = OllamaChatRequest {
|
|
||||||
model,
|
|
||||||
messages,
|
|
||||||
stream: true,
|
|
||||||
options,
|
|
||||||
};
|
|
||||||
|
|
||||||
let url = format!("{}/api/chat", self.base_url);
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.client
|
|
||||||
.post(&url)
|
|
||||||
.json(&ollama_request)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| owlen_core::Error::Network(format!("Streaming request failed: {e}")))?;
|
|
||||||
|
|
||||||
if !response.status().is_success() {
|
|
||||||
let code = response.status();
|
|
||||||
let error = parse_error_body(response).await;
|
|
||||||
return Err(owlen_core::Error::Network(format!(
|
|
||||||
"Ollama streaming chat failed ({code}): {error}"
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let (tx, rx) = mpsc::unbounded_channel();
|
|
||||||
let mut stream = response.bytes_stream();
|
|
||||||
|
|
||||||
tokio::spawn(async move {
|
|
||||||
let mut buffer = String::new();
|
|
||||||
|
|
||||||
while let Some(chunk) = stream.next().await {
|
|
||||||
match chunk {
|
|
||||||
Ok(bytes) => {
|
|
||||||
if let Ok(text) = String::from_utf8(bytes.to_vec()) {
|
|
||||||
buffer.push_str(&text);
|
|
||||||
|
|
||||||
while let Some(pos) = buffer.find('\n') {
|
|
||||||
let mut line = buffer[..pos].trim().to_string();
|
|
||||||
buffer.drain(..=pos);
|
|
||||||
|
|
||||||
if line.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if line.ends_with('\r') {
|
|
||||||
line.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
match serde_json::from_str::<OllamaChatResponse>(&line) {
|
|
||||||
Ok(mut ollama_response) => {
|
|
||||||
if let Some(error) = ollama_response.error.take() {
|
|
||||||
let _ = tx.send(Err(owlen_core::Error::Provider(
|
|
||||||
anyhow::anyhow!(error),
|
|
||||||
)));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(message) = ollama_response.message {
|
|
||||||
let mut chat_response = ChatResponse {
|
|
||||||
message: Self::convert_ollama_message(&message),
|
|
||||||
usage: None,
|
|
||||||
is_streaming: true,
|
|
||||||
is_final: ollama_response.done,
|
|
||||||
};
|
|
||||||
|
|
||||||
if let (Some(prompt_tokens), Some(completion_tokens)) = (
|
|
||||||
ollama_response.prompt_eval_count,
|
|
||||||
ollama_response.eval_count,
|
|
||||||
) {
|
|
||||||
chat_response.usage = Some(TokenUsage {
|
|
||||||
prompt_tokens,
|
|
||||||
completion_tokens,
|
|
||||||
total_tokens: prompt_tokens + completion_tokens,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if tx.send(Ok(chat_response)).is_err() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ollama_response.done {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
let _ = tx.send(Err(owlen_core::Error::Serialization(e)));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let _ = tx.send(Err(owlen_core::Error::Serialization(
|
|
||||||
serde_json::Error::io(io::Error::new(
|
|
||||||
io::ErrorKind::InvalidData,
|
|
||||||
"Non UTF-8 chunk from Ollama",
|
|
||||||
)),
|
|
||||||
)));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
let _ = tx.send(Err(owlen_core::Error::Network(format!(
|
|
||||||
"Stream error: {e}"
|
|
||||||
))));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let stream = UnboundedReceiverStream::new(rx);
|
|
||||||
Ok(Box::pin(stream))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn health_check(&self) -> Result<()> {
|
|
||||||
let url = format!("{}/api/version", self.base_url);
|
|
||||||
|
|
||||||
let response = self
|
|
||||||
.client
|
|
||||||
.get(&url)
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|e| owlen_core::Error::Network(format!("Health check failed: {e}")))?;
|
|
||||||
|
|
||||||
if response.status().is_success() {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(owlen_core::Error::Network(format!(
|
|
||||||
"Ollama health check failed: HTTP {}",
|
|
||||||
response.status()
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn config_schema(&self) -> serde_json::Value {
|
|
||||||
serde_json::json!({
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"base_url": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Base URL for Ollama API",
|
|
||||||
"default": "http://localhost:11434"
|
|
||||||
},
|
|
||||||
"timeout_secs": {
|
|
||||||
"type": "integer",
|
|
||||||
"description": "HTTP request timeout in seconds",
|
|
||||||
"minimum": 5,
|
|
||||||
"default": DEFAULT_TIMEOUT_SECS
|
|
||||||
},
|
|
||||||
"model_cache_ttl_secs": {
|
|
||||||
"type": "integer",
|
|
||||||
"description": "Seconds to cache model listings",
|
|
||||||
"minimum": 5,
|
|
||||||
"default": DEFAULT_MODEL_CACHE_TTL_SECS
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn parse_error_body(response: reqwest::Response) -> String {
|
|
||||||
match response.bytes().await {
|
|
||||||
Ok(bytes) => {
|
|
||||||
if bytes.is_empty() {
|
|
||||||
return "unknown error".to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(err) = serde_json::from_slice::<OllamaErrorResponse>(&bytes) {
|
|
||||||
if let Some(error) = err.error {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match String::from_utf8(bytes.to_vec()) {
|
|
||||||
Ok(text) if !text.trim().is_empty() => text,
|
|
||||||
_ => "unknown error".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_) => "unknown error".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "owlen-tui"
|
|
||||||
version.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
description = "Terminal User Interface for OWLEN LLM client"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
owlen-core = { path = "../owlen-core" }
|
|
||||||
|
|
||||||
# TUI framework
|
|
||||||
ratatui = { workspace = true }
|
|
||||||
crossterm = { workspace = true }
|
|
||||||
tui-textarea = { workspace = true }
|
|
||||||
textwrap = { workspace = true }
|
|
||||||
unicode-width = "0.1"
|
|
||||||
|
|
||||||
# Async runtime
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-util = { workspace = true }
|
|
||||||
futures-util = { workspace = true }
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
uuid = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = { workspace = true }
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,44 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
use owlen_core::session::SessionController;
|
|
||||||
use owlen_core::ui::{AppState, InputMode};
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
|
|
||||||
use crate::chat_app::{ChatApp, SessionEvent};
|
|
||||||
use crate::events::Event;
|
|
||||||
|
|
||||||
const DEFAULT_SYSTEM_PROMPT: &str =
|
|
||||||
"You are OWLEN Code Assistant. Provide detailed, actionable programming help.";
|
|
||||||
|
|
||||||
pub struct CodeApp {
|
|
||||||
inner: ChatApp,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CodeApp {
|
|
||||||
pub fn new(mut controller: SessionController) -> (Self, mpsc::UnboundedReceiver<SessionEvent>) {
|
|
||||||
controller
|
|
||||||
.conversation_mut()
|
|
||||||
.push_system_message(DEFAULT_SYSTEM_PROMPT.to_string());
|
|
||||||
let (inner, rx) = ChatApp::new(controller);
|
|
||||||
(Self { inner }, rx)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_event(&mut self, event: Event) -> Result<AppState> {
|
|
||||||
self.inner.handle_event(event).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn handle_session_event(&mut self, event: SessionEvent) -> Result<()> {
|
|
||||||
self.inner.handle_session_event(event)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mode(&self) -> InputMode {
|
|
||||||
self.inner.mode()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn inner(&self) -> &ChatApp {
|
|
||||||
&self.inner
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn inner_mut(&mut self) -> &mut ChatApp {
|
|
||||||
&mut self.inner
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
pub use owlen_core::config::{
|
|
||||||
default_config_path, ensure_ollama_config, session_timeout, Config, GeneralSettings,
|
|
||||||
InputSettings, StorageSettings, UiSettings, DEFAULT_CONFIG_PATH,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Attempt to load configuration from default location
|
|
||||||
pub fn try_load_config() -> Option<Config> {
|
|
||||||
Config::load(None).ok()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Persist configuration to default path
|
|
||||||
pub fn save_config(config: &Config) -> anyhow::Result<()> {
|
|
||||||
config
|
|
||||||
.save(None)
|
|
||||||
.map_err(|e| anyhow::anyhow!(e.to_string()))
|
|
||||||
}
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
use crossterm::event::{self, KeyCode, KeyEvent, KeyEventKind, KeyModifiers};
|
|
||||||
use std::time::Duration;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use tokio_util::sync::CancellationToken;
|
|
||||||
|
|
||||||
/// Application events
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum Event {
|
|
||||||
/// Terminal key press event
|
|
||||||
Key(KeyEvent),
|
|
||||||
/// Terminal resize event
|
|
||||||
#[allow(dead_code)]
|
|
||||||
Resize(u16, u16),
|
|
||||||
/// Paste event
|
|
||||||
Paste(String),
|
|
||||||
/// Tick event for regular updates
|
|
||||||
Tick,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Event handler that captures terminal events and sends them to the application
|
|
||||||
pub struct EventHandler {
|
|
||||||
sender: mpsc::UnboundedSender<Event>,
|
|
||||||
tick_rate: Duration,
|
|
||||||
cancellation_token: CancellationToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EventHandler {
|
|
||||||
pub fn new(
|
|
||||||
sender: mpsc::UnboundedSender<Event>,
|
|
||||||
cancellation_token: CancellationToken,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
sender,
|
|
||||||
tick_rate: Duration::from_millis(250), // 4 times per second
|
|
||||||
cancellation_token,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn run(&self) {
|
|
||||||
let mut last_tick = tokio::time::Instant::now();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if self.cancellation_token.is_cancelled() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let timeout = self
|
|
||||||
.tick_rate
|
|
||||||
.checked_sub(last_tick.elapsed())
|
|
||||||
.unwrap_or_else(|| Duration::from_secs(0));
|
|
||||||
|
|
||||||
if event::poll(timeout).unwrap_or(false) {
|
|
||||||
match event::read() {
|
|
||||||
Ok(event) => {
|
|
||||||
match event {
|
|
||||||
crossterm::event::Event::Key(key) => {
|
|
||||||
// Only handle KeyEventKind::Press to avoid duplicate events
|
|
||||||
if key.kind == KeyEventKind::Press {
|
|
||||||
let _ = self.sender.send(Event::Key(key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
crossterm::event::Event::Resize(width, height) => {
|
|
||||||
let _ = self.sender.send(Event::Resize(width, height));
|
|
||||||
}
|
|
||||||
crossterm::event::Event::Paste(text) => {
|
|
||||||
let _ = self.sender.send(Event::Paste(text));
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(_) => {
|
|
||||||
// Handle error by continuing the loop
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if last_tick.elapsed() >= self.tick_rate {
|
|
||||||
let _ = self.sender.send(Event::Tick);
|
|
||||||
last_tick = tokio::time::Instant::now();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper functions for key event handling
|
|
||||||
impl Event {
|
|
||||||
/// Check if this is a quit command (Ctrl+C or 'q')
|
|
||||||
pub fn is_quit(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Char('q'),
|
|
||||||
modifiers: KeyModifiers::NONE,
|
|
||||||
..
|
|
||||||
}) | Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Char('c'),
|
|
||||||
modifiers: KeyModifiers::CONTROL,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is an enter key press
|
|
||||||
pub fn is_enter(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Enter,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is a tab key press
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn is_tab(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Tab,
|
|
||||||
modifiers: KeyModifiers::NONE,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is a backspace
|
|
||||||
pub fn is_backspace(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Backspace,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is an escape key press
|
|
||||||
pub fn is_escape(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Esc,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the character if this is a character key event
|
|
||||||
pub fn as_char(&self) -> Option<char> {
|
|
||||||
match self {
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Char(c),
|
|
||||||
modifiers: KeyModifiers::NONE,
|
|
||||||
..
|
|
||||||
}) => Some(*c),
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Char(c),
|
|
||||||
modifiers: KeyModifiers::SHIFT,
|
|
||||||
..
|
|
||||||
}) => Some(*c),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is an up arrow key press
|
|
||||||
pub fn is_up(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Up,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is a down arrow key press
|
|
||||||
pub fn is_down(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Down,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is a left arrow key press
|
|
||||||
pub fn is_left(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Left,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this is a right arrow key press
|
|
||||||
pub fn is_right(&self) -> bool {
|
|
||||||
matches!(
|
|
||||||
self,
|
|
||||||
Event::Key(KeyEvent {
|
|
||||||
code: KeyCode::Right,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
pub mod chat_app;
|
|
||||||
pub mod code_app;
|
|
||||||
pub mod config;
|
|
||||||
pub mod events;
|
|
||||||
pub mod ui;
|
|
||||||
|
|
||||||
pub use chat_app::{ChatApp, SessionEvent};
|
|
||||||
pub use code_app::CodeApp;
|
|
||||||
pub use events::{Event, EventHandler};
|
|
||||||
pub use owlen_core::ui::{AppState, FocusedPanel, InputMode};
|
|
||||||
File diff suppressed because it is too large
Load Diff
22
crates/platform/config/.gitignore
vendored
Normal file
22
crates/platform/config/.gitignore
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/target
|
||||||
|
### Rust template
|
||||||
|
# Generated by Cargo
|
||||||
|
# will have compiled files and executables
|
||||||
|
debug/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||||
|
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||||
|
Cargo.lock
|
||||||
|
|
||||||
|
# These are backup files generated by rustfmt
|
||||||
|
**/*.rs.bk
|
||||||
|
|
||||||
|
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
### rust-analyzer template
|
||||||
|
# Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
|
||||||
|
rust-project.json
|
||||||
|
|
||||||
|
|
||||||
15
crates/platform/config/Cargo.toml
Normal file
15
crates/platform/config/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
[package]
|
||||||
|
name = "config-agent"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
directories = "5"
|
||||||
|
figment = { version = "0.10", features = ["toml", "env"] }
|
||||||
|
permissions = { path = "../permissions" }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.23.0"
|
||||||
76
crates/platform/config/src/lib.rs
Normal file
76
crates/platform/config/src/lib.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use directories::ProjectDirs;
|
||||||
|
use figment::{
|
||||||
|
Figment,
|
||||||
|
providers::{Env, Format, Serialized, Toml},
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use permissions::{Mode, PermissionManager};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Settings {
|
||||||
|
#[serde(default = "default_ollama_url")]
|
||||||
|
pub ollama_url: String,
|
||||||
|
#[serde(default = "default_model")]
|
||||||
|
pub model: String,
|
||||||
|
#[serde(default = "default_mode")]
|
||||||
|
pub mode: String, // "plan" (read-only) for now
|
||||||
|
#[serde(default)]
|
||||||
|
pub api_key: Option<String>, // For Ollama Cloud or other API authentication
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_ollama_url() -> String {
|
||||||
|
"http://localhost:11434".into()
|
||||||
|
}
|
||||||
|
fn default_model() -> String {
|
||||||
|
"qwen3:8b".into()
|
||||||
|
}
|
||||||
|
fn default_mode() -> String {
|
||||||
|
"plan".into()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Settings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
ollama_url: default_ollama_url(),
|
||||||
|
model: default_model(),
|
||||||
|
mode: default_mode(),
|
||||||
|
api_key: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Settings {
|
||||||
|
/// Create a PermissionManager based on the configured mode
|
||||||
|
pub fn create_permission_manager(&self) -> PermissionManager {
|
||||||
|
let mode = Mode::from_str(&self.mode).unwrap_or(Mode::Plan);
|
||||||
|
PermissionManager::new(mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the Mode enum from the mode string
|
||||||
|
pub fn get_mode(&self) -> Mode {
|
||||||
|
Mode::from_str(&self.mode).unwrap_or(Mode::Plan)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn load_settings(project_root: Option<&str>) -> Result<Settings, figment::Error> {
|
||||||
|
let mut fig = Figment::from(Serialized::defaults(Settings::default()));
|
||||||
|
|
||||||
|
// User file: ~/.config/owlen/config.toml
|
||||||
|
if let Some(pd) = ProjectDirs::from("dev", "owlibou", "owlen") {
|
||||||
|
let user = pd.config_dir().join("config.toml");
|
||||||
|
fig = fig.merge(Toml::file(user));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Project file: <root>/.owlen.toml
|
||||||
|
if let Some(root) = project_root {
|
||||||
|
fig = fig.merge(Toml::file(PathBuf::from(root).join(".owlen.toml")));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables have highest precedence
|
||||||
|
fig = fig.merge(Env::prefixed("OWLEN_").split("__"));
|
||||||
|
// Support OLLAMA_API_KEY, OLLAMA_MODEL, etc. (without nesting)
|
||||||
|
fig = fig.merge(Env::prefixed("OLLAMA_"));
|
||||||
|
|
||||||
|
fig.extract()
|
||||||
|
}
|
||||||
48
crates/platform/config/tests/precedence.rs
Normal file
48
crates/platform/config/tests/precedence.rs
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
use config_agent::{load_settings, Settings};
|
||||||
|
use permissions::{Mode, PermissionDecision, Tool};
|
||||||
|
use std::{env, fs};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn precedence_env_overrides_files() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let project_file = tmp.path().join(".owlen.toml");
|
||||||
|
fs::write(&project_file, r#"model="local-model""#).unwrap();
|
||||||
|
|
||||||
|
unsafe { env::set_var("OWLEN_MODEL", "env-model"); }
|
||||||
|
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||||
|
assert_eq!(s.model, "env-model");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn default_mode_is_plan() {
|
||||||
|
let s = Settings::default();
|
||||||
|
assert_eq!(s.mode, "plan");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn settings_create_permission_manager_with_plan_mode() {
|
||||||
|
let s = Settings::default();
|
||||||
|
let mgr = s.create_permission_manager();
|
||||||
|
|
||||||
|
// Plan mode should allow read operations
|
||||||
|
assert_eq!(mgr.check(Tool::Read, None), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// Plan mode should ask for write operations
|
||||||
|
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Ask);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn settings_parse_mode_from_config() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let project_file = tmp.path().join(".owlen.toml");
|
||||||
|
fs::write(&project_file, r#"mode="code""#).unwrap();
|
||||||
|
|
||||||
|
let s = load_settings(Some(tmp.path().to_str().unwrap())).unwrap();
|
||||||
|
assert_eq!(s.mode, "code");
|
||||||
|
assert_eq!(s.get_mode(), Mode::Code);
|
||||||
|
|
||||||
|
let mgr = s.create_permission_manager();
|
||||||
|
// Code mode should allow everything
|
||||||
|
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, None), PermissionDecision::Allow);
|
||||||
|
}
|
||||||
16
crates/platform/hooks/Cargo.toml
Normal file
16
crates/platform/hooks/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
name = "hooks"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
tokio = { version = "1.39", features = ["process", "time", "io-util"] }
|
||||||
|
color-eyre = "0.6"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.23.0"
|
||||||
|
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||||
171
crates/platform/hooks/src/lib.rs
Normal file
171
crates/platform/hooks/src/lib.rs
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::process::Stdio;
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
use tokio::process::Command;
|
||||||
|
use tokio::time::timeout;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
#[serde(tag = "event", rename_all = "camelCase")]
|
||||||
|
pub enum HookEvent {
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
PreToolUse {
|
||||||
|
tool: String,
|
||||||
|
args: Value,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
PostToolUse {
|
||||||
|
tool: String,
|
||||||
|
result: Value,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
SessionStart {
|
||||||
|
session_id: String,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
SessionEnd {
|
||||||
|
session_id: String,
|
||||||
|
},
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
UserPromptSubmit {
|
||||||
|
prompt: String,
|
||||||
|
},
|
||||||
|
PreCompact,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HookEvent {
|
||||||
|
/// Get the hook name for this event (used to find the hook script)
|
||||||
|
pub fn hook_name(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
HookEvent::PreToolUse { .. } => "PreToolUse",
|
||||||
|
HookEvent::PostToolUse { .. } => "PostToolUse",
|
||||||
|
HookEvent::SessionStart { .. } => "SessionStart",
|
||||||
|
HookEvent::SessionEnd { .. } => "SessionEnd",
|
||||||
|
HookEvent::UserPromptSubmit { .. } => "UserPromptSubmit",
|
||||||
|
HookEvent::PreCompact => "PreCompact",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum HookResult {
|
||||||
|
Allow,
|
||||||
|
Deny,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HookManager {
|
||||||
|
project_root: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HookManager {
|
||||||
|
pub fn new(project_root: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
project_root: PathBuf::from(project_root),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a hook for the given event
|
||||||
|
///
|
||||||
|
/// Returns:
|
||||||
|
/// - Ok(HookResult::Allow) if hook succeeds or doesn't exist (exit code 0 or no hook)
|
||||||
|
/// - Ok(HookResult::Deny) if hook denies (exit code 2)
|
||||||
|
/// - Err if hook fails (other exit codes) or times out
|
||||||
|
pub async fn execute(&self, event: &HookEvent, timeout_ms: Option<u64>) -> Result<HookResult> {
|
||||||
|
let hook_path = self.get_hook_path(event);
|
||||||
|
|
||||||
|
// If hook doesn't exist, allow by default
|
||||||
|
if !hook_path.exists() {
|
||||||
|
return Ok(HookResult::Allow);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize event to JSON
|
||||||
|
let input_json = serde_json::to_string(event)?;
|
||||||
|
|
||||||
|
// Spawn the hook process
|
||||||
|
let mut child = Command::new(&hook_path)
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.current_dir(&self.project_root)
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
// Write JSON input to stdin
|
||||||
|
if let Some(mut stdin) = child.stdin.take() {
|
||||||
|
stdin.write_all(input_json.as_bytes()).await?;
|
||||||
|
stdin.flush().await?;
|
||||||
|
drop(stdin); // Close stdin
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for process with timeout
|
||||||
|
let result = if let Some(ms) = timeout_ms {
|
||||||
|
timeout(Duration::from_millis(ms), child.wait_with_output()).await
|
||||||
|
} else {
|
||||||
|
Ok(child.wait_with_output().await)
|
||||||
|
};
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(Ok(output)) => {
|
||||||
|
// Check exit code
|
||||||
|
match output.status.code() {
|
||||||
|
Some(0) => Ok(HookResult::Allow),
|
||||||
|
Some(2) => Ok(HookResult::Deny),
|
||||||
|
Some(code) => Err(eyre!(
|
||||||
|
"Hook {} failed with exit code {}: {}",
|
||||||
|
event.hook_name(),
|
||||||
|
code,
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
)),
|
||||||
|
None => Err(eyre!("Hook {} terminated by signal", event.hook_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Err(e)) => Err(eyre!("Failed to execute hook {}: {}", event.hook_name(), e)),
|
||||||
|
Err(_) => Err(eyre!("Hook {} timed out", event.hook_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_hook_path(&self, event: &HookEvent) -> PathBuf {
|
||||||
|
self.project_root
|
||||||
|
.join(".owlen")
|
||||||
|
.join("hooks")
|
||||||
|
.join(event.hook_name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hook_event_serializes_correctly() {
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({"path": "/tmp/test.txt"}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let json = serde_json::to_string(&event).unwrap();
|
||||||
|
assert!(json.contains("\"event\":\"preToolUse\""));
|
||||||
|
assert!(json.contains("\"tool\":\"Read\""));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hook_event_names() {
|
||||||
|
assert_eq!(
|
||||||
|
HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
.hook_name(),
|
||||||
|
"PreToolUse"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
HookEvent::SessionStart {
|
||||||
|
session_id: "123".to_string(),
|
||||||
|
}
|
||||||
|
.hook_name(),
|
||||||
|
"SessionStart"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
160
crates/platform/hooks/tests/hooks.rs
Normal file
160
crates/platform/hooks/tests/hooks.rs
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
use hooks::{HookEvent, HookManager, HookResult};
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn pretooluse_can_deny_call() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let hooks_dir = dir.path().join(".owlen/hooks");
|
||||||
|
fs::create_dir_all(&hooks_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a PreToolUse hook that denies Write operations
|
||||||
|
let hook_script = r#"#!/bin/bash
|
||||||
|
INPUT=$(cat)
|
||||||
|
TOOL=$(echo "$INPUT" | grep -o '"tool":"[^"]*"' | cut -d'"' -f4)
|
||||||
|
|
||||||
|
if [ "$TOOL" = "Write" ]; then
|
||||||
|
exit 2 # Deny
|
||||||
|
fi
|
||||||
|
exit 0 # Allow
|
||||||
|
"#;
|
||||||
|
let hook_path = hooks_dir.join("PreToolUse");
|
||||||
|
fs::write(&hook_path, hook_script).unwrap();
|
||||||
|
fs::set_permissions(&hook_path, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
|
||||||
|
|
||||||
|
let manager = HookManager::new(dir.path().to_str().unwrap());
|
||||||
|
|
||||||
|
// Test Write tool (should be denied)
|
||||||
|
let write_event = HookEvent::PreToolUse {
|
||||||
|
tool: "Write".to_string(),
|
||||||
|
args: serde_json::json!({"path": "/tmp/test.txt", "content": "hello"}),
|
||||||
|
};
|
||||||
|
let result = manager.execute(&write_event, Some(5000)).await.unwrap();
|
||||||
|
assert_eq!(result, HookResult::Deny);
|
||||||
|
|
||||||
|
// Test Read tool (should be allowed)
|
||||||
|
let read_event = HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({"path": "/tmp/test.txt"}),
|
||||||
|
};
|
||||||
|
let result = manager.execute(&read_event, Some(5000)).await.unwrap();
|
||||||
|
assert_eq!(result, HookResult::Allow);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn posttooluse_runs_parallel() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let hooks_dir = dir.path().join(".owlen/hooks");
|
||||||
|
fs::create_dir_all(&hooks_dir).unwrap();
|
||||||
|
|
||||||
|
let output_file = dir.path().join("hook_output.txt");
|
||||||
|
|
||||||
|
// Create a PostToolUse hook that writes to a file
|
||||||
|
let hook_script = format!(
|
||||||
|
r#"#!/bin/bash
|
||||||
|
INPUT=$(cat)
|
||||||
|
echo "Hook executed: $INPUT" >> {}
|
||||||
|
exit 0
|
||||||
|
"#,
|
||||||
|
output_file.display()
|
||||||
|
);
|
||||||
|
let hook_path = hooks_dir.join("PostToolUse");
|
||||||
|
fs::write(&hook_path, hook_script).unwrap();
|
||||||
|
fs::set_permissions(&hook_path, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
|
||||||
|
|
||||||
|
let manager = HookManager::new(dir.path().to_str().unwrap());
|
||||||
|
|
||||||
|
// Execute hook
|
||||||
|
let event = HookEvent::PostToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
result: serde_json::json!({"success": true}),
|
||||||
|
};
|
||||||
|
let result = manager.execute(&event, Some(5000)).await.unwrap();
|
||||||
|
assert_eq!(result, HookResult::Allow);
|
||||||
|
|
||||||
|
// Verify hook ran
|
||||||
|
let output = fs::read_to_string(&output_file).unwrap();
|
||||||
|
assert!(output.contains("Hook executed"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn sessionstart_persists_env() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let hooks_dir = dir.path().join(".owlen/hooks");
|
||||||
|
fs::create_dir_all(&hooks_dir).unwrap();
|
||||||
|
|
||||||
|
let env_file = dir.path().join(".owlen/session.env");
|
||||||
|
|
||||||
|
// Create a SessionStart hook that writes env vars to a file
|
||||||
|
let hook_script = format!(
|
||||||
|
r#"#!/bin/bash
|
||||||
|
cat > {} <<EOF
|
||||||
|
MY_VAR=hello
|
||||||
|
ANOTHER_VAR=world
|
||||||
|
EOF
|
||||||
|
exit 0
|
||||||
|
"#,
|
||||||
|
env_file.display()
|
||||||
|
);
|
||||||
|
let hook_path = hooks_dir.join("SessionStart");
|
||||||
|
fs::write(&hook_path, hook_script).unwrap();
|
||||||
|
fs::set_permissions(&hook_path, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
|
||||||
|
|
||||||
|
let manager = HookManager::new(dir.path().to_str().unwrap());
|
||||||
|
|
||||||
|
// Execute SessionStart hook
|
||||||
|
let event = HookEvent::SessionStart {
|
||||||
|
session_id: "test-123".to_string(),
|
||||||
|
};
|
||||||
|
let result = manager.execute(&event, Some(5000)).await.unwrap();
|
||||||
|
assert_eq!(result, HookResult::Allow);
|
||||||
|
|
||||||
|
// Verify env file was created
|
||||||
|
assert!(env_file.exists());
|
||||||
|
let content = fs::read_to_string(&env_file).unwrap();
|
||||||
|
assert!(content.contains("MY_VAR=hello"));
|
||||||
|
assert!(content.contains("ANOTHER_VAR=world"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_timeout_works() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let hooks_dir = dir.path().join(".owlen/hooks");
|
||||||
|
fs::create_dir_all(&hooks_dir).unwrap();
|
||||||
|
|
||||||
|
// Create a hook that sleeps longer than the timeout
|
||||||
|
let hook_script = r#"#!/bin/bash
|
||||||
|
sleep 10
|
||||||
|
exit 0
|
||||||
|
"#;
|
||||||
|
let hook_path = hooks_dir.join("PreToolUse");
|
||||||
|
fs::write(&hook_path, hook_script).unwrap();
|
||||||
|
fs::set_permissions(&hook_path, std::os::unix::fs::PermissionsExt::from_mode(0o755)).unwrap();
|
||||||
|
|
||||||
|
let manager = HookManager::new(dir.path().to_str().unwrap());
|
||||||
|
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({"path": "/tmp/test.txt"}),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Should timeout after 1000ms
|
||||||
|
let result = manager.execute(&event, Some(1000)).await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
assert!(err_msg.contains("timeout") || err_msg.contains("timed out"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn hook_not_found_is_ok() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let manager = HookManager::new(dir.path().to_str().unwrap());
|
||||||
|
|
||||||
|
// No hooks directory exists, should just return Allow
|
||||||
|
let event = HookEvent::PreToolUse {
|
||||||
|
tool: "Read".to_string(),
|
||||||
|
args: serde_json::json!({"path": "/tmp/test.txt"}),
|
||||||
|
};
|
||||||
|
let result = manager.execute(&event, Some(5000)).await.unwrap();
|
||||||
|
assert_eq!(result, HookResult::Allow);
|
||||||
|
}
|
||||||
10
crates/platform/permissions/Cargo.toml
Normal file
10
crates/platform/permissions/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[package]
|
||||||
|
name = "permissions"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
thiserror = "1"
|
||||||
240
crates/platform/permissions/src/lib.rs
Normal file
240
crates/platform/permissions/src/lib.rs
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
|
pub enum Tool {
|
||||||
|
Read,
|
||||||
|
Write,
|
||||||
|
Edit,
|
||||||
|
Bash,
|
||||||
|
Grep,
|
||||||
|
Glob,
|
||||||
|
WebFetch,
|
||||||
|
WebSearch,
|
||||||
|
NotebookRead,
|
||||||
|
NotebookEdit,
|
||||||
|
SlashCommand,
|
||||||
|
Task,
|
||||||
|
TodoWrite,
|
||||||
|
Mcp,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum Action {
|
||||||
|
Allow,
|
||||||
|
Ask,
|
||||||
|
Deny,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub enum Mode {
|
||||||
|
Plan, // Read-only: Read/Grep/Glob allowed, others Ask
|
||||||
|
AcceptEdits, // Auto-allow Edit/Write, Bash still Ask
|
||||||
|
Code, // Full access (all allowed)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Mode {
|
||||||
|
pub fn from_str(s: &str) -> Option<Self> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"plan" => Some(Mode::Plan),
|
||||||
|
"acceptedits" | "accept_edits" => Some(Mode::AcceptEdits),
|
||||||
|
"code" => Some(Mode::Code),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum PermissionDecision {
|
||||||
|
Allow,
|
||||||
|
Ask,
|
||||||
|
Deny,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct PermissionRule {
|
||||||
|
pub tool: Tool,
|
||||||
|
pub pattern: Option<String>,
|
||||||
|
pub action: Action,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PermissionRule {
|
||||||
|
fn matches(&self, tool: Tool, context: Option<&str>) -> bool {
|
||||||
|
if self.tool != tool {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
match (&self.pattern, context) {
|
||||||
|
(None, _) => true, // No pattern means match all
|
||||||
|
(Some(_), None) => false, // Pattern specified but no context
|
||||||
|
(Some(pattern), Some(ctx)) => {
|
||||||
|
// Support prefix matching with wildcard
|
||||||
|
if pattern.ends_with('*') {
|
||||||
|
let prefix = pattern.trim_end_matches('*');
|
||||||
|
ctx.starts_with(prefix)
|
||||||
|
} else {
|
||||||
|
// Exact match
|
||||||
|
pattern == ctx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PermissionManager {
|
||||||
|
mode: Mode,
|
||||||
|
rules: Vec<PermissionRule>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PermissionManager {
|
||||||
|
pub fn new(mode: Mode) -> Self {
|
||||||
|
Self {
|
||||||
|
mode,
|
||||||
|
rules: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_rule(&mut self, tool: Tool, pattern: Option<String>, action: Action) {
|
||||||
|
self.rules.push(PermissionRule {
|
||||||
|
tool,
|
||||||
|
pattern,
|
||||||
|
action,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn check(&self, tool: Tool, context: Option<&str>) -> PermissionDecision {
|
||||||
|
// Check explicit rules first (most specific to least specific)
|
||||||
|
// Deny rules take precedence
|
||||||
|
for rule in &self.rules {
|
||||||
|
if rule.matches(tool, context) {
|
||||||
|
return match rule.action {
|
||||||
|
Action::Allow => PermissionDecision::Allow,
|
||||||
|
Action::Ask => PermissionDecision::Ask,
|
||||||
|
Action::Deny => PermissionDecision::Deny,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to mode-based defaults
|
||||||
|
self.check_mode_default(tool)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_mode_default(&self, tool: Tool) -> PermissionDecision {
|
||||||
|
match self.mode {
|
||||||
|
Mode::Plan => match tool {
|
||||||
|
// Read-only tools are allowed in plan mode
|
||||||
|
Tool::Read | Tool::Grep | Tool::Glob | Tool::NotebookRead => {
|
||||||
|
PermissionDecision::Allow
|
||||||
|
}
|
||||||
|
// Everything else requires asking
|
||||||
|
_ => PermissionDecision::Ask,
|
||||||
|
},
|
||||||
|
Mode::AcceptEdits => match tool {
|
||||||
|
// Read operations allowed
|
||||||
|
Tool::Read | Tool::Grep | Tool::Glob | Tool::NotebookRead => {
|
||||||
|
PermissionDecision::Allow
|
||||||
|
}
|
||||||
|
// Edit/Write operations allowed
|
||||||
|
Tool::Edit | Tool::Write | Tool::NotebookEdit => PermissionDecision::Allow,
|
||||||
|
// Bash and other dangerous operations still require asking
|
||||||
|
Tool::Bash | Tool::WebFetch | Tool::WebSearch | Tool::Mcp => PermissionDecision::Ask,
|
||||||
|
// Utility tools allowed
|
||||||
|
Tool::TodoWrite | Tool::SlashCommand | Tool::Task => PermissionDecision::Allow,
|
||||||
|
},
|
||||||
|
Mode::Code => {
|
||||||
|
// Everything allowed in code mode
|
||||||
|
PermissionDecision::Allow
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_mode(&mut self, mode: Mode) {
|
||||||
|
self.mode = mode;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn mode(&self) -> Mode {
|
||||||
|
self.mode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pattern_exact_match() {
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Bash,
|
||||||
|
pattern: Some("npm test".to_string()),
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(rule.matches(Tool::Bash, Some("npm test")));
|
||||||
|
assert!(!rule.matches(Tool::Bash, Some("npm install")));
|
||||||
|
assert!(!rule.matches(Tool::Read, Some("npm test")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pattern_prefix_match() {
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Bash,
|
||||||
|
pattern: Some("npm test:*".to_string()),
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(rule.matches(Tool::Bash, Some("npm test:unit")));
|
||||||
|
assert!(rule.matches(Tool::Bash, Some("npm test:integration")));
|
||||||
|
assert!(!rule.matches(Tool::Bash, Some("npm install")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn pattern_no_context() {
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Bash,
|
||||||
|
pattern: Some("npm test".to_string()),
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Pattern specified but no context provided
|
||||||
|
assert!(!rule.matches(Tool::Bash, None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_pattern_matches_all() {
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Read,
|
||||||
|
pattern: None,
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(rule.matches(Tool::Read, Some("any context")));
|
||||||
|
assert!(rule.matches(Tool::Read, None));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mcp_server_pattern_matching() {
|
||||||
|
// Allow all tools from a specific server
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Mcp,
|
||||||
|
pattern: Some("filesystem__*".to_string()),
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(rule.matches(Tool::Mcp, Some("filesystem__read_file")));
|
||||||
|
assert!(rule.matches(Tool::Mcp, Some("filesystem__write_file")));
|
||||||
|
assert!(!rule.matches(Tool::Mcp, Some("database__query")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mcp_exact_tool_matching() {
|
||||||
|
// Allow only a specific tool from a server
|
||||||
|
let rule = PermissionRule {
|
||||||
|
tool: Tool::Mcp,
|
||||||
|
pattern: Some("filesystem__read_file".to_string()),
|
||||||
|
action: Action::Allow,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert!(rule.matches(Tool::Mcp, Some("filesystem__read_file")));
|
||||||
|
assert!(!rule.matches(Tool::Mcp, Some("filesystem__write_file")));
|
||||||
|
}
|
||||||
|
}
|
||||||
85
crates/platform/permissions/tests/plan_mode.rs
Normal file
85
crates/platform/permissions/tests/plan_mode.rs
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
use permissions::{PermissionManager, Mode, Tool, PermissionDecision};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn plan_mode_blocks_write_bash_by_default() {
|
||||||
|
let mgr = PermissionManager::new(Mode::Plan);
|
||||||
|
|
||||||
|
// Plan mode should allow read operations
|
||||||
|
assert_eq!(mgr.check(Tool::Read, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Grep, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Glob, None), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// Plan mode should ask for write operations
|
||||||
|
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Ask);
|
||||||
|
assert_eq!(mgr.check(Tool::Edit, None), PermissionDecision::Ask);
|
||||||
|
|
||||||
|
// Plan mode should ask for Bash
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, None), PermissionDecision::Ask);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn accept_edits_mode_allows_edit_write() {
|
||||||
|
let mgr = PermissionManager::new(Mode::AcceptEdits);
|
||||||
|
|
||||||
|
// AcceptEdits mode should allow read operations
|
||||||
|
assert_eq!(mgr.check(Tool::Read, None), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// AcceptEdits mode should allow edit/write
|
||||||
|
assert_eq!(mgr.check(Tool::Edit, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// But still ask for Bash
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, None), PermissionDecision::Ask);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn code_mode_allows_everything() {
|
||||||
|
let mgr = PermissionManager::new(Mode::Code);
|
||||||
|
|
||||||
|
assert_eq!(mgr.check(Tool::Read, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Write, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Edit, None), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, None), PermissionDecision::Allow);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bash_pattern_matching() {
|
||||||
|
let mut mgr = PermissionManager::new(Mode::Plan);
|
||||||
|
|
||||||
|
// Add a rule to allow "npm test"
|
||||||
|
mgr.add_rule(Tool::Bash, Some("npm test".to_string()), permissions::Action::Allow);
|
||||||
|
|
||||||
|
// Should allow the exact command
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("npm test")), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// Should still ask for other commands
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("rm -rf /")), PermissionDecision::Ask);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bash_prefix_matching() {
|
||||||
|
let mut mgr = PermissionManager::new(Mode::Plan);
|
||||||
|
|
||||||
|
// Add a rule to allow "npm test:*" (prefix match)
|
||||||
|
mgr.add_rule(Tool::Bash, Some("npm test:*".to_string()), permissions::Action::Allow);
|
||||||
|
|
||||||
|
// Should allow commands matching the prefix
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("npm test:unit")), PermissionDecision::Allow);
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("npm test:integration")), PermissionDecision::Allow);
|
||||||
|
|
||||||
|
// Should not allow non-matching commands
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("npm install")), PermissionDecision::Ask);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn deny_rules_take_precedence() {
|
||||||
|
let mut mgr = PermissionManager::new(Mode::Code);
|
||||||
|
|
||||||
|
// Even in Code mode, we can deny specific operations
|
||||||
|
mgr.add_rule(Tool::Bash, Some("rm -rf*".to_string()), permissions::Action::Deny);
|
||||||
|
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("rm -rf /")), PermissionDecision::Deny);
|
||||||
|
|
||||||
|
// But other commands are still allowed
|
||||||
|
assert_eq!(mgr.check(Tool::Bash, Some("ls")), PermissionDecision::Allow);
|
||||||
|
}
|
||||||
14
crates/tools/bash/Cargo.toml
Normal file
14
crates/tools/bash/Cargo.toml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
[package]
|
||||||
|
name = "tools-bash"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1.39", features = ["process", "io-util", "time", "sync"] }
|
||||||
|
color-eyre = "0.6"
|
||||||
|
tempfile = "3.23.0"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||||
170
crates/tools/bash/src/lib.rs
Normal file
170
crates/tools/bash/src/lib.rs
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use std::process::Stdio;
|
||||||
|
use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader};
|
||||||
|
use tokio::process::{Child, Command};
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
use tokio::time::{timeout, Duration};
|
||||||
|
|
||||||
|
const MAX_OUTPUT_LINES: usize = 2000;
|
||||||
|
const DEFAULT_TIMEOUT_MS: u64 = 120000; // 2 minutes
|
||||||
|
const COMMAND_DELIMITER: &str = "___OWLEN_CMD_END___";
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct CommandOutput {
|
||||||
|
pub stdout: String,
|
||||||
|
pub stderr: String,
|
||||||
|
pub exit_code: i32,
|
||||||
|
pub success: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct BashSession {
|
||||||
|
child: Mutex<Child>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BashSession {
|
||||||
|
/// Create a new persistent bash session
|
||||||
|
pub async fn new() -> Result<Self> {
|
||||||
|
let child = Command::new("bash")
|
||||||
|
.arg("--norc")
|
||||||
|
.arg("--noprofile")
|
||||||
|
.stdin(Stdio::piped())
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.stderr(Stdio::piped())
|
||||||
|
.kill_on_drop(true)
|
||||||
|
.spawn()?;
|
||||||
|
|
||||||
|
// Verify the process started
|
||||||
|
if child.stdin.is_none() || child.stdout.is_none() || child.stderr.is_none() {
|
||||||
|
return Err(eyre!("Failed to capture bash process stdio"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
child: Mutex::new(child),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a command in the persistent bash session
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `command` - The bash command to execute
|
||||||
|
/// * `timeout_ms` - Optional timeout in milliseconds (default: 2 minutes)
|
||||||
|
pub async fn execute(&mut self, command: &str, timeout_ms: Option<u64>) -> Result<CommandOutput> {
|
||||||
|
let timeout_duration = Duration::from_millis(timeout_ms.unwrap_or(DEFAULT_TIMEOUT_MS));
|
||||||
|
|
||||||
|
let result = timeout(timeout_duration, self.execute_internal(command)).await;
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(output) => output,
|
||||||
|
Err(_) => Err(eyre!("Command timed out after {}ms", timeout_duration.as_millis())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute_internal(&mut self, command: &str) -> Result<CommandOutput> {
|
||||||
|
let mut child = self.child.lock().await;
|
||||||
|
|
||||||
|
// Take ownership of stdio handles
|
||||||
|
let mut stdin = child.stdin.take().ok_or_else(|| eyre!("No stdin"))?;
|
||||||
|
let stdout = child.stdout.take().ok_or_else(|| eyre!("No stdout"))?;
|
||||||
|
let stderr = child.stderr.take().ok_or_else(|| eyre!("No stderr"))?;
|
||||||
|
|
||||||
|
// Write command with delimiter and exit code capture
|
||||||
|
let full_command = format!(
|
||||||
|
"{}\necho $? > /tmp/owlen_exit_code_$$.tmp\necho '{}'\n",
|
||||||
|
command, COMMAND_DELIMITER
|
||||||
|
);
|
||||||
|
stdin.write_all(full_command.as_bytes()).await?;
|
||||||
|
stdin.flush().await?;
|
||||||
|
|
||||||
|
// Read stdout until delimiter
|
||||||
|
let mut stdout_reader = BufReader::new(stdout);
|
||||||
|
let mut stdout_lines = Vec::new();
|
||||||
|
let mut line = String::new();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
line.clear();
|
||||||
|
let n = stdout_reader.read_line(&mut line).await?;
|
||||||
|
if n == 0 {
|
||||||
|
return Err(eyre!("Bash process terminated unexpectedly"));
|
||||||
|
}
|
||||||
|
|
||||||
|
if line.trim() == COMMAND_DELIMITER {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
stdout_lines.push(line.clone());
|
||||||
|
|
||||||
|
// Truncate if too many lines
|
||||||
|
if stdout_lines.len() > MAX_OUTPUT_LINES {
|
||||||
|
stdout_lines.push("<<<...output truncated...>>>\n".to_string());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read stderr (non-blocking, best effort)
|
||||||
|
let mut stderr_reader = BufReader::new(stderr);
|
||||||
|
let mut stderr_lines = Vec::new();
|
||||||
|
let mut stderr_line = String::new();
|
||||||
|
|
||||||
|
// Try to read stderr without blocking indefinitely
|
||||||
|
while let Ok(result) = timeout(Duration::from_millis(100), stderr_reader.read_line(&mut stderr_line)).await {
|
||||||
|
match result {
|
||||||
|
Ok(n) if n > 0 => {
|
||||||
|
stderr_lines.push(stderr_line.clone());
|
||||||
|
stderr_line.clear();
|
||||||
|
|
||||||
|
if stderr_lines.len() > MAX_OUTPUT_LINES {
|
||||||
|
stderr_lines.push("<<<...stderr truncated...>>>\n".to_string());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read exit code
|
||||||
|
let exit_code_cmd = "cat /tmp/owlen_exit_code_$$.tmp 2>/dev/null; rm -f /tmp/owlen_exit_code_$$.tmp\n";
|
||||||
|
stdin.write_all(exit_code_cmd.as_bytes()).await?;
|
||||||
|
stdin.flush().await?;
|
||||||
|
|
||||||
|
let mut exit_line = String::new();
|
||||||
|
stdout_reader.read_line(&mut exit_line).await?;
|
||||||
|
|
||||||
|
let exit_code: i32 = exit_line.trim().parse().unwrap_or(0);
|
||||||
|
|
||||||
|
// Restore stdio handles
|
||||||
|
child.stdin = Some(stdin);
|
||||||
|
child.stdout = Some(stdout_reader.into_inner());
|
||||||
|
child.stderr = Some(stderr_reader.into_inner());
|
||||||
|
|
||||||
|
Ok(CommandOutput {
|
||||||
|
stdout: stdout_lines.join(""),
|
||||||
|
stderr: stderr_lines.join(""),
|
||||||
|
exit_code,
|
||||||
|
success: exit_code == 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Close the bash session
|
||||||
|
pub async fn close(self) -> Result<()> {
|
||||||
|
let mut child = self.child.into_inner();
|
||||||
|
|
||||||
|
if let Some(mut stdin) = child.stdin.take() {
|
||||||
|
let _ = stdin.write_all(b"exit\n").await;
|
||||||
|
let _ = stdin.flush().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = child.wait().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn can_create_session() {
|
||||||
|
let session = BashSession::new().await;
|
||||||
|
assert!(session.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
107
crates/tools/bash/tests/bash_session.rs
Normal file
107
crates/tools/bash/tests/bash_session.rs
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
use tools_bash::BashSession;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_persists_env_between_calls() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
// Set an environment variable
|
||||||
|
let output1 = session.execute("export TEST_VAR=hello", None).await.unwrap();
|
||||||
|
assert!(output1.success);
|
||||||
|
|
||||||
|
// Verify it persists in next command
|
||||||
|
let output2 = session.execute("echo $TEST_VAR", None).await.unwrap();
|
||||||
|
assert!(output2.success);
|
||||||
|
assert!(output2.stdout.contains("hello"));
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_persists_cwd_between_calls() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
// Change to /tmp
|
||||||
|
let output1 = session.execute("cd /tmp", None).await.unwrap();
|
||||||
|
assert!(output1.success);
|
||||||
|
|
||||||
|
// Verify cwd persists
|
||||||
|
let output2 = session.execute("pwd", None).await.unwrap();
|
||||||
|
assert!(output2.success);
|
||||||
|
assert!(output2.stdout.trim().ends_with("/tmp"));
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_command_timeout() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
// Command that sleeps for 5 seconds, but with 1 second timeout
|
||||||
|
let result = session.execute("sleep 5", Some(1000)).await;
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
assert!(err_msg.contains("timeout") || err_msg.contains("timed out"));
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_output_truncation() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
// Generate a lot of output
|
||||||
|
let output = session
|
||||||
|
.execute("for i in {1..100}; do echo 'Line '$i; done", None)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(output.success);
|
||||||
|
// Should have output but might be truncated
|
||||||
|
assert!(!output.stdout.is_empty());
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_command_failure_returns_error_code() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
let output = session.execute("false", None).await.unwrap();
|
||||||
|
assert!(!output.success);
|
||||||
|
assert_eq!(output.exit_code, 1);
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_stderr_captured() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
let output = session
|
||||||
|
.execute("echo 'error message' >&2", None)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert!(output.success);
|
||||||
|
assert!(output.stderr.contains("error message"));
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bash_multiple_commands_in_sequence() {
|
||||||
|
let mut session = BashSession::new().await.unwrap();
|
||||||
|
|
||||||
|
// Set a variable
|
||||||
|
session.execute("X=1", None).await.unwrap();
|
||||||
|
|
||||||
|
// Increment it
|
||||||
|
session.execute("X=$((X + 1))", None).await.unwrap();
|
||||||
|
|
||||||
|
// Verify final value
|
||||||
|
let output = session.execute("echo $X", None).await.unwrap();
|
||||||
|
assert!(output.stdout.contains("2"));
|
||||||
|
|
||||||
|
session.close().await.unwrap();
|
||||||
|
}
|
||||||
18
crates/tools/fs/Cargo.toml
Normal file
18
crates/tools/fs/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
[package]
|
||||||
|
name = "tools-fs"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
ignore = "0.4"
|
||||||
|
walkdir = "2.5"
|
||||||
|
globset = "0.4"
|
||||||
|
grep-regex = "0.1"
|
||||||
|
grep-searcher = "0.1"
|
||||||
|
color-eyre = "0.6"
|
||||||
|
similar = "2.7"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.23.0"
|
||||||
130
crates/tools/fs/src/lib.rs
Normal file
130
crates/tools/fs/src/lib.rs
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use ignore::WalkBuilder;
|
||||||
|
use grep_regex::RegexMatcher;
|
||||||
|
use grep_searcher::{sinks::UTF8, SearcherBuilder};
|
||||||
|
use globset::Glob;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
pub fn read_file(path: &str) -> Result<String> {
|
||||||
|
Ok(std::fs::read_to_string(path)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_file(path: &str, content: &str) -> Result<()> {
|
||||||
|
// Create parent directories if they don't exist
|
||||||
|
if let Some(parent) = Path::new(path).parent() {
|
||||||
|
std::fs::create_dir_all(parent)?;
|
||||||
|
}
|
||||||
|
std::fs::write(path, content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn edit_file(path: &str, old_string: &str, new_string: &str) -> Result<()> {
|
||||||
|
// Read the current file content
|
||||||
|
let content = std::fs::read_to_string(path)?;
|
||||||
|
|
||||||
|
// Find all occurrences of old_string
|
||||||
|
let matches: Vec<_> = content.match_indices(old_string).collect();
|
||||||
|
|
||||||
|
match matches.len() {
|
||||||
|
0 => Err(eyre!("String to replace not found in file: '{}'", old_string)),
|
||||||
|
1 => {
|
||||||
|
// Exactly one match - safe to replace
|
||||||
|
let new_content = content.replace(old_string, new_string);
|
||||||
|
|
||||||
|
// Create a backup before modifying
|
||||||
|
let backup_path = format!("{}.backup", path);
|
||||||
|
std::fs::write(&backup_path, &content)?;
|
||||||
|
|
||||||
|
// Write the new content
|
||||||
|
match std::fs::write(path, new_content) {
|
||||||
|
Ok(_) => {
|
||||||
|
// Success - remove backup
|
||||||
|
let _ = std::fs::remove_file(&backup_path);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
// Failed to write - restore from backup
|
||||||
|
let _ = std::fs::rename(&backup_path, path);
|
||||||
|
Err(e.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
n => Err(eyre!(
|
||||||
|
"Ambiguous replacement: found {} occurrences of '{}' in file. Please make the old_string unique.",
|
||||||
|
n,
|
||||||
|
old_string
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn glob_list(pattern: &str) -> Result<Vec<String>> {
|
||||||
|
let glob = Glob::new(pattern)?.compile_matcher();
|
||||||
|
|
||||||
|
// Extract the literal prefix to determine the root directory
|
||||||
|
// Find the position of the first glob metacharacter
|
||||||
|
let first_glob = pattern
|
||||||
|
.find(|c| matches!(c, '*' | '?' | '[' | '{'))
|
||||||
|
.unwrap_or(pattern.len());
|
||||||
|
|
||||||
|
// Find the last directory separator before the first glob metacharacter
|
||||||
|
let root = if first_glob > 0 {
|
||||||
|
let prefix = &pattern[..first_glob];
|
||||||
|
prefix.rfind('/').map(|pos| &prefix[..pos]).unwrap_or(".")
|
||||||
|
} else {
|
||||||
|
"."
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut out = Vec::new();
|
||||||
|
for result in WalkBuilder::new(root)
|
||||||
|
.standard_filters(true)
|
||||||
|
.git_ignore(true)
|
||||||
|
.git_global(false)
|
||||||
|
.git_exclude(false)
|
||||||
|
.require_git(false)
|
||||||
|
.build()
|
||||||
|
{
|
||||||
|
let entity = result?;
|
||||||
|
if entity.file_type().map(|filetype| filetype.is_file()).unwrap_or(false) {
|
||||||
|
if let Some(path) = entity.path().to_str() {
|
||||||
|
// Match against the glob pattern
|
||||||
|
if glob.is_match(path) {
|
||||||
|
out.push(path.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn grep(root: &str, pattern: &str) -> Result<Vec<(String, usize, String)>> {
|
||||||
|
let matcher = RegexMatcher::new_line_matcher(pattern)?;
|
||||||
|
let mut searcher = SearcherBuilder::new().line_number(true).build();
|
||||||
|
|
||||||
|
let mut results = Vec::new();
|
||||||
|
for result in WalkBuilder::new(root)
|
||||||
|
.standard_filters(true)
|
||||||
|
.git_ignore(true)
|
||||||
|
.git_global(false)
|
||||||
|
.git_exclude(false)
|
||||||
|
.require_git(false)
|
||||||
|
.build()
|
||||||
|
{
|
||||||
|
let entity = result?;
|
||||||
|
if !entity.file_type().map(|filetype| filetype.is_file()).unwrap_or(false) { continue; }
|
||||||
|
let path = entity.path().to_path_buf();
|
||||||
|
let mut line_hits: Vec<(usize, String)> = Vec::new();
|
||||||
|
let sink = UTF8(|line_number, line| {
|
||||||
|
line_hits.push((line_number as usize, line.to_string()));
|
||||||
|
Ok(true)
|
||||||
|
});
|
||||||
|
let _ = searcher.search_path(&matcher, &path, sink);
|
||||||
|
if !line_hits.is_empty() {
|
||||||
|
let p = path.to_string_lossy().to_string();
|
||||||
|
for (line_number, text) in line_hits {
|
||||||
|
results.push((p.clone(), line_number, text));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(results)
|
||||||
|
}
|
||||||
104
crates/tools/fs/tests/fs_tools.rs
Normal file
104
crates/tools/fs/tests/fs_tools.rs
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
use tools_fs::{read_file, glob_list, grep, write_file, edit_file};
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_and_glob_respect_gitignore() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let root = dir.path();
|
||||||
|
fs::write(root.join("a.txt"), "hello").unwrap();
|
||||||
|
fs::create_dir(root.join("secret")).unwrap();
|
||||||
|
fs::write(root.join("secret/secret.txt"), "token=123").unwrap();
|
||||||
|
fs::write(root.join(".gitignore"), "secret/\n").unwrap();
|
||||||
|
|
||||||
|
let pattern = format!("{}/**/*", root.display());
|
||||||
|
let files = glob_list(&pattern).unwrap();
|
||||||
|
assert!(files.iter().any(|p| p.ends_with("a.txt")));
|
||||||
|
assert!(!files.iter().any(|p| p.contains("secret.txt")));
|
||||||
|
assert_eq!(read_file(root.join("a.txt").to_str().unwrap()).unwrap(), "hello");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn grep_finds_lines() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let root = dir.path();
|
||||||
|
fs::write(root.join("a.rs"), "fn main() { println!(\"hello\"); }").unwrap();
|
||||||
|
|
||||||
|
let hits = grep(root.to_str().unwrap(), "hello").unwrap();
|
||||||
|
assert!(hits.iter().any(|(_p, _ln, text)| text.contains("hello")));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_file_creates_new_file() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("new.txt");
|
||||||
|
|
||||||
|
write_file(file_path.to_str().unwrap(), "new content").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(read_file(file_path.to_str().unwrap()).unwrap(), "new content");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_file_overwrites_existing() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("existing.txt");
|
||||||
|
fs::write(&file_path, "old content").unwrap();
|
||||||
|
|
||||||
|
write_file(file_path.to_str().unwrap(), "new content").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(read_file(file_path.to_str().unwrap()).unwrap(), "new content");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn edit_file_replaces_exact_match() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("test.txt");
|
||||||
|
let original = "line 1\nline 2\nline 3\n";
|
||||||
|
fs::write(&file_path, original).unwrap();
|
||||||
|
|
||||||
|
edit_file(file_path.to_str().unwrap(), "line 2", "modified line 2").unwrap();
|
||||||
|
|
||||||
|
let result = read_file(file_path.to_str().unwrap()).unwrap();
|
||||||
|
assert_eq!(result, "line 1\nmodified line 2\nline 3\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn edit_file_replaces_multiline() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("test.txt");
|
||||||
|
let original = "line 1\nline 2\nline 3\nline 4\n";
|
||||||
|
fs::write(&file_path, original).unwrap();
|
||||||
|
|
||||||
|
edit_file(file_path.to_str().unwrap(), "line 2\nline 3", "new content").unwrap();
|
||||||
|
|
||||||
|
let result = read_file(file_path.to_str().unwrap()).unwrap();
|
||||||
|
assert_eq!(result, "line 1\nnew content\nline 4\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn edit_file_fails_on_ambiguous_match() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("test.txt");
|
||||||
|
let original = "duplicate\nsome text\nduplicate\n";
|
||||||
|
fs::write(&file_path, original).unwrap();
|
||||||
|
|
||||||
|
let result = edit_file(file_path.to_str().unwrap(), "duplicate", "changed");
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
assert!(err_msg.contains("Ambiguous") || err_msg.contains("multiple") || err_msg.contains("occurrences"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn edit_file_fails_on_no_match() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file_path = dir.path().join("test.txt");
|
||||||
|
let original = "line 1\nline 2\n";
|
||||||
|
fs::write(&file_path, original).unwrap();
|
||||||
|
|
||||||
|
let result = edit_file(file_path.to_str().unwrap(), "nonexistent", "changed");
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
let err_msg = result.unwrap_err().to_string();
|
||||||
|
assert!(err_msg.contains("not found") || err_msg.contains("String to replace"));
|
||||||
|
}
|
||||||
15
crates/tools/slash/Cargo.toml
Normal file
15
crates/tools/slash/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
[package]
|
||||||
|
name = "tools-slash"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_yaml = "0.9"
|
||||||
|
color-eyre = "0.6"
|
||||||
|
regex = "1.12"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempfile = "3.23.0"
|
||||||
169
crates/tools/slash/src/lib.rs
Normal file
169
crates/tools/slash/src/lib.rs
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
use color_eyre::eyre::{Result, eyre};
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SlashCommandMetadata {
|
||||||
|
#[serde(default)]
|
||||||
|
pub description: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub author: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub tags: Option<Vec<String>>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub version: Option<String>,
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub extra: HashMap<String, serde_yaml::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct SlashCommand {
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub author: Option<String>,
|
||||||
|
pub tags: Option<Vec<String>>,
|
||||||
|
pub version: Option<String>,
|
||||||
|
pub body: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SlashCommand {
|
||||||
|
/// Resolve file references (@path) in the command body
|
||||||
|
pub fn resolve_file_refs(&self) -> Result<String> {
|
||||||
|
let re = Regex::new(r"@([^\s]+)").unwrap();
|
||||||
|
let mut result = self.body.clone();
|
||||||
|
|
||||||
|
for cap in re.captures_iter(&self.body.clone()) {
|
||||||
|
let full_match = &cap[0];
|
||||||
|
let file_path = &cap[1];
|
||||||
|
|
||||||
|
// Read the file
|
||||||
|
match std::fs::read_to_string(file_path) {
|
||||||
|
Ok(content) => {
|
||||||
|
result = result.replace(full_match, &content);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
return Err(eyre!("Failed to read file '{}': {}", file_path, e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a slash command from its content
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
/// * `content` - The full content of the slash command file (with optional frontmatter)
|
||||||
|
/// * `args` - Arguments to substitute ($ARGUMENTS, $1, $2, etc.)
|
||||||
|
pub fn parse_slash_command(content: &str, args: &[&str]) -> Result<SlashCommand> {
|
||||||
|
// Check if content starts with frontmatter (---)
|
||||||
|
let (metadata, body) = if content.trim_start().starts_with("---") {
|
||||||
|
parse_with_frontmatter(content)?
|
||||||
|
} else {
|
||||||
|
(None, content.to_string())
|
||||||
|
};
|
||||||
|
|
||||||
|
// Perform argument substitution
|
||||||
|
let body_with_args = substitute_arguments(&body, args);
|
||||||
|
|
||||||
|
Ok(SlashCommand {
|
||||||
|
description: metadata.as_ref().and_then(|m| m.description.clone()),
|
||||||
|
author: metadata.as_ref().and_then(|m| m.author.clone()),
|
||||||
|
tags: metadata.as_ref().and_then(|m| m.tags.clone()),
|
||||||
|
version: metadata.as_ref().and_then(|m| m.version.clone()),
|
||||||
|
body: body_with_args,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_with_frontmatter(content: &str) -> Result<(Option<SlashCommandMetadata>, String)> {
|
||||||
|
let lines: Vec<&str> = content.lines().collect();
|
||||||
|
|
||||||
|
// Find the end of frontmatter
|
||||||
|
let mut end_idx = None;
|
||||||
|
for (i, line) in lines.iter().enumerate().skip(1) {
|
||||||
|
if line.trim() == "---" {
|
||||||
|
end_idx = Some(i);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
match end_idx {
|
||||||
|
Some(idx) => {
|
||||||
|
// Extract frontmatter YAML
|
||||||
|
let frontmatter_lines = &lines[1..idx];
|
||||||
|
let frontmatter_str = frontmatter_lines.join("\n");
|
||||||
|
|
||||||
|
// Parse YAML
|
||||||
|
let metadata: SlashCommandMetadata = serde_yaml::from_str(&frontmatter_str)
|
||||||
|
.map_err(|e| eyre!("Failed to parse frontmatter YAML: {}", e))?;
|
||||||
|
|
||||||
|
// Extract body
|
||||||
|
let body = lines[(idx + 1)..].join("\n");
|
||||||
|
|
||||||
|
Ok((Some(metadata), body))
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// Malformed frontmatter, treat entire content as body
|
||||||
|
Ok((None, content.to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn substitute_arguments(body: &str, args: &[&str]) -> String {
|
||||||
|
let mut result = body.to_string();
|
||||||
|
|
||||||
|
// Replace $ARGUMENTS with all args joined by space
|
||||||
|
let all_args = args.join(" ");
|
||||||
|
result = result.replace("$ARGUMENTS", &all_args);
|
||||||
|
|
||||||
|
// Replace positional arguments $1, $2, $3, etc.
|
||||||
|
for (i, arg) in args.iter().enumerate() {
|
||||||
|
let placeholder = format!("${}", i + 1);
|
||||||
|
result = result.replace(&placeholder, arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn substitute_arguments_works() {
|
||||||
|
let body = "Args: $ARGUMENTS, First: $1, Second: $2";
|
||||||
|
let result = substitute_arguments(body, &["hello", "world"]);
|
||||||
|
|
||||||
|
assert!(result.contains("Args: hello world"));
|
||||||
|
assert!(result.contains("First: hello"));
|
||||||
|
assert!(result.contains("Second: world"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn substitute_arguments_empty() {
|
||||||
|
let body = "Args: $ARGUMENTS, First: $1";
|
||||||
|
let result = substitute_arguments(body, &[]);
|
||||||
|
|
||||||
|
assert!(result.contains("Args: ,"));
|
||||||
|
assert!(result.contains("First: $1")); // Unchanged
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_frontmatter_extracts_metadata() {
|
||||||
|
let content = r#"---
|
||||||
|
description: "Test"
|
||||||
|
author: "Me"
|
||||||
|
---
|
||||||
|
Body content
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (metadata, body) = parse_with_frontmatter(content).unwrap();
|
||||||
|
|
||||||
|
assert!(metadata.is_some());
|
||||||
|
let m = metadata.unwrap();
|
||||||
|
assert_eq!(m.description, Some("Test".to_string()));
|
||||||
|
assert_eq!(m.author, Some("Me".to_string()));
|
||||||
|
assert_eq!(body.trim(), "Body content");
|
||||||
|
}
|
||||||
|
}
|
||||||
109
crates/tools/slash/tests/slash_command.rs
Normal file
109
crates/tools/slash/tests/slash_command.rs
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
use tools_slash::parse_slash_command;
|
||||||
|
use std::fs;
|
||||||
|
use tempfile::tempdir;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_parse_frontmatter_and_args() {
|
||||||
|
let content = r#"---
|
||||||
|
description: "Test command"
|
||||||
|
author: "Test Author"
|
||||||
|
---
|
||||||
|
This is the command body with $ARGUMENTS
|
||||||
|
First arg: $1
|
||||||
|
Second arg: $2
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(content, &["arg1", "arg2"]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(cmd.description, Some("Test command".to_string()));
|
||||||
|
assert_eq!(cmd.author, Some("Test Author".to_string()));
|
||||||
|
assert!(cmd.body.contains("arg1 arg2")); // $ARGUMENTS replaced
|
||||||
|
assert!(cmd.body.contains("First arg: arg1")); // $1 replaced
|
||||||
|
assert!(cmd.body.contains("Second arg: arg2")); // $2 replaced
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_parse_no_frontmatter() {
|
||||||
|
let content = "Simple command without frontmatter";
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(content, &[]).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(cmd.description, None);
|
||||||
|
assert_eq!(cmd.author, None);
|
||||||
|
assert_eq!(cmd.body.trim(), "Simple command without frontmatter");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_file_refs() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let test_file = dir.path().join("test.txt");
|
||||||
|
fs::write(&test_file, "File content here").unwrap();
|
||||||
|
|
||||||
|
let content = format!("Check this file: @{}", test_file.display());
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(&content, &[]).unwrap();
|
||||||
|
let resolved = cmd.resolve_file_refs().unwrap();
|
||||||
|
|
||||||
|
assert!(resolved.contains("File content here"));
|
||||||
|
assert!(!resolved.contains(&format!("@{}", test_file.display())));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_arguments_substitution() {
|
||||||
|
let content = "All args: $ARGUMENTS\nFirst: $1\nSecond: $2\nThird: $3";
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(content, &["hello", "world"]).unwrap();
|
||||||
|
|
||||||
|
assert!(cmd.body.contains("All args: hello world"));
|
||||||
|
assert!(cmd.body.contains("First: hello"));
|
||||||
|
assert!(cmd.body.contains("Second: world"));
|
||||||
|
assert!(cmd.body.contains("Third: $3")); // No third arg, should remain
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_multiple_file_refs() {
|
||||||
|
let dir = tempdir().unwrap();
|
||||||
|
let file1 = dir.path().join("file1.txt");
|
||||||
|
let file2 = dir.path().join("file2.txt");
|
||||||
|
fs::write(&file1, "Content 1").unwrap();
|
||||||
|
fs::write(&file2, "Content 2").unwrap();
|
||||||
|
|
||||||
|
let content = format!("File 1: @{}\nFile 2: @{}", file1.display(), file2.display());
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(&content, &[]).unwrap();
|
||||||
|
let resolved = cmd.resolve_file_refs().unwrap();
|
||||||
|
|
||||||
|
assert!(resolved.contains("Content 1"));
|
||||||
|
assert!(resolved.contains("Content 2"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_empty_args_leaves_placeholders() {
|
||||||
|
let content = "Args: $ARGUMENTS, First: $1, Second: $2";
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(content, &[]).unwrap();
|
||||||
|
|
||||||
|
// With no args, $ARGUMENTS becomes empty, but positional args remain
|
||||||
|
assert!(cmd.body.contains("Args: ,"));
|
||||||
|
assert!(cmd.body.contains("First: $1"));
|
||||||
|
assert!(cmd.body.contains("Second: $2"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn slash_complex_frontmatter() {
|
||||||
|
let content = r#"---
|
||||||
|
description: "Multi-line
|
||||||
|
description"
|
||||||
|
tags:
|
||||||
|
- test
|
||||||
|
- example
|
||||||
|
version: 1.0
|
||||||
|
---
|
||||||
|
Command body
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let cmd = parse_slash_command(content, &[]).unwrap();
|
||||||
|
|
||||||
|
assert!(cmd.description.is_some());
|
||||||
|
assert!(cmd.description.as_ref().unwrap().contains("Multi-line"));
|
||||||
|
}
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 50 KiB |
BIN
images/help.png
BIN
images/help.png
Binary file not shown.
|
Before Width: | Height: | Size: 103 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 26 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 68 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 51 KiB |
Reference in New Issue
Block a user