Files
owlen/crates/owlen-core/src/storage.rs
vikingowl 235f84fa19 Integrate core functionality for tools, MCP, and enhanced session management
Adds consent management for tool execution, input validation, sandboxed process execution, and MCP server integration. Updates session management to support tool use, conversation persistence, and streaming responses.

Major additions:
- Database migrations for conversations and secure storage
- Encryption and credential management infrastructure
- Extensible tool system with code execution and web search
- Consent management and validation systems
- Sandboxed process execution
- MCP server integration

Infrastructure changes:
- Module registration and workspace dependencies
- ToolCall type and tool-related Message methods
- Privacy, security, and tool configuration structures
- Database-backed conversation persistence
- Tool call tracking in conversations

Provider and UI updates:
- Ollama provider updates for tool support and new Role types
- TUI chat and code app updates for async initialization
- CLI updates for new SessionController API
- Configuration documentation updates
- CHANGELOG updates

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-10-06 18:36:42 +02:00

560 lines
19 KiB
Rust

//! Session persistence and storage management backed by SQLite
use crate::types::Conversation;
use crate::{Error, Result};
use aes_gcm::aead::{Aead, KeyInit};
use aes_gcm::{Aes256Gcm, Nonce};
use ring::rand::{SecureRandom, SystemRandom};
use serde::{Deserialize, Serialize};
use sqlx::sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions, SqliteSynchronous};
use sqlx::{Pool, Row, Sqlite};
use std::fs;
use std::io::IsTerminal;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use uuid::Uuid;
/// Metadata about a saved session
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SessionMeta {
/// Conversation ID
pub id: Uuid,
/// Optional session name
pub name: Option<String>,
/// Optional AI-generated description
pub description: Option<String>,
/// Number of messages in the conversation
pub message_count: usize,
/// Model used
pub model: String,
/// When the session was created
pub created_at: SystemTime,
/// When the session was last updated
pub updated_at: SystemTime,
}
/// Storage manager for persisting conversations in SQLite
pub struct StorageManager {
pool: Pool<Sqlite>,
database_path: PathBuf,
}
impl StorageManager {
/// Create a new storage manager using the default database path
pub async fn new() -> Result<Self> {
let db_path = Self::default_database_path()?;
Self::with_database_path(db_path).await
}
/// Create a storage manager using the provided database path
pub async fn with_database_path(database_path: PathBuf) -> Result<Self> {
if let Some(parent) = database_path.parent() {
if !parent.exists() {
std::fs::create_dir_all(parent).map_err(|e| {
Error::Storage(format!(
"Failed to create database directory {parent:?}: {e}"
))
})?;
}
}
let options = SqliteConnectOptions::from_str(&format!(
"sqlite://{}",
database_path
.to_str()
.ok_or_else(|| Error::Storage("Invalid database path".to_string()))?
))
.map_err(|e| Error::Storage(format!("Invalid database URL: {e}")))?
.create_if_missing(true)
.journal_mode(SqliteJournalMode::Wal)
.synchronous(SqliteSynchronous::Normal);
let pool = SqlitePoolOptions::new()
.max_connections(5)
.connect_with(options)
.await
.map_err(|e| Error::Storage(format!("Failed to connect to database: {e}")))?;
sqlx::migrate!("./migrations")
.run(&pool)
.await
.map_err(|e| Error::Storage(format!("Failed to run database migrations: {e}")))?;
let storage = Self {
pool,
database_path,
};
storage.try_migrate_legacy_sessions().await?;
Ok(storage)
}
/// Save a conversation. Existing entries are updated in-place.
pub async fn save_conversation(
&self,
conversation: &Conversation,
name: Option<String>,
) -> Result<()> {
self.save_conversation_with_description(conversation, name, None)
.await
}
/// Save a conversation with an optional description override
pub async fn save_conversation_with_description(
&self,
conversation: &Conversation,
name: Option<String>,
description: Option<String>,
) -> Result<()> {
let mut serialized = conversation.clone();
if name.is_some() {
serialized.name = name.clone();
}
if description.is_some() {
serialized.description = description.clone();
}
let data = serde_json::to_string(&serialized)
.map_err(|e| Error::Storage(format!("Failed to serialize conversation: {e}")))?;
let created_at = to_epoch_seconds(serialized.created_at);
let updated_at = to_epoch_seconds(serialized.updated_at);
let message_count = serialized.messages.len() as i64;
sqlx::query(
r#"
INSERT INTO conversations (
id,
name,
description,
model,
message_count,
created_at,
updated_at,
data
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)
ON CONFLICT(id) DO UPDATE SET
name = excluded.name,
description = excluded.description,
model = excluded.model,
message_count = excluded.message_count,
created_at = excluded.created_at,
updated_at = excluded.updated_at,
data = excluded.data
"#,
)
.bind(serialized.id.to_string())
.bind(name.or(serialized.name.clone()))
.bind(description.or(serialized.description.clone()))
.bind(&serialized.model)
.bind(message_count)
.bind(created_at)
.bind(updated_at)
.bind(data)
.execute(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to save conversation: {e}")))?;
Ok(())
}
/// Load a conversation by ID
pub async fn load_conversation(&self, id: Uuid) -> Result<Conversation> {
let record = sqlx::query(r#"SELECT data FROM conversations WHERE id = ?1"#)
.bind(id.to_string())
.fetch_optional(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to load conversation: {e}")))?;
let row =
record.ok_or_else(|| Error::Storage(format!("No conversation found with id {id}")))?;
let data: String = row
.try_get("data")
.map_err(|e| Error::Storage(format!("Failed to read conversation payload: {e}")))?;
serde_json::from_str(&data)
.map_err(|e| Error::Storage(format!("Failed to deserialize conversation: {e}")))
}
/// List metadata for all saved conversations ordered by most recent update
pub async fn list_sessions(&self) -> Result<Vec<SessionMeta>> {
let rows = sqlx::query(
r#"
SELECT id, name, description, model, message_count, created_at, updated_at
FROM conversations
ORDER BY updated_at DESC
"#,
)
.fetch_all(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to list sessions: {e}")))?;
let mut sessions = Vec::with_capacity(rows.len());
for row in rows {
let id_text: String = row
.try_get("id")
.map_err(|e| Error::Storage(format!("Failed to read id column: {e}")))?;
let id = Uuid::parse_str(&id_text)
.map_err(|e| Error::Storage(format!("Invalid UUID in storage: {e}")))?;
let message_count: i64 = row
.try_get("message_count")
.map_err(|e| Error::Storage(format!("Failed to read message count: {e}")))?;
let created_at: i64 = row
.try_get("created_at")
.map_err(|e| Error::Storage(format!("Failed to read created_at: {e}")))?;
let updated_at: i64 = row
.try_get("updated_at")
.map_err(|e| Error::Storage(format!("Failed to read updated_at: {e}")))?;
sessions.push(SessionMeta {
id,
name: row
.try_get("name")
.map_err(|e| Error::Storage(format!("Failed to read name: {e}")))?,
description: row
.try_get("description")
.map_err(|e| Error::Storage(format!("Failed to read description: {e}")))?,
model: row
.try_get("model")
.map_err(|e| Error::Storage(format!("Failed to read model: {e}")))?,
message_count: message_count as usize,
created_at: from_epoch_seconds(created_at),
updated_at: from_epoch_seconds(updated_at),
});
}
Ok(sessions)
}
/// Delete a conversation by ID
pub async fn delete_session(&self, id: Uuid) -> Result<()> {
sqlx::query("DELETE FROM conversations WHERE id = ?1")
.bind(id.to_string())
.execute(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to delete conversation: {e}")))?;
Ok(())
}
pub async fn store_secure_item(
&self,
key: &str,
plaintext: &[u8],
master_key: &[u8],
) -> Result<()> {
let cipher = create_cipher(master_key)?;
let nonce_bytes = generate_nonce()?;
let nonce = Nonce::from_slice(&nonce_bytes);
let ciphertext = cipher
.encrypt(nonce, plaintext)
.map_err(|e| Error::Storage(format!("Failed to encrypt secure item: {e}")))?;
let now = to_epoch_seconds(SystemTime::now());
sqlx::query(
r#"
INSERT INTO secure_items (key, nonce, ciphertext, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(key) DO UPDATE SET
nonce = excluded.nonce,
ciphertext = excluded.ciphertext,
updated_at = excluded.updated_at
"#,
)
.bind(key)
.bind(&nonce_bytes[..])
.bind(&ciphertext[..])
.bind(now)
.bind(now)
.execute(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to store secure item: {e}")))?;
Ok(())
}
pub async fn load_secure_item(&self, key: &str, master_key: &[u8]) -> Result<Option<Vec<u8>>> {
let record = sqlx::query("SELECT nonce, ciphertext FROM secure_items WHERE key = ?1")
.bind(key)
.fetch_optional(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to load secure item: {e}")))?;
let Some(row) = record else {
return Ok(None);
};
let nonce_bytes: Vec<u8> = row
.try_get("nonce")
.map_err(|e| Error::Storage(format!("Failed to read secure item nonce: {e}")))?;
let ciphertext: Vec<u8> = row
.try_get("ciphertext")
.map_err(|e| Error::Storage(format!("Failed to read secure item ciphertext: {e}")))?;
if nonce_bytes.len() != 12 {
return Err(Error::Storage(
"Invalid nonce length for secure item".to_string(),
));
}
let cipher = create_cipher(master_key)?;
let nonce = Nonce::from_slice(&nonce_bytes);
let plaintext = cipher
.decrypt(nonce, ciphertext.as_ref())
.map_err(|e| Error::Storage(format!("Failed to decrypt secure item: {e}")))?;
Ok(Some(plaintext))
}
pub async fn delete_secure_item(&self, key: &str) -> Result<()> {
sqlx::query("DELETE FROM secure_items WHERE key = ?1")
.bind(key)
.execute(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to delete secure item: {e}")))?;
Ok(())
}
pub async fn clear_secure_items(&self) -> Result<()> {
sqlx::query("DELETE FROM secure_items")
.execute(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to clear secure items: {e}")))?;
Ok(())
}
/// Database location used by this storage manager
pub fn database_path(&self) -> &Path {
&self.database_path
}
/// Determine default database path (platform specific)
pub fn default_database_path() -> Result<PathBuf> {
let data_dir = dirs::data_local_dir()
.ok_or_else(|| Error::Storage("Could not determine data directory".to_string()))?;
Ok(data_dir.join("owlen").join("owlen.db"))
}
fn legacy_sessions_dir() -> Result<PathBuf> {
let data_dir = dirs::data_local_dir()
.ok_or_else(|| Error::Storage("Could not determine data directory".to_string()))?;
Ok(data_dir.join("owlen").join("sessions"))
}
async fn database_has_records(&self) -> Result<bool> {
let (count,): (i64,) = sqlx::query_as("SELECT COUNT(*) FROM conversations")
.fetch_one(&self.pool)
.await
.map_err(|e| Error::Storage(format!("Failed to inspect database: {e}")))?;
Ok(count > 0)
}
async fn try_migrate_legacy_sessions(&self) -> Result<()> {
if self.database_has_records().await? {
return Ok(());
}
let legacy_dir = match Self::legacy_sessions_dir() {
Ok(dir) => dir,
Err(_) => return Ok(()),
};
if !legacy_dir.exists() {
return Ok(());
}
let entries = fs::read_dir(&legacy_dir).map_err(|e| {
Error::Storage(format!("Failed to read legacy sessions directory: {e}"))
})?;
let mut json_files = Vec::new();
for entry in entries.flatten() {
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) == Some("json") {
json_files.push(path);
}
}
if json_files.is_empty() {
return Ok(());
}
if !io::stdin().is_terminal() {
return Ok(());
}
println!(
"Legacy OWLEN session files were found in {}.",
legacy_dir.display()
);
if !prompt_yes_no("Migrate them to the new SQLite storage? (y/N) ")? {
println!("Skipping legacy session migration.");
return Ok(());
}
println!("Migrating legacy sessions...");
let mut migrated = 0usize;
for path in &json_files {
match fs::read_to_string(path) {
Ok(content) => match serde_json::from_str::<Conversation>(&content) {
Ok(conversation) => {
if let Err(err) = self
.save_conversation_with_description(
&conversation,
conversation.name.clone(),
conversation.description.clone(),
)
.await
{
println!(" • Failed to migrate {}: {}", path.display(), err);
} else {
migrated += 1;
}
}
Err(err) => {
println!(
" • Failed to parse conversation {}: {}",
path.display(),
err
);
}
},
Err(err) => {
println!(" • Failed to read {}: {}", path.display(), err);
}
}
}
if migrated > 0 {
if let Err(err) = archive_legacy_directory(&legacy_dir) {
println!(
"Warning: migrated sessions but failed to archive legacy directory: {}",
err
);
}
}
println!("Migrated {} legacy sessions.", migrated);
Ok(())
}
}
fn to_epoch_seconds(time: SystemTime) -> i64 {
match time.duration_since(UNIX_EPOCH) {
Ok(duration) => duration.as_secs() as i64,
Err(_) => 0,
}
}
fn from_epoch_seconds(seconds: i64) -> SystemTime {
UNIX_EPOCH + Duration::from_secs(seconds.max(0) as u64)
}
fn prompt_yes_no(prompt: &str) -> Result<bool> {
print!("{}", prompt);
io::stdout()
.flush()
.map_err(|e| Error::Storage(format!("Failed to flush stdout: {e}")))?;
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.map_err(|e| Error::Storage(format!("Failed to read input: {e}")))?;
let trimmed = input.trim().to_lowercase();
Ok(matches!(trimmed.as_str(), "y" | "yes"))
}
fn archive_legacy_directory(legacy_dir: &Path) -> Result<()> {
let mut backup_dir = legacy_dir.with_file_name("sessions_legacy_backup");
let mut counter = 1;
while backup_dir.exists() {
backup_dir = legacy_dir.with_file_name(format!("sessions_legacy_backup_{}", counter));
counter += 1;
}
fs::rename(legacy_dir, &backup_dir).map_err(|e| {
Error::Storage(format!(
"Failed to archive legacy sessions directory {}: {}",
legacy_dir.display(),
e
))
})?;
println!("Legacy session files archived to {}", backup_dir.display());
Ok(())
}
fn create_cipher(master_key: &[u8]) -> Result<Aes256Gcm> {
if master_key.len() != 32 {
return Err(Error::Storage(
"Master key must be 32 bytes for AES-256-GCM".to_string(),
));
}
Aes256Gcm::new_from_slice(master_key).map_err(|_| {
Error::Storage("Failed to initialize cipher with provided master key".to_string())
})
}
fn generate_nonce() -> Result<[u8; 12]> {
let mut nonce = [0u8; 12];
SystemRandom::new()
.fill(&mut nonce)
.map_err(|_| Error::Storage("Failed to generate nonce".to_string()))?;
Ok(nonce)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::{Conversation, Message};
use tempfile::tempdir;
fn sample_conversation() -> Conversation {
Conversation {
id: Uuid::new_v4(),
name: Some("Test conversation".to_string()),
description: Some("A sample conversation".to_string()),
messages: vec![
Message::user("Hello".to_string()),
Message::assistant("Hi".to_string()),
],
model: "test-model".to_string(),
created_at: SystemTime::now(),
updated_at: SystemTime::now(),
}
}
#[tokio::test]
async fn test_storage_lifecycle() {
let temp_dir = tempdir().expect("failed to create temp dir");
let db_path = temp_dir.path().join("owlen.db");
let storage = StorageManager::with_database_path(db_path).await.unwrap();
let conversation = sample_conversation();
storage
.save_conversation(&conversation, None)
.await
.expect("failed to save conversation");
let sessions = storage.list_sessions().await.unwrap();
assert_eq!(sessions.len(), 1);
assert_eq!(sessions[0].id, conversation.id);
let loaded = storage.load_conversation(conversation.id).await.unwrap();
assert_eq!(loaded.messages.len(), 2);
storage
.delete_session(conversation.id)
.await
.expect("failed to delete conversation");
let sessions = storage.list_sessions().await.unwrap();
assert!(sessions.is_empty());
}
}