From 2c2744fc277f5c2617e9959f122985daa744660a Mon Sep 17 00:00:00 2001 From: vikingowl Date: Thu, 22 Jan 2026 09:21:49 +0100 Subject: [PATCH] feat: add .env.example and fix hardcoded Ollama URL - Add .env.example with all documented environment variables - Fix conversation-summary.ts to use proxy instead of hardcoded localhost Closes #9 --- .env.example | 28 +++++++++++++++++++ .../src/lib/services/conversation-summary.ts | 4 +-- 2 files changed, 30 insertions(+), 2 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..bcbbe25 --- /dev/null +++ b/.env.example @@ -0,0 +1,28 @@ +# =========================================== +# Vessel Configuration +# =========================================== +# Copy this file to .env and adjust values as needed. +# All variables have sensible defaults - only set what you need to change. + +# ----- Backend ----- +# Server port (default: 8080, but 9090 recommended for local dev) +PORT=9090 + +# SQLite database path (relative to backend working directory) +DB_PATH=./data/vessel.db + +# Ollama API endpoint +OLLAMA_URL=http://localhost:11434 + +# GitHub repo for version checking (format: owner/repo) +GITHUB_REPO=VikingOwl91/vessel + +# ----- Frontend ----- +# Ollama API endpoint (for frontend proxy) +OLLAMA_API_URL=http://localhost:11434 + +# Backend API endpoint +BACKEND_URL=http://localhost:9090 + +# Development server port +DEV_PORT=7842 diff --git a/frontend/src/lib/services/conversation-summary.ts b/frontend/src/lib/services/conversation-summary.ts index 8a02ade..7b59a85 100644 --- a/frontend/src/lib/services/conversation-summary.ts +++ b/frontend/src/lib/services/conversation-summary.ts @@ -15,7 +15,7 @@ import { indexConversationMessages } from './chat-indexer.js'; export interface SummaryGenerationOptions { /** Model to use for summary generation */ model: string; - /** Base URL for Ollama API */ + /** Base URL for Ollama API (default: /api/v1/ollama, uses proxy) */ baseUrl?: string; /** Maximum messages to include in summary context */ maxMessages?: number; @@ -37,7 +37,7 @@ export async function generateConversationSummary( messages: Message[], options: SummaryGenerationOptions ): Promise { - const { model, baseUrl = 'http://localhost:11434', maxMessages = 20 } = options; + const { model, baseUrl = '/api/v1/ollama', maxMessages = 20 } = options; // Filter to user and assistant messages only const relevantMessages = messages