Complete Ollama Web UI implementation featuring: Frontend (SvelteKit + Svelte 5 + Tailwind CSS + Skeleton UI): - Chat interface with streaming responses and markdown rendering - Message tree with branching support (edit creates branches) - Vision model support with image upload/paste - Code syntax highlighting with Shiki - Built-in tools: get_current_time, calculate, fetch_url - Function model middleware (functiongemma) for tool routing - IndexedDB storage with Dexie.js - Context window tracking with token estimation - Knowledge base with embeddings (RAG support) - Keyboard shortcuts and responsive design - Export conversations as Markdown/JSON Backend (Go + Gin + SQLite): - RESTful API for conversations and messages - SQLite persistence with branching message tree - Sync endpoints for IndexedDB ↔ SQLite synchronization - URL proxy endpoint for CORS-bypassed web fetching - Health check endpoint - Docker support with host network mode Infrastructure: - Docker Compose for development and production - Vite proxy configuration for Ollama and backend APIs - Hot reload development setup 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
63 lines
1.2 KiB
YAML
63 lines
1.2 KiB
YAML
services:
|
|
# Ollama WebUI Frontend
|
|
frontend:
|
|
build:
|
|
context: ./frontend
|
|
dockerfile: Dockerfile
|
|
ports:
|
|
- "7842:3000"
|
|
environment:
|
|
- OLLAMA_API_URL=http://ollama:11434
|
|
- BACKEND_URL=http://backend:9090
|
|
depends_on:
|
|
- ollama
|
|
- backend
|
|
networks:
|
|
- ollama-network
|
|
restart: unless-stopped
|
|
|
|
# Go Backend API
|
|
backend:
|
|
build:
|
|
context: ./backend
|
|
dockerfile: Dockerfile
|
|
ports:
|
|
- "9090:9090"
|
|
environment:
|
|
- OLLAMA_URL=http://ollama:11434
|
|
- PORT=9090
|
|
volumes:
|
|
- backend-data:/app/data
|
|
depends_on:
|
|
- ollama
|
|
networks:
|
|
- ollama-network
|
|
restart: unless-stopped
|
|
|
|
# Ollama LLM Server
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
ports:
|
|
- "11434:11434"
|
|
volumes:
|
|
- ollama-data:/root/.ollama
|
|
networks:
|
|
- ollama-network
|
|
restart: unless-stopped
|
|
# Uncomment for GPU support (NVIDIA)
|
|
# deploy:
|
|
# resources:
|
|
# reservations:
|
|
# devices:
|
|
# - driver: nvidia
|
|
# count: all
|
|
# capabilities: [gpu]
|
|
|
|
networks:
|
|
ollama-network:
|
|
driver: bridge
|
|
|
|
volumes:
|
|
ollama-data:
|
|
backend-data:
|