Docker compatibility: - Discovery endpoints now read from OLLAMA_URL, LLAMACPP_URL, LMSTUDIO_URL env vars - docker-compose.yml sets backends to host.docker.internal for container access - justfile updated with --host 0.0.0.0 for llama-server Vision support: - OpenAI adapter now converts images to content parts array format - Enables vision models with llama.cpp and LM Studio Bumps version to 0.7.1
46 lines
964 B
YAML
46 lines
964 B
YAML
name: vessel
|
|
|
|
services:
|
|
# Vessel Frontend
|
|
frontend:
|
|
build:
|
|
context: ./frontend
|
|
dockerfile: Dockerfile
|
|
ports:
|
|
- "7842:3000"
|
|
environment:
|
|
- OLLAMA_API_URL=http://host.docker.internal:11434
|
|
- BACKEND_URL=http://backend:9090
|
|
extra_hosts:
|
|
- "host.docker.internal:host-gateway"
|
|
networks:
|
|
- vessel-network
|
|
restart: unless-stopped
|
|
|
|
# Vessel Backend API
|
|
backend:
|
|
build:
|
|
context: ./backend
|
|
dockerfile: Dockerfile
|
|
ports:
|
|
- "9090:9090"
|
|
environment:
|
|
- OLLAMA_URL=http://host.docker.internal:11434
|
|
- LLAMACPP_URL=http://host.docker.internal:8081
|
|
- LMSTUDIO_URL=http://host.docker.internal:1234
|
|
- PORT=9090
|
|
extra_hosts:
|
|
- "host.docker.internal:host-gateway"
|
|
volumes:
|
|
- backend-data:/app/data
|
|
networks:
|
|
- vessel-network
|
|
restart: unless-stopped
|
|
|
|
networks:
|
|
vessel-network:
|
|
driver: bridge
|
|
|
|
volumes:
|
|
backend-data:
|