feat(settings): Ollama connection status + runtime model selector

Add /admin/settings/ai endpoint (GET status + available models, POST
model switch). OllamaProvider gains SetModel/Model/ListModels with a
RWMutex so the active model can be swapped at runtime without restart.
New /admin/einstellungen page shows provider, connection badge, and a
model dropdown that calls the API on submit.
This commit is contained in:
2026-04-25 08:29:38 +02:00
parent f13cd55393
commit c4207865c8
8 changed files with 328 additions and 6 deletions

View File

@@ -0,0 +1,75 @@
package settings
import (
"net/http"
"github.com/gin-gonic/gin"
"marktvogt.de/backend/internal/pkg/ai"
)
// AIStatus is the response payload for GET /admin/settings/ai.
type AIStatus struct {
Provider string `json:"provider"`
Connected bool `json:"connected"`
BaseURL string `json:"base_url,omitempty"`
Model string `json:"model"`
Models []string `json:"models"`
}
// Handler serves AI settings endpoints. ollama is nil when the active
// provider is not Ollama.
type Handler struct {
ollama *ai.OllamaProvider
provider string
}
func NewHandler(provider ai.Provider) *Handler {
ollama, _ := provider.(*ai.OllamaProvider)
return &Handler{ollama: ollama, provider: provider.Name()}
}
func (h *Handler) GetAI(c *gin.Context) {
if h.ollama == nil {
c.JSON(http.StatusOK, gin.H{"data": AIStatus{
Provider: h.provider,
Connected: true,
Model: "",
Models: []string{},
}})
return
}
models, err := h.ollama.ListModels(c.Request.Context())
status := AIStatus{
Provider: "ollama",
BaseURL: h.ollama.BaseURL(),
Model: h.ollama.Model(),
Models: []string{},
}
if err != nil {
status.Connected = false
} else {
status.Connected = true
for _, m := range models {
status.Models = append(status.Models, m.Name)
}
}
c.JSON(http.StatusOK, gin.H{"data": status})
}
func (h *Handler) SetModel(c *gin.Context) {
if h.ollama == nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "model switching only supported for Ollama provider"})
return
}
var req struct {
Model string `json:"model" binding:"required"`
}
if err := c.ShouldBindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "model is required"})
return
}
h.ollama.SetModel(req.Model)
c.JSON(http.StatusOK, gin.H{"data": gin.H{"model": req.Model}})
}

View File

@@ -0,0 +1,9 @@
package settings
import "github.com/gin-gonic/gin"
func RegisterRoutes(rg *gin.RouterGroup, h *Handler, requireAuth, requireAdmin gin.HandlerFunc) {
admin := rg.Group("/admin", requireAuth, requireAdmin)
admin.GET("/settings/ai", h.GetAI)
admin.POST("/settings/ai/model", h.SetModel)
}

View File

@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"net/http"
"sync"
"time"
)
@@ -17,8 +18,10 @@ type OllamaConfig struct {
}
type OllamaProvider struct {
cfg OllamaConfig
client *http.Client
cfg OllamaConfig
client *http.Client
mu sync.RWMutex
activeModel string
}
func NewOllamaProvider(cfg OllamaConfig) *OllamaProvider {
@@ -26,8 +29,9 @@ func NewOllamaProvider(cfg OllamaConfig) *OllamaProvider {
cfg.Timeout = 300 * time.Second
}
return &OllamaProvider{
cfg: cfg,
client: &http.Client{Timeout: cfg.Timeout},
cfg: cfg,
client: &http.Client{Timeout: cfg.Timeout},
activeModel: cfg.Model,
}
}
@@ -35,6 +39,59 @@ func (p *OllamaProvider) Name() string { return "ollama" }
func (p *OllamaProvider) SupportsJSONMode() bool { return true }
func (p *OllamaProvider) SupportsJSONSchema() bool { return true }
func (p *OllamaProvider) BaseURL() string {
return p.cfg.BaseURL
}
func (p *OllamaProvider) Model() string {
p.mu.RLock()
defer p.mu.RUnlock()
return p.activeModel
}
func (p *OllamaProvider) SetModel(model string) {
p.mu.Lock()
defer p.mu.Unlock()
p.activeModel = model
}
// OllamaModelInfo is a model entry from Ollama's /api/tags response.
type OllamaModelInfo struct {
Name string `json:"name"`
Size int64 `json:"size"`
}
// ListModels calls Ollama's /api/tags endpoint and returns available models.
func (p *OllamaProvider) ListModels(ctx context.Context) ([]OllamaModelInfo, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, p.cfg.BaseURL+"/api/tags", nil)
if err != nil {
return nil, err
}
resp, err := p.client.Do(req)
if err != nil {
return nil, err
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
b, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("ollama /api/tags: status %d: %s", resp.StatusCode, b)
}
var body struct {
Models []struct {
Name string `json:"name"`
Size int64 `json:"size"`
} `json:"models"`
}
if err := json.NewDecoder(resp.Body).Decode(&body); err != nil {
return nil, err
}
out := make([]OllamaModelInfo, len(body.Models))
for i, m := range body.Models {
out[i] = OllamaModelInfo{Name: m.Name, Size: m.Size}
}
return out, nil
}
type ollamaChatReq struct {
Model string `json:"model"`
Messages []ollamaMessage `json:"messages"`
@@ -64,7 +121,7 @@ type ollamaChatResp struct {
func (p *OllamaProvider) Chat(ctx context.Context, req *ChatRequest) (*ChatResponse, error) {
model := req.Model
if model == "" {
model = p.cfg.Model
model = p.Model()
}
body := ollamaChatReq{
Model: model,

View File

@@ -11,6 +11,7 @@ import (
"marktvogt.de/backend/internal/domain/discovery/crawler"
"marktvogt.de/backend/internal/domain/discovery/enrich"
"marktvogt.de/backend/internal/domain/market"
"marktvogt.de/backend/internal/domain/settings"
"marktvogt.de/backend/internal/domain/user"
"marktvogt.de/backend/internal/middleware"
"marktvogt.de/backend/internal/pkg/ai"
@@ -89,6 +90,10 @@ func (s *Server) registerRoutes() {
discoveryHandler := discovery.NewHandler(discoveryService, s.cfg.Discovery.CrawlerManualRateLimitPerHour)
requireTickToken := middleware.RequireBearerToken(s.cfg.Discovery.Token)
discovery.RegisterRoutes(v1, discoveryHandler, requireAuth, requireAdmin, requireTickToken)
// AI settings routes
settingsHandler := settings.NewHandler(aiProvider)
settings.RegisterRoutes(v1, settingsHandler, requireAuth, requireAdmin)
}
func (s *Server) healthz(c *gin.Context) {

View File

@@ -183,6 +183,15 @@ export interface SubmitMarketRequest {
turnstile_token: string;
}
// AI settings
export interface AIStatus {
provider: string;
connected: boolean;
base_url?: string;
model: string;
models: string[];
}
// AI Research types
export interface ResearchResult {
suggestions: FieldSuggestion[];

View File

@@ -10,7 +10,8 @@
const navItems = [
{ href: '/admin/maerkte', label: 'Märkte' },
{ href: '/admin/discovery', label: 'Discovery' }
{ href: '/admin/discovery', label: 'Discovery' },
{ href: '/admin/einstellungen', label: 'Einstellungen' }
];
function isActive(href: string): boolean {

View File

@@ -0,0 +1,30 @@
import { fail } from '@sveltejs/kit';
import { serverFetch } from '$lib/api/client.server.js';
import type { AIStatus } from '$lib/api/types.js';
import type { Actions, PageServerLoad } from './$types.js';
export const load: PageServerLoad = async ({ cookies, fetch }) => {
const res = await serverFetch<AIStatus>('/admin/settings/ai', cookies, { fetch });
return { ai: res.data };
};
export const actions: Actions = {
setModel: async ({ cookies, fetch, request }) => {
const data = await request.formData();
const model = data.get('model');
if (!model || typeof model !== 'string') {
return fail(400, { error: 'Kein Modell ausgewählt.' });
}
try {
await serverFetch('/admin/settings/ai/model', cookies, {
method: 'POST',
body: JSON.stringify({ model }),
fetch
});
return { success: true, model };
} catch (err) {
const message = err instanceof Error ? err.message : 'Modell konnte nicht gesetzt werden.';
return fail(500, { error: message });
}
}
};

View File

@@ -0,0 +1,136 @@
<script lang="ts">
import { untrack } from 'svelte';
import { enhance } from '$app/forms';
import type { PageData, ActionData } from './$types.js';
interface Props {
data: PageData;
form: ActionData;
}
let { data, form }: Props = $props();
let selectedModel = $state(untrack(() => data.ai.model));
let saving = $state(false);
let activeModel = $derived(form?.success && form.model ? form.model : data.ai.model);
</script>
<div class="space-y-6">
<div>
<h1 class="text-2xl font-bold text-stone-900 dark:text-stone-100">Einstellungen</h1>
<p class="mt-1 text-sm text-stone-500 dark:text-stone-400">
Systemkonfiguration und KI-Provider
</p>
</div>
<div class="rounded-lg border border-stone-200 bg-white dark:border-stone-700 dark:bg-stone-900">
<div class="border-b border-stone-200 px-6 py-4 dark:border-stone-700">
<h2 class="text-base font-semibold text-stone-900 dark:text-stone-100">KI-Provider</h2>
</div>
<div class="space-y-4 px-6 py-4">
<!-- Provider + status row -->
<div class="flex items-center gap-4">
<div>
<span class="text-xs font-medium tracking-wide text-stone-400 uppercase">Provider</span>
<p class="mt-0.5 font-mono text-sm font-semibold text-stone-800 dark:text-stone-200">
{data.ai.provider}
</p>
</div>
{#if data.ai.base_url}
<div>
<span class="text-xs font-medium tracking-wide text-stone-400 uppercase">URL</span>
<p class="mt-0.5 font-mono text-sm text-stone-600 dark:text-stone-400">
{data.ai.base_url}
</p>
</div>
{/if}
<div class="ml-auto">
{#if data.ai.connected}
<span
class="inline-flex items-center gap-1.5 rounded-full bg-green-100 px-3 py-1 text-xs font-medium text-green-800 dark:bg-green-900 dark:text-green-200"
>
<span class="h-1.5 w-1.5 rounded-full bg-green-500"></span>
Verbunden
</span>
{:else}
<span
class="inline-flex items-center gap-1.5 rounded-full bg-red-100 px-3 py-1 text-xs font-medium text-red-800 dark:bg-red-900 dark:text-red-200"
>
<span class="h-1.5 w-1.5 rounded-full bg-red-500"></span>
Nicht erreichbar
</span>
{/if}
</div>
</div>
<!-- Model selector (Ollama only) -->
{#if data.ai.provider === 'ollama'}
<div class="border-t border-stone-100 pt-4 dark:border-stone-800">
<span class="text-xs font-medium tracking-wide text-stone-400 uppercase"
>Aktives Modell</span
>
{#if data.ai.connected && data.ai.models.length > 0}
<form
method="POST"
action="?/setModel"
use:enhance={() => {
saving = true;
return async ({ update }) => {
await update();
saving = false;
};
}}
class="mt-2 flex items-center gap-3"
>
<select
name="model"
bind:value={selectedModel}
class="focus:border-primary-500 focus:ring-primary-500 rounded-md border border-stone-300 bg-white px-3 py-2 text-sm text-stone-900 shadow-sm focus:ring-1 focus:outline-none dark:border-stone-600 dark:bg-stone-800 dark:text-stone-100"
>
{#each data.ai.models as model}
<option value={model}>{model}</option>
{/each}
</select>
<button
type="submit"
disabled={saving || selectedModel === activeModel}
class="bg-primary-600 hover:bg-primary-700 rounded-md px-4 py-2 text-sm font-medium text-white shadow-sm disabled:cursor-not-allowed disabled:opacity-50"
>
{saving ? 'Speichert…' : 'Übernehmen'}
</button>
</form>
{#if form?.success}
<p class="mt-2 text-xs text-green-600 dark:text-green-400">
Modell auf <span class="font-mono">{form.model}</span> gesetzt.
</p>
{/if}
{#if form?.error}
<p class="mt-2 text-xs text-red-600 dark:text-red-400">{form.error}</p>
{/if}
{:else if data.ai.connected}
<p class="mt-1 text-sm text-stone-500 dark:text-stone-400">
Keine Modelle gefunden. Installiere ein Modell mit
<code class="rounded bg-stone-100 px-1 py-0.5 font-mono text-xs dark:bg-stone-800">
ollama pull &lt;model&gt;
</code>
</p>
{:else}
<p class="mt-1 font-mono text-sm text-stone-500 dark:text-stone-400">
{data.ai.model || '—'}
</p>
<p class="mt-1 text-xs text-red-500">
Ollama nicht erreichbar — prüfe ob der Dienst läuft.
</p>
{/if}
</div>
{/if}
</div>
</div>
</div>