feat: add LLM provider settings to web UI with encrypted API key storage

Add full LLM configuration section to the setup page with provider
dropdown, model/API key fields for cloud providers (Anthropic, OpenAI,
Gemini), and endpoint/model fields for Ollama. API keys are encrypted
with AES-256-GCM and stored in the database.
This commit is contained in:
2026-02-09 11:41:16 +01:00
parent c63e70cab0
commit a89720fded
8 changed files with 291 additions and 15 deletions

View File

@@ -88,32 +88,58 @@ func getActiveProfile() (*store.Profile, error) {
return &profiles[0], nil
}
// getLLMProvider creates an LLM provider based on config.
// getLLMProvider creates an LLM provider from DB settings, with env var / config fallback.
func getLLMProvider() llm.Provider {
switch cfg.LLM.Provider {
// Determine provider: --llm flag > DB settings > config.yaml
provider := cfg.LLM.Provider
model := cfg.LLM.Model
endpoint := cfg.LLM.Endpoint
var dbKey string
if db != nil {
if ls, err := db.GetLLMSettings(); err == nil && ls.Provider != "" {
if llmFlag == "" { // only use DB provider when no CLI flag override
provider = ls.Provider
}
model = ls.Model
endpoint = ls.Endpoint
if key, err := config.Decrypt(ls.APIKeyEnc); err == nil {
dbKey = key
}
}
}
envKey := func(envVar string) string {
if dbKey != "" {
return dbKey
}
return os.Getenv(envVar)
}
switch provider {
case "anthropic":
key := os.Getenv("ANTHROPIC_API_KEY")
key := envKey("ANTHROPIC_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: ANTHROPIC_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no Anthropic API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewAnthropic(key, cfg.LLM.Model, nil)
return llm.NewAnthropic(key, model, nil)
case "openai":
key := os.Getenv("OPENAI_API_KEY")
key := envKey("OPENAI_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: OPENAI_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no OpenAI API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewOpenAI(key, cfg.LLM.Model, nil)
return llm.NewOpenAI(key, model, nil)
case "gemini":
key := os.Getenv("GEMINI_API_KEY")
key := envKey("GEMINI_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: GEMINI_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no Gemini API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewGemini(key, cfg.LLM.Model, nil)
return llm.NewGemini(key, model, nil)
case "ollama":
return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil)
return llm.NewOllama(model, endpoint, nil)
default:
return llm.NewNoop()
}

View File

@@ -37,6 +37,7 @@
<a href="#ac" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">AC Units</a>
<a href="#toggles" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">Toggles</a>
<a href="#forecast" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">Forecast</a>
<a href="#llm" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">LLM</a>
</div>
{{template "profiles" .}}
@@ -46,6 +47,7 @@
{{template "ac_units" .}}
{{template "toggles" .}}
{{template "forecast" .}}
{{template "llm" .}}
<footer class="mt-8 text-center text-xs text-gray-500 dark:text-gray-500 py-4">
<p>Heatwave Autopilot</p>
@@ -540,3 +542,47 @@
{{end}}
</section>
{{end}}
{{define "llm"}}
<section id="llm" class="mb-8">
<h2 class="text-xl font-semibold mb-3">LLM Provider</h2>
<form method="POST" action="/setup/llm/save" class="p-4 bg-white dark:bg-gray-800 rounded-lg shadow dark:shadow-gray-700">
<div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
<div class="sm:col-span-2">
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Provider</label>
<select name="provider" id="llm-provider" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700"
onchange="document.getElementById('llm-cloud').style.display=(this.value==='anthropic'||this.value==='openai'||this.value==='gemini')?'':'none';document.getElementById('llm-local').style.display=(this.value==='ollama')?'':'none';">
<option value="none" {{if eq .LLMProvider "none"}}selected{{end}}>None</option>
<option value="anthropic" {{if eq .LLMProvider "anthropic"}}selected{{end}}>Anthropic</option>
<option value="openai" {{if eq .LLMProvider "openai"}}selected{{end}}>OpenAI</option>
<option value="gemini" {{if eq .LLMProvider "gemini"}}selected{{end}}>Gemini</option>
<option value="ollama" {{if eq .LLMProvider "ollama"}}selected{{end}}>Ollama (local)</option>
</select>
</div>
<div id="llm-cloud" style="{{if or (eq .LLMProvider "anthropic") (eq .LLMProvider "openai") (eq .LLMProvider "gemini")}}{{else}}display:none{{end}}" class="sm:col-span-2 grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Model</label>
<input type="text" name="cloud_model" value="{{.LLMModel}}" placeholder="e.g. claude-sonnet-4-5-20250929" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">API Key</label>
<input type="password" name="api_key" placeholder="Leave blank to keep current key" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
</div>
<div id="llm-local" style="{{if eq .LLMProvider "ollama"}}{{else}}display:none{{end}}" class="sm:col-span-2 grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Endpoint URL</label>
<input type="text" name="endpoint" value="{{.LLMEndpoint}}" placeholder="http://localhost:11434" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Model</label>
<input type="text" name="local_model" value="{{.LLMModel}}" placeholder="e.g. llama3" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
</div>
</div>
<button type="submit" class="mt-4 px-4 py-2 bg-blue-600 text-white rounded hover:bg-blue-700">Save LLM Settings</button>
</form>
</section>
{{end}}

View File

@@ -7,6 +7,7 @@ import (
"net/http"
"strconv"
"github.com/cnachtigall/heatwave-autopilot/internal/config"
"github.com/cnachtigall/heatwave-autopilot/internal/report"
"github.com/cnachtigall/heatwave-autopilot/internal/static"
"github.com/cnachtigall/heatwave-autopilot/internal/store"
@@ -20,6 +21,7 @@ var setupTmpl *template.Template
func init() {
funcs := template.FuncMap{
"mul": func(a, b float64) float64 { return a * b },
"eq": func(a, b string) bool { return a == b },
}
setupTmpl = template.Must(template.New("setup").Funcs(funcs).Parse(setupTmplStr))
}
@@ -46,8 +48,11 @@ type setupData struct {
Devices []store.Device
Occupants []store.Occupant
ACUnits []acUnitView
Toggles map[string]bool
LastFetch string
Toggles map[string]bool
LastFetch string
LLMProvider string
LLMModel string
LLMEndpoint string
}
func loadSetupData(w http.ResponseWriter, r *http.Request) setupData {
@@ -96,6 +101,11 @@ func loadSetupData(w http.ResponseWriter, r *http.Request) setupData {
sd.LastFetch = lastFetch.Format("2006-01-02 15:04")
}
llmSettings, _ := db.GetLLMSettings()
sd.LLMProvider = llmSettings.Provider
sd.LLMModel = llmSettings.Model
sd.LLMEndpoint = llmSettings.Endpoint
return sd
}
@@ -399,6 +409,49 @@ func forecastFetchHandler(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, "/setup#forecast", http.StatusSeeOther)
}
func llmSaveHandler(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
provider := r.FormValue("provider")
var model, endpoint, apiKey string
switch provider {
case "anthropic", "openai", "gemini":
model = r.FormValue("cloud_model")
apiKey = r.FormValue("api_key")
case "ollama":
model = r.FormValue("local_model")
endpoint = r.FormValue("endpoint")
}
apiKeyEnc := ""
if apiKey != "" {
var err error
apiKeyEnc, err = config.Encrypt(apiKey)
if err != nil {
setFlash(w, "Error encrypting API key: "+err.Error())
http.Redirect(w, r, "/setup#llm", http.StatusSeeOther)
return
}
} else {
// Preserve existing encrypted key if no new one was submitted.
existing, _ := db.GetLLMSettings()
apiKeyEnc = existing.APIKeyEnc
}
ls := &store.LLMSettings{
Provider: provider,
Model: model,
Endpoint: endpoint,
APIKeyEnc: apiKeyEnc,
}
if err := db.SaveLLMSettings(ls); err != nil {
setFlash(w, "Error saving LLM settings: "+err.Error())
} else {
setFlash(w, "LLM settings saved.")
}
http.Redirect(w, r, "/setup#llm", http.StatusSeeOther)
}
// --- Flash helpers ---
const flashCookieName = "heatwave_flash"
@@ -470,4 +523,5 @@ func registerSetupRoutes(mux *http.ServeMux, dateStr string) {
mux.HandleFunc("POST /setup/toggles/set", toggleSetHandler)
mux.HandleFunc("POST /setup/forecast/fetch", forecastFetchHandler)
mux.HandleFunc("POST /setup/llm/save", llmSaveHandler)
}