feat: add LLM provider settings to web UI with encrypted API key storage

Add full LLM configuration section to the setup page with provider
dropdown, model/API key fields for cloud providers (Anthropic, OpenAI,
Gemini), and endpoint/model fields for Ollama. API keys are encrypted
with AES-256-GCM and stored in the database.
This commit is contained in:
2026-02-09 11:41:16 +01:00
parent c63e70cab0
commit a89720fded
8 changed files with 291 additions and 15 deletions

View File

@@ -88,32 +88,58 @@ func getActiveProfile() (*store.Profile, error) {
return &profiles[0], nil
}
// getLLMProvider creates an LLM provider based on config.
// getLLMProvider creates an LLM provider from DB settings, with env var / config fallback.
func getLLMProvider() llm.Provider {
switch cfg.LLM.Provider {
// Determine provider: --llm flag > DB settings > config.yaml
provider := cfg.LLM.Provider
model := cfg.LLM.Model
endpoint := cfg.LLM.Endpoint
var dbKey string
if db != nil {
if ls, err := db.GetLLMSettings(); err == nil && ls.Provider != "" {
if llmFlag == "" { // only use DB provider when no CLI flag override
provider = ls.Provider
}
model = ls.Model
endpoint = ls.Endpoint
if key, err := config.Decrypt(ls.APIKeyEnc); err == nil {
dbKey = key
}
}
}
envKey := func(envVar string) string {
if dbKey != "" {
return dbKey
}
return os.Getenv(envVar)
}
switch provider {
case "anthropic":
key := os.Getenv("ANTHROPIC_API_KEY")
key := envKey("ANTHROPIC_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: ANTHROPIC_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no Anthropic API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewAnthropic(key, cfg.LLM.Model, nil)
return llm.NewAnthropic(key, model, nil)
case "openai":
key := os.Getenv("OPENAI_API_KEY")
key := envKey("OPENAI_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: OPENAI_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no OpenAI API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewOpenAI(key, cfg.LLM.Model, nil)
return llm.NewOpenAI(key, model, nil)
case "gemini":
key := os.Getenv("GEMINI_API_KEY")
key := envKey("GEMINI_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: GEMINI_API_KEY not set, LLM features disabled")
fmt.Fprintln(os.Stderr, "Warning: no Gemini API key configured, LLM features disabled")
return llm.NewNoop()
}
return llm.NewGemini(key, cfg.LLM.Model, nil)
return llm.NewGemini(key, model, nil)
case "ollama":
return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil)
return llm.NewOllama(model, endpoint, nil)
default:
return llm.NewNoop()
}

View File

@@ -37,6 +37,7 @@
<a href="#ac" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">AC Units</a>
<a href="#toggles" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">Toggles</a>
<a href="#forecast" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">Forecast</a>
<a href="#llm" class="px-3 py-1 bg-gray-200 dark:bg-gray-700 rounded hover:bg-gray-300 dark:hover:bg-gray-600">LLM</a>
</div>
{{template "profiles" .}}
@@ -46,6 +47,7 @@
{{template "ac_units" .}}
{{template "toggles" .}}
{{template "forecast" .}}
{{template "llm" .}}
<footer class="mt-8 text-center text-xs text-gray-500 dark:text-gray-500 py-4">
<p>Heatwave Autopilot</p>
@@ -540,3 +542,47 @@
{{end}}
</section>
{{end}}
{{define "llm"}}
<section id="llm" class="mb-8">
<h2 class="text-xl font-semibold mb-3">LLM Provider</h2>
<form method="POST" action="/setup/llm/save" class="p-4 bg-white dark:bg-gray-800 rounded-lg shadow dark:shadow-gray-700">
<div class="grid grid-cols-1 sm:grid-cols-2 gap-4">
<div class="sm:col-span-2">
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Provider</label>
<select name="provider" id="llm-provider" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700"
onchange="document.getElementById('llm-cloud').style.display=(this.value==='anthropic'||this.value==='openai'||this.value==='gemini')?'':'none';document.getElementById('llm-local').style.display=(this.value==='ollama')?'':'none';">
<option value="none" {{if eq .LLMProvider "none"}}selected{{end}}>None</option>
<option value="anthropic" {{if eq .LLMProvider "anthropic"}}selected{{end}}>Anthropic</option>
<option value="openai" {{if eq .LLMProvider "openai"}}selected{{end}}>OpenAI</option>
<option value="gemini" {{if eq .LLMProvider "gemini"}}selected{{end}}>Gemini</option>
<option value="ollama" {{if eq .LLMProvider "ollama"}}selected{{end}}>Ollama (local)</option>
</select>
</div>
<div id="llm-cloud" style="{{if or (eq .LLMProvider "anthropic") (eq .LLMProvider "openai") (eq .LLMProvider "gemini")}}{{else}}display:none{{end}}" class="sm:col-span-2 grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Model</label>
<input type="text" name="cloud_model" value="{{.LLMModel}}" placeholder="e.g. claude-sonnet-4-5-20250929" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">API Key</label>
<input type="password" name="api_key" placeholder="Leave blank to keep current key" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
</div>
<div id="llm-local" style="{{if eq .LLMProvider "ollama"}}{{else}}display:none{{end}}" class="sm:col-span-2 grid grid-cols-1 sm:grid-cols-2 gap-4">
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Endpoint URL</label>
<input type="text" name="endpoint" value="{{.LLMEndpoint}}" placeholder="http://localhost:11434" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
<div>
<label class="block text-sm text-gray-600 dark:text-gray-400 mb-1">Model</label>
<input type="text" name="local_model" value="{{.LLMModel}}" placeholder="e.g. llama3" class="w-full px-3 py-2 border dark:border-gray-600 rounded bg-white dark:bg-gray-700">
</div>
</div>
</div>
<button type="submit" class="mt-4 px-4 py-2 bg-blue-600 text-white rounded hover:bg-blue-700">Save LLM Settings</button>
</form>
</section>
{{end}}

View File

@@ -7,6 +7,7 @@ import (
"net/http"
"strconv"
"github.com/cnachtigall/heatwave-autopilot/internal/config"
"github.com/cnachtigall/heatwave-autopilot/internal/report"
"github.com/cnachtigall/heatwave-autopilot/internal/static"
"github.com/cnachtigall/heatwave-autopilot/internal/store"
@@ -20,6 +21,7 @@ var setupTmpl *template.Template
func init() {
funcs := template.FuncMap{
"mul": func(a, b float64) float64 { return a * b },
"eq": func(a, b string) bool { return a == b },
}
setupTmpl = template.Must(template.New("setup").Funcs(funcs).Parse(setupTmplStr))
}
@@ -46,8 +48,11 @@ type setupData struct {
Devices []store.Device
Occupants []store.Occupant
ACUnits []acUnitView
Toggles map[string]bool
LastFetch string
Toggles map[string]bool
LastFetch string
LLMProvider string
LLMModel string
LLMEndpoint string
}
func loadSetupData(w http.ResponseWriter, r *http.Request) setupData {
@@ -96,6 +101,11 @@ func loadSetupData(w http.ResponseWriter, r *http.Request) setupData {
sd.LastFetch = lastFetch.Format("2006-01-02 15:04")
}
llmSettings, _ := db.GetLLMSettings()
sd.LLMProvider = llmSettings.Provider
sd.LLMModel = llmSettings.Model
sd.LLMEndpoint = llmSettings.Endpoint
return sd
}
@@ -399,6 +409,49 @@ func forecastFetchHandler(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, "/setup#forecast", http.StatusSeeOther)
}
func llmSaveHandler(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
provider := r.FormValue("provider")
var model, endpoint, apiKey string
switch provider {
case "anthropic", "openai", "gemini":
model = r.FormValue("cloud_model")
apiKey = r.FormValue("api_key")
case "ollama":
model = r.FormValue("local_model")
endpoint = r.FormValue("endpoint")
}
apiKeyEnc := ""
if apiKey != "" {
var err error
apiKeyEnc, err = config.Encrypt(apiKey)
if err != nil {
setFlash(w, "Error encrypting API key: "+err.Error())
http.Redirect(w, r, "/setup#llm", http.StatusSeeOther)
return
}
} else {
// Preserve existing encrypted key if no new one was submitted.
existing, _ := db.GetLLMSettings()
apiKeyEnc = existing.APIKeyEnc
}
ls := &store.LLMSettings{
Provider: provider,
Model: model,
Endpoint: endpoint,
APIKeyEnc: apiKeyEnc,
}
if err := db.SaveLLMSettings(ls); err != nil {
setFlash(w, "Error saving LLM settings: "+err.Error())
} else {
setFlash(w, "LLM settings saved.")
}
http.Redirect(w, r, "/setup#llm", http.StatusSeeOther)
}
// --- Flash helpers ---
const flashCookieName = "heatwave_flash"
@@ -470,4 +523,5 @@ func registerSetupRoutes(mux *http.ServeMux, dateStr string) {
mux.HandleFunc("POST /setup/toggles/set", toggleSetHandler)
mux.HandleFunc("POST /setup/forecast/fetch", forecastFetchHandler)
mux.HandleFunc("POST /setup/llm/save", llmSaveHandler)
}

View File

@@ -60,3 +60,4 @@ func Load() Config {
_ = yaml.Unmarshal(data, &cfg)
return cfg
}

99
internal/config/crypto.go Normal file
View File

@@ -0,0 +1,99 @@
package config
import (
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"fmt"
"io"
"os"
"path/filepath"
)
const keyFileName = "encryption.key"
// keyPath returns the full path to the encryption key file.
func keyPath() string {
return filepath.Join(ConfigDir(), keyFileName)
}
// loadOrCreateKey reads the 32-byte AES key, creating it if absent.
func loadOrCreateKey() ([]byte, error) {
path := keyPath()
data, err := os.ReadFile(path)
if err == nil && len(data) == 32 {
return data, nil
}
key := make([]byte, 32)
if _, err := io.ReadFull(rand.Reader, key); err != nil {
return nil, fmt.Errorf("generate key: %w", err)
}
if err := os.MkdirAll(filepath.Dir(path), 0o700); err != nil {
return nil, fmt.Errorf("create config dir: %w", err)
}
if err := os.WriteFile(path, key, 0o600); err != nil {
return nil, fmt.Errorf("write key file: %w", err)
}
return key, nil
}
// Encrypt encrypts plaintext using AES-256-GCM and returns a base64 string.
func Encrypt(plaintext string) (string, error) {
if plaintext == "" {
return "", nil
}
key, err := loadOrCreateKey()
if err != nil {
return "", err
}
block, err := aes.NewCipher(key)
if err != nil {
return "", fmt.Errorf("new cipher: %w", err)
}
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", fmt.Errorf("new gcm: %w", err)
}
nonce := make([]byte, gcm.NonceSize())
if _, err := io.ReadFull(rand.Reader, nonce); err != nil {
return "", fmt.Errorf("generate nonce: %w", err)
}
ciphertext := gcm.Seal(nonce, nonce, []byte(plaintext), nil)
return base64.StdEncoding.EncodeToString(ciphertext), nil
}
// Decrypt decrypts a base64 AES-256-GCM ciphertext back to plaintext.
func Decrypt(encoded string) (string, error) {
if encoded == "" {
return "", nil
}
key, err := loadOrCreateKey()
if err != nil {
return "", err
}
ciphertext, err := base64.StdEncoding.DecodeString(encoded)
if err != nil {
return "", fmt.Errorf("decode base64: %w", err)
}
block, err := aes.NewCipher(key)
if err != nil {
return "", fmt.Errorf("new cipher: %w", err)
}
gcm, err := cipher.NewGCM(block)
if err != nil {
return "", fmt.Errorf("new gcm: %w", err)
}
nonceSize := gcm.NonceSize()
if len(ciphertext) < nonceSize {
return "", fmt.Errorf("ciphertext too short")
}
nonce, ct := ciphertext[:nonceSize], ciphertext[nonceSize:]
plaintext, err := gcm.Open(nil, nonce, ct, nil)
if err != nil {
return "", fmt.Errorf("decrypt: %w", err)
}
return string(plaintext), nil
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,41 @@
package store
import "database/sql"
// LLMSettings holds the persisted LLM configuration.
type LLMSettings struct {
Provider string
Model string
Endpoint string
APIKeyEnc string // base64-encoded AES-256-GCM ciphertext
}
// GetLLMSettings returns the stored LLM settings, or defaults if none exist.
func (s *Store) GetLLMSettings() (*LLMSettings, error) {
ls := &LLMSettings{Provider: "none"}
err := s.db.QueryRow(
`SELECT provider, model, endpoint, api_key_enc FROM llm_settings WHERE id = 1`,
).Scan(&ls.Provider, &ls.Model, &ls.Endpoint, &ls.APIKeyEnc)
if err == sql.ErrNoRows {
return ls, nil
}
if err != nil {
return nil, err
}
return ls, nil
}
// SaveLLMSettings upserts the LLM configuration (single-row table).
func (s *Store) SaveLLMSettings(ls *LLMSettings) error {
_, err := s.db.Exec(`
INSERT INTO llm_settings (id, provider, model, endpoint, api_key_enc, updated_at)
VALUES (1, ?, ?, ?, ?, datetime('now'))
ON CONFLICT(id) DO UPDATE SET
provider = excluded.provider,
model = excluded.model,
endpoint = excluded.endpoint,
api_key_enc = excluded.api_key_enc,
updated_at = datetime('now')
`, ls.Provider, ls.Model, ls.Endpoint, ls.APIKeyEnc)
return err
}

View File

@@ -104,3 +104,12 @@ CREATE TABLE IF NOT EXISTS toggles (
active INTEGER NOT NULL DEFAULT 0,
activated_at TEXT
);
CREATE TABLE IF NOT EXISTS llm_settings (
id INTEGER PRIMARY KEY CHECK (id = 1),
provider TEXT NOT NULL DEFAULT 'none',
model TEXT NOT NULL DEFAULT '',
endpoint TEXT NOT NULL DEFAULT '',
api_key_enc TEXT NOT NULL DEFAULT '',
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);