Remove unused crypto module, DataDir/DefaultDBPath (SQLite remnant), and ListenAndServe (replaced by direct http.Server in main). Strip 17 unreferenced i18n keys from en/de translations. Add --llm-provider, --llm-model, and --llm-endpoint CLI flags for runtime LLM override without a config file. Rewrite README with correct Go 1.25 version, shields, LLM providers table, Docker/Helm deployment docs. Fix .gitignore pattern to not match cmd/heatguard/ directory.
225 lines
5.6 KiB
Go
225 lines
5.6 KiB
Go
package server
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"html/template"
|
|
"io/fs"
|
|
"net/http"
|
|
"os"
|
|
"path/filepath"
|
|
"sync/atomic"
|
|
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/config"
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/llm"
|
|
)
|
|
|
|
// Server holds the HTTP server state.
|
|
type Server struct {
|
|
mux *http.ServeMux
|
|
trans *translations
|
|
cfg config.Config
|
|
llmProvider llm.Provider
|
|
devMode bool
|
|
ready atomic.Bool
|
|
}
|
|
|
|
// Options configures the server.
|
|
type Options struct {
|
|
Port int
|
|
DevMode bool
|
|
Config config.Config
|
|
}
|
|
|
|
// New creates a new Server and sets up routes.
|
|
func New(opts Options) (*Server, error) {
|
|
s := &Server{
|
|
mux: http.NewServeMux(),
|
|
cfg: opts.Config,
|
|
devMode: opts.DevMode,
|
|
}
|
|
|
|
// Load translations
|
|
var enJSON, deJSON []byte
|
|
var err error
|
|
if opts.DevMode {
|
|
enJSON, err = os.ReadFile(filepath.Join("web", "i18n", "en.json"))
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read en.json: %w", err)
|
|
}
|
|
deJSON, err = os.ReadFile(filepath.Join("web", "i18n", "de.json"))
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read de.json: %w", err)
|
|
}
|
|
} else {
|
|
if WebFS == nil {
|
|
return nil, fmt.Errorf("WebFS not set — call server.WebFS = ... before server.New()")
|
|
}
|
|
enJSON, err = fs.ReadFile(WebFS, "i18n/en.json")
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read embedded en.json: %w", err)
|
|
}
|
|
deJSON, err = fs.ReadFile(WebFS, "i18n/de.json")
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read embedded de.json: %w", err)
|
|
}
|
|
}
|
|
|
|
s.trans, err = loadTranslations(enJSON, deJSON)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("load translations: %w", err)
|
|
}
|
|
|
|
// Set up LLM provider
|
|
s.llmProvider = buildLLMProvider(s.cfg)
|
|
|
|
// Static assets
|
|
if opts.DevMode {
|
|
s.mux.Handle("/assets/", http.StripPrefix("/assets/", http.FileServer(http.Dir("web"))))
|
|
} else {
|
|
s.mux.Handle("/assets/", http.StripPrefix("/assets/", http.FileServer(http.FS(WebFS))))
|
|
}
|
|
|
|
// Page routes
|
|
s.mux.HandleFunc("/", s.handleDashboard)
|
|
s.mux.HandleFunc("/setup", s.handleSetup)
|
|
s.mux.HandleFunc("/guide", s.handleGuide)
|
|
|
|
// Health routes
|
|
s.mux.HandleFunc("/healthz", s.handleHealthz)
|
|
s.mux.HandleFunc("/readyz", s.handleReadyz)
|
|
|
|
// API routes
|
|
s.mux.HandleFunc("/api/compute/dashboard", s.handleComputeDashboard)
|
|
s.mux.HandleFunc("/api/weather/forecast", s.handleWeatherForecast)
|
|
s.mux.HandleFunc("/api/weather/warnings", s.handleWeatherWarnings)
|
|
s.mux.HandleFunc("/api/llm/summarize", s.handleLLMSummarize)
|
|
s.mux.HandleFunc("/api/llm/actions", s.handleLLMActions)
|
|
s.mux.HandleFunc("/api/llm/config", s.handleLLMConfig)
|
|
|
|
s.ready.Store(true)
|
|
return s, nil
|
|
}
|
|
|
|
// Handler returns the HTTP handler.
|
|
func (s *Server) Handler() http.Handler {
|
|
return s.mux
|
|
}
|
|
|
|
type pageData struct {
|
|
Lang string
|
|
Page string
|
|
Title string
|
|
}
|
|
|
|
func (s *Server) renderPage(w http.ResponseWriter, r *http.Request, page, templateFile string) {
|
|
lang := detectLanguage(r)
|
|
|
|
// Set language cookie if query param was provided
|
|
if qLang := r.URL.Query().Get("lang"); isSupported(qLang) {
|
|
http.SetCookie(w, &http.Cookie{
|
|
Name: "heatguard_lang",
|
|
Value: qLang,
|
|
Path: "/",
|
|
MaxAge: 365 * 24 * 3600,
|
|
SameSite: http.SameSiteLaxMode,
|
|
})
|
|
}
|
|
|
|
funcMap := template.FuncMap{
|
|
"t": func(key string) string {
|
|
return s.trans.get(lang, key)
|
|
},
|
|
}
|
|
|
|
var tmpl *template.Template
|
|
var err error
|
|
|
|
if s.devMode {
|
|
tmpl, err = template.New("layout.html").Funcs(funcMap).ParseFiles(
|
|
filepath.Join("web", "templates", "layout.html"),
|
|
filepath.Join("web", "templates", templateFile),
|
|
)
|
|
} else {
|
|
tmpl, err = template.New("layout.html").Funcs(funcMap).ParseFS(WebFS,
|
|
"templates/layout.html",
|
|
"templates/"+templateFile,
|
|
)
|
|
}
|
|
|
|
if err != nil {
|
|
http.Error(w, fmt.Sprintf("template error: %v", err), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
title := s.trans.get(lang, "nav."+page)
|
|
data := pageData{
|
|
Lang: lang,
|
|
Page: page,
|
|
Title: title,
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
|
if err := tmpl.Execute(w, data); err != nil {
|
|
http.Error(w, fmt.Sprintf("render error: %v", err), http.StatusInternalServerError)
|
|
}
|
|
}
|
|
|
|
func (s *Server) handleDashboard(w http.ResponseWriter, r *http.Request) {
|
|
if r.URL.Path != "/" {
|
|
http.NotFound(w, r)
|
|
return
|
|
}
|
|
s.renderPage(w, r, "dashboard", "dashboard.html")
|
|
}
|
|
|
|
func (s *Server) handleSetup(w http.ResponseWriter, r *http.Request) {
|
|
s.renderPage(w, r, "setup", "setup.html")
|
|
}
|
|
|
|
func (s *Server) handleGuide(w http.ResponseWriter, r *http.Request) {
|
|
s.renderPage(w, r, "guide", "guide.html")
|
|
}
|
|
|
|
func (s *Server) handleHealthz(w http.ResponseWriter, r *http.Request) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]string{"status": "ok"})
|
|
}
|
|
|
|
func (s *Server) handleReadyz(w http.ResponseWriter, r *http.Request) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
if !s.ready.Load() {
|
|
w.WriteHeader(http.StatusServiceUnavailable)
|
|
json.NewEncoder(w).Encode(map[string]string{"status": "not ready"})
|
|
return
|
|
}
|
|
json.NewEncoder(w).Encode(map[string]string{"status": "ok"})
|
|
}
|
|
|
|
func buildLLMProvider(cfg config.Config) llm.Provider {
|
|
switch cfg.LLM.Provider {
|
|
case "anthropic":
|
|
key := os.Getenv("ANTHROPIC_API_KEY")
|
|
if key == "" {
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewAnthropic(key, cfg.LLM.Model, nil)
|
|
case "openai":
|
|
key := os.Getenv("OPENAI_API_KEY")
|
|
if key == "" {
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewOpenAI(key, cfg.LLM.Model, nil)
|
|
case "gemini":
|
|
key := os.Getenv("GEMINI_API_KEY")
|
|
if key == "" {
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewGemini(key, cfg.LLM.Model, nil)
|
|
case "ollama":
|
|
return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil)
|
|
default:
|
|
return llm.NewNoop()
|
|
}
|
|
}
|