Replace CLI + SQLite architecture with a Go web server + vanilla JS frontend using IndexedDB for all client-side data storage. - Remove: cli, store, report, static packages - Add: compute engine (BuildDashboard), server package, web UI - Add: setup page with CRUD for profiles, rooms, devices, occupants, AC - Add: dashboard with SVG temperature timeline, risk analysis, care checklist - Add: i18n support (English/German) with server-side Go templates - Add: LLM provider selection UI with client-side API key storage - Add: per-room indoor temperature, edit buttons, language-aware AI summary
169 lines
4.3 KiB
Go
169 lines
4.3 KiB
Go
package server
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"net/http"
|
|
"time"
|
|
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/compute"
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/llm"
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/weather"
|
|
)
|
|
|
|
func (s *Server) handleComputeDashboard(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
var req compute.ComputeRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
jsonError(w, "invalid request body", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
data, err := compute.BuildDashboard(req)
|
|
if err != nil {
|
|
jsonError(w, err.Error(), http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
jsonResponse(w, data)
|
|
}
|
|
|
|
type forecastRequest struct {
|
|
Lat float64 `json:"lat"`
|
|
Lon float64 `json:"lon"`
|
|
Timezone string `json:"timezone"`
|
|
}
|
|
|
|
func (s *Server) handleWeatherForecast(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
var req forecastRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
jsonError(w, "invalid request body", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
|
|
defer cancel()
|
|
|
|
provider := weather.NewOpenMeteo(nil)
|
|
resp, err := provider.FetchForecast(ctx, req.Lat, req.Lon, req.Timezone)
|
|
if err != nil {
|
|
jsonError(w, err.Error(), http.StatusBadGateway)
|
|
return
|
|
}
|
|
|
|
jsonResponse(w, resp)
|
|
}
|
|
|
|
type warningsRequest struct {
|
|
Lat float64 `json:"lat"`
|
|
Lon float64 `json:"lon"`
|
|
}
|
|
|
|
func (s *Server) handleWeatherWarnings(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
var req warningsRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
jsonError(w, "invalid request body", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
|
|
defer cancel()
|
|
|
|
provider := weather.NewDWDWFS(nil)
|
|
warnings, err := provider.FetchWarnings(ctx, req.Lat, req.Lon)
|
|
if err != nil {
|
|
jsonError(w, err.Error(), http.StatusBadGateway)
|
|
return
|
|
}
|
|
|
|
jsonResponse(w, map[string]any{"warnings": warnings})
|
|
}
|
|
|
|
type summarizeRequest struct {
|
|
llm.SummaryInput
|
|
Provider string `json:"provider,omitempty"`
|
|
APIKey string `json:"apiKey,omitempty"`
|
|
Model string `json:"model,omitempty"`
|
|
}
|
|
|
|
func (s *Server) handleLLMSummarize(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
var req summarizeRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
jsonError(w, "invalid request body", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Use client-provided credentials if present, otherwise fall back to server config
|
|
provider := s.llmProvider
|
|
if req.Provider != "" && req.APIKey != "" {
|
|
switch req.Provider {
|
|
case "anthropic":
|
|
provider = llm.NewAnthropic(req.APIKey, req.Model, nil)
|
|
case "openai":
|
|
provider = llm.NewOpenAI(req.APIKey, req.Model, nil)
|
|
case "gemini":
|
|
provider = llm.NewGemini(req.APIKey, req.Model, nil)
|
|
}
|
|
}
|
|
|
|
if provider.Name() == "none" {
|
|
jsonResponse(w, map[string]string{"summary": ""})
|
|
return
|
|
}
|
|
|
|
ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second)
|
|
defer cancel()
|
|
|
|
summary, err := provider.Summarize(ctx, req.SummaryInput)
|
|
if err != nil {
|
|
jsonError(w, err.Error(), http.StatusBadGateway)
|
|
return
|
|
}
|
|
|
|
jsonResponse(w, map[string]string{"summary": summary})
|
|
}
|
|
|
|
func (s *Server) handleLLMConfig(w http.ResponseWriter, r *http.Request) {
|
|
if r.Method != http.MethodGet {
|
|
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
available := s.llmProvider.Name() != "none"
|
|
jsonResponse(w, map[string]any{
|
|
"provider": s.cfg.LLM.Provider,
|
|
"model": s.cfg.LLM.Model,
|
|
"available": available,
|
|
})
|
|
}
|
|
|
|
func jsonResponse(w http.ResponseWriter, data any) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(data)
|
|
}
|
|
|
|
func jsonError(w http.ResponseWriter, msg string, status int) {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
w.WriteHeader(status)
|
|
json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
|
}
|