Files
HeatGuard/internal/server/api.go
vikingowl b30c0b5f36 feat: add OpenWeatherMap provider, retry transport, and forecast config UI
Add OpenWeatherMap One Call API 3.0 as alternative weather provider with
configurable selection in the Forecast tab. Includes server-side retry
transport with exponential backoff for all weather providers, structured
error responses with type classification, auto-fetch on stale dashboard
data, and improved error UX with specific messages.
2026-02-11 01:02:48 +01:00

381 lines
9.8 KiB
Go

package server
import (
"context"
"encoding/json"
"errors"
"net"
"net/http"
"time"
"github.com/cnachtigall/heatwave-autopilot/internal/compute"
"github.com/cnachtigall/heatwave-autopilot/internal/llm"
"github.com/cnachtigall/heatwave-autopilot/internal/weather"
)
func (s *Server) handleComputeDashboard(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
var req compute.ComputeRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, "invalid request body", http.StatusBadRequest)
return
}
data, err := compute.BuildDashboard(req)
if err != nil {
jsonError(w, err.Error(), http.StatusBadRequest)
return
}
jsonResponse(w, data)
}
type forecastRequest struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
Timezone string `json:"timezone"`
Provider string `json:"provider,omitempty"`
APIKey string `json:"apiKey,omitempty"`
}
func (s *Server) handleWeatherForecast(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
var req forecastRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, "invalid request body", http.StatusBadRequest)
return
}
var provider weather.Provider
switch req.Provider {
case "openweathermap":
if req.APIKey == "" {
jsonError(w, "API key required for OpenWeatherMap", http.StatusBadRequest)
return
}
provider = weather.NewOpenWeatherMap(req.APIKey, nil)
default:
provider = weather.NewOpenMeteo(nil)
}
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
resp, err := provider.FetchForecast(ctx, req.Lat, req.Lon, req.Timezone)
if err != nil {
classifyWeatherError(w, err)
return
}
jsonResponse(w, resp)
}
type warningsRequest struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}
func (s *Server) handleWeatherWarnings(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
var req warningsRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, "invalid request body", http.StatusBadRequest)
return
}
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
provider := weather.NewDWDWFS(nil)
warnings, err := provider.FetchWarnings(ctx, req.Lat, req.Lon)
if err != nil {
classifyWeatherError(w, err)
return
}
jsonResponse(w, map[string]any{"warnings": warnings})
}
type summarizeRequest struct {
llm.SummaryInput
Provider string `json:"provider,omitempty"`
APIKey string `json:"apiKey,omitempty"`
Model string `json:"model,omitempty"`
}
func (s *Server) handleLLMSummarize(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
var req summarizeRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, "invalid request body", http.StatusBadRequest)
return
}
// Use client-provided credentials if present, otherwise fall back to server config
provider := s.llmProvider
if req.Provider != "" && req.APIKey != "" {
switch req.Provider {
case "anthropic":
provider = llm.NewAnthropic(req.APIKey, req.Model, nil)
case "openai":
provider = llm.NewOpenAI(req.APIKey, req.Model, nil)
case "gemini":
provider = llm.NewGemini(req.APIKey, req.Model, nil)
}
}
if provider.Name() == "none" {
jsonResponse(w, map[string]string{"summary": ""})
return
}
ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second)
defer cancel()
summary, err := provider.Summarize(ctx, req.SummaryInput)
if err != nil {
jsonError(w, err.Error(), http.StatusBadGateway)
return
}
jsonResponse(w, map[string]string{"summary": summary})
}
type actionsRequest struct {
// Thermal context
Date string `json:"date"`
IndoorTempC float64 `json:"indoorTempC"`
PeakTempC float64 `json:"peakTempC"`
MinNightTempC float64 `json:"minNightTempC"`
PoorNightCool bool `json:"poorNightCool"`
RiskLevel string `json:"riskLevel"`
RiskWindows []struct {
StartHour int `json:"startHour"`
EndHour int `json:"endHour"`
PeakTempC float64 `json:"peakTempC"`
Level string `json:"level"`
} `json:"riskWindows"`
Timeline []struct {
Hour int `json:"hour"`
TempC float64 `json:"tempC"`
HumidityPct float64 `json:"humidityPct"`
BudgetStatus string `json:"budgetStatus"`
CoolMode string `json:"coolMode"`
} `json:"timeline"`
RoomBudgets []struct {
TotalGainW float64 `json:"totalGainW"`
} `json:"roomBudgets"`
// Room metadata
Rooms []struct {
Name string `json:"name"`
Orientation string `json:"orientation"`
ShadingType string `json:"shadingType"`
HasAC bool `json:"hasAC"`
} `json:"rooms"`
// LLM credentials
Provider string `json:"provider,omitempty"`
APIKey string `json:"apiKey,omitempty"`
Model string `json:"model,omitempty"`
Language string `json:"language,omitempty"`
}
func (s *Server) handleLLMActions(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
var req actionsRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, "invalid request body", http.StatusBadRequest)
return
}
provider := s.llmProvider
if req.Provider != "" && req.APIKey != "" {
switch req.Provider {
case "anthropic":
provider = llm.NewAnthropic(req.APIKey, req.Model, nil)
case "openai":
provider = llm.NewOpenAI(req.APIKey, req.Model, nil)
case "gemini":
provider = llm.NewGemini(req.APIKey, req.Model, nil)
}
}
if provider.Name() == "none" {
jsonResponse(w, map[string]any{"actions": []any{}})
return
}
// Build ActionsInput
input := llm.ActionsInput{
Date: req.Date,
Language: req.Language,
IndoorTempC: req.IndoorTempC,
PeakTempC: req.PeakTempC,
MinNightTempC: req.MinNightTempC,
PoorNightCool: req.PoorNightCool,
RiskLevel: req.RiskLevel,
}
for _, rw := range req.RiskWindows {
input.RiskWindows = append(input.RiskWindows, llm.RiskWindowSummary{
StartHour: rw.StartHour,
EndHour: rw.EndHour,
PeakTempC: rw.PeakTempC,
Level: rw.Level,
})
}
for i, s := range req.Timeline {
gainsW := 0.0
if i < len(req.RoomBudgets) {
gainsW = req.RoomBudgets[i].TotalGainW
}
input.Timeline = append(input.Timeline, llm.ActionsTimelineSlot{
Hour: s.Hour,
TempC: s.TempC,
HumidityPct: s.HumidityPct,
BudgetStatus: s.BudgetStatus,
CoolMode: s.CoolMode,
GainsW: gainsW,
})
}
for _, rm := range req.Rooms {
input.Rooms = append(input.Rooms, llm.ActionsRoom{
Name: rm.Name,
Orientation: rm.Orientation,
ShadingType: rm.ShadingType,
HasAC: rm.HasAC,
})
}
ctx, cancel := context.WithTimeout(r.Context(), 60*time.Second)
defer cancel()
raw, err := provider.GenerateActions(ctx, input)
if err != nil {
jsonError(w, err.Error(), http.StatusBadGateway)
return
}
// Parse JSON response defensively
var actions []json.RawMessage
if err := json.Unmarshal([]byte(raw), &actions); err != nil {
// Try extracting from ```json ... ``` fences
trimmed := raw
if start := findJSONStart(trimmed); start >= 0 {
trimmed = trimmed[start:]
}
if end := findJSONEnd(trimmed); end >= 0 {
trimmed = trimmed[:end+1]
}
if err2 := json.Unmarshal([]byte(trimmed), &actions); err2 != nil {
jsonError(w, "failed to parse AI actions response", http.StatusBadGateway)
return
}
}
jsonResponse(w, map[string]any{"actions": actions})
}
func findJSONStart(s string) int {
for i, c := range s {
if c == '[' {
return i
}
}
return -1
}
func findJSONEnd(s string) int {
for i := len(s) - 1; i >= 0; i-- {
if s[i] == ']' {
return i
}
}
return -1
}
func (s *Server) handleLLMConfig(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
available := s.llmProvider.Name() != "none"
jsonResponse(w, map[string]any{
"provider": s.cfg.LLM.Provider,
"model": s.cfg.LLM.Model,
"available": available,
})
}
func (s *Server) handleBetterventSearch(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
q := r.URL.Query().Get("q")
ctx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
resp, err := s.betterventProvider.Search(ctx, q)
if err != nil {
jsonError(w, err.Error(), http.StatusBadGateway)
return
}
jsonResponse(w, resp)
}
func jsonResponse(w http.ResponseWriter, data any) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
func jsonError(w http.ResponseWriter, msg string, status int) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
json.NewEncoder(w).Encode(map[string]string{"error": msg})
}
func jsonErrorTyped(w http.ResponseWriter, msg, errType string, status int) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
json.NewEncoder(w).Encode(map[string]string{"error": msg, "type": errType})
}
func classifyWeatherError(w http.ResponseWriter, err error) {
if errors.Is(err, context.DeadlineExceeded) {
jsonErrorTyped(w, err.Error(), "timeout", http.StatusGatewayTimeout)
return
}
var netErr net.Error
if errors.As(err, &netErr) {
if netErr.Timeout() {
jsonErrorTyped(w, err.Error(), "timeout", http.StatusGatewayTimeout)
return
}
jsonErrorTyped(w, err.Error(), "network", http.StatusBadGateway)
return
}
jsonErrorTyped(w, err.Error(), "upstream", http.StatusBadGateway)
}