Files
HeatGuard/internal/llm/anthropic.go
vikingowl 5e6696aa42 feat: add AI-powered actions endpoint and timeline annotations
Add LLM actions endpoint that generates hour-specific heat
management recommendations. Replace static action engine with
AI-driven approach. Add cool mode logic (ventilate/ac/overloaded),
indoor temperature tracking, and timeline legend with annotations.
2026-02-10 03:54:09 +01:00

111 lines
3.1 KiB
Go

package llm
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
)
// Anthropic implements Provider using the Anthropic Messages API.
type Anthropic struct {
apiKey string
model string
client *http.Client
baseURL string
}
// NewAnthropic creates a new Anthropic provider.
func NewAnthropic(apiKey, model string, client *http.Client) *Anthropic {
if client == nil {
client = &http.Client{Timeout: 60 * time.Second}
}
if model == "" {
model = "claude-sonnet-4-5-20250929"
}
return &Anthropic{apiKey: apiKey, model: model, client: client, baseURL: "https://api.anthropic.com"}
}
func (a *Anthropic) Name() string { return "anthropic" }
type anthropicRequest struct {
Model string `json:"model"`
MaxTokens int `json:"max_tokens"`
System string `json:"system"`
Messages []anthropicMessage `json:"messages"`
}
type anthropicMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type anthropicResponse struct {
Content []struct {
Type string `json:"type"`
Text string `json:"text"`
} `json:"content"`
Error *struct {
Message string `json:"message"`
} `json:"error"`
}
func (a *Anthropic) call(ctx context.Context, systemPrompt, userMessage string, maxTokens int) (string, error) {
reqBody := anthropicRequest{
Model: a.model,
MaxTokens: maxTokens,
System: systemPrompt,
Messages: []anthropicMessage{
{Role: "user", Content: userMessage},
},
}
body, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("marshal request: %w", err)
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, a.baseURL+"/v1/messages", strings.NewReader(string(body)))
if err != nil {
return "", fmt.Errorf("build request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("x-api-key", a.apiKey)
req.Header.Set("anthropic-version", "2023-06-01")
resp, err := a.client.Do(req)
if err != nil {
return "", fmt.Errorf("anthropic call: %w", err)
}
defer resp.Body.Close()
var result anthropicResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", fmt.Errorf("decode response: %w", err)
}
if result.Error != nil {
return "", fmt.Errorf("anthropic error: %s", result.Error.Message)
}
if len(result.Content) == 0 {
return "", fmt.Errorf("empty response from anthropic")
}
return result.Content[0].Text, nil
}
func (a *Anthropic) Summarize(ctx context.Context, input SummaryInput) (string, error) {
return a.call(ctx, SummarizeSystemPrompt(), BuildSummaryPrompt(input), 300)
}
func (a *Anthropic) RewriteAction(ctx context.Context, input ActionInput) (string, error) {
return a.call(ctx, RewriteActionSystemPrompt(), BuildRewriteActionPrompt(input), 100)
}
func (a *Anthropic) GenerateHeatPlan(ctx context.Context, input HeatPlanInput) (string, error) {
return a.call(ctx, HeatPlanSystemPrompt(), BuildHeatPlanPrompt(input), 2000)
}
func (a *Anthropic) GenerateActions(ctx context.Context, input ActionsInput) (string, error) {
return a.call(ctx, GenerateActionsSystemPrompt(), BuildActionsPrompt(input), 1500)
}