Files
HeatGuard/internal/llm/gemini.go
vikingowl c63e70cab0 feat: add Google Gemini LLM provider
Add Gemini provider using the generativelanguage.googleapis.com API
with systemInstruction support. Activated via --llm gemini with
GEMINI_API_KEY env var. Default model: gemini-2.0-flash.
2026-02-09 10:50:48 +01:00

110 lines
2.9 KiB
Go

package llm
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
)
// Gemini implements Provider using the Google Gemini API.
type Gemini struct {
apiKey string
model string
client *http.Client
baseURL string
}
// NewGemini creates a new Gemini provider.
func NewGemini(apiKey, model string, client *http.Client) *Gemini {
if client == nil {
client = &http.Client{Timeout: 60 * time.Second}
}
if model == "" {
model = "gemini-2.0-flash"
}
return &Gemini{apiKey: apiKey, model: model, client: client, baseURL: "https://generativelanguage.googleapis.com"}
}
func (g *Gemini) Name() string { return "gemini" }
type geminiRequest struct {
SystemInstruction *geminiContent `json:"systemInstruction,omitempty"`
Contents []geminiContent `json:"contents"`
}
type geminiContent struct {
Parts []geminiPart `json:"parts"`
}
type geminiPart struct {
Text string `json:"text"`
}
type geminiResponse struct {
Candidates []struct {
Content struct {
Parts []struct {
Text string `json:"text"`
} `json:"parts"`
} `json:"content"`
} `json:"candidates"`
Error *struct {
Message string `json:"message"`
} `json:"error"`
}
func (g *Gemini) call(ctx context.Context, systemPrompt, userMessage string) (string, error) {
reqBody := geminiRequest{
SystemInstruction: &geminiContent{
Parts: []geminiPart{{Text: systemPrompt}},
},
Contents: []geminiContent{
{Parts: []geminiPart{{Text: userMessage}}},
},
}
body, err := json.Marshal(reqBody)
if err != nil {
return "", fmt.Errorf("marshal request: %w", err)
}
url := fmt.Sprintf("%s/v1beta/models/%s:generateContent?key=%s", g.baseURL, g.model, g.apiKey)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, strings.NewReader(string(body)))
if err != nil {
return "", fmt.Errorf("build request: %w", err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := g.client.Do(req)
if err != nil {
return "", fmt.Errorf("gemini call: %w", err)
}
defer resp.Body.Close()
var result geminiResponse
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", fmt.Errorf("decode response: %w", err)
}
if result.Error != nil {
return "", fmt.Errorf("gemini error: %s", result.Error.Message)
}
if len(result.Candidates) == 0 || len(result.Candidates[0].Content.Parts) == 0 {
return "", fmt.Errorf("empty response from gemini")
}
return result.Candidates[0].Content.Parts[0].Text, nil
}
func (g *Gemini) Summarize(ctx context.Context, input SummaryInput) (string, error) {
return g.call(ctx, SummarizeSystemPrompt(), BuildSummaryPrompt(input))
}
func (g *Gemini) RewriteAction(ctx context.Context, input ActionInput) (string, error) {
return g.call(ctx, RewriteActionSystemPrompt(), BuildRewriteActionPrompt(input))
}
func (g *Gemini) GenerateHeatPlan(ctx context.Context, input HeatPlanInput) (string, error) {
return g.call(ctx, HeatPlanSystemPrompt(), BuildHeatPlanPrompt(input))
}