Add full LLM configuration section to the setup page with provider dropdown, model/API key fields for cloud providers (Anthropic, OpenAI, Gemini), and endpoint/model fields for Ollama. API keys are encrypted with AES-256-GCM and stored in the database.
147 lines
3.8 KiB
Go
147 lines
3.8 KiB
Go
package cli
|
|
|
|
import (
|
|
"fmt"
|
|
"os"
|
|
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/config"
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/llm"
|
|
"github.com/cnachtigall/heatwave-autopilot/internal/store"
|
|
"github.com/spf13/cobra"
|
|
)
|
|
|
|
var (
|
|
dbPath string
|
|
verbose bool
|
|
profileName string
|
|
llmFlag string
|
|
|
|
db *store.Store
|
|
cfg config.Config
|
|
)
|
|
|
|
var rootCmd = &cobra.Command{
|
|
Use: "heatwave",
|
|
Short: "Heatwave Autopilot — personalized heat preparedness",
|
|
Long: "A CLI tool that ingests weather forecasts, computes personalized heat budgets, and generates actionable hour-by-hour plans.",
|
|
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
|
cfg = config.Load()
|
|
|
|
if llmFlag != "" {
|
|
cfg.LLM.Provider = llmFlag
|
|
}
|
|
|
|
if cmd.Name() == "version" {
|
|
return nil
|
|
}
|
|
|
|
path := dbPath
|
|
if path == "" {
|
|
path = config.DefaultDBPath()
|
|
}
|
|
if err := os.MkdirAll(config.DataDir(), 0o755); err != nil {
|
|
return fmt.Errorf("create data dir: %w", err)
|
|
}
|
|
|
|
var err error
|
|
db, err = store.New(path)
|
|
if err != nil {
|
|
return fmt.Errorf("open database: %w", err)
|
|
}
|
|
return nil
|
|
},
|
|
PersistentPostRun: func(cmd *cobra.Command, args []string) {
|
|
if db != nil {
|
|
db.Close()
|
|
}
|
|
},
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
fmt.Println("Heatwave Autopilot — use --help for available commands")
|
|
return nil
|
|
},
|
|
}
|
|
|
|
func init() {
|
|
rootCmd.PersistentFlags().StringVar(&dbPath, "db", "", "path to SQLite database")
|
|
rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "enable verbose output")
|
|
rootCmd.PersistentFlags().StringVar(&profileName, "profile", "", "profile name to use")
|
|
rootCmd.PersistentFlags().StringVar(&llmFlag, "llm", "", "LLM provider (anthropic, openai, gemini, ollama, none)")
|
|
}
|
|
|
|
// Execute runs the root command.
|
|
func Execute() error {
|
|
return rootCmd.Execute()
|
|
}
|
|
|
|
// getActiveProfile resolves the current profile from --profile flag or first available.
|
|
func getActiveProfile() (*store.Profile, error) {
|
|
if profileName != "" {
|
|
return db.GetProfileByName(profileName)
|
|
}
|
|
profiles, err := db.ListProfiles()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
if len(profiles) == 0 {
|
|
return nil, fmt.Errorf("no profiles found — create one with: heatwave profile create <name> --lat <lat> --lon <lon>")
|
|
}
|
|
return &profiles[0], nil
|
|
}
|
|
|
|
// getLLMProvider creates an LLM provider from DB settings, with env var / config fallback.
|
|
func getLLMProvider() llm.Provider {
|
|
// Determine provider: --llm flag > DB settings > config.yaml
|
|
provider := cfg.LLM.Provider
|
|
model := cfg.LLM.Model
|
|
endpoint := cfg.LLM.Endpoint
|
|
var dbKey string
|
|
|
|
if db != nil {
|
|
if ls, err := db.GetLLMSettings(); err == nil && ls.Provider != "" {
|
|
if llmFlag == "" { // only use DB provider when no CLI flag override
|
|
provider = ls.Provider
|
|
}
|
|
model = ls.Model
|
|
endpoint = ls.Endpoint
|
|
if key, err := config.Decrypt(ls.APIKeyEnc); err == nil {
|
|
dbKey = key
|
|
}
|
|
}
|
|
}
|
|
|
|
envKey := func(envVar string) string {
|
|
if dbKey != "" {
|
|
return dbKey
|
|
}
|
|
return os.Getenv(envVar)
|
|
}
|
|
|
|
switch provider {
|
|
case "anthropic":
|
|
key := envKey("ANTHROPIC_API_KEY")
|
|
if key == "" {
|
|
fmt.Fprintln(os.Stderr, "Warning: no Anthropic API key configured, LLM features disabled")
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewAnthropic(key, model, nil)
|
|
case "openai":
|
|
key := envKey("OPENAI_API_KEY")
|
|
if key == "" {
|
|
fmt.Fprintln(os.Stderr, "Warning: no OpenAI API key configured, LLM features disabled")
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewOpenAI(key, model, nil)
|
|
case "gemini":
|
|
key := envKey("GEMINI_API_KEY")
|
|
if key == "" {
|
|
fmt.Fprintln(os.Stderr, "Warning: no Gemini API key configured, LLM features disabled")
|
|
return llm.NewNoop()
|
|
}
|
|
return llm.NewGemini(key, model, nil)
|
|
case "ollama":
|
|
return llm.NewOllama(model, endpoint, nil)
|
|
default:
|
|
return llm.NewNoop()
|
|
}
|
|
}
|