package cli import ( "fmt" "os" "github.com/cnachtigall/heatwave-autopilot/internal/config" "github.com/cnachtigall/heatwave-autopilot/internal/llm" "github.com/cnachtigall/heatwave-autopilot/internal/store" "github.com/spf13/cobra" ) var ( dbPath string verbose bool profileName string llmFlag string db *store.Store cfg config.Config ) var rootCmd = &cobra.Command{ Use: "heatwave", Short: "Heatwave Autopilot — personalized heat preparedness", Long: "A CLI tool that ingests weather forecasts, computes personalized heat budgets, and generates actionable hour-by-hour plans.", PersistentPreRunE: func(cmd *cobra.Command, args []string) error { cfg = config.Load() if llmFlag != "" { cfg.LLM.Provider = llmFlag } if cmd.Name() == "version" { return nil } path := dbPath if path == "" { path = config.DefaultDBPath() } if err := os.MkdirAll(config.DataDir(), 0o755); err != nil { return fmt.Errorf("create data dir: %w", err) } var err error db, err = store.New(path) if err != nil { return fmt.Errorf("open database: %w", err) } return nil }, PersistentPostRun: func(cmd *cobra.Command, args []string) { if db != nil { db.Close() } }, RunE: func(cmd *cobra.Command, args []string) error { fmt.Println("Heatwave Autopilot — use --help for available commands") return nil }, } func init() { rootCmd.PersistentFlags().StringVar(&dbPath, "db", "", "path to SQLite database") rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "enable verbose output") rootCmd.PersistentFlags().StringVar(&profileName, "profile", "", "profile name to use") rootCmd.PersistentFlags().StringVar(&llmFlag, "llm", "", "LLM provider (anthropic, openai, gemini, ollama, none)") } // Execute runs the root command. func Execute() error { return rootCmd.Execute() } // getActiveProfile resolves the current profile from --profile flag or first available. func getActiveProfile() (*store.Profile, error) { if profileName != "" { return db.GetProfileByName(profileName) } profiles, err := db.ListProfiles() if err != nil { return nil, err } if len(profiles) == 0 { return nil, fmt.Errorf("no profiles found — create one with: heatwave profile create --lat --lon ") } return &profiles[0], nil } // getLLMProvider creates an LLM provider based on config. func getLLMProvider() llm.Provider { switch cfg.LLM.Provider { case "anthropic": key := os.Getenv("ANTHROPIC_API_KEY") if key == "" { fmt.Fprintln(os.Stderr, "Warning: ANTHROPIC_API_KEY not set, LLM features disabled") return llm.NewNoop() } return llm.NewAnthropic(key, cfg.LLM.Model, nil) case "openai": key := os.Getenv("OPENAI_API_KEY") if key == "" { fmt.Fprintln(os.Stderr, "Warning: OPENAI_API_KEY not set, LLM features disabled") return llm.NewNoop() } return llm.NewOpenAI(key, cfg.LLM.Model, nil) case "gemini": key := os.Getenv("GEMINI_API_KEY") if key == "" { fmt.Fprintln(os.Stderr, "Warning: GEMINI_API_KEY not set, LLM features disabled") return llm.NewNoop() } return llm.NewGemini(key, cfg.LLM.Model, nil) case "ollama": return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil) default: return llm.NewNoop() } }