diff --git a/internal/cli/root.go b/internal/cli/root.go index 9fed4fb..a644941 100644 --- a/internal/cli/root.go +++ b/internal/cli/root.go @@ -88,32 +88,58 @@ func getActiveProfile() (*store.Profile, error) { return &profiles[0], nil } -// getLLMProvider creates an LLM provider based on config. +// getLLMProvider creates an LLM provider from DB settings, with env var / config fallback. func getLLMProvider() llm.Provider { - switch cfg.LLM.Provider { + // Determine provider: --llm flag > DB settings > config.yaml + provider := cfg.LLM.Provider + model := cfg.LLM.Model + endpoint := cfg.LLM.Endpoint + var dbKey string + + if db != nil { + if ls, err := db.GetLLMSettings(); err == nil && ls.Provider != "" { + if llmFlag == "" { // only use DB provider when no CLI flag override + provider = ls.Provider + } + model = ls.Model + endpoint = ls.Endpoint + if key, err := config.Decrypt(ls.APIKeyEnc); err == nil { + dbKey = key + } + } + } + + envKey := func(envVar string) string { + if dbKey != "" { + return dbKey + } + return os.Getenv(envVar) + } + + switch provider { case "anthropic": - key := os.Getenv("ANTHROPIC_API_KEY") + key := envKey("ANTHROPIC_API_KEY") if key == "" { - fmt.Fprintln(os.Stderr, "Warning: ANTHROPIC_API_KEY not set, LLM features disabled") + fmt.Fprintln(os.Stderr, "Warning: no Anthropic API key configured, LLM features disabled") return llm.NewNoop() } - return llm.NewAnthropic(key, cfg.LLM.Model, nil) + return llm.NewAnthropic(key, model, nil) case "openai": - key := os.Getenv("OPENAI_API_KEY") + key := envKey("OPENAI_API_KEY") if key == "" { - fmt.Fprintln(os.Stderr, "Warning: OPENAI_API_KEY not set, LLM features disabled") + fmt.Fprintln(os.Stderr, "Warning: no OpenAI API key configured, LLM features disabled") return llm.NewNoop() } - return llm.NewOpenAI(key, cfg.LLM.Model, nil) + return llm.NewOpenAI(key, model, nil) case "gemini": - key := os.Getenv("GEMINI_API_KEY") + key := envKey("GEMINI_API_KEY") if key == "" { - fmt.Fprintln(os.Stderr, "Warning: GEMINI_API_KEY not set, LLM features disabled") + fmt.Fprintln(os.Stderr, "Warning: no Gemini API key configured, LLM features disabled") return llm.NewNoop() } - return llm.NewGemini(key, cfg.LLM.Model, nil) + return llm.NewGemini(key, model, nil) case "ollama": - return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil) + return llm.NewOllama(model, endpoint, nil) default: return llm.NewNoop() } diff --git a/internal/cli/templates/setup.html.tmpl b/internal/cli/templates/setup.html.tmpl index 27faaa0..7668de2 100644 --- a/internal/cli/templates/setup.html.tmpl +++ b/internal/cli/templates/setup.html.tmpl @@ -37,6 +37,7 @@ AC Units Toggles Forecast + LLM {{template "profiles" .}} @@ -46,6 +47,7 @@ {{template "ac_units" .}} {{template "toggles" .}} {{template "forecast" .}} +{{template "llm" .}}