feat: add Google Gemini LLM provider

Add Gemini provider using the generativelanguage.googleapis.com API
with systemInstruction support. Activated via --llm gemini with
GEMINI_API_KEY env var. Default model: gemini-2.0-flash.
This commit is contained in:
2026-02-09 10:50:48 +01:00
parent 1c9db02334
commit c63e70cab0
2 changed files with 117 additions and 1 deletions

View File

@@ -65,7 +65,7 @@ func init() {
rootCmd.PersistentFlags().StringVar(&dbPath, "db", "", "path to SQLite database")
rootCmd.PersistentFlags().BoolVar(&verbose, "verbose", false, "enable verbose output")
rootCmd.PersistentFlags().StringVar(&profileName, "profile", "", "profile name to use")
rootCmd.PersistentFlags().StringVar(&llmFlag, "llm", "", "LLM provider (anthropic, openai, ollama, none)")
rootCmd.PersistentFlags().StringVar(&llmFlag, "llm", "", "LLM provider (anthropic, openai, gemini, ollama, none)")
}
// Execute runs the root command.
@@ -105,6 +105,13 @@ func getLLMProvider() llm.Provider {
return llm.NewNoop()
}
return llm.NewOpenAI(key, cfg.LLM.Model, nil)
case "gemini":
key := os.Getenv("GEMINI_API_KEY")
if key == "" {
fmt.Fprintln(os.Stderr, "Warning: GEMINI_API_KEY not set, LLM features disabled")
return llm.NewNoop()
}
return llm.NewGemini(key, cfg.LLM.Model, nil)
case "ollama":
return llm.NewOllama(cfg.LLM.Model, cfg.LLM.Endpoint, nil)
default: