From e2da14a60431095f463667f987866ea1bf055e6c Mon Sep 17 00:00:00 2001 From: vikingowl Date: Tue, 10 Feb 2026 05:12:46 +0100 Subject: [PATCH] fix: improve CLI help output with usage header and examples --- cmd/heatguard/main.go | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/cmd/heatguard/main.go b/cmd/heatguard/main.go index b1f8b86..561cd6e 100644 --- a/cmd/heatguard/main.go +++ b/cmd/heatguard/main.go @@ -17,11 +17,23 @@ import ( ) func main() { - port := flag.Int("port", 8080, "HTTP port") - dev := flag.Bool("dev", false, "development mode (serve from filesystem)") - llmProvider := flag.String("llm-provider", "", "LLM provider (anthropic, openai, gemini, ollama, none)") - llmModel := flag.String("llm-model", "", "LLM model name override") - llmEndpoint := flag.String("llm-endpoint", "", "LLM API endpoint override (for ollama)") + port := flag.Int("port", 8080, "HTTP listen port") + dev := flag.Bool("dev", false, "serve from filesystem instead of embedded assets") + llmProvider := flag.String("llm-provider", "", "LLM provider: anthropic|openai|gemini|ollama|none") + llmModel := flag.String("llm-model", "", "model name (overrides config file)") + llmEndpoint := flag.String("llm-endpoint", "", "API endpoint (e.g. http://localhost:11434 for Ollama)") + + flag.Usage = func() { + fmt.Fprintf(os.Stderr, "HeatGuard — personalized heat preparedness server\n\n") + fmt.Fprintf(os.Stderr, "Usage:\n heatguard [flags]\n\n") + fmt.Fprintf(os.Stderr, "Flags:\n") + flag.PrintDefaults() + fmt.Fprintf(os.Stderr, "\nExamples:\n") + fmt.Fprintf(os.Stderr, " heatguard # default, port 8080, no LLM\n") + fmt.Fprintf(os.Stderr, " heatguard -port 3000 # custom port\n") + fmt.Fprintf(os.Stderr, " heatguard -llm-provider ollama -llm-model llama3.2 # local Ollama\n") + fmt.Fprintf(os.Stderr, " heatguard -dev # development mode\n") + } flag.Parse() cfg := config.Load()