feat(config): separate Ollama into local/cloud providers, add OpenAI & Anthropic defaults, bump schema version to 1.6.0
This commit is contained in:
@@ -127,7 +127,7 @@ Create or edit `~/.config/owlen/config.toml`:
|
||||
|
||||
```toml
|
||||
[general]
|
||||
default_provider = "ollama"
|
||||
default_provider = "ollama_local"
|
||||
default_model = "llama3.2:latest"
|
||||
|
||||
[modes.chat]
|
||||
|
||||
Reference in New Issue
Block a user