From 4cfb66bac263798721de814217e77e7018126d96 Mon Sep 17 00:00:00 2001 From: teknium1 Date: Sun, 8 Mar 2026 20:42:54 -0700 Subject: [PATCH] docs: list all supported fallback providers with env var names The config comment now shows the complete list of built-in providers that the fallback system supports, each with the env var it reads for the API key. Also clarifies that custom OpenAI-compatible endpoints work via base_url + api_key_env. --- hermes_cli/config.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/hermes_cli/config.py b/hermes_cli/config.py index 119b6c1f..61facf2c 100644 --- a/hermes_cli/config.py +++ b/hermes_cli/config.py @@ -107,9 +107,22 @@ DEFAULT_CONFIG = { # When the primary hits rate limits (429), overload (529), or service errors (503), # Hermes will automatically switch to this model for the remainder of the session. # Set to None / omit to disable fallback. + # + # Built-in providers (auto-resolve base_url and API key from env): + # openrouter (OPENROUTER_API_KEY) — best fallback, routes to any model + # openai (OPENAI_API_KEY) — GPT-4.1, o3, etc. + # nous (NOUS_API_KEY) — Nous inference API + # deepseek (DEEPSEEK_API_KEY) — DeepSeek models + # together (TOGETHER_API_KEY) — Together AI + # groq (GROQ_API_KEY) — Groq (fast inference) + # fireworks (FIREWORKS_API_KEY) — Fireworks AI + # mistral (MISTRAL_API_KEY) — Mistral models + # gemini (GEMINI_API_KEY) — Google Gemini + # + # For any other OpenAI-compatible endpoint, use base_url + api_key_env. "fallback_model": { - "provider": "", # e.g. "openrouter", "openai", "nous", "deepseek", "together", "groq" - "model": "", # e.g. "anthropic/claude-sonnet-4", "gpt-4.1", "deepseek-chat" + "provider": "", # provider name from the list above + "model": "", # model slug, e.g. "anthropic/claude-sonnet-4", "gpt-4.1" # Optional overrides (usually auto-resolved from provider): # "base_url": "", # custom endpoint URL # "api_key_env": "", # env var name for API key (e.g. "MY_CUSTOM_KEY")