fix(setup): prevent OpenRouter model list fallback for Nous provider
When `fetch_nous_models()` fails silently during setup, the model selection falls through to the OpenRouter static list. Users then pick models in OpenRouter format (e.g. `anthropic/claude-opus-4.6`) which the Nous inference API rejects with a 400 "missing model" error. Add an explicit `elif selected_provider == "nous"` branch that prompts for manual model entry instead of falling through to the generic OpenRouter fallback.
This commit is contained in:
parent
ce28f847ce
commit
8bf28e1441
1 changed files with 9 additions and 0 deletions
|
|
@ -1050,6 +1050,15 @@ def run_setup_wizard(args):
|
||||||
config['model'] = custom
|
config['model'] = custom
|
||||||
save_env_value("LLM_MODEL", custom)
|
save_env_value("LLM_MODEL", custom)
|
||||||
# else: keep current
|
# else: keep current
|
||||||
|
elif selected_provider == "nous":
|
||||||
|
# Nous login succeeded but model fetch failed — prompt manually
|
||||||
|
# instead of falling through to the OpenRouter static list.
|
||||||
|
print_warning("Could not fetch available models from Nous Portal.")
|
||||||
|
print_info("Enter a Nous model name manually (e.g., claude-opus-4-6).")
|
||||||
|
custom = prompt(f" Model name (Enter to keep '{current_model}')")
|
||||||
|
if custom:
|
||||||
|
config['model'] = custom
|
||||||
|
save_env_value("LLM_MODEL", custom)
|
||||||
elif selected_provider == "openai-codex":
|
elif selected_provider == "openai-codex":
|
||||||
from hermes_cli.codex_models import get_codex_model_ids
|
from hermes_cli.codex_models import get_codex_model_ids
|
||||||
# Try to get the access token for live model discovery
|
# Try to get the access token for live model discovery
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue