fix: persist default openai vision model in setup wizard
Add regression coverage for the new provider-aware vision setup flow and make the default OpenAI choice write AUXILIARY_VISION_MODEL so auxiliary vision requests don't fall back to the main model slug.
This commit is contained in:
parent
2054ffdaeb
commit
ee73b6bf27
2 changed files with 83 additions and 14 deletions
|
|
@ -1299,9 +1299,13 @@ def setup_model_provider(config: dict):
|
|||
_oai_vision_models = ["gpt-4o", "gpt-4o-mini", "gpt-4.1", "gpt-4.1-mini", "gpt-4.1-nano"]
|
||||
_vm_choices = _oai_vision_models + ["Keep default (gpt-4o-mini)"]
|
||||
_vm_idx = prompt_choice("Select vision model:", _vm_choices, len(_vm_choices) - 1)
|
||||
if _vm_idx < len(_oai_vision_models):
|
||||
save_env_value("AUXILIARY_VISION_MODEL", _oai_vision_models[_vm_idx])
|
||||
print_success(f"Vision model set to {_oai_vision_models[_vm_idx]}")
|
||||
_selected_vision_model = (
|
||||
_oai_vision_models[_vm_idx]
|
||||
if _vm_idx < len(_oai_vision_models)
|
||||
else "gpt-4o-mini"
|
||||
)
|
||||
save_env_value("AUXILIARY_VISION_MODEL", _selected_vision_model)
|
||||
print_success(f"Vision model set to {_selected_vision_model}")
|
||||
_vision_needs_setup = False
|
||||
|
||||
# Even for providers without native vision, check if existing credentials
|
||||
|
|
@ -1366,11 +1370,13 @@ def setup_model_provider(config: dict):
|
|||
_oai_vision_models = ["gpt-4o", "gpt-4o-mini", "gpt-4.1", "gpt-4.1-mini", "gpt-4.1-nano"]
|
||||
_vm_choices = _oai_vision_models + ["Use default (gpt-4o-mini)"]
|
||||
_vm_idx = prompt_choice("Select vision model:", _vm_choices, 0)
|
||||
if _vm_idx < len(_oai_vision_models):
|
||||
save_env_value("AUXILIARY_VISION_MODEL", _oai_vision_models[_vm_idx])
|
||||
print_success(f"Vision configured with OpenAI ({_oai_vision_models[_vm_idx]})")
|
||||
else:
|
||||
print_success("Vision configured with OpenAI (gpt-4o-mini)")
|
||||
_selected_vision_model = (
|
||||
_oai_vision_models[_vm_idx]
|
||||
if _vm_idx < len(_oai_vision_models)
|
||||
else "gpt-4o-mini"
|
||||
)
|
||||
save_env_value("AUXILIARY_VISION_MODEL", _selected_vision_model)
|
||||
print_success(f"Vision configured with OpenAI ({_selected_vision_model})")
|
||||
else:
|
||||
print_info("Skipped — vision won't be available")
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from hermes_cli.config import load_config, save_config, save_env_value
|
||||
from hermes_cli.setup import setup_model_provider
|
||||
from hermes_cli.setup import _print_setup_summary, setup_model_provider
|
||||
|
||||
|
||||
def _read_env(home):
|
||||
|
|
@ -50,11 +50,15 @@ def test_setup_keep_current_custom_from_config_does_not_fall_through(tmp_path, m
|
|||
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_prompt_choice(_question, choices, default=0):
|
||||
def fake_prompt_choice(question, choices, default=0):
|
||||
calls["count"] += 1
|
||||
if calls["count"] == 1:
|
||||
assert choices[-1] == "Keep current (Custom: https://example.invalid/v1)"
|
||||
return len(choices) - 1
|
||||
if calls["count"] == 2:
|
||||
assert question == "Configure vision:"
|
||||
assert choices[-1] == "Skip for now"
|
||||
return len(choices) - 1
|
||||
raise AssertionError("Model menu should not appear for keep-current custom")
|
||||
|
||||
monkeypatch.setattr("hermes_cli.setup.prompt_choice", fake_prompt_choice)
|
||||
|
|
@ -70,7 +74,7 @@ def test_setup_keep_current_custom_from_config_does_not_fall_through(tmp_path, m
|
|||
assert reloaded["model"]["provider"] == "custom"
|
||||
assert reloaded["model"]["default"] == "custom/model"
|
||||
assert reloaded["model"]["base_url"] == "https://example.invalid/v1"
|
||||
assert calls["count"] == 1
|
||||
assert calls["count"] == 2
|
||||
|
||||
|
||||
def test_setup_keep_current_config_provider_uses_provider_specific_model_menu(tmp_path, monkeypatch):
|
||||
|
|
@ -88,13 +92,17 @@ def test_setup_keep_current_config_provider_uses_provider_specific_model_menu(tm
|
|||
captured = {"provider_choices": None, "model_choices": None}
|
||||
calls = {"count": 0}
|
||||
|
||||
def fake_prompt_choice(_question, choices, default=0):
|
||||
def fake_prompt_choice(question, choices, default=0):
|
||||
calls["count"] += 1
|
||||
if calls["count"] == 1:
|
||||
captured["provider_choices"] = list(choices)
|
||||
assert choices[-1] == "Keep current (Anthropic)"
|
||||
return len(choices) - 1
|
||||
if calls["count"] == 2:
|
||||
assert question == "Configure vision:"
|
||||
assert choices[-1] == "Skip for now"
|
||||
return len(choices) - 1
|
||||
if calls["count"] == 3:
|
||||
captured["model_choices"] = list(choices)
|
||||
return len(choices) - 1 # keep current model
|
||||
raise AssertionError("Unexpected extra prompt_choice call")
|
||||
|
|
@ -113,7 +121,43 @@ def test_setup_keep_current_config_provider_uses_provider_specific_model_menu(tm
|
|||
assert captured["model_choices"] is not None
|
||||
assert captured["model_choices"][0] == "claude-opus-4-6"
|
||||
assert "anthropic/claude-opus-4.6 (recommended)" not in captured["model_choices"]
|
||||
assert calls["count"] == 2
|
||||
assert calls["count"] == 3
|
||||
|
||||
|
||||
def test_setup_keep_current_anthropic_can_configure_openai_vision_default(tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
_clear_provider_env(monkeypatch)
|
||||
|
||||
config = load_config()
|
||||
config["model"] = {
|
||||
"default": "claude-opus-4-6",
|
||||
"provider": "anthropic",
|
||||
}
|
||||
save_config(config)
|
||||
|
||||
picks = iter([
|
||||
9, # keep current provider
|
||||
1, # configure vision with OpenAI
|
||||
5, # use default gpt-4o-mini vision model
|
||||
4, # keep current Anthropic model
|
||||
])
|
||||
|
||||
monkeypatch.setattr("hermes_cli.setup.prompt_choice", lambda *args, **kwargs: next(picks))
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.setup.prompt",
|
||||
lambda message, *args, **kwargs: "sk-openai" if "OpenAI API key" in message else "",
|
||||
)
|
||||
monkeypatch.setattr("hermes_cli.setup.prompt_yes_no", lambda *args, **kwargs: False)
|
||||
monkeypatch.setattr("hermes_cli.auth.get_active_provider", lambda: None)
|
||||
monkeypatch.setattr("hermes_cli.auth.detect_external_credentials", lambda: [])
|
||||
monkeypatch.setattr("hermes_cli.models.provider_model_ids", lambda provider: [])
|
||||
|
||||
setup_model_provider(config)
|
||||
env = _read_env(tmp_path)
|
||||
|
||||
assert env.get("OPENAI_API_KEY") == "sk-openai"
|
||||
assert env.get("OPENAI_BASE_URL") == "https://api.openai.com/v1"
|
||||
assert env.get("AUXILIARY_VISION_MODEL") == "gpt-4o-mini"
|
||||
|
||||
|
||||
def test_setup_switch_custom_to_codex_clears_custom_endpoint_and_updates_config(tmp_path, monkeypatch):
|
||||
|
|
@ -144,7 +188,7 @@ def test_setup_switch_custom_to_codex_clears_custom_endpoint_and_updates_config(
|
|||
"hermes_cli.auth.resolve_codex_runtime_credentials",
|
||||
lambda *args, **kwargs: {
|
||||
"base_url": "https://chatgpt.com/backend-api/codex",
|
||||
"api_key": "codex-access-token",
|
||||
"api_key": "codex-...oken",
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
|
|
@ -163,3 +207,22 @@ def test_setup_switch_custom_to_codex_clears_custom_endpoint_and_updates_config(
|
|||
assert reloaded["model"]["provider"] == "openai-codex"
|
||||
assert reloaded["model"]["default"] == "openai/gpt-5.3-codex"
|
||||
assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex"
|
||||
|
||||
|
||||
def test_setup_summary_marks_codex_auth_as_vision_available(tmp_path, monkeypatch, capsys):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
_clear_provider_env(monkeypatch)
|
||||
|
||||
(tmp_path / "auth.json").write_text(
|
||||
'{"active_provider":"openai-codex","providers":{"openai-codex":{"tokens":{"access_token":"tok"}}}}'
|
||||
)
|
||||
|
||||
monkeypatch.setattr("shutil.which", lambda _name: None)
|
||||
|
||||
_print_setup_summary(load_config(), tmp_path)
|
||||
output = capsys.readouterr().out
|
||||
|
||||
assert "Vision (image analysis)" in output
|
||||
assert "missing run 'hermes setup' to configure" not in output
|
||||
assert "Mixture of Agents" in output
|
||||
assert "missing OPENROUTER_API_KEY" in output
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue