Merge pull request #1227 from NousResearch/hermes/hermes-07d947aa

fix: surface gpt-5.4 in codex setup
This commit is contained in:
Teknium 2026-03-13 21:55:51 -07:00 committed by GitHub
commit 7c3cb9bb31
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 145 additions and 4 deletions

View file

@ -95,3 +95,50 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch):
assert reloaded["model"]["provider"] == "custom"
assert reloaded["model"]["base_url"] == "https://custom.example/v1"
assert reloaded["model"]["default"] == "custom/model"
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
_clear_provider_env(monkeypatch)
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
config = load_config()
prompt_choices = iter([1, 0])
monkeypatch.setattr(
"hermes_cli.setup.prompt_choice",
lambda *args, **kwargs: next(prompt_choices),
)
monkeypatch.setattr("hermes_cli.setup.prompt", lambda *args, **kwargs: "")
monkeypatch.setattr("hermes_cli.auth.detect_external_credentials", lambda: [])
monkeypatch.setattr("hermes_cli.auth._login_openai_codex", lambda *args, **kwargs: None)
monkeypatch.setattr(
"hermes_cli.auth.resolve_codex_runtime_credentials",
lambda *args, **kwargs: {
"base_url": "https://chatgpt.com/backend-api/codex",
"api_key": "codex-access-token",
},
)
captured = {}
def _fake_get_codex_model_ids(access_token=None):
captured["access_token"] = access_token
return ["gpt-5.2-codex", "gpt-5.2"]
monkeypatch.setattr(
"hermes_cli.codex_models.get_codex_model_ids",
_fake_get_codex_model_ids,
)
setup_model_provider(config)
save_config(config)
reloaded = load_config()
assert captured["access_token"] == "codex-access-token"
assert isinstance(reloaded["model"], dict)
assert reloaded["model"]["provider"] == "openai-codex"
assert reloaded["model"]["default"] == "gpt-5.2-codex"
assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex"

View file

@ -52,6 +52,58 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc
models = get_codex_model_ids()
assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS
assert "gpt-5.4" in models
assert "gpt-5.3-codex-spark" in models
def test_get_codex_model_ids_adds_forward_compat_models_from_templates(monkeypatch):
monkeypatch.setattr(
"hermes_cli.codex_models._fetch_models_from_api",
lambda access_token: ["gpt-5.2-codex"],
)
models = get_codex_model_ids(access_token="codex-access-token")
assert models == ["gpt-5.2-codex", "gpt-5.3-codex", "gpt-5.4", "gpt-5.3-codex-spark"]
def test_model_command_uses_runtime_access_token_for_codex_list(monkeypatch):
from hermes_cli.main import _model_flow_openai_codex
captured = {}
monkeypatch.setattr(
"hermes_cli.auth.get_codex_auth_status",
lambda: {"logged_in": True},
)
monkeypatch.setattr(
"hermes_cli.auth.resolve_codex_runtime_credentials",
lambda *args, **kwargs: {"api_key": "codex-access-token"},
)
def _fake_get_codex_model_ids(access_token=None):
captured["access_token"] = access_token
return ["gpt-5.2-codex", "gpt-5.2"]
def _fake_prompt_model_selection(model_ids, current_model=""):
captured["model_ids"] = list(model_ids)
captured["current_model"] = current_model
return None
monkeypatch.setattr(
"hermes_cli.codex_models.get_codex_model_ids",
_fake_get_codex_model_ids,
)
monkeypatch.setattr(
"hermes_cli.auth._prompt_model_selection",
_fake_prompt_model_selection,
)
_model_flow_openai_codex({}, current_model="openai/gpt-5.4")
assert captured["access_token"] == "codex-access-token"
assert captured["model_ids"] == ["gpt-5.2-codex", "gpt-5.2"]
assert captured["current_model"] == "openai/gpt-5.4"
# ── Tests for _normalize_model_for_provider ──────────────────────────