Merge pull request #1227 from NousResearch/hermes/hermes-07d947aa
fix: surface gpt-5.4 in codex setup
This commit is contained in:
commit
7c3cb9bb31
6 changed files with 145 additions and 4 deletions
|
|
@ -18,6 +18,36 @@ DEFAULT_CODEX_MODELS: List[str] = [
|
|||
"gpt-5.1-codex-mini",
|
||||
]
|
||||
|
||||
_FORWARD_COMPAT_TEMPLATE_MODELS: List[tuple[str, tuple[str, ...]]] = [
|
||||
("gpt-5.3-codex", ("gpt-5.2-codex",)),
|
||||
("gpt-5.4", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
||||
("gpt-5.3-codex-spark", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
||||
]
|
||||
|
||||
|
||||
def _add_forward_compat_models(model_ids: List[str]) -> List[str]:
|
||||
"""Add Clawdbot-style synthetic forward-compat Codex models.
|
||||
|
||||
If a newer Codex slug isn't returned by live discovery, surface it when an
|
||||
older compatible template model is present. This mirrors Clawdbot's
|
||||
synthetic catalog / forward-compat behavior for GPT-5 Codex variants.
|
||||
"""
|
||||
ordered: List[str] = []
|
||||
seen: set[str] = set()
|
||||
for model_id in model_ids:
|
||||
if model_id not in seen:
|
||||
ordered.append(model_id)
|
||||
seen.add(model_id)
|
||||
|
||||
for synthetic_model, template_models in _FORWARD_COMPAT_TEMPLATE_MODELS:
|
||||
if synthetic_model in seen:
|
||||
continue
|
||||
if any(template in seen for template in template_models):
|
||||
ordered.append(synthetic_model)
|
||||
seen.add(synthetic_model)
|
||||
|
||||
return ordered
|
||||
|
||||
|
||||
def _fetch_models_from_api(access_token: str) -> List[str]:
|
||||
"""Fetch available models from the Codex API. Returns visible models sorted by priority."""
|
||||
|
|
@ -54,7 +84,7 @@ def _fetch_models_from_api(access_token: str) -> List[str]:
|
|||
sortable.append((rank, slug))
|
||||
|
||||
sortable.sort(key=lambda x: (x[0], x[1]))
|
||||
return [slug for _, slug in sortable]
|
||||
return _add_forward_compat_models([slug for _, slug in sortable])
|
||||
|
||||
|
||||
def _read_default_model(codex_home: Path) -> Optional[str]:
|
||||
|
|
@ -125,7 +155,7 @@ def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]:
|
|||
if access_token:
|
||||
api_models = _fetch_models_from_api(access_token)
|
||||
if api_models:
|
||||
return api_models
|
||||
return _add_forward_compat_models(api_models)
|
||||
|
||||
# Fall back to local sources
|
||||
default_model = _read_default_model(codex_home)
|
||||
|
|
@ -140,4 +170,4 @@ def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]:
|
|||
if model_id not in ordered:
|
||||
ordered.append(model_id)
|
||||
|
||||
return ordered
|
||||
return _add_forward_compat_models(ordered)
|
||||
|
|
|
|||
|
|
@ -1057,6 +1057,7 @@ def _model_flow_openai_codex(config, current_model=""):
|
|||
_codex_token = _codex_creds.get("api_key")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
codex_models = get_codex_model_ids(access_token=_codex_token)
|
||||
|
||||
selected = _prompt_model_selection(codex_models, current_model=current_model)
|
||||
|
|
@ -1072,6 +1073,7 @@ def _model_flow_openai_codex(config, current_model=""):
|
|||
print("No change.")
|
||||
|
||||
|
||||
|
||||
def _model_flow_custom(config):
|
||||
"""Custom endpoint: collect URL, API key, and model name.
|
||||
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
|
|||
"deepseek-v3.2",
|
||||
],
|
||||
"openai-codex": [
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.2-codex",
|
||||
"gpt-5.1-codex-mini",
|
||||
"gpt-5.1-codex-max",
|
||||
|
|
|
|||
|
|
@ -654,6 +654,7 @@ def setup_model_provider(config: dict):
|
|||
_update_config_for_provider,
|
||||
_login_openai_codex,
|
||||
get_codex_auth_status,
|
||||
resolve_codex_runtime_credentials,
|
||||
DEFAULT_CODEX_BASE_URL,
|
||||
detect_external_credentials,
|
||||
)
|
||||
|
|
@ -1266,7 +1267,15 @@ def setup_model_provider(config: dict):
|
|||
elif selected_provider == "openai-codex":
|
||||
from hermes_cli.codex_models import get_codex_model_ids
|
||||
|
||||
codex_models = get_codex_model_ids()
|
||||
codex_token = None
|
||||
try:
|
||||
codex_creds = resolve_codex_runtime_credentials()
|
||||
codex_token = codex_creds.get("api_key")
|
||||
except Exception as exc:
|
||||
logger.debug("Could not resolve Codex runtime credentials for model list: %s", exc)
|
||||
|
||||
codex_models = get_codex_model_ids(access_token=codex_token)
|
||||
|
||||
model_choices = codex_models + [f"Keep current ({current_model})"]
|
||||
default_codex = 0
|
||||
if current_model in codex_models:
|
||||
|
|
|
|||
|
|
@ -95,3 +95,50 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch):
|
|||
assert reloaded["model"]["provider"] == "custom"
|
||||
assert reloaded["model"]["base_url"] == "https://custom.example/v1"
|
||||
assert reloaded["model"]["default"] == "custom/model"
|
||||
|
||||
|
||||
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
|
||||
_clear_provider_env(monkeypatch)
|
||||
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
|
||||
|
||||
config = load_config()
|
||||
|
||||
prompt_choices = iter([1, 0])
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.setup.prompt_choice",
|
||||
lambda *args, **kwargs: next(prompt_choices),
|
||||
)
|
||||
monkeypatch.setattr("hermes_cli.setup.prompt", lambda *args, **kwargs: "")
|
||||
monkeypatch.setattr("hermes_cli.auth.detect_external_credentials", lambda: [])
|
||||
monkeypatch.setattr("hermes_cli.auth._login_openai_codex", lambda *args, **kwargs: None)
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth.resolve_codex_runtime_credentials",
|
||||
lambda *args, **kwargs: {
|
||||
"base_url": "https://chatgpt.com/backend-api/codex",
|
||||
"api_key": "codex-access-token",
|
||||
},
|
||||
)
|
||||
|
||||
captured = {}
|
||||
|
||||
def _fake_get_codex_model_ids(access_token=None):
|
||||
captured["access_token"] = access_token
|
||||
return ["gpt-5.2-codex", "gpt-5.2"]
|
||||
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.codex_models.get_codex_model_ids",
|
||||
_fake_get_codex_model_ids,
|
||||
)
|
||||
|
||||
setup_model_provider(config)
|
||||
save_config(config)
|
||||
|
||||
reloaded = load_config()
|
||||
|
||||
assert captured["access_token"] == "codex-access-token"
|
||||
assert isinstance(reloaded["model"], dict)
|
||||
assert reloaded["model"]["provider"] == "openai-codex"
|
||||
assert reloaded["model"]["default"] == "gpt-5.2-codex"
|
||||
assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex"
|
||||
|
|
|
|||
|
|
@ -52,6 +52,58 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc
|
|||
models = get_codex_model_ids()
|
||||
|
||||
assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS
|
||||
assert "gpt-5.4" in models
|
||||
assert "gpt-5.3-codex-spark" in models
|
||||
|
||||
|
||||
def test_get_codex_model_ids_adds_forward_compat_models_from_templates(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.codex_models._fetch_models_from_api",
|
||||
lambda access_token: ["gpt-5.2-codex"],
|
||||
)
|
||||
|
||||
models = get_codex_model_ids(access_token="codex-access-token")
|
||||
|
||||
assert models == ["gpt-5.2-codex", "gpt-5.3-codex", "gpt-5.4", "gpt-5.3-codex-spark"]
|
||||
|
||||
|
||||
def test_model_command_uses_runtime_access_token_for_codex_list(monkeypatch):
|
||||
from hermes_cli.main import _model_flow_openai_codex
|
||||
|
||||
captured = {}
|
||||
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth.get_codex_auth_status",
|
||||
lambda: {"logged_in": True},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth.resolve_codex_runtime_credentials",
|
||||
lambda *args, **kwargs: {"api_key": "codex-access-token"},
|
||||
)
|
||||
|
||||
def _fake_get_codex_model_ids(access_token=None):
|
||||
captured["access_token"] = access_token
|
||||
return ["gpt-5.2-codex", "gpt-5.2"]
|
||||
|
||||
def _fake_prompt_model_selection(model_ids, current_model=""):
|
||||
captured["model_ids"] = list(model_ids)
|
||||
captured["current_model"] = current_model
|
||||
return None
|
||||
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.codex_models.get_codex_model_ids",
|
||||
_fake_get_codex_model_ids,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth._prompt_model_selection",
|
||||
_fake_prompt_model_selection,
|
||||
)
|
||||
|
||||
_model_flow_openai_codex({}, current_model="openai/gpt-5.4")
|
||||
|
||||
assert captured["access_token"] == "codex-access-token"
|
||||
assert captured["model_ids"] == ["gpt-5.2-codex", "gpt-5.2"]
|
||||
assert captured["current_model"] == "openai/gpt-5.4"
|
||||
|
||||
|
||||
# ── Tests for _normalize_model_for_provider ──────────────────────────
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue