fix: respect model.default from config.yaml for openai-codex provider (#1896)
When config.yaml had a non-default model (e.g. gpt-5.3-codex) and the provider was openai-codex, _normalize_model_for_provider() would replace it with the latest available codex model because _model_is_default only checked the CLI argument, not the config value. Now _model_is_default is False when config.yaml has a model that differs from the global fallback (anthropic/claude-opus-4.6), so the user's explicit config choice is preserved. Fixes #1887 Co-authored-by: Test <test@test.com>
This commit is contained in:
parent
e86bfd7667
commit
24ac577046
2 changed files with 51 additions and 2 deletions
10
cli.py
10
cli.py
|
|
@ -1044,11 +1044,17 @@ class HermesCLI:
|
||||||
# env vars would stomp each other.
|
# env vars would stomp each other.
|
||||||
_model_config = CLI_CONFIG.get("model", {})
|
_model_config = CLI_CONFIG.get("model", {})
|
||||||
_config_model = _model_config.get("default", "") if isinstance(_model_config, dict) else (_model_config or "")
|
_config_model = _model_config.get("default", "") if isinstance(_model_config, dict) else (_model_config or "")
|
||||||
self.model = model or _config_model or "anthropic/claude-opus-4.6"
|
_FALLBACK_MODEL = "anthropic/claude-opus-4.6"
|
||||||
|
self.model = model or _config_model or _FALLBACK_MODEL
|
||||||
# Track whether model was explicitly chosen by the user or fell back
|
# Track whether model was explicitly chosen by the user or fell back
|
||||||
# to the global default. Provider-specific normalisation may override
|
# to the global default. Provider-specific normalisation may override
|
||||||
# the default silently but should warn when overriding an explicit choice.
|
# the default silently but should warn when overriding an explicit choice.
|
||||||
self._model_is_default = not model
|
# A config model that matches the global fallback is NOT considered an
|
||||||
|
# explicit choice — the user just never changed it. But a config model
|
||||||
|
# like "gpt-5.3-codex" IS explicit and must be preserved.
|
||||||
|
self._model_is_default = not model and (
|
||||||
|
not _config_model or _config_model == _FALLBACK_MODEL
|
||||||
|
)
|
||||||
|
|
||||||
self._explicit_api_key = api_key
|
self._explicit_api_key = api_key
|
||||||
self._explicit_base_url = base_url
|
self._explicit_base_url = base_url
|
||||||
|
|
|
||||||
|
|
@ -312,6 +312,49 @@ def test_codex_provider_uses_config_model(monkeypatch):
|
||||||
assert shell.model != "should-be-ignored"
|
assert shell.model != "should-be-ignored"
|
||||||
|
|
||||||
|
|
||||||
|
def test_codex_config_model_not_replaced_by_normalization(monkeypatch):
|
||||||
|
"""When the user sets model.default in config.yaml to a specific codex
|
||||||
|
model, _normalize_model_for_provider must NOT replace it with the latest
|
||||||
|
available model from the API. Regression test for #1887."""
|
||||||
|
cli = _import_cli()
|
||||||
|
|
||||||
|
monkeypatch.delenv("LLM_MODEL", raising=False)
|
||||||
|
monkeypatch.delenv("OPENAI_MODEL", raising=False)
|
||||||
|
|
||||||
|
# User explicitly configured gpt-5.3-codex in config.yaml
|
||||||
|
monkeypatch.setitem(cli.CLI_CONFIG, "model", {
|
||||||
|
"default": "gpt-5.3-codex",
|
||||||
|
"provider": "openai-codex",
|
||||||
|
"base_url": "https://chatgpt.com/backend-api/codex",
|
||||||
|
})
|
||||||
|
|
||||||
|
def _runtime_resolve(**kwargs):
|
||||||
|
return {
|
||||||
|
"provider": "openai-codex",
|
||||||
|
"api_mode": "codex_responses",
|
||||||
|
"base_url": "https://chatgpt.com/backend-api/codex",
|
||||||
|
"api_key": "fake-key",
|
||||||
|
"source": "env/config",
|
||||||
|
}
|
||||||
|
|
||||||
|
monkeypatch.setattr("hermes_cli.runtime_provider.resolve_runtime_provider", _runtime_resolve)
|
||||||
|
monkeypatch.setattr("hermes_cli.runtime_provider.format_runtime_provider_error", lambda exc: str(exc))
|
||||||
|
# API returns a DIFFERENT model than what the user configured
|
||||||
|
monkeypatch.setattr(
|
||||||
|
"hermes_cli.codex_models.get_codex_model_ids",
|
||||||
|
lambda access_token=None: ["gpt-5.4", "gpt-5.3-codex"],
|
||||||
|
)
|
||||||
|
|
||||||
|
shell = cli.HermesCLI(compact=True, max_turns=1)
|
||||||
|
|
||||||
|
# Config model is NOT the global default — user made a deliberate choice
|
||||||
|
assert shell._model_is_default is False
|
||||||
|
assert shell._ensure_runtime_credentials() is True
|
||||||
|
assert shell.provider == "openai-codex"
|
||||||
|
# Model must stay as user configured, not replaced by gpt-5.4
|
||||||
|
assert shell.model == "gpt-5.3-codex"
|
||||||
|
|
||||||
|
|
||||||
def test_codex_provider_preserves_explicit_codex_model(monkeypatch):
|
def test_codex_provider_preserves_explicit_codex_model(monkeypatch):
|
||||||
"""If the user explicitly passes a Codex-compatible model, it must be
|
"""If the user explicitly passes a Codex-compatible model, it must be
|
||||||
preserved even when the provider resolves to openai-codex."""
|
preserved even when the provider resolves to openai-codex."""
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue