feat: add openai-codex as fallback provider
Codex OAuth uses a different auth flow (OAuth tokens, not env vars) and a different API mode (codex_responses, not chat_completions). The fallback now handles this specially: - Resolves credentials via resolve_codex_runtime_credentials() - Sets api_mode to codex_responses - Fails gracefully if no Codex OAuth session exists Also added to the commented-out config.yaml example. 2 new tests (codex activation + graceful failure).
This commit is contained in:
parent
cf9482984e
commit
5785bd3272
3 changed files with 76 additions and 25 deletions
|
|
@ -766,6 +766,7 @@ _FALLBACK_MODEL_COMMENT = """
|
|||
#
|
||||
# Supported providers:
|
||||
# openrouter (OPENROUTER_API_KEY) — routes to any model
|
||||
# openai-codex (OAuth — hermes login) — OpenAI Codex
|
||||
# zai (ZAI_API_KEY) — Z.AI / GLM
|
||||
# kimi-coding (KIMI_API_KEY) — Kimi / Moonshot
|
||||
# minimax (MINIMAX_API_KEY) — MiniMax
|
||||
|
|
|
|||
24
run_agent.py
24
run_agent.py
|
|
@ -2163,6 +2163,7 @@ class AIAgent:
|
|||
# Maps provider id → (default_base_url, [env_var_names])
|
||||
# Only includes providers that Hermes actually supports.
|
||||
# For anything else, use base_url + api_key_env in the config.
|
||||
# Note: openai-codex is handled specially (OAuth, not env var).
|
||||
_FALLBACK_PROVIDERS = {
|
||||
"openrouter": (OPENROUTER_BASE_URL, ["OPENROUTER_API_KEY"]),
|
||||
"zai": ("https://api.z.ai/api/paas/v4", ["ZAI_API_KEY", "Z_AI_API_KEY"]),
|
||||
|
|
@ -2188,7 +2189,23 @@ class AIAgent:
|
|||
if not fb_provider or not fb_model:
|
||||
return False
|
||||
|
||||
# Resolve API key
|
||||
# ── Resolve credentials ──────────────────────────────────────────
|
||||
# OpenAI Codex uses OAuth (not a simple env var), so handle it
|
||||
# separately from the standard env-var-based providers.
|
||||
fb_api_mode = "chat_completions"
|
||||
|
||||
if fb_provider == "openai-codex":
|
||||
try:
|
||||
from hermes_cli.auth import resolve_codex_runtime_credentials
|
||||
creds = resolve_codex_runtime_credentials()
|
||||
fb_key = creds["api_key"]
|
||||
fb_base_url = creds["base_url"]
|
||||
fb_api_mode = "codex_responses"
|
||||
except Exception as e:
|
||||
logging.warning("Fallback to openai-codex failed (no credentials): %s", e)
|
||||
return False
|
||||
else:
|
||||
# Standard env-var resolution
|
||||
fb_key = (fb.get("api_key") or "").strip()
|
||||
if not fb_key:
|
||||
key_env = (fb.get("api_key_env") or "").strip()
|
||||
|
|
@ -2206,14 +2223,13 @@ class AIAgent:
|
|||
)
|
||||
return False
|
||||
|
||||
# Resolve base URL
|
||||
fb_base_url = (fb.get("base_url") or "").strip()
|
||||
if not fb_base_url and fb_provider in self._FALLBACK_PROVIDERS:
|
||||
fb_base_url = self._FALLBACK_PROVIDERS[fb_provider][0]
|
||||
if not fb_base_url:
|
||||
fb_base_url = OPENROUTER_BASE_URL
|
||||
|
||||
# Build new client
|
||||
# ── Build new client ──────────────────────────────────────────
|
||||
try:
|
||||
client_kwargs = {"api_key": fb_key, "base_url": fb_base_url}
|
||||
if "openrouter" in fb_base_url.lower():
|
||||
|
|
@ -2231,7 +2247,7 @@ class AIAgent:
|
|||
self.model = fb_model
|
||||
self.provider = fb_provider
|
||||
self.base_url = fb_base_url
|
||||
self.api_mode = "chat_completions"
|
||||
self.api_mode = fb_api_mode
|
||||
self._fallback_activated = True
|
||||
|
||||
# Re-evaluate prompt caching for the new provider/model
|
||||
|
|
|
|||
|
|
@ -218,6 +218,40 @@ class TestTryActivateFallback:
|
|||
call_kwargs = mock_openai.call_args[1]
|
||||
assert call_kwargs["api_key"] == "sk-alt-key"
|
||||
|
||||
def test_activates_codex_fallback(self):
|
||||
"""OpenAI Codex fallback should use OAuth credentials and codex_responses mode."""
|
||||
agent = _make_agent(
|
||||
fallback_model={"provider": "openai-codex", "model": "gpt-5.3-codex"},
|
||||
)
|
||||
mock_creds = {
|
||||
"api_key": "codex-oauth-token",
|
||||
"base_url": "https://chatgpt.com/backend-api/codex",
|
||||
}
|
||||
with (
|
||||
patch("hermes_cli.auth.resolve_codex_runtime_credentials", return_value=mock_creds),
|
||||
patch("run_agent.OpenAI") as mock_openai,
|
||||
):
|
||||
result = agent._try_activate_fallback()
|
||||
assert result is True
|
||||
assert agent.model == "gpt-5.3-codex"
|
||||
assert agent.provider == "openai-codex"
|
||||
assert agent.api_mode == "codex_responses"
|
||||
call_kwargs = mock_openai.call_args[1]
|
||||
assert call_kwargs["api_key"] == "codex-oauth-token"
|
||||
assert "chatgpt.com" in call_kwargs["base_url"]
|
||||
|
||||
def test_codex_fallback_fails_gracefully_without_credentials(self):
|
||||
"""Codex fallback should return False if no OAuth credentials available."""
|
||||
agent = _make_agent(
|
||||
fallback_model={"provider": "openai-codex", "model": "gpt-5.3-codex"},
|
||||
)
|
||||
with patch(
|
||||
"hermes_cli.auth.resolve_codex_runtime_credentials",
|
||||
side_effect=Exception("No Codex credentials"),
|
||||
):
|
||||
assert agent._try_activate_fallback() is False
|
||||
assert agent._fallback_activated is False
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Fallback config init
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue