Add OpenAI Codex provider runtime and responses integration (without .agent/PLANS.md)

This commit is contained in:
George Pickett 2026-02-25 18:20:38 -08:00
parent e3cb957a10
commit 609b19b630
19 changed files with 1713 additions and 145 deletions

View file

@ -18,7 +18,9 @@ from __future__ import annotations
import json
import logging
import os
import shutil
import stat
import subprocess
import time
import webbrowser
from contextlib import contextmanager
@ -55,6 +57,7 @@ DEFAULT_NOUS_SCOPE = "inference:mint_agent_key"
DEFAULT_AGENT_KEY_MIN_TTL_SECONDS = 30 * 60 # 30 minutes
ACCESS_TOKEN_REFRESH_SKEW_SECONDS = 120 # refresh 2 min before expiry
DEVICE_AUTH_POLL_INTERVAL_CAP_SECONDS = 1 # poll at most every 1s
DEFAULT_CODEX_BASE_URL = "https://chatgpt.com/backend-api/codex"
# =============================================================================
@ -84,7 +87,12 @@ PROVIDER_REGISTRY: Dict[str, ProviderConfig] = {
client_id=DEFAULT_NOUS_CLIENT_ID,
scope=DEFAULT_NOUS_SCOPE,
),
# Future: "openai_codex", "anthropic", etc.
"openai-codex": ProviderConfig(
id="openai-codex",
name="OpenAI Codex",
auth_type="oauth_external",
inference_base_url=DEFAULT_CODEX_BASE_URL,
),
}
@ -298,12 +306,15 @@ def resolve_provider(
"""
normalized = (requested or "auto").strip().lower()
if normalized in {"openrouter", "custom"}:
return "openrouter"
if normalized in PROVIDER_REGISTRY:
return normalized
if normalized == "openrouter":
return "openrouter"
if normalized != "auto":
return "openrouter"
raise AuthError(
f"Unknown provider '{normalized}'.",
code="invalid_provider",
)
# Explicit one-off CLI creds always mean openrouter/custom
if explicit_api_key or explicit_base_url:
@ -314,8 +325,8 @@ def resolve_provider(
auth_store = _load_auth_store()
active = auth_store.get("active_provider")
if active and active in PROVIDER_REGISTRY:
state = _load_provider_state(auth_store, active)
if state and (state.get("access_token") or state.get("refresh_token")):
status = get_auth_status(active)
if status.get("logged_in"):
return active
except Exception as e:
logger.debug("Could not detect active auth provider: %s", e)
@ -378,6 +389,108 @@ def _is_remote_session() -> bool:
return bool(os.getenv("SSH_CLIENT") or os.getenv("SSH_TTY"))
# =============================================================================
# OpenAI Codex auth file helpers
# =============================================================================
def resolve_codex_home_path() -> Path:
"""Resolve CODEX_HOME, defaulting to ~/.codex."""
codex_home = os.getenv("CODEX_HOME", "").strip()
if not codex_home:
codex_home = str(Path.home() / ".codex")
return Path(codex_home).expanduser()
def _codex_auth_file_path() -> Path:
return resolve_codex_home_path() / "auth.json"
def read_codex_auth_file() -> Dict[str, Any]:
"""Read and validate Codex auth.json shape."""
codex_home = resolve_codex_home_path()
if not codex_home.exists():
raise AuthError(
f"Codex home directory not found at {codex_home}.",
provider="openai-codex",
code="codex_home_missing",
relogin_required=True,
)
auth_path = codex_home / "auth.json"
if not auth_path.exists():
raise AuthError(
f"Codex auth file not found at {auth_path}.",
provider="openai-codex",
code="codex_auth_missing",
relogin_required=True,
)
try:
payload = json.loads(auth_path.read_text())
except Exception as exc:
raise AuthError(
f"Failed to parse Codex auth file at {auth_path}.",
provider="openai-codex",
code="codex_auth_invalid_json",
relogin_required=True,
) from exc
tokens = payload.get("tokens")
if not isinstance(tokens, dict):
raise AuthError(
"Codex auth file is missing a valid 'tokens' object.",
provider="openai-codex",
code="codex_auth_invalid_shape",
relogin_required=True,
)
access_token = tokens.get("access_token")
refresh_token = tokens.get("refresh_token")
if not isinstance(access_token, str) or not access_token.strip():
raise AuthError(
"Codex auth file is missing tokens.access_token.",
provider="openai-codex",
code="codex_auth_missing_access_token",
relogin_required=True,
)
if not isinstance(refresh_token, str) or not refresh_token.strip():
raise AuthError(
"Codex auth file is missing tokens.refresh_token.",
provider="openai-codex",
code="codex_auth_missing_refresh_token",
relogin_required=True,
)
return {
"payload": payload,
"tokens": tokens,
"auth_path": auth_path,
"codex_home": codex_home,
}
def resolve_codex_runtime_credentials() -> Dict[str, Any]:
"""Resolve runtime credentials from Codex CLI auth state."""
data = read_codex_auth_file()
payload = data["payload"]
tokens = data["tokens"]
base_url = (
os.getenv("HERMES_CODEX_BASE_URL", "").strip().rstrip("/")
or DEFAULT_CODEX_BASE_URL
)
return {
"provider": "openai-codex",
"base_url": base_url,
"api_key": tokens["access_token"],
"source": "codex-auth-json",
"last_refresh": payload.get("last_refresh"),
"auth_mode": payload.get("auth_mode"),
"auth_file": str(data["auth_path"]),
"codex_home": str(data["codex_home"]),
}
# =============================================================================
# TLS verification helper
# =============================================================================
@ -806,11 +919,37 @@ def get_nous_auth_status() -> Dict[str, Any]:
}
def get_codex_auth_status() -> Dict[str, Any]:
"""Status snapshot for Codex auth."""
state = get_provider_auth_state("openai-codex") or {}
auth_file = state.get("auth_file") or str(_codex_auth_file_path())
codex_home = state.get("codex_home") or str(resolve_codex_home_path())
try:
creds = resolve_codex_runtime_credentials()
return {
"logged_in": True,
"auth_file": creds.get("auth_file"),
"codex_home": creds.get("codex_home"),
"last_refresh": creds.get("last_refresh"),
"auth_mode": creds.get("auth_mode"),
"source": creds.get("source"),
}
except AuthError as exc:
return {
"logged_in": False,
"auth_file": auth_file,
"codex_home": codex_home,
"error": str(exc),
}
def get_auth_status(provider_id: Optional[str] = None) -> Dict[str, Any]:
"""Generic auth status dispatcher."""
target = provider_id or get_active_provider()
if target == "nous":
return get_nous_auth_status()
if target == "openai-codex":
return get_codex_auth_status()
return {"logged_in": False}
@ -982,11 +1121,64 @@ def login_command(args) -> None:
if provider_id == "nous":
_login_nous(args, pconfig)
elif provider_id == "openai-codex":
_login_openai_codex(args, pconfig)
else:
print(f"Login for provider '{provider_id}' is not yet implemented.")
raise SystemExit(1)
def _login_openai_codex(args, pconfig: ProviderConfig) -> None:
"""OpenAI Codex login flow using Codex CLI auth state."""
codex_path = shutil.which("codex")
if not codex_path:
print("Codex CLI was not found in PATH.")
print("Install Codex CLI, then retry `hermes login --provider openai-codex`.")
raise SystemExit(1)
print(f"Starting Hermes login via {pconfig.name}...")
print(f"Using Codex CLI: {codex_path}")
print(f"Codex home: {resolve_codex_home_path()}")
creds: Dict[str, Any]
try:
creds = resolve_codex_runtime_credentials()
except AuthError:
print("No usable Codex auth found. Running `codex login`...")
try:
subprocess.run(["codex", "login"], check=True)
except subprocess.CalledProcessError as exc:
print(f"Codex login failed with exit code {exc.returncode}.")
raise SystemExit(1)
except KeyboardInterrupt:
print("\nLogin cancelled.")
raise SystemExit(130)
try:
creds = resolve_codex_runtime_credentials()
except AuthError as exc:
print(format_auth_error(exc))
raise SystemExit(1)
auth_state = {
"auth_file": creds.get("auth_file"),
"codex_home": creds.get("codex_home"),
"last_refresh": creds.get("last_refresh"),
"auth_mode": creds.get("auth_mode"),
"source": creds.get("source"),
}
with _auth_store_lock():
auth_store = _load_auth_store()
_save_provider_state(auth_store, "openai-codex", auth_state)
saved_to = _save_auth_store(auth_store)
config_path = _update_config_for_provider("openai-codex", creds["base_url"])
print()
print("Login successful!")
print(f" Auth state: {saved_to}")
print(f" Config updated: {config_path} (model.provider=openai-codex)")
def _login_nous(args, pconfig: ProviderConfig) -> None:
"""Nous Portal device authorization flow."""
portal_base_url = (

View file

@ -171,6 +171,36 @@ def run_doctor(args):
else:
check_warn("config.yaml not found", "(using defaults)")
# =========================================================================
# Check: Auth providers
# =========================================================================
print()
print(color("◆ Auth Providers", Colors.CYAN, Colors.BOLD))
try:
from hermes_cli.auth import get_nous_auth_status, get_codex_auth_status
nous_status = get_nous_auth_status()
if nous_status.get("logged_in"):
check_ok("Nous Portal auth", "(logged in)")
else:
check_warn("Nous Portal auth", "(not logged in)")
codex_status = get_codex_auth_status()
if codex_status.get("logged_in"):
check_ok("OpenAI Codex auth", "(logged in)")
else:
check_warn("OpenAI Codex auth", "(not logged in)")
if codex_status.get("error"):
check_info(codex_status["error"])
except Exception as e:
check_warn("Auth provider status", f"(could not check: {e})")
if shutil.which("codex"):
check_ok("codex CLI")
else:
check_warn("codex CLI not found", "(required for openai-codex login)")
# =========================================================================
# Check: Directory structure
# =========================================================================

View file

@ -53,6 +53,7 @@ logger = logging.getLogger(__name__)
def _has_any_provider_configured() -> bool:
"""Check if at least one inference provider is usable."""
from hermes_cli.config import get_env_path, get_hermes_home
from hermes_cli.auth import get_auth_status
# Check env vars (may be set by .env or shell)
if os.getenv("OPENROUTER_API_KEY") or os.getenv("OPENAI_API_KEY") or os.getenv("ANTHROPIC_API_KEY"):
@ -81,8 +82,8 @@ def _has_any_provider_configured() -> bool:
auth = json.loads(auth_file.read_text())
active = auth.get("active_provider")
if active:
state = auth.get("providers", {}).get(active, {})
if state.get("access_token") or state.get("refresh_token"):
status = get_auth_status(active)
if status.get("logged_in"):
return True
except Exception:
pass
@ -145,7 +146,7 @@ def cmd_model(args):
resolve_provider, get_provider_auth_state, PROVIDER_REGISTRY,
_prompt_model_selection, _save_model_choice, _update_config_for_provider,
resolve_nous_runtime_credentials, fetch_nous_models, AuthError, format_auth_error,
_login_nous, ProviderConfig,
_login_nous,
)
from hermes_cli.config import load_config, save_config, get_env_value, save_env_value
@ -168,7 +169,12 @@ def cmd_model(args):
or config_provider
or "auto"
)
active = resolve_provider(effective_provider)
try:
active = resolve_provider(effective_provider)
except AuthError as exc:
warning = format_auth_error(exc)
print(f"Warning: {warning} Falling back to auto provider detection.")
active = resolve_provider("auto")
# Detect custom endpoint
if active == "openrouter" and get_env_value("OPENAI_BASE_URL"):
@ -177,6 +183,7 @@ def cmd_model(args):
provider_labels = {
"openrouter": "OpenRouter",
"nous": "Nous Portal",
"openai-codex": "OpenAI Codex",
"custom": "Custom endpoint",
}
active_label = provider_labels.get(active, active)
@ -190,11 +197,12 @@ def cmd_model(args):
providers = [
("openrouter", "OpenRouter (100+ models, pay-per-use)"),
("nous", "Nous Portal (Nous Research subscription)"),
("openai-codex", "OpenAI Codex (ChatGPT/Codex CLI login)"),
("custom", "Custom endpoint (self-hosted / VLLM / etc.)"),
]
# Reorder so the active provider is at the top
active_key = active if active in ("openrouter", "nous") else "custom"
active_key = active if active in ("openrouter", "nous", "openai-codex") else "custom"
ordered = []
for key, label in providers:
if key == active_key:
@ -215,6 +223,8 @@ def cmd_model(args):
_model_flow_openrouter(config, current_model)
elif selected_provider == "nous":
_model_flow_nous(config, current_model)
elif selected_provider == "openai-codex":
_model_flow_openai_codex(config, current_model)
elif selected_provider == "custom":
_model_flow_custom(config)
@ -368,6 +378,52 @@ def _model_flow_nous(config, current_model=""):
print("No change.")
def _model_flow_openai_codex(config, current_model=""):
"""OpenAI Codex provider: ensure logged in, then pick model."""
from hermes_cli.auth import (
get_codex_auth_status, _prompt_model_selection, _save_model_choice,
_update_config_for_provider, _login_openai_codex,
PROVIDER_REGISTRY, DEFAULT_CODEX_BASE_URL,
)
from hermes_cli.config import get_env_value, save_env_value
import argparse
status = get_codex_auth_status()
if not status.get("logged_in"):
print("Not logged into OpenAI Codex. Starting login...")
print()
try:
mock_args = argparse.Namespace()
_login_openai_codex(mock_args, PROVIDER_REGISTRY["openai-codex"])
except SystemExit:
print("Login cancelled or failed.")
return
except Exception as exc:
print(f"Login failed: {exc}")
return
# Codex models are not discoverable through /models with this auth path,
# so provide curated IDs with custom fallback.
codex_models = [
"gpt-5-codex",
"gpt-5.3-codex",
"gpt-5.2-codex",
"gpt-5.1-codex",
]
selected = _prompt_model_selection(codex_models, current_model=current_model)
if selected:
_save_model_choice(selected)
_update_config_for_provider("openai-codex", DEFAULT_CODEX_BASE_URL)
# Clear custom endpoint env vars that would otherwise override Codex.
if get_env_value("OPENAI_BASE_URL"):
save_env_value("OPENAI_BASE_URL", "")
save_env_value("OPENAI_API_KEY", "")
print(f"Default model set to: {selected} (via OpenAI Codex)")
else:
print("No change.")
def _model_flow_custom(config):
"""Custom endpoint: collect URL, API key, and model name."""
from hermes_cli.auth import _save_model_choice, deactivate_provider
@ -678,7 +734,7 @@ For more help on a command:
)
chat_parser.add_argument(
"--provider",
choices=["auto", "openrouter", "nous"],
choices=["auto", "openrouter", "nous", "openai-codex"],
default=None,
help="Inference provider (default: auto)"
)
@ -765,9 +821,9 @@ For more help on a command:
)
login_parser.add_argument(
"--provider",
choices=["nous"],
choices=["nous", "openai-codex"],
default=None,
help="Provider to authenticate with (default: interactive selection)"
help="Provider to authenticate with (default: nous)"
)
login_parser.add_argument(
"--portal-url",
@ -819,7 +875,7 @@ For more help on a command:
)
logout_parser.add_argument(
"--provider",
choices=["nous"],
choices=["nous", "openai-codex"],
default=None,
help="Provider to log out from (default: active provider)"
)

View file

@ -0,0 +1,149 @@
"""Shared runtime provider resolution for CLI, gateway, cron, and helpers."""
from __future__ import annotations
import os
from typing import Any, Dict, Optional
from hermes_cli.auth import (
AuthError,
format_auth_error,
resolve_provider,
resolve_nous_runtime_credentials,
resolve_codex_runtime_credentials,
)
from hermes_cli.config import load_config
from hermes_constants import OPENROUTER_BASE_URL
def _get_model_config() -> Dict[str, Any]:
config = load_config()
model_cfg = config.get("model")
if isinstance(model_cfg, dict):
return dict(model_cfg)
if isinstance(model_cfg, str) and model_cfg.strip():
return {"default": model_cfg.strip()}
return {}
def resolve_requested_provider(requested: Optional[str] = None) -> str:
"""Resolve provider request from explicit arg, env, then config."""
if requested and requested.strip():
return requested.strip().lower()
env_provider = os.getenv("HERMES_INFERENCE_PROVIDER", "").strip().lower()
if env_provider:
return env_provider
model_cfg = _get_model_config()
cfg_provider = model_cfg.get("provider")
if isinstance(cfg_provider, str) and cfg_provider.strip():
return cfg_provider.strip().lower()
return "auto"
def _resolve_openrouter_runtime(
*,
requested_provider: str,
explicit_api_key: Optional[str] = None,
explicit_base_url: Optional[str] = None,
) -> Dict[str, Any]:
model_cfg = _get_model_config()
cfg_base_url = model_cfg.get("base_url") if isinstance(model_cfg.get("base_url"), str) else ""
cfg_provider = model_cfg.get("provider") if isinstance(model_cfg.get("provider"), str) else ""
requested_norm = (requested_provider or "").strip().lower()
cfg_provider = cfg_provider.strip().lower()
env_openai_base_url = os.getenv("OPENAI_BASE_URL", "").strip()
env_openrouter_base_url = os.getenv("OPENROUTER_BASE_URL", "").strip()
use_config_base_url = False
if requested_norm == "auto":
if cfg_base_url.strip() and not explicit_base_url and not env_openai_base_url:
if not cfg_provider or cfg_provider == "auto":
use_config_base_url = True
base_url = (
(explicit_base_url or "").strip()
or env_openai_base_url
or (cfg_base_url.strip() if use_config_base_url else "")
or env_openrouter_base_url
or OPENROUTER_BASE_URL
).rstrip("/")
api_key = (
explicit_api_key
or os.getenv("OPENAI_API_KEY")
or os.getenv("OPENROUTER_API_KEY")
or ""
)
source = "explicit" if (explicit_api_key or explicit_base_url) else "env/config"
return {
"provider": "openrouter",
"api_mode": "chat_completions",
"base_url": base_url,
"api_key": api_key,
"source": source,
}
def resolve_runtime_provider(
*,
requested: Optional[str] = None,
explicit_api_key: Optional[str] = None,
explicit_base_url: Optional[str] = None,
) -> Dict[str, Any]:
"""Resolve runtime provider credentials for agent execution."""
requested_provider = resolve_requested_provider(requested)
provider = resolve_provider(
requested_provider,
explicit_api_key=explicit_api_key,
explicit_base_url=explicit_base_url,
)
if provider == "nous":
creds = resolve_nous_runtime_credentials(
min_key_ttl_seconds=max(60, int(os.getenv("HERMES_NOUS_MIN_KEY_TTL_SECONDS", "1800"))),
timeout_seconds=float(os.getenv("HERMES_NOUS_TIMEOUT_SECONDS", "15")),
)
return {
"provider": "nous",
"api_mode": "chat_completions",
"base_url": creds.get("base_url", "").rstrip("/"),
"api_key": creds.get("api_key", ""),
"source": creds.get("source", "portal"),
"expires_at": creds.get("expires_at"),
"requested_provider": requested_provider,
}
if provider == "openai-codex":
creds = resolve_codex_runtime_credentials()
return {
"provider": "openai-codex",
"api_mode": "codex_responses",
"base_url": creds.get("base_url", "").rstrip("/"),
"api_key": creds.get("api_key", ""),
"source": creds.get("source", "codex-auth-json"),
"auth_file": creds.get("auth_file"),
"codex_home": creds.get("codex_home"),
"last_refresh": creds.get("last_refresh"),
"requested_provider": requested_provider,
}
runtime = _resolve_openrouter_runtime(
requested_provider=requested_provider,
explicit_api_key=explicit_api_key,
explicit_base_url=explicit_base_url,
)
runtime["requested_provider"] = requested_provider
return runtime
def format_runtime_provider_error(error: Exception) -> str:
if isinstance(error, AuthError):
return format_auth_error(error)
return str(error)

View file

@ -613,6 +613,7 @@ def run_setup_wizard(args):
get_active_provider, get_provider_auth_state, PROVIDER_REGISTRY,
format_auth_error, AuthError, fetch_nous_models,
resolve_nous_runtime_credentials, _update_config_for_provider,
_login_openai_codex, get_codex_auth_status, DEFAULT_CODEX_BASE_URL,
)
existing_custom = get_env_value("OPENAI_BASE_URL")
existing_or = get_env_value("OPENROUTER_API_KEY")
@ -633,6 +634,7 @@ def run_setup_wizard(args):
provider_choices = [
"Login with Nous Portal (Nous Research subscription)",
"Login with OpenAI Codex (ChatGPT/Codex CLI auth)",
"OpenRouter API key (100+ models, pay-per-use)",
"Custom OpenAI-compatible endpoint (self-hosted / VLLM / etc.)",
]
@ -640,7 +642,7 @@ def run_setup_wizard(args):
provider_choices.append(keep_label)
# Default to "Keep current" if a provider exists, otherwise OpenRouter (most common)
default_provider = len(provider_choices) - 1 if has_any_provider else 1
default_provider = len(provider_choices) - 1 if has_any_provider else 2
if not has_any_provider:
print_warning("An inference provider is required for Hermes to work.")
@ -649,7 +651,7 @@ def run_setup_wizard(args):
provider_idx = prompt_choice("Select your inference provider:", provider_choices, default_provider)
# Track which provider was selected for model step
selected_provider = None # "nous", "openrouter", "custom", or None (keep)
selected_provider = None # "nous", "openai-codex", "openrouter", "custom", or None (keep)
nous_models = [] # populated if Nous login succeeds
if provider_idx == 0: # Nous Portal
@ -692,7 +694,33 @@ def run_setup_wizard(args):
print_info("You can try again later with: hermes login")
selected_provider = None
elif provider_idx == 1: # OpenRouter
elif provider_idx == 1: # OpenAI Codex
selected_provider = "openai-codex"
print()
print_header("OpenAI Codex Login")
print_info("This uses your Codex CLI auth state from CODEX_HOME/auth.json.")
print_info("If you're not logged in, Hermes will run `codex login`.")
print()
try:
import argparse
mock_args = argparse.Namespace()
_login_openai_codex(mock_args, PROVIDER_REGISTRY["openai-codex"])
# Clear custom endpoint vars that would override provider routing.
if existing_custom:
save_env_value("OPENAI_BASE_URL", "")
save_env_value("OPENAI_API_KEY", "")
_update_config_for_provider("openai-codex", DEFAULT_CODEX_BASE_URL)
except SystemExit:
print_warning("OpenAI Codex login was cancelled or failed.")
print_info("You can try again later with: hermes login --provider openai-codex")
selected_provider = None
except Exception as e:
print_error(f"Login failed: {e}")
print_info("You can try again later with: hermes login --provider openai-codex")
selected_provider = None
elif provider_idx == 2: # OpenRouter
selected_provider = "openrouter"
print()
print_header("OpenRouter API Key")
@ -719,7 +747,7 @@ def run_setup_wizard(args):
save_env_value("OPENAI_BASE_URL", "")
save_env_value("OPENAI_API_KEY", "")
elif provider_idx == 2: # Custom endpoint
elif provider_idx == 3: # Custom endpoint
selected_provider = "custom"
print()
print_header("Custom OpenAI-Compatible Endpoint")
@ -746,14 +774,14 @@ def run_setup_wizard(args):
config['model'] = model_name
save_env_value("LLM_MODEL", model_name)
print_success("Custom endpoint configured")
# else: provider_idx == 3 (Keep current) — only shown when a provider already exists
# else: provider_idx == 4 (Keep current) — only shown when a provider already exists
# =========================================================================
# Step 1b: OpenRouter API Key for tools (if not already set)
# =========================================================================
# Tools (vision, web, MoA) use OpenRouter independently of the main provider.
# Prompt for OpenRouter key if not set and a non-OpenRouter provider was chosen.
if selected_provider in ("nous", "custom") and not get_env_value("OPENROUTER_API_KEY"):
if selected_provider in ("nous", "openai-codex", "custom") and not get_env_value("OPENROUTER_API_KEY"):
print()
print_header("OpenRouter API Key (for tools)")
print_info("Tools like vision analysis, web search, and MoA use OpenRouter")
@ -799,6 +827,29 @@ def run_setup_wizard(args):
config['model'] = custom
save_env_value("LLM_MODEL", custom)
# else: keep current
elif selected_provider == "openai-codex":
codex_models = [
"gpt-5-codex",
"gpt-5.3-codex",
"gpt-5.2-codex",
"gpt-5.1-codex",
]
model_choices = [f"{m}" for m in codex_models]
model_choices.append("Custom model")
model_choices.append(f"Keep current ({current_model})")
keep_idx = len(model_choices) - 1
model_idx = prompt_choice("Select default model:", model_choices, keep_idx)
if model_idx < len(codex_models):
config['model'] = codex_models[model_idx]
save_env_value("LLM_MODEL", codex_models[model_idx])
elif model_idx == len(codex_models):
custom = prompt("Enter model name")
if custom:
config['model'] = custom
save_env_value("LLM_MODEL", custom)
_update_config_for_provider("openai-codex", DEFAULT_CODEX_BASE_URL)
else:
# Static list for OpenRouter / fallback (from canonical list)
from hermes_cli.models import model_ids, menu_labels

View file

@ -100,10 +100,12 @@ def show_status(args):
print(color("◆ Auth Providers", Colors.CYAN, Colors.BOLD))
try:
from hermes_cli.auth import get_nous_auth_status
from hermes_cli.auth import get_nous_auth_status, get_codex_auth_status
nous_status = get_nous_auth_status()
codex_status = get_codex_auth_status()
except Exception:
nous_status = {}
codex_status = {}
nous_logged_in = bool(nous_status.get("logged_in"))
print(
@ -120,6 +122,20 @@ def show_status(args):
print(f" Key exp: {key_exp}")
print(f" Refresh: {refresh_label}")
codex_logged_in = bool(codex_status.get("logged_in"))
print(
f" {'OpenAI Codex':<12} {check_mark(codex_logged_in)} "
f"{'logged in' if codex_logged_in else 'not logged in (run: hermes login --provider openai-codex)'}"
)
codex_auth_file = codex_status.get("auth_file")
if codex_auth_file:
print(f" Auth file: {codex_auth_file}")
codex_last_refresh = _format_iso_timestamp(codex_status.get("last_refresh"))
if codex_status.get("last_refresh"):
print(f" Refreshed: {codex_last_refresh}")
if codex_status.get("error") and not codex_logged_in:
print(f" Error: {codex_status.get('error')}")
# =========================================================================
# Terminal Configuration
# =========================================================================