refactor(cli): Finalize OpenAI Codex Integration with OAuth
- Enhanced Codex model discovery by fetching available models from the API, with fallback to local cache and defaults. - Updated the context compressor's summary target tokens to 2500 for improved performance. - Added external credential detection for Codex CLI to streamline authentication. - Refactored various components to ensure consistent handling of authentication and model selection across the application.
This commit is contained in:
parent
86b1db0598
commit
500f0eab4a
22 changed files with 1784 additions and 207 deletions
|
|
@ -10,7 +10,7 @@ Architecture:
|
|||
- Auth store (auth.json) holds per-provider credential state
|
||||
- resolve_provider() picks the active provider via priority chain
|
||||
- resolve_*_runtime_credentials() handles token refresh and key minting
|
||||
- login_command() / logout_command() are the CLI entry points
|
||||
- logout_command() is the CLI entry point for clearing auth
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
@ -127,7 +127,7 @@ def format_auth_error(error: Exception) -> str:
|
|||
return str(error)
|
||||
|
||||
if error.relogin_required:
|
||||
return f"{error} Run `hermes login` to re-authenticate."
|
||||
return f"{error} Run `hermes model` to re-authenticate."
|
||||
|
||||
if error.code == "subscription_required":
|
||||
return (
|
||||
|
|
@ -1172,6 +1172,39 @@ def get_auth_status(provider_id: Optional[str] = None) -> Dict[str, Any]:
|
|||
return {"logged_in": False}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# External credential detection
|
||||
# =============================================================================
|
||||
|
||||
def detect_external_credentials() -> List[Dict[str, Any]]:
|
||||
"""Scan for credentials from other CLI tools that Hermes can reuse.
|
||||
|
||||
Returns a list of dicts, each with:
|
||||
- provider: str -- Hermes provider id (e.g. "openai-codex")
|
||||
- path: str -- filesystem path where creds were found
|
||||
- label: str -- human-friendly description for the setup UI
|
||||
"""
|
||||
found: List[Dict[str, Any]] = []
|
||||
|
||||
# Codex CLI: ~/.codex/auth.json (or $CODEX_HOME/auth.json)
|
||||
try:
|
||||
codex_home = resolve_codex_home_path()
|
||||
codex_auth = codex_home / "auth.json"
|
||||
if codex_auth.is_file():
|
||||
data = json.loads(codex_auth.read_text())
|
||||
tokens = data.get("tokens", {})
|
||||
if isinstance(tokens, dict) and tokens.get("access_token"):
|
||||
found.append({
|
||||
"provider": "openai-codex",
|
||||
"path": str(codex_auth),
|
||||
"label": f"Codex CLI credentials found ({codex_auth})",
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return found
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# CLI Commands — login / logout
|
||||
# =============================================================================
|
||||
|
|
@ -1328,56 +1361,43 @@ def _save_model_choice(model_id: str) -> None:
|
|||
|
||||
|
||||
def login_command(args) -> None:
|
||||
"""Run OAuth device code login for the selected provider."""
|
||||
provider_id = getattr(args, "provider", None) or "nous"
|
||||
|
||||
if provider_id not in PROVIDER_REGISTRY:
|
||||
print(f"Unknown provider: {provider_id}")
|
||||
print(f"Available: {', '.join(PROVIDER_REGISTRY.keys())}")
|
||||
raise SystemExit(1)
|
||||
|
||||
pconfig = PROVIDER_REGISTRY[provider_id]
|
||||
|
||||
if provider_id == "nous":
|
||||
_login_nous(args, pconfig)
|
||||
elif provider_id == "openai-codex":
|
||||
_login_openai_codex(args, pconfig)
|
||||
else:
|
||||
print(f"Login for provider '{provider_id}' is not yet implemented.")
|
||||
raise SystemExit(1)
|
||||
"""Deprecated: use 'hermes model' or 'hermes setup' instead."""
|
||||
print("The 'hermes login' command has been removed.")
|
||||
print("Use 'hermes model' to select a provider and model,")
|
||||
print("or 'hermes setup' for full interactive setup.")
|
||||
raise SystemExit(0)
|
||||
|
||||
|
||||
def _login_openai_codex(args, pconfig: ProviderConfig) -> None:
|
||||
"""OpenAI Codex login flow using Codex CLI auth state."""
|
||||
codex_path = shutil.which("codex")
|
||||
if not codex_path:
|
||||
print("Codex CLI was not found in PATH.")
|
||||
print("Install Codex CLI, then retry `hermes login --provider openai-codex`.")
|
||||
raise SystemExit(1)
|
||||
"""OpenAI Codex login via device code flow (no Codex CLI required)."""
|
||||
codex_home = resolve_codex_home_path()
|
||||
|
||||
print(f"Starting Hermes login via {pconfig.name}...")
|
||||
print(f"Using Codex CLI: {codex_path}")
|
||||
print(f"Codex home: {resolve_codex_home_path()}")
|
||||
|
||||
creds: Dict[str, Any]
|
||||
# Check for existing valid credentials first
|
||||
try:
|
||||
creds = resolve_codex_runtime_credentials()
|
||||
existing = resolve_codex_runtime_credentials()
|
||||
print(f"Existing Codex credentials found at {codex_home / 'auth.json'}")
|
||||
try:
|
||||
reuse = input("Use existing credentials? [Y/n]: ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
reuse = "y"
|
||||
if reuse in ("", "y", "yes"):
|
||||
creds = existing
|
||||
_save_codex_provider_state(creds)
|
||||
return
|
||||
except AuthError:
|
||||
print("No usable Codex auth found. Running `codex login`...")
|
||||
try:
|
||||
subprocess.run(["codex", "login"], check=True)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(f"Codex login failed with exit code {exc.returncode}.")
|
||||
raise SystemExit(1)
|
||||
except KeyboardInterrupt:
|
||||
print("\nLogin cancelled.")
|
||||
raise SystemExit(130)
|
||||
try:
|
||||
creds = resolve_codex_runtime_credentials()
|
||||
except AuthError as exc:
|
||||
print(format_auth_error(exc))
|
||||
raise SystemExit(1)
|
||||
pass
|
||||
|
||||
# No existing creds (or user declined) -- run device code flow
|
||||
print()
|
||||
print("Signing in to OpenAI Codex...")
|
||||
print()
|
||||
|
||||
creds = _codex_device_code_login()
|
||||
_save_codex_provider_state(creds)
|
||||
|
||||
|
||||
def _save_codex_provider_state(creds: Dict[str, Any]) -> None:
|
||||
"""Persist Codex provider state to auth store and config."""
|
||||
auth_state = {
|
||||
"auth_file": creds.get("auth_file"),
|
||||
"codex_home": creds.get("codex_home"),
|
||||
|
|
@ -1391,13 +1411,170 @@ def _login_openai_codex(args, pconfig: ProviderConfig) -> None:
|
|||
_save_provider_state(auth_store, "openai-codex", auth_state)
|
||||
saved_to = _save_auth_store(auth_store)
|
||||
|
||||
config_path = _update_config_for_provider("openai-codex", creds["base_url"])
|
||||
config_path = _update_config_for_provider("openai-codex", creds.get("base_url", DEFAULT_CODEX_BASE_URL))
|
||||
print()
|
||||
print("Login successful!")
|
||||
print(f" Auth state: {saved_to}")
|
||||
print(f" Config updated: {config_path} (model.provider=openai-codex)")
|
||||
|
||||
|
||||
def _codex_device_code_login() -> Dict[str, Any]:
|
||||
"""Run the OpenAI device code login flow and return credentials dict."""
|
||||
import time as _time
|
||||
|
||||
issuer = "https://auth.openai.com"
|
||||
client_id = CODEX_OAUTH_CLIENT_ID
|
||||
|
||||
# Step 1: Request device code
|
||||
try:
|
||||
with httpx.Client(timeout=httpx.Timeout(15.0)) as client:
|
||||
resp = client.post(
|
||||
f"{issuer}/api/accounts/deviceauth/usercode",
|
||||
json={"client_id": client_id},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
except Exception as exc:
|
||||
raise AuthError(
|
||||
f"Failed to request device code: {exc}",
|
||||
provider="openai-codex", code="device_code_request_failed",
|
||||
)
|
||||
|
||||
if resp.status_code != 200:
|
||||
raise AuthError(
|
||||
f"Device code request returned status {resp.status_code}.",
|
||||
provider="openai-codex", code="device_code_request_error",
|
||||
)
|
||||
|
||||
device_data = resp.json()
|
||||
user_code = device_data.get("user_code", "")
|
||||
device_auth_id = device_data.get("device_auth_id", "")
|
||||
poll_interval = max(3, int(device_data.get("interval", "5")))
|
||||
|
||||
if not user_code or not device_auth_id:
|
||||
raise AuthError(
|
||||
"Device code response missing required fields.",
|
||||
provider="openai-codex", code="device_code_incomplete",
|
||||
)
|
||||
|
||||
# Step 2: Show user the code
|
||||
print("To continue, follow these steps:\n")
|
||||
print(f" 1. Open this URL in your browser:")
|
||||
print(f" \033[94m{issuer}/codex/device\033[0m\n")
|
||||
print(f" 2. Enter this code:")
|
||||
print(f" \033[94m{user_code}\033[0m\n")
|
||||
print("Waiting for sign-in... (press Ctrl+C to cancel)")
|
||||
|
||||
# Step 3: Poll for authorization code
|
||||
max_wait = 15 * 60 # 15 minutes
|
||||
start = _time.monotonic()
|
||||
code_resp = None
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=httpx.Timeout(15.0)) as client:
|
||||
while _time.monotonic() - start < max_wait:
|
||||
_time.sleep(poll_interval)
|
||||
poll_resp = client.post(
|
||||
f"{issuer}/api/accounts/deviceauth/token",
|
||||
json={"device_auth_id": device_auth_id, "user_code": user_code},
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
if poll_resp.status_code == 200:
|
||||
code_resp = poll_resp.json()
|
||||
break
|
||||
elif poll_resp.status_code in (403, 404):
|
||||
continue # User hasn't completed login yet
|
||||
else:
|
||||
raise AuthError(
|
||||
f"Device auth polling returned status {poll_resp.status_code}.",
|
||||
provider="openai-codex", code="device_code_poll_error",
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
print("\nLogin cancelled.")
|
||||
raise SystemExit(130)
|
||||
|
||||
if code_resp is None:
|
||||
raise AuthError(
|
||||
"Login timed out after 15 minutes.",
|
||||
provider="openai-codex", code="device_code_timeout",
|
||||
)
|
||||
|
||||
# Step 4: Exchange authorization code for tokens
|
||||
authorization_code = code_resp.get("authorization_code", "")
|
||||
code_verifier = code_resp.get("code_verifier", "")
|
||||
redirect_uri = f"{issuer}/deviceauth/callback"
|
||||
|
||||
if not authorization_code or not code_verifier:
|
||||
raise AuthError(
|
||||
"Device auth response missing authorization_code or code_verifier.",
|
||||
provider="openai-codex", code="device_code_incomplete_exchange",
|
||||
)
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=httpx.Timeout(15.0)) as client:
|
||||
token_resp = client.post(
|
||||
CODEX_OAUTH_TOKEN_URL,
|
||||
data={
|
||||
"grant_type": "authorization_code",
|
||||
"code": authorization_code,
|
||||
"redirect_uri": redirect_uri,
|
||||
"client_id": client_id,
|
||||
"code_verifier": code_verifier,
|
||||
},
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
except Exception as exc:
|
||||
raise AuthError(
|
||||
f"Token exchange failed: {exc}",
|
||||
provider="openai-codex", code="token_exchange_failed",
|
||||
)
|
||||
|
||||
if token_resp.status_code != 200:
|
||||
raise AuthError(
|
||||
f"Token exchange returned status {token_resp.status_code}.",
|
||||
provider="openai-codex", code="token_exchange_error",
|
||||
)
|
||||
|
||||
tokens = token_resp.json()
|
||||
access_token = tokens.get("access_token", "")
|
||||
refresh_token = tokens.get("refresh_token", "")
|
||||
|
||||
if not access_token:
|
||||
raise AuthError(
|
||||
"Token exchange did not return an access_token.",
|
||||
provider="openai-codex", code="token_exchange_no_access_token",
|
||||
)
|
||||
|
||||
# Step 5: Persist tokens to ~/.codex/auth.json
|
||||
codex_home = resolve_codex_home_path()
|
||||
codex_home.mkdir(parents=True, exist_ok=True)
|
||||
auth_path = codex_home / "auth.json"
|
||||
|
||||
payload = {
|
||||
"tokens": {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
},
|
||||
"last_refresh": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
||||
}
|
||||
_persist_codex_auth_payload(auth_path, payload, lock_held=False)
|
||||
|
||||
base_url = (
|
||||
os.getenv("HERMES_CODEX_BASE_URL", "").strip().rstrip("/")
|
||||
or DEFAULT_CODEX_BASE_URL
|
||||
)
|
||||
|
||||
return {
|
||||
"api_key": access_token,
|
||||
"base_url": base_url,
|
||||
"auth_file": str(auth_path),
|
||||
"codex_home": str(codex_home),
|
||||
"last_refresh": payload["last_refresh"],
|
||||
"auth_mode": "chatgpt",
|
||||
"source": "device-code",
|
||||
}
|
||||
|
||||
|
||||
def _login_nous(args, pconfig: ProviderConfig) -> None:
|
||||
"""Nous Portal device authorization flow."""
|
||||
portal_base_url = (
|
||||
|
|
@ -1579,6 +1756,6 @@ def logout_command(args) -> None:
|
|||
if os.getenv("OPENROUTER_API_KEY"):
|
||||
print("Hermes will use OpenRouter for inference.")
|
||||
else:
|
||||
print("Run `hermes login` or configure an API key to use Hermes.")
|
||||
print("Run `hermes model` or configure an API key to use Hermes.")
|
||||
else:
|
||||
print(f"No auth state found for {provider_name}.")
|
||||
|
|
|
|||
|
|
@ -1,21 +1,62 @@
|
|||
"""Codex model discovery from local Codex CLI cache/config."""
|
||||
"""Codex model discovery from API, local cache, and config."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from hermes_cli.auth import resolve_codex_home_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_CODEX_MODELS: List[str] = [
|
||||
"gpt-5-codex",
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.2-codex",
|
||||
"gpt-5.1-codex",
|
||||
"gpt-5.1-codex-max",
|
||||
"gpt-5.1-codex-mini",
|
||||
]
|
||||
|
||||
|
||||
def _fetch_models_from_api(access_token: str) -> List[str]:
|
||||
"""Fetch available models from the Codex API. Returns visible models sorted by priority."""
|
||||
try:
|
||||
import httpx
|
||||
resp = httpx.get(
|
||||
"https://chatgpt.com/backend-api/codex/models?client_version=1.0.0",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=10,
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
return []
|
||||
data = resp.json()
|
||||
entries = data.get("models", []) if isinstance(data, dict) else []
|
||||
except Exception as exc:
|
||||
logger.debug("Failed to fetch Codex models from API: %s", exc)
|
||||
return []
|
||||
|
||||
sortable = []
|
||||
for item in entries:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
slug = item.get("slug")
|
||||
if not isinstance(slug, str) or not slug.strip():
|
||||
continue
|
||||
slug = slug.strip()
|
||||
if item.get("supported_in_api") is False:
|
||||
continue
|
||||
visibility = item.get("visibility", "")
|
||||
if isinstance(visibility, str) and visibility.strip().lower() == "hide":
|
||||
continue
|
||||
priority = item.get("priority")
|
||||
rank = int(priority) if isinstance(priority, (int, float)) else 10_000
|
||||
sortable.append((rank, slug))
|
||||
|
||||
sortable.sort(key=lambda x: (x[0], x[1]))
|
||||
return [slug for _, slug in sortable]
|
||||
|
||||
|
||||
def _read_default_model(codex_home: Path) -> Optional[str]:
|
||||
config_path = codex_home / "config.toml"
|
||||
if not config_path.exists():
|
||||
|
|
@ -72,10 +113,22 @@ def _read_cache_models(codex_home: Path) -> List[str]:
|
|||
return deduped
|
||||
|
||||
|
||||
def get_codex_model_ids() -> List[str]:
|
||||
def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]:
|
||||
"""Return available Codex model IDs, trying API first, then local sources.
|
||||
|
||||
Resolution order: API (live, if token provided) > config.toml default >
|
||||
local cache > hardcoded defaults.
|
||||
"""
|
||||
codex_home = resolve_codex_home_path()
|
||||
ordered: List[str] = []
|
||||
|
||||
# Try live API if we have a token
|
||||
if access_token:
|
||||
api_models = _fetch_models_from_api(access_token)
|
||||
if api_models:
|
||||
return api_models
|
||||
|
||||
# Fall back to local sources
|
||||
default_model = _read_default_model(codex_home)
|
||||
if default_model:
|
||||
ordered.append(default_model)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ Usage:
|
|||
hermes gateway install # Install gateway service
|
||||
hermes gateway uninstall # Uninstall gateway service
|
||||
hermes setup # Interactive setup wizard
|
||||
hermes login # Authenticate with Nous Portal (or other providers)
|
||||
hermes logout # Clear stored authentication
|
||||
hermes status # Show status of all components
|
||||
hermes cron # Manage cron jobs
|
||||
|
|
@ -547,7 +546,14 @@ def _model_flow_openai_codex(config, current_model=""):
|
|||
print(f"Login failed: {exc}")
|
||||
return
|
||||
|
||||
codex_models = get_codex_model_ids()
|
||||
_codex_token = None
|
||||
try:
|
||||
from hermes_cli.auth import resolve_codex_runtime_credentials
|
||||
_codex_creds = resolve_codex_runtime_credentials()
|
||||
_codex_token = _codex_creds.get("api_key")
|
||||
except Exception:
|
||||
pass
|
||||
codex_models = get_codex_model_ids(access_token=_codex_token)
|
||||
|
||||
selected = _prompt_model_selection(codex_models, current_model=current_model)
|
||||
if selected:
|
||||
|
|
@ -827,8 +833,8 @@ def cmd_update(args):
|
|||
pass # No systemd (macOS, WSL1, etc.) — skip silently
|
||||
|
||||
print()
|
||||
print("Tip: You can now log in with Nous Portal for inference:")
|
||||
print(" hermes login # Authenticate with Nous Portal")
|
||||
print("Tip: You can now select a provider and model:")
|
||||
print(" hermes model # Select provider and model")
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"✗ Update failed: {e}")
|
||||
|
|
@ -848,7 +854,6 @@ Examples:
|
|||
hermes --continue Resume the most recent session
|
||||
hermes --resume <session_id> Resume a specific session
|
||||
hermes setup Run setup wizard
|
||||
hermes login Authenticate with an inference provider
|
||||
hermes logout Clear stored authentication
|
||||
hermes model Select default model
|
||||
hermes config View configuration
|
||||
|
|
|
|||
|
|
@ -621,11 +621,23 @@ def run_setup_wizard(args):
|
|||
format_auth_error, AuthError, fetch_nous_models,
|
||||
resolve_nous_runtime_credentials, _update_config_for_provider,
|
||||
_login_openai_codex, get_codex_auth_status, DEFAULT_CODEX_BASE_URL,
|
||||
detect_external_credentials,
|
||||
)
|
||||
existing_custom = get_env_value("OPENAI_BASE_URL")
|
||||
existing_or = get_env_value("OPENROUTER_API_KEY")
|
||||
active_oauth = get_active_provider()
|
||||
|
||||
# Detect credentials from other CLI tools
|
||||
detected_creds = detect_external_credentials()
|
||||
if detected_creds:
|
||||
print_info("Detected existing credentials:")
|
||||
for cred in detected_creds:
|
||||
if cred["provider"] == "openai-codex":
|
||||
print_success(f" * {cred['label']} -- select \"OpenAI Codex\" to use it")
|
||||
else:
|
||||
print_info(f" * {cred['label']}")
|
||||
print()
|
||||
|
||||
# Detect if any provider is already configured
|
||||
has_any_provider = bool(active_oauth or existing_custom or existing_or)
|
||||
|
||||
|
|
@ -694,11 +706,11 @@ def run_setup_wizard(args):
|
|||
|
||||
except SystemExit:
|
||||
print_warning("Nous Portal login was cancelled or failed.")
|
||||
print_info("You can try again later with: hermes login")
|
||||
print_info("You can try again later with: hermes model")
|
||||
selected_provider = None
|
||||
except Exception as e:
|
||||
print_error(f"Login failed: {e}")
|
||||
print_info("You can try again later with: hermes login")
|
||||
print_info("You can try again later with: hermes model")
|
||||
selected_provider = None
|
||||
|
||||
elif provider_idx == 1: # OpenAI Codex
|
||||
|
|
@ -718,11 +730,11 @@ def run_setup_wizard(args):
|
|||
_update_config_for_provider("openai-codex", DEFAULT_CODEX_BASE_URL)
|
||||
except SystemExit:
|
||||
print_warning("OpenAI Codex login was cancelled or failed.")
|
||||
print_info("You can try again later with: hermes login --provider openai-codex")
|
||||
print_info("You can try again later with: hermes model")
|
||||
selected_provider = None
|
||||
except Exception as e:
|
||||
print_error(f"Login failed: {e}")
|
||||
print_info("You can try again later with: hermes login --provider openai-codex")
|
||||
print_info("You can try again later with: hermes model")
|
||||
selected_provider = None
|
||||
|
||||
elif provider_idx == 2: # OpenRouter
|
||||
|
|
@ -834,7 +846,15 @@ def run_setup_wizard(args):
|
|||
# else: keep current
|
||||
elif selected_provider == "openai-codex":
|
||||
from hermes_cli.codex_models import get_codex_model_ids
|
||||
codex_models = get_codex_model_ids()
|
||||
# Try to get the access token for live model discovery
|
||||
_codex_token = None
|
||||
try:
|
||||
from hermes_cli.auth import resolve_codex_runtime_credentials
|
||||
_codex_creds = resolve_codex_runtime_credentials()
|
||||
_codex_token = _codex_creds.get("api_key")
|
||||
except Exception:
|
||||
pass
|
||||
codex_models = get_codex_model_ids(access_token=_codex_token)
|
||||
model_choices = [f"{m}" for m in codex_models]
|
||||
model_choices.append("Custom model")
|
||||
model_choices.append(f"Keep current ({current_model})")
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ def show_status(args):
|
|||
nous_logged_in = bool(nous_status.get("logged_in"))
|
||||
print(
|
||||
f" {'Nous Portal':<12} {check_mark(nous_logged_in)} "
|
||||
f"{'logged in' if nous_logged_in else 'not logged in (run: hermes login)'}"
|
||||
f"{'logged in' if nous_logged_in else 'not logged in (run: hermes model)'}"
|
||||
)
|
||||
if nous_logged_in:
|
||||
portal_url = nous_status.get("portal_base_url") or "(unknown)"
|
||||
|
|
@ -126,7 +126,7 @@ def show_status(args):
|
|||
codex_logged_in = bool(codex_status.get("logged_in"))
|
||||
print(
|
||||
f" {'OpenAI Codex':<12} {check_mark(codex_logged_in)} "
|
||||
f"{'logged in' if codex_logged_in else 'not logged in (run: hermes login --provider openai-codex)'}"
|
||||
f"{'logged in' if codex_logged_in else 'not logged in (run: hermes model)'}"
|
||||
)
|
||||
codex_auth_file = codex_status.get("auth_file")
|
||||
if codex_auth_file:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue