feat: integrate GitHub Copilot providers across Hermes
Add first-class GitHub Copilot and Copilot ACP provider support across model selection, runtime provider resolution, CLI sessions, delegated subagents, cron jobs, and the Telegram gateway. This also normalizes Copilot model catalogs and API modes, introduces a Copilot ACP OpenAI-compatible shim, and fixes service-mode auth by resolving Homebrew-installed gh binaries under launchd. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
parent
f656dfcb32
commit
0c392e7a87
26 changed files with 2472 additions and 122 deletions
|
|
@ -11,5 +11,5 @@ Provides subcommands for:
|
|||
- hermes cron - Manage cron jobs
|
||||
"""
|
||||
|
||||
__version__ = "0.3.0"
|
||||
__release_date__ = "2026.3.17"
|
||||
__version__ = "0.4.0"
|
||||
__release_date__ = "2026.3.18"
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import json
|
|||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import shlex
|
||||
import stat
|
||||
import base64
|
||||
import hashlib
|
||||
|
|
@ -66,6 +67,8 @@ DEFAULT_AGENT_KEY_MIN_TTL_SECONDS = 30 * 60 # 30 minutes
|
|||
ACCESS_TOKEN_REFRESH_SKEW_SECONDS = 120 # refresh 2 min before expiry
|
||||
DEVICE_AUTH_POLL_INTERVAL_CAP_SECONDS = 1 # poll at most every 1s
|
||||
DEFAULT_CODEX_BASE_URL = "https://chatgpt.com/backend-api/codex"
|
||||
DEFAULT_GITHUB_MODELS_BASE_URL = "https://api.githubcopilot.com"
|
||||
DEFAULT_COPILOT_ACP_BASE_URL = "acp://copilot"
|
||||
CODEX_OAUTH_CLIENT_ID = "app_EMoamEEZ73f0CkXaXp7hrann"
|
||||
CODEX_OAUTH_TOKEN_URL = "https://auth.openai.com/oauth/token"
|
||||
CODEX_ACCESS_TOKEN_REFRESH_SKEW_SECONDS = 120
|
||||
|
|
@ -108,6 +111,20 @@ PROVIDER_REGISTRY: Dict[str, ProviderConfig] = {
|
|||
auth_type="oauth_external",
|
||||
inference_base_url=DEFAULT_CODEX_BASE_URL,
|
||||
),
|
||||
"copilot": ProviderConfig(
|
||||
id="copilot",
|
||||
name="GitHub Copilot",
|
||||
auth_type="api_key",
|
||||
inference_base_url=DEFAULT_GITHUB_MODELS_BASE_URL,
|
||||
api_key_env_vars=("GITHUB_TOKEN", "GH_TOKEN"),
|
||||
),
|
||||
"copilot-acp": ProviderConfig(
|
||||
id="copilot-acp",
|
||||
name="GitHub Copilot ACP",
|
||||
auth_type="external_process",
|
||||
inference_base_url=DEFAULT_COPILOT_ACP_BASE_URL,
|
||||
base_url_env_var="COPILOT_ACP_BASE_URL",
|
||||
),
|
||||
"zai": ProviderConfig(
|
||||
id="zai",
|
||||
name="Z.AI / GLM",
|
||||
|
|
@ -222,6 +239,62 @@ def _resolve_kimi_base_url(api_key: str, default_url: str, env_override: str) ->
|
|||
return default_url
|
||||
|
||||
|
||||
def _gh_cli_candidates() -> list[str]:
|
||||
"""Return candidate ``gh`` binary paths, including common Homebrew installs."""
|
||||
candidates: list[str] = []
|
||||
|
||||
resolved = shutil.which("gh")
|
||||
if resolved:
|
||||
candidates.append(resolved)
|
||||
|
||||
for candidate in (
|
||||
"/opt/homebrew/bin/gh",
|
||||
"/usr/local/bin/gh",
|
||||
str(Path.home() / ".local" / "bin" / "gh"),
|
||||
):
|
||||
if candidate in candidates:
|
||||
continue
|
||||
if os.path.isfile(candidate) and os.access(candidate, os.X_OK):
|
||||
candidates.append(candidate)
|
||||
|
||||
return candidates
|
||||
|
||||
|
||||
def _try_gh_cli_token() -> Optional[str]:
|
||||
"""Return a token from ``gh auth token`` when the GitHub CLI is available."""
|
||||
for gh_path in _gh_cli_candidates():
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[gh_path, "auth", "token"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=5,
|
||||
)
|
||||
except (FileNotFoundError, subprocess.TimeoutExpired) as exc:
|
||||
logger.debug("gh CLI token lookup failed (%s): %s", gh_path, exc)
|
||||
continue
|
||||
if result.returncode == 0 and result.stdout.strip():
|
||||
return result.stdout.strip()
|
||||
return None
|
||||
|
||||
|
||||
def _resolve_api_key_provider_secret(
|
||||
provider_id: str, pconfig: ProviderConfig
|
||||
) -> tuple[str, str]:
|
||||
"""Resolve an API-key provider's token and indicate where it came from."""
|
||||
for env_var in pconfig.api_key_env_vars:
|
||||
val = os.getenv(env_var, "").strip()
|
||||
if val:
|
||||
return val, env_var
|
||||
|
||||
if provider_id == "copilot":
|
||||
token = _try_gh_cli_token()
|
||||
if token:
|
||||
return token, "gh auth token"
|
||||
|
||||
return "", ""
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Z.AI Endpoint Detection
|
||||
# =============================================================================
|
||||
|
|
@ -572,6 +645,9 @@ def resolve_provider(
|
|||
"kimi": "kimi-coding", "moonshot": "kimi-coding",
|
||||
"minimax-china": "minimax-cn", "minimax_cn": "minimax-cn",
|
||||
"claude": "anthropic", "claude-code": "anthropic",
|
||||
"github": "copilot", "github-copilot": "copilot",
|
||||
"github-models": "copilot", "github-model": "copilot",
|
||||
"github-copilot-acp": "copilot-acp", "copilot-acp-agent": "copilot-acp",
|
||||
"aigateway": "ai-gateway", "vercel": "ai-gateway", "vercel-ai-gateway": "ai-gateway",
|
||||
"opencode": "opencode-zen", "zen": "opencode-zen",
|
||||
"go": "opencode-go", "opencode-go-sub": "opencode-go",
|
||||
|
|
@ -611,6 +687,11 @@ def resolve_provider(
|
|||
for pid, pconfig in PROVIDER_REGISTRY.items():
|
||||
if pconfig.auth_type != "api_key":
|
||||
continue
|
||||
# GitHub tokens are commonly present for repo/tool access but should not
|
||||
# hijack inference auto-selection unless the user explicitly chooses
|
||||
# Copilot/GitHub Models as the provider.
|
||||
if pid == "copilot":
|
||||
continue
|
||||
for env_var in pconfig.api_key_env_vars:
|
||||
if os.getenv(env_var, "").strip():
|
||||
return pid
|
||||
|
|
@ -1479,12 +1560,7 @@ def get_api_key_provider_status(provider_id: str) -> Dict[str, Any]:
|
|||
|
||||
api_key = ""
|
||||
key_source = ""
|
||||
for env_var in pconfig.api_key_env_vars:
|
||||
val = os.getenv(env_var, "").strip()
|
||||
if val:
|
||||
api_key = val
|
||||
key_source = env_var
|
||||
break
|
||||
api_key, key_source = _resolve_api_key_provider_secret(provider_id, pconfig)
|
||||
|
||||
env_url = ""
|
||||
if pconfig.base_url_env_var:
|
||||
|
|
@ -1507,6 +1583,36 @@ def get_api_key_provider_status(provider_id: str) -> Dict[str, Any]:
|
|||
}
|
||||
|
||||
|
||||
def get_external_process_provider_status(provider_id: str) -> Dict[str, Any]:
|
||||
"""Status snapshot for providers that run a local subprocess."""
|
||||
pconfig = PROVIDER_REGISTRY.get(provider_id)
|
||||
if not pconfig or pconfig.auth_type != "external_process":
|
||||
return {"configured": False}
|
||||
|
||||
command = (
|
||||
os.getenv("HERMES_COPILOT_ACP_COMMAND", "").strip()
|
||||
or os.getenv("COPILOT_CLI_PATH", "").strip()
|
||||
or "copilot"
|
||||
)
|
||||
raw_args = os.getenv("HERMES_COPILOT_ACP_ARGS", "").strip()
|
||||
args = shlex.split(raw_args) if raw_args else ["--acp", "--stdio"]
|
||||
base_url = os.getenv(pconfig.base_url_env_var, "").strip() if pconfig.base_url_env_var else ""
|
||||
if not base_url:
|
||||
base_url = pconfig.inference_base_url
|
||||
|
||||
resolved_command = shutil.which(command) if command else None
|
||||
return {
|
||||
"configured": bool(resolved_command or base_url.startswith("acp+tcp://")),
|
||||
"provider": provider_id,
|
||||
"name": pconfig.name,
|
||||
"command": command,
|
||||
"args": args,
|
||||
"resolved_command": resolved_command,
|
||||
"base_url": base_url,
|
||||
"logged_in": bool(resolved_command or base_url.startswith("acp+tcp://")),
|
||||
}
|
||||
|
||||
|
||||
def get_auth_status(provider_id: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""Generic auth status dispatcher."""
|
||||
target = provider_id or get_active_provider()
|
||||
|
|
@ -1514,6 +1620,8 @@ def get_auth_status(provider_id: Optional[str] = None) -> Dict[str, Any]:
|
|||
return get_nous_auth_status()
|
||||
if target == "openai-codex":
|
||||
return get_codex_auth_status()
|
||||
if target == "copilot-acp":
|
||||
return get_external_process_provider_status(target)
|
||||
# API-key providers
|
||||
pconfig = PROVIDER_REGISTRY.get(target)
|
||||
if pconfig and pconfig.auth_type == "api_key":
|
||||
|
|
@ -1536,12 +1644,7 @@ def resolve_api_key_provider_credentials(provider_id: str) -> Dict[str, Any]:
|
|||
|
||||
api_key = ""
|
||||
key_source = ""
|
||||
for env_var in pconfig.api_key_env_vars:
|
||||
val = os.getenv(env_var, "").strip()
|
||||
if val:
|
||||
api_key = val
|
||||
key_source = env_var
|
||||
break
|
||||
api_key, key_source = _resolve_api_key_provider_secret(provider_id, pconfig)
|
||||
|
||||
env_url = ""
|
||||
if pconfig.base_url_env_var:
|
||||
|
|
@ -1562,6 +1665,46 @@ def resolve_api_key_provider_credentials(provider_id: str) -> Dict[str, Any]:
|
|||
}
|
||||
|
||||
|
||||
def resolve_external_process_provider_credentials(provider_id: str) -> Dict[str, Any]:
|
||||
"""Resolve runtime details for local subprocess-backed providers."""
|
||||
pconfig = PROVIDER_REGISTRY.get(provider_id)
|
||||
if not pconfig or pconfig.auth_type != "external_process":
|
||||
raise AuthError(
|
||||
f"Provider '{provider_id}' is not an external-process provider.",
|
||||
provider=provider_id,
|
||||
code="invalid_provider",
|
||||
)
|
||||
|
||||
base_url = os.getenv(pconfig.base_url_env_var, "").strip() if pconfig.base_url_env_var else ""
|
||||
if not base_url:
|
||||
base_url = pconfig.inference_base_url
|
||||
|
||||
command = (
|
||||
os.getenv("HERMES_COPILOT_ACP_COMMAND", "").strip()
|
||||
or os.getenv("COPILOT_CLI_PATH", "").strip()
|
||||
or "copilot"
|
||||
)
|
||||
raw_args = os.getenv("HERMES_COPILOT_ACP_ARGS", "").strip()
|
||||
args = shlex.split(raw_args) if raw_args else ["--acp", "--stdio"]
|
||||
resolved_command = shutil.which(command) if command else None
|
||||
if not resolved_command and not base_url.startswith("acp+tcp://"):
|
||||
raise AuthError(
|
||||
f"Could not find the Copilot CLI command '{command}'. "
|
||||
"Install GitHub Copilot CLI or set HERMES_COPILOT_ACP_COMMAND/COPILOT_CLI_PATH.",
|
||||
provider=provider_id,
|
||||
code="missing_copilot_cli",
|
||||
)
|
||||
|
||||
return {
|
||||
"provider": provider_id,
|
||||
"api_key": "copilot-acp",
|
||||
"base_url": base_url.rstrip("/"),
|
||||
"command": resolved_command or command,
|
||||
"args": args,
|
||||
"source": "process",
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# External credential detection
|
||||
# =============================================================================
|
||||
|
|
|
|||
|
|
@ -125,6 +125,17 @@ def _has_any_provider_configured() -> bool:
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
# Check provider-specific auth fallbacks (for example, Copilot via gh auth).
|
||||
try:
|
||||
for provider_id, pconfig in PROVIDER_REGISTRY.items():
|
||||
if pconfig.auth_type != "api_key":
|
||||
continue
|
||||
status = get_auth_status(provider_id)
|
||||
if status.get("logged_in"):
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Check for Nous Portal OAuth credentials
|
||||
auth_file = get_hermes_home() / "auth.json"
|
||||
if auth_file.exists():
|
||||
|
|
@ -775,6 +786,8 @@ def cmd_model(args):
|
|||
"openrouter": "OpenRouter",
|
||||
"nous": "Nous Portal",
|
||||
"openai-codex": "OpenAI Codex",
|
||||
"copilot-acp": "GitHub Copilot ACP",
|
||||
"copilot": "GitHub Copilot",
|
||||
"anthropic": "Anthropic",
|
||||
"zai": "Z.AI / GLM",
|
||||
"kimi-coding": "Kimi / Moonshot",
|
||||
|
|
@ -799,6 +812,8 @@ def cmd_model(args):
|
|||
("openrouter", "OpenRouter (100+ models, pay-per-use)"),
|
||||
("nous", "Nous Portal (Nous Research subscription)"),
|
||||
("openai-codex", "OpenAI Codex"),
|
||||
("copilot-acp", "GitHub Copilot ACP (spawns `copilot --acp --stdio`)"),
|
||||
("copilot", "GitHub Copilot (uses GITHUB_TOKEN or gh auth token)"),
|
||||
("anthropic", "Anthropic (Claude models — API key or Claude Code)"),
|
||||
("zai", "Z.AI / GLM (Zhipu AI direct API)"),
|
||||
("kimi-coding", "Kimi / Moonshot (Moonshot AI direct API)"),
|
||||
|
|
@ -867,6 +882,10 @@ def cmd_model(args):
|
|||
_model_flow_nous(config, current_model)
|
||||
elif selected_provider == "openai-codex":
|
||||
_model_flow_openai_codex(config, current_model)
|
||||
elif selected_provider == "copilot-acp":
|
||||
_model_flow_copilot_acp(config, current_model)
|
||||
elif selected_provider == "copilot":
|
||||
_model_flow_copilot(config, current_model)
|
||||
elif selected_provider == "custom":
|
||||
_model_flow_custom(config)
|
||||
elif selected_provider.startswith("custom:") and selected_provider in _custom_provider_map:
|
||||
|
|
@ -1407,6 +1426,25 @@ def _model_flow_named_custom(config, provider_info):
|
|||
|
||||
# Curated model lists for direct API-key providers
|
||||
_PROVIDER_MODELS = {
|
||||
"copilot-acp": [
|
||||
"copilot-acp",
|
||||
],
|
||||
"copilot": [
|
||||
"gpt-5.4",
|
||||
"gpt-5.4-mini",
|
||||
"gpt-5-mini",
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.2-codex",
|
||||
"gpt-4.1",
|
||||
"gpt-4o",
|
||||
"gpt-4o-mini",
|
||||
"claude-opus-4.6",
|
||||
"claude-sonnet-4.6",
|
||||
"claude-sonnet-4.5",
|
||||
"claude-haiku-4.5",
|
||||
"gemini-2.5-pro",
|
||||
"grok-code-fast-1",
|
||||
],
|
||||
"zai": [
|
||||
"glm-5",
|
||||
"glm-4.7",
|
||||
|
|
@ -1447,6 +1485,331 @@ _PROVIDER_MODELS = {
|
|||
}
|
||||
|
||||
|
||||
def _current_reasoning_effort(config) -> str:
|
||||
agent_cfg = config.get("agent")
|
||||
if isinstance(agent_cfg, dict):
|
||||
return str(agent_cfg.get("reasoning_effort") or "").strip().lower()
|
||||
return ""
|
||||
|
||||
|
||||
def _set_reasoning_effort(config, effort: str) -> None:
|
||||
agent_cfg = config.get("agent")
|
||||
if not isinstance(agent_cfg, dict):
|
||||
agent_cfg = {}
|
||||
config["agent"] = agent_cfg
|
||||
agent_cfg["reasoning_effort"] = effort
|
||||
|
||||
|
||||
def _prompt_reasoning_effort_selection(efforts, current_effort=""):
|
||||
"""Prompt for a reasoning effort. Returns effort, 'none', or None to keep current."""
|
||||
ordered = list(dict.fromkeys(str(effort).strip().lower() for effort in efforts if str(effort).strip()))
|
||||
if not ordered:
|
||||
return None
|
||||
|
||||
def _label(effort):
|
||||
if effort == current_effort:
|
||||
return f"{effort} ← currently in use"
|
||||
return effort
|
||||
|
||||
disable_label = "Disable reasoning"
|
||||
skip_label = "Skip (keep current)"
|
||||
|
||||
if current_effort == "none":
|
||||
default_idx = len(ordered)
|
||||
elif current_effort in ordered:
|
||||
default_idx = ordered.index(current_effort)
|
||||
elif "medium" in ordered:
|
||||
default_idx = ordered.index("medium")
|
||||
else:
|
||||
default_idx = 0
|
||||
|
||||
try:
|
||||
from simple_term_menu import TerminalMenu
|
||||
|
||||
choices = [f" {_label(effort)}" for effort in ordered]
|
||||
choices.append(f" {disable_label}")
|
||||
choices.append(f" {skip_label}")
|
||||
menu = TerminalMenu(
|
||||
choices,
|
||||
cursor_index=default_idx,
|
||||
menu_cursor="-> ",
|
||||
menu_cursor_style=("fg_green", "bold"),
|
||||
menu_highlight_style=("fg_green",),
|
||||
cycle_cursor=True,
|
||||
clear_screen=False,
|
||||
title="Select reasoning effort:",
|
||||
)
|
||||
idx = menu.show()
|
||||
if idx is None:
|
||||
return None
|
||||
print()
|
||||
if idx < len(ordered):
|
||||
return ordered[idx]
|
||||
if idx == len(ordered):
|
||||
return "none"
|
||||
return None
|
||||
except (ImportError, NotImplementedError):
|
||||
pass
|
||||
|
||||
print("Select reasoning effort:")
|
||||
for i, effort in enumerate(ordered, 1):
|
||||
print(f" {i}. {_label(effort)}")
|
||||
n = len(ordered)
|
||||
print(f" {n + 1}. {disable_label}")
|
||||
print(f" {n + 2}. {skip_label}")
|
||||
print()
|
||||
|
||||
while True:
|
||||
try:
|
||||
choice = input(f"Choice [1-{n + 2}] (default: keep current): ").strip()
|
||||
if not choice:
|
||||
return None
|
||||
idx = int(choice)
|
||||
if 1 <= idx <= n:
|
||||
return ordered[idx - 1]
|
||||
if idx == n + 1:
|
||||
return "none"
|
||||
if idx == n + 2:
|
||||
return None
|
||||
print(f"Please enter 1-{n + 2}")
|
||||
except ValueError:
|
||||
print("Please enter a number")
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
return None
|
||||
|
||||
|
||||
def _model_flow_copilot(config, current_model=""):
|
||||
"""GitHub Copilot flow using env vars or ``gh auth token``."""
|
||||
from hermes_cli.auth import (
|
||||
PROVIDER_REGISTRY,
|
||||
_prompt_model_selection,
|
||||
_save_model_choice,
|
||||
deactivate_provider,
|
||||
resolve_api_key_provider_credentials,
|
||||
)
|
||||
from hermes_cli.config import get_env_value, save_env_value, load_config, save_config
|
||||
from hermes_cli.models import (
|
||||
fetch_api_models,
|
||||
fetch_github_model_catalog,
|
||||
github_model_reasoning_efforts,
|
||||
copilot_model_api_mode,
|
||||
normalize_copilot_model_id,
|
||||
)
|
||||
|
||||
provider_id = "copilot"
|
||||
pconfig = PROVIDER_REGISTRY[provider_id]
|
||||
|
||||
creds = resolve_api_key_provider_credentials(provider_id)
|
||||
api_key = creds.get("api_key", "")
|
||||
source = creds.get("source", "")
|
||||
|
||||
if not api_key:
|
||||
print("No GitHub token configured for GitHub Copilot.")
|
||||
print(" Hermes can use GITHUB_TOKEN, GH_TOKEN, or your gh CLI login.")
|
||||
try:
|
||||
new_key = input("GITHUB_TOKEN (or Enter to cancel): ").strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
print()
|
||||
return
|
||||
if not new_key:
|
||||
print("Cancelled.")
|
||||
return
|
||||
save_env_value("GITHUB_TOKEN", new_key)
|
||||
print("GitHub token saved.")
|
||||
print()
|
||||
creds = resolve_api_key_provider_credentials(provider_id)
|
||||
api_key = creds.get("api_key", "")
|
||||
source = creds.get("source", "")
|
||||
else:
|
||||
if source in ("GITHUB_TOKEN", "GH_TOKEN"):
|
||||
print(f" GitHub token: {api_key[:8]}... ✓ ({source})")
|
||||
elif source == "gh auth token":
|
||||
print(" GitHub token: ✓ (from `gh auth token`)")
|
||||
else:
|
||||
print(" GitHub token: ✓")
|
||||
print()
|
||||
|
||||
effective_base = pconfig.inference_base_url
|
||||
|
||||
catalog = fetch_github_model_catalog(api_key)
|
||||
live_models = [item.get("id", "") for item in catalog if item.get("id")] if catalog else fetch_api_models(api_key, effective_base)
|
||||
normalized_current_model = normalize_copilot_model_id(
|
||||
current_model,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or current_model
|
||||
if live_models:
|
||||
model_list = [model_id for model_id in live_models if model_id]
|
||||
print(f" Found {len(model_list)} model(s) from GitHub Copilot")
|
||||
else:
|
||||
model_list = _PROVIDER_MODELS.get(provider_id, [])
|
||||
if model_list:
|
||||
print(" ⚠ Could not auto-detect models from GitHub Copilot — showing defaults.")
|
||||
print(' Use "Enter custom model name" if you do not see your model.')
|
||||
|
||||
if model_list:
|
||||
selected = _prompt_model_selection(model_list, current_model=normalized_current_model)
|
||||
else:
|
||||
try:
|
||||
selected = input("Model name: ").strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
selected = None
|
||||
|
||||
if selected:
|
||||
selected = normalize_copilot_model_id(
|
||||
selected,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or selected
|
||||
# Clear stale custom-endpoint overrides so the Copilot provider wins cleanly.
|
||||
if get_env_value("OPENAI_BASE_URL"):
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
save_env_value("OPENAI_API_KEY", "")
|
||||
|
||||
initial_cfg = load_config()
|
||||
current_effort = _current_reasoning_effort(initial_cfg)
|
||||
reasoning_efforts = github_model_reasoning_efforts(
|
||||
selected,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
)
|
||||
selected_effort = None
|
||||
if reasoning_efforts:
|
||||
print(f" {selected} supports reasoning controls.")
|
||||
selected_effort = _prompt_reasoning_effort_selection(
|
||||
reasoning_efforts, current_effort=current_effort
|
||||
)
|
||||
|
||||
_save_model_choice(selected)
|
||||
|
||||
cfg = load_config()
|
||||
model = cfg.get("model")
|
||||
if not isinstance(model, dict):
|
||||
model = {"default": model} if model else {}
|
||||
cfg["model"] = model
|
||||
model["provider"] = provider_id
|
||||
model["base_url"] = effective_base
|
||||
model["api_mode"] = copilot_model_api_mode(
|
||||
selected,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
)
|
||||
if selected_effort is not None:
|
||||
_set_reasoning_effort(cfg, selected_effort)
|
||||
save_config(cfg)
|
||||
deactivate_provider()
|
||||
|
||||
print(f"Default model set to: {selected} (via {pconfig.name})")
|
||||
if reasoning_efforts:
|
||||
if selected_effort == "none":
|
||||
print("Reasoning disabled for this model.")
|
||||
elif selected_effort:
|
||||
print(f"Reasoning effort set to: {selected_effort}")
|
||||
else:
|
||||
print("No change.")
|
||||
|
||||
|
||||
def _model_flow_copilot_acp(config, current_model=""):
|
||||
"""GitHub Copilot ACP flow using the local Copilot CLI."""
|
||||
from hermes_cli.auth import (
|
||||
PROVIDER_REGISTRY,
|
||||
_prompt_model_selection,
|
||||
_save_model_choice,
|
||||
deactivate_provider,
|
||||
get_external_process_provider_status,
|
||||
resolve_api_key_provider_credentials,
|
||||
resolve_external_process_provider_credentials,
|
||||
)
|
||||
from hermes_cli.models import (
|
||||
fetch_github_model_catalog,
|
||||
normalize_copilot_model_id,
|
||||
)
|
||||
from hermes_cli.config import load_config, save_config
|
||||
|
||||
del config
|
||||
|
||||
provider_id = "copilot-acp"
|
||||
pconfig = PROVIDER_REGISTRY[provider_id]
|
||||
|
||||
status = get_external_process_provider_status(provider_id)
|
||||
resolved_command = status.get("resolved_command") or status.get("command") or "copilot"
|
||||
effective_base = status.get("base_url") or pconfig.inference_base_url
|
||||
|
||||
print(" GitHub Copilot ACP delegates Hermes turns to `copilot --acp`.")
|
||||
print(" Hermes currently starts its own ACP subprocess for each request.")
|
||||
print(" Hermes uses your selected model as a hint for the Copilot ACP session.")
|
||||
print(f" Command: {resolved_command}")
|
||||
print(f" Backend marker: {effective_base}")
|
||||
print()
|
||||
|
||||
try:
|
||||
creds = resolve_external_process_provider_credentials(provider_id)
|
||||
except Exception as exc:
|
||||
print(f" ⚠ {exc}")
|
||||
print(" Set HERMES_COPILOT_ACP_COMMAND or COPILOT_CLI_PATH if Copilot CLI is installed elsewhere.")
|
||||
return
|
||||
|
||||
effective_base = creds.get("base_url") or effective_base
|
||||
|
||||
catalog_api_key = ""
|
||||
try:
|
||||
catalog_creds = resolve_api_key_provider_credentials("copilot")
|
||||
catalog_api_key = catalog_creds.get("api_key", "")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
catalog = fetch_github_model_catalog(catalog_api_key)
|
||||
normalized_current_model = normalize_copilot_model_id(
|
||||
current_model,
|
||||
catalog=catalog,
|
||||
api_key=catalog_api_key,
|
||||
) or current_model
|
||||
|
||||
if catalog:
|
||||
model_list = [item.get("id", "") for item in catalog if item.get("id")]
|
||||
print(f" Found {len(model_list)} model(s) from GitHub Copilot")
|
||||
else:
|
||||
model_list = _PROVIDER_MODELS.get("copilot", [])
|
||||
if model_list:
|
||||
print(" ⚠ Could not auto-detect models from GitHub Copilot — showing defaults.")
|
||||
print(' Use "Enter custom model name" if you do not see your model.')
|
||||
|
||||
if model_list:
|
||||
selected = _prompt_model_selection(
|
||||
model_list,
|
||||
current_model=normalized_current_model,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
selected = input("Model name: ").strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
selected = None
|
||||
|
||||
if not selected:
|
||||
print("No change.")
|
||||
return
|
||||
|
||||
selected = normalize_copilot_model_id(
|
||||
selected,
|
||||
catalog=catalog,
|
||||
api_key=catalog_api_key,
|
||||
) or selected
|
||||
_save_model_choice(selected)
|
||||
|
||||
cfg = load_config()
|
||||
model = cfg.get("model")
|
||||
if not isinstance(model, dict):
|
||||
model = {"default": model} if model else {}
|
||||
cfg["model"] = model
|
||||
model["provider"] = provider_id
|
||||
model["base_url"] = effective_base
|
||||
model["api_mode"] = "chat_completions"
|
||||
save_config(cfg)
|
||||
deactivate_provider()
|
||||
|
||||
print(f"Default model set to: {selected} (via {pconfig.name})")
|
||||
|
||||
|
||||
def _model_flow_kimi(config, current_model=""):
|
||||
"""Kimi / Moonshot model selection with automatic endpoint routing.
|
||||
|
||||
|
|
@ -2642,7 +3005,7 @@ For more help on a command:
|
|||
)
|
||||
chat_parser.add_argument(
|
||||
"--provider",
|
||||
choices=["auto", "openrouter", "nous", "openai-codex", "anthropic", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode"],
|
||||
choices=["auto", "openrouter", "nous", "openai-codex", "copilot-acp", "copilot", "anthropic", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode"],
|
||||
default=None,
|
||||
help="Inference provider (default: auto)"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,6 +14,16 @@ import urllib.error
|
|||
from difflib import get_close_matches
|
||||
from typing import Any, Optional
|
||||
|
||||
COPILOT_BASE_URL = "https://api.githubcopilot.com"
|
||||
COPILOT_MODELS_URL = f"{COPILOT_BASE_URL}/models"
|
||||
COPILOT_EDITOR_VERSION = "vscode/1.104.1"
|
||||
COPILOT_REASONING_EFFORTS_GPT5 = ["minimal", "low", "medium", "high"]
|
||||
COPILOT_REASONING_EFFORTS_O_SERIES = ["low", "medium", "high"]
|
||||
|
||||
# Backward-compatible aliases for the earlier GitHub Models-backed Copilot work.
|
||||
GITHUB_MODELS_BASE_URL = COPILOT_BASE_URL
|
||||
GITHUB_MODELS_CATALOG_URL = COPILOT_MODELS_URL
|
||||
|
||||
# (model_id, display description shown in menus)
|
||||
OPENROUTER_MODELS: list[tuple[str, str]] = [
|
||||
("anthropic/claude-opus-4.6", "recommended"),
|
||||
|
|
@ -46,6 +56,25 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
|
|||
"gpt-5.1-codex-mini",
|
||||
"gpt-5.1-codex-max",
|
||||
],
|
||||
"copilot-acp": [
|
||||
"copilot-acp",
|
||||
],
|
||||
"copilot": [
|
||||
"gpt-5.4",
|
||||
"gpt-5.4-mini",
|
||||
"gpt-5-mini",
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.2-codex",
|
||||
"gpt-4.1",
|
||||
"gpt-4o",
|
||||
"gpt-4o-mini",
|
||||
"claude-opus-4.6",
|
||||
"claude-sonnet-4.6",
|
||||
"claude-sonnet-4.5",
|
||||
"claude-haiku-4.5",
|
||||
"gemini-2.5-pro",
|
||||
"grok-code-fast-1",
|
||||
],
|
||||
"zai": [
|
||||
"glm-5",
|
||||
"glm-4.7",
|
||||
|
|
@ -160,7 +189,9 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
|
|||
_PROVIDER_LABELS = {
|
||||
"openrouter": "OpenRouter",
|
||||
"openai-codex": "OpenAI Codex",
|
||||
"copilot-acp": "GitHub Copilot ACP",
|
||||
"nous": "Nous Portal",
|
||||
"copilot": "GitHub Copilot",
|
||||
"zai": "Z.AI / GLM",
|
||||
"kimi-coding": "Kimi / Moonshot",
|
||||
"minimax": "MiniMax",
|
||||
|
|
@ -180,6 +211,12 @@ _PROVIDER_ALIASES = {
|
|||
"z-ai": "zai",
|
||||
"z.ai": "zai",
|
||||
"zhipu": "zai",
|
||||
"github": "copilot",
|
||||
"github-copilot": "copilot",
|
||||
"github-models": "copilot",
|
||||
"github-model": "copilot",
|
||||
"github-copilot-acp": "copilot-acp",
|
||||
"copilot-acp-agent": "copilot-acp",
|
||||
"kimi": "kimi-coding",
|
||||
"moonshot": "kimi-coding",
|
||||
"minimax-china": "minimax-cn",
|
||||
|
|
@ -233,7 +270,7 @@ def list_available_providers() -> list[dict[str, str]]:
|
|||
"""
|
||||
# Canonical providers in display order
|
||||
_PROVIDER_ORDER = [
|
||||
"openrouter", "nous", "openai-codex",
|
||||
"openrouter", "nous", "openai-codex", "copilot", "copilot-acp",
|
||||
"zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "anthropic", "alibaba",
|
||||
"opencode-zen", "opencode-go",
|
||||
"ai-gateway", "deepseek", "custom",
|
||||
|
|
@ -454,6 +491,17 @@ def provider_label(provider: Optional[str]) -> str:
|
|||
return _PROVIDER_LABELS.get(normalized, original or "OpenRouter")
|
||||
|
||||
|
||||
def _resolve_copilot_catalog_api_key() -> str:
|
||||
"""Best-effort GitHub token for fetching the Copilot model catalog."""
|
||||
try:
|
||||
from hermes_cli.auth import resolve_api_key_provider_credentials
|
||||
|
||||
creds = resolve_api_key_provider_credentials("copilot")
|
||||
return str(creds.get("api_key") or "").strip()
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def provider_model_ids(provider: Optional[str]) -> list[str]:
|
||||
"""Return the best known model catalog for a provider.
|
||||
|
||||
|
|
@ -467,6 +515,15 @@ def provider_model_ids(provider: Optional[str]) -> list[str]:
|
|||
from hermes_cli.codex_models import get_codex_model_ids
|
||||
|
||||
return get_codex_model_ids()
|
||||
if normalized in {"copilot", "copilot-acp"}:
|
||||
try:
|
||||
live = _fetch_github_models(_resolve_copilot_catalog_api_key())
|
||||
if live:
|
||||
return live
|
||||
except Exception:
|
||||
pass
|
||||
if normalized == "copilot-acp":
|
||||
return list(_PROVIDER_MODELS.get("copilot", []))
|
||||
if normalized == "nous":
|
||||
# Try live Nous Portal /models endpoint
|
||||
try:
|
||||
|
|
@ -545,6 +602,274 @@ def _fetch_anthropic_models(timeout: float = 5.0) -> Optional[list[str]]:
|
|||
return None
|
||||
|
||||
|
||||
def _payload_items(payload: Any) -> list[dict[str, Any]]:
|
||||
if isinstance(payload, list):
|
||||
return [item for item in payload if isinstance(item, dict)]
|
||||
if isinstance(payload, dict):
|
||||
data = payload.get("data", [])
|
||||
if isinstance(data, list):
|
||||
return [item for item in data if isinstance(item, dict)]
|
||||
return []
|
||||
|
||||
|
||||
def _extract_model_ids(payload: Any) -> list[str]:
|
||||
return [item.get("id", "") for item in _payload_items(payload) if item.get("id")]
|
||||
|
||||
|
||||
def copilot_default_headers() -> dict[str, str]:
|
||||
return {
|
||||
"Editor-Version": COPILOT_EDITOR_VERSION,
|
||||
"User-Agent": "HermesAgent/1.0",
|
||||
}
|
||||
|
||||
|
||||
def _copilot_catalog_item_is_text_model(item: dict[str, Any]) -> bool:
|
||||
model_id = str(item.get("id") or "").strip()
|
||||
if not model_id:
|
||||
return False
|
||||
|
||||
if item.get("model_picker_enabled") is False:
|
||||
return False
|
||||
|
||||
capabilities = item.get("capabilities")
|
||||
if isinstance(capabilities, dict):
|
||||
model_type = str(capabilities.get("type") or "").strip().lower()
|
||||
if model_type and model_type != "chat":
|
||||
return False
|
||||
|
||||
supported_endpoints = item.get("supported_endpoints")
|
||||
if isinstance(supported_endpoints, list):
|
||||
normalized_endpoints = {
|
||||
str(endpoint).strip()
|
||||
for endpoint in supported_endpoints
|
||||
if str(endpoint).strip()
|
||||
}
|
||||
if normalized_endpoints and not normalized_endpoints.intersection(
|
||||
{"/chat/completions", "/responses", "/v1/messages"}
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def fetch_github_model_catalog(
|
||||
api_key: Optional[str] = None, timeout: float = 5.0
|
||||
) -> Optional[list[dict[str, Any]]]:
|
||||
"""Fetch the live GitHub Copilot model catalog for this account."""
|
||||
attempts: list[dict[str, str]] = []
|
||||
if api_key:
|
||||
attempts.append({
|
||||
**copilot_default_headers(),
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
})
|
||||
attempts.append(copilot_default_headers())
|
||||
|
||||
for headers in attempts:
|
||||
req = urllib.request.Request(COPILOT_MODELS_URL, headers=headers)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
data = json.loads(resp.read().decode())
|
||||
items = _payload_items(data)
|
||||
models: list[dict[str, Any]] = []
|
||||
seen_ids: set[str] = set()
|
||||
for item in items:
|
||||
if not _copilot_catalog_item_is_text_model(item):
|
||||
continue
|
||||
model_id = str(item.get("id") or "").strip()
|
||||
if not model_id or model_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(model_id)
|
||||
models.append(item)
|
||||
if models:
|
||||
return models
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _is_github_models_base_url(base_url: Optional[str]) -> bool:
|
||||
normalized = (base_url or "").strip().rstrip("/").lower()
|
||||
return (
|
||||
normalized.startswith(COPILOT_BASE_URL)
|
||||
or normalized.startswith("https://models.github.ai/inference")
|
||||
)
|
||||
|
||||
|
||||
def _fetch_github_models(api_key: Optional[str] = None, timeout: float = 5.0) -> Optional[list[str]]:
|
||||
catalog = fetch_github_model_catalog(api_key=api_key, timeout=timeout)
|
||||
if not catalog:
|
||||
return None
|
||||
return [item.get("id", "") for item in catalog if item.get("id")]
|
||||
|
||||
|
||||
_COPILOT_MODEL_ALIASES = {
|
||||
"openai/gpt-5": "gpt-5-mini",
|
||||
"openai/gpt-5-chat": "gpt-5-mini",
|
||||
"openai/gpt-5-mini": "gpt-5-mini",
|
||||
"openai/gpt-5-nano": "gpt-5-mini",
|
||||
"openai/gpt-4.1": "gpt-4.1",
|
||||
"openai/gpt-4.1-mini": "gpt-4.1",
|
||||
"openai/gpt-4.1-nano": "gpt-4.1",
|
||||
"openai/gpt-4o": "gpt-4o",
|
||||
"openai/gpt-4o-mini": "gpt-4o-mini",
|
||||
"openai/o1": "gpt-5.2",
|
||||
"openai/o1-mini": "gpt-5-mini",
|
||||
"openai/o1-preview": "gpt-5.2",
|
||||
"openai/o3": "gpt-5.3-codex",
|
||||
"openai/o3-mini": "gpt-5-mini",
|
||||
"openai/o4-mini": "gpt-5-mini",
|
||||
"anthropic/claude-opus-4.6": "claude-opus-4.6",
|
||||
"anthropic/claude-sonnet-4.6": "claude-sonnet-4.6",
|
||||
"anthropic/claude-sonnet-4.5": "claude-sonnet-4.5",
|
||||
"anthropic/claude-haiku-4.5": "claude-haiku-4.5",
|
||||
}
|
||||
|
||||
|
||||
def _copilot_catalog_ids(
|
||||
catalog: Optional[list[dict[str, Any]]] = None,
|
||||
api_key: Optional[str] = None,
|
||||
) -> set[str]:
|
||||
if catalog is None and api_key:
|
||||
catalog = fetch_github_model_catalog(api_key=api_key)
|
||||
if not catalog:
|
||||
return set()
|
||||
return {
|
||||
str(item.get("id") or "").strip()
|
||||
for item in catalog
|
||||
if str(item.get("id") or "").strip()
|
||||
}
|
||||
|
||||
|
||||
def normalize_copilot_model_id(
|
||||
model_id: Optional[str],
|
||||
*,
|
||||
catalog: Optional[list[dict[str, Any]]] = None,
|
||||
api_key: Optional[str] = None,
|
||||
) -> str:
|
||||
raw = str(model_id or "").strip()
|
||||
if not raw:
|
||||
return ""
|
||||
|
||||
catalog_ids = _copilot_catalog_ids(catalog=catalog, api_key=api_key)
|
||||
alias = _COPILOT_MODEL_ALIASES.get(raw)
|
||||
if alias:
|
||||
return alias
|
||||
|
||||
candidates = [raw]
|
||||
if "/" in raw:
|
||||
candidates.append(raw.split("/", 1)[1].strip())
|
||||
|
||||
if raw.endswith("-mini"):
|
||||
candidates.append(raw[:-5])
|
||||
if raw.endswith("-nano"):
|
||||
candidates.append(raw[:-5])
|
||||
if raw.endswith("-chat"):
|
||||
candidates.append(raw[:-5])
|
||||
|
||||
seen: set[str] = set()
|
||||
for candidate in candidates:
|
||||
if not candidate or candidate in seen:
|
||||
continue
|
||||
seen.add(candidate)
|
||||
if candidate in _COPILOT_MODEL_ALIASES:
|
||||
return _COPILOT_MODEL_ALIASES[candidate]
|
||||
if candidate in catalog_ids:
|
||||
return candidate
|
||||
|
||||
if "/" in raw:
|
||||
return raw.split("/", 1)[1].strip()
|
||||
return raw
|
||||
|
||||
|
||||
def _github_reasoning_efforts_for_model_id(model_id: str) -> list[str]:
|
||||
raw = (model_id or "").strip().lower()
|
||||
if raw.startswith(("openai/o1", "openai/o3", "openai/o4", "o1", "o3", "o4")):
|
||||
return list(COPILOT_REASONING_EFFORTS_O_SERIES)
|
||||
normalized = normalize_copilot_model_id(model_id).lower()
|
||||
if normalized.startswith("gpt-5"):
|
||||
return list(COPILOT_REASONING_EFFORTS_GPT5)
|
||||
return []
|
||||
|
||||
|
||||
def copilot_model_api_mode(
|
||||
model_id: Optional[str],
|
||||
*,
|
||||
catalog: Optional[list[dict[str, Any]]] = None,
|
||||
api_key: Optional[str] = None,
|
||||
) -> str:
|
||||
normalized = normalize_copilot_model_id(model_id, catalog=catalog, api_key=api_key)
|
||||
if not normalized:
|
||||
return "chat_completions"
|
||||
|
||||
if catalog is None and api_key:
|
||||
catalog = fetch_github_model_catalog(api_key=api_key)
|
||||
|
||||
catalog_entry = None
|
||||
if catalog:
|
||||
catalog_entry = next((item for item in catalog if item.get("id") == normalized), None)
|
||||
|
||||
if isinstance(catalog_entry, dict):
|
||||
supported_endpoints = {
|
||||
str(endpoint).strip()
|
||||
for endpoint in (catalog_entry.get("supported_endpoints") or [])
|
||||
if str(endpoint).strip()
|
||||
}
|
||||
if "/chat/completions" in supported_endpoints:
|
||||
return "chat_completions"
|
||||
if "/responses" in supported_endpoints:
|
||||
return "codex_responses"
|
||||
if "/v1/messages" in supported_endpoints:
|
||||
return "anthropic_messages"
|
||||
|
||||
if normalized.startswith(("gpt-5.4", "gpt-5.3-codex", "gpt-5.2-codex", "gpt-5.1-codex")):
|
||||
return "codex_responses"
|
||||
return "chat_completions"
|
||||
|
||||
|
||||
def github_model_reasoning_efforts(
|
||||
model_id: Optional[str],
|
||||
*,
|
||||
catalog: Optional[list[dict[str, Any]]] = None,
|
||||
api_key: Optional[str] = None,
|
||||
) -> list[str]:
|
||||
"""Return supported reasoning-effort levels for a Copilot-visible model."""
|
||||
normalized = normalize_copilot_model_id(model_id, catalog=catalog, api_key=api_key)
|
||||
if not normalized:
|
||||
return []
|
||||
|
||||
catalog_entry = None
|
||||
if catalog is not None:
|
||||
catalog_entry = next((item for item in catalog if item.get("id") == normalized), None)
|
||||
elif api_key:
|
||||
fetched_catalog = fetch_github_model_catalog(api_key=api_key)
|
||||
if fetched_catalog:
|
||||
catalog_entry = next((item for item in fetched_catalog if item.get("id") == normalized), None)
|
||||
|
||||
if catalog_entry is not None:
|
||||
capabilities = catalog_entry.get("capabilities")
|
||||
if isinstance(capabilities, dict):
|
||||
supports = capabilities.get("supports")
|
||||
if isinstance(supports, dict):
|
||||
efforts = supports.get("reasoning_effort")
|
||||
if isinstance(efforts, list):
|
||||
normalized_efforts = [
|
||||
str(effort).strip().lower()
|
||||
for effort in efforts
|
||||
if str(effort).strip()
|
||||
]
|
||||
return list(dict.fromkeys(normalized_efforts))
|
||||
return []
|
||||
legacy_capabilities = {
|
||||
str(capability).strip().lower()
|
||||
for capability in catalog_entry.get("capabilities", [])
|
||||
if str(capability).strip()
|
||||
}
|
||||
if "reasoning" not in legacy_capabilities:
|
||||
return []
|
||||
|
||||
return _github_reasoning_efforts_for_model_id(str(model_id or normalized))
|
||||
|
||||
|
||||
def probe_api_models(
|
||||
api_key: Optional[str],
|
||||
base_url: Optional[str],
|
||||
|
|
@ -561,6 +886,16 @@ def probe_api_models(
|
|||
"used_fallback": False,
|
||||
}
|
||||
|
||||
if _is_github_models_base_url(normalized):
|
||||
models = _fetch_github_models(api_key=api_key, timeout=timeout)
|
||||
return {
|
||||
"models": models,
|
||||
"probed_url": COPILOT_MODELS_URL,
|
||||
"resolved_base_url": COPILOT_BASE_URL,
|
||||
"suggested_base_url": None,
|
||||
"used_fallback": False,
|
||||
}
|
||||
|
||||
if normalized.endswith("/v1"):
|
||||
alternate_base = normalized[:-3].rstrip("/")
|
||||
else:
|
||||
|
|
@ -574,6 +909,8 @@ def probe_api_models(
|
|||
headers: dict[str, str] = {}
|
||||
if api_key:
|
||||
headers["Authorization"] = f"Bearer {api_key}"
|
||||
if normalized.startswith(COPILOT_BASE_URL):
|
||||
headers.update(copilot_default_headers())
|
||||
|
||||
for candidate_base, is_fallback in candidates:
|
||||
url = candidate_base.rstrip("/") + "/models"
|
||||
|
|
@ -664,6 +1001,12 @@ def validate_requested_model(
|
|||
normalized = normalize_provider(provider)
|
||||
if normalized == "openrouter" and base_url and "openrouter.ai" not in base_url:
|
||||
normalized = "custom"
|
||||
requested_for_lookup = requested
|
||||
if normalized == "copilot":
|
||||
requested_for_lookup = normalize_copilot_model_id(
|
||||
requested,
|
||||
api_key=api_key,
|
||||
) or requested
|
||||
|
||||
if not requested:
|
||||
return {
|
||||
|
|
@ -685,7 +1028,7 @@ def validate_requested_model(
|
|||
probe = probe_api_models(api_key, base_url)
|
||||
api_models = probe.get("models")
|
||||
if api_models is not None:
|
||||
if requested in set(api_models):
|
||||
if requested_for_lookup in set(api_models):
|
||||
return {
|
||||
"accepted": True,
|
||||
"persist": True,
|
||||
|
|
@ -734,7 +1077,7 @@ def validate_requested_model(
|
|||
api_models = fetch_api_models(api_key, base_url)
|
||||
|
||||
if api_models is not None:
|
||||
if requested in set(api_models):
|
||||
if requested_for_lookup in set(api_models):
|
||||
# API confirmed the model exists
|
||||
return {
|
||||
"accepted": True,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from hermes_cli.auth import (
|
|||
resolve_nous_runtime_credentials,
|
||||
resolve_codex_runtime_credentials,
|
||||
resolve_api_key_provider_credentials,
|
||||
resolve_external_process_provider_credentials,
|
||||
)
|
||||
from hermes_cli.config import load_config
|
||||
from hermes_constants import OPENROUTER_BASE_URL
|
||||
|
|
@ -33,7 +34,24 @@ def _get_model_config() -> Dict[str, Any]:
|
|||
return {}
|
||||
|
||||
|
||||
_VALID_API_MODES = {"chat_completions", "codex_responses"}
|
||||
def _copilot_runtime_api_mode(model_cfg: Dict[str, Any], api_key: str) -> str:
|
||||
configured_mode = _parse_api_mode(model_cfg.get("api_mode"))
|
||||
if configured_mode:
|
||||
return configured_mode
|
||||
|
||||
model_name = str(model_cfg.get("default") or "").strip()
|
||||
if not model_name:
|
||||
return "chat_completions"
|
||||
|
||||
try:
|
||||
from hermes_cli.models import copilot_model_api_mode
|
||||
|
||||
return copilot_model_api_mode(model_name, api_key=api_key)
|
||||
except Exception:
|
||||
return "chat_completions"
|
||||
|
||||
|
||||
_VALID_API_MODES = {"chat_completions", "codex_responses", "anthropic_messages"}
|
||||
|
||||
|
||||
def _parse_api_mode(raw: Any) -> Optional[str]:
|
||||
|
|
@ -267,6 +285,19 @@ def resolve_runtime_provider(
|
|||
"requested_provider": requested_provider,
|
||||
}
|
||||
|
||||
if provider == "copilot-acp":
|
||||
creds = resolve_external_process_provider_credentials(provider)
|
||||
return {
|
||||
"provider": "copilot-acp",
|
||||
"api_mode": "chat_completions",
|
||||
"base_url": creds.get("base_url", "").rstrip("/"),
|
||||
"api_key": creds.get("api_key", ""),
|
||||
"command": creds.get("command", ""),
|
||||
"args": list(creds.get("args") or []),
|
||||
"source": creds.get("source", "process"),
|
||||
"requested_provider": requested_provider,
|
||||
}
|
||||
|
||||
# Anthropic (native Messages API)
|
||||
if provider == "anthropic":
|
||||
from agent.anthropic_adapter import resolve_anthropic_token
|
||||
|
|
@ -302,9 +333,13 @@ def resolve_runtime_provider(
|
|||
pconfig = PROVIDER_REGISTRY.get(provider)
|
||||
if pconfig and pconfig.auth_type == "api_key":
|
||||
creds = resolve_api_key_provider_credentials(provider)
|
||||
model_cfg = _get_model_config()
|
||||
api_mode = "chat_completions"
|
||||
if provider == "copilot":
|
||||
api_mode = _copilot_runtime_api_mode(model_cfg, creds.get("api_key", ""))
|
||||
return {
|
||||
"provider": provider,
|
||||
"api_mode": "chat_completions",
|
||||
"api_mode": api_mode,
|
||||
"base_url": creds.get("base_url", "").rstrip("/"),
|
||||
"api_key": creds.get("api_key", ""),
|
||||
"source": creds.get("source", "env"),
|
||||
|
|
|
|||
|
|
@ -55,6 +55,25 @@ def _set_default_model(config: Dict[str, Any], model_name: str) -> None:
|
|||
# Default model lists per provider — used as fallback when the live
|
||||
# /models endpoint can't be reached.
|
||||
_DEFAULT_PROVIDER_MODELS = {
|
||||
"copilot-acp": [
|
||||
"copilot-acp",
|
||||
],
|
||||
"copilot": [
|
||||
"gpt-5.4",
|
||||
"gpt-5.4-mini",
|
||||
"gpt-5-mini",
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.2-codex",
|
||||
"gpt-4.1",
|
||||
"gpt-4o",
|
||||
"gpt-4o-mini",
|
||||
"claude-opus-4.6",
|
||||
"claude-sonnet-4.6",
|
||||
"claude-sonnet-4.5",
|
||||
"claude-haiku-4.5",
|
||||
"gemini-2.5-pro",
|
||||
"grok-code-fast-1",
|
||||
],
|
||||
"zai": ["glm-5", "glm-4.7", "glm-4.5", "glm-4.5-flash"],
|
||||
"kimi-coding": ["kimi-k2.5", "kimi-k2-thinking", "kimi-k2-turbo-preview"],
|
||||
"minimax": ["MiniMax-M2.5", "MiniMax-M2.5-highspeed", "MiniMax-M2.1"],
|
||||
|
|
@ -64,6 +83,59 @@ _DEFAULT_PROVIDER_MODELS = {
|
|||
}
|
||||
|
||||
|
||||
def _current_reasoning_effort(config: Dict[str, Any]) -> str:
|
||||
agent_cfg = config.get("agent")
|
||||
if isinstance(agent_cfg, dict):
|
||||
return str(agent_cfg.get("reasoning_effort") or "").strip().lower()
|
||||
return ""
|
||||
|
||||
|
||||
def _set_reasoning_effort(config: Dict[str, Any], effort: str) -> None:
|
||||
agent_cfg = config.get("agent")
|
||||
if not isinstance(agent_cfg, dict):
|
||||
agent_cfg = {}
|
||||
config["agent"] = agent_cfg
|
||||
agent_cfg["reasoning_effort"] = effort
|
||||
|
||||
|
||||
def _setup_copilot_reasoning_selection(
|
||||
config: Dict[str, Any],
|
||||
model_id: str,
|
||||
prompt_choice,
|
||||
*,
|
||||
catalog: Optional[list[dict[str, Any]]] = None,
|
||||
api_key: str = "",
|
||||
) -> None:
|
||||
from hermes_cli.models import github_model_reasoning_efforts, normalize_copilot_model_id
|
||||
|
||||
normalized_model = normalize_copilot_model_id(
|
||||
model_id,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or model_id
|
||||
efforts = github_model_reasoning_efforts(normalized_model, catalog=catalog, api_key=api_key)
|
||||
if not efforts:
|
||||
return
|
||||
|
||||
current_effort = _current_reasoning_effort(config)
|
||||
choices = list(efforts) + ["Disable reasoning", f"Keep current ({current_effort or 'default'})"]
|
||||
|
||||
if current_effort == "none":
|
||||
default_idx = len(efforts)
|
||||
elif current_effort in efforts:
|
||||
default_idx = efforts.index(current_effort)
|
||||
elif "medium" in efforts:
|
||||
default_idx = efforts.index("medium")
|
||||
else:
|
||||
default_idx = len(choices) - 1
|
||||
|
||||
effort_idx = prompt_choice("Select reasoning effort:", choices, default_idx)
|
||||
if effort_idx < len(efforts):
|
||||
_set_reasoning_effort(config, efforts[effort_idx])
|
||||
elif effort_idx == len(efforts):
|
||||
_set_reasoning_effort(config, "none")
|
||||
|
||||
|
||||
def _setup_provider_model_selection(config, provider_id, current_model, prompt_choice, prompt_fn):
|
||||
"""Model selection for API-key providers with live /models detection.
|
||||
|
||||
|
|
@ -71,29 +143,60 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
|
|||
hardcoded default list with a warning if the endpoint is unreachable.
|
||||
Always offers a 'Custom model' escape hatch.
|
||||
"""
|
||||
from hermes_cli.auth import PROVIDER_REGISTRY
|
||||
from hermes_cli.auth import PROVIDER_REGISTRY, resolve_api_key_provider_credentials
|
||||
from hermes_cli.config import get_env_value
|
||||
from hermes_cli.models import fetch_api_models
|
||||
from hermes_cli.models import (
|
||||
copilot_model_api_mode,
|
||||
fetch_api_models,
|
||||
fetch_github_model_catalog,
|
||||
normalize_copilot_model_id,
|
||||
)
|
||||
|
||||
pconfig = PROVIDER_REGISTRY[provider_id]
|
||||
is_copilot_catalog_provider = provider_id in {"copilot", "copilot-acp"}
|
||||
|
||||
# Resolve API key and base URL for the probe
|
||||
api_key = ""
|
||||
for ev in pconfig.api_key_env_vars:
|
||||
api_key = get_env_value(ev) or os.getenv(ev, "")
|
||||
if api_key:
|
||||
break
|
||||
base_url_env = pconfig.base_url_env_var or ""
|
||||
base_url = (get_env_value(base_url_env) if base_url_env else "") or pconfig.inference_base_url
|
||||
if is_copilot_catalog_provider:
|
||||
api_key = ""
|
||||
if provider_id == "copilot":
|
||||
creds = resolve_api_key_provider_credentials(provider_id)
|
||||
api_key = creds.get("api_key", "")
|
||||
base_url = creds.get("base_url", "") or pconfig.inference_base_url
|
||||
else:
|
||||
try:
|
||||
creds = resolve_api_key_provider_credentials("copilot")
|
||||
api_key = creds.get("api_key", "")
|
||||
except Exception:
|
||||
pass
|
||||
base_url = pconfig.inference_base_url
|
||||
catalog = fetch_github_model_catalog(api_key)
|
||||
current_model = normalize_copilot_model_id(
|
||||
current_model,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or current_model
|
||||
else:
|
||||
api_key = ""
|
||||
for ev in pconfig.api_key_env_vars:
|
||||
api_key = get_env_value(ev) or os.getenv(ev, "")
|
||||
if api_key:
|
||||
break
|
||||
base_url_env = pconfig.base_url_env_var or ""
|
||||
base_url = (get_env_value(base_url_env) if base_url_env else "") or pconfig.inference_base_url
|
||||
catalog = None
|
||||
|
||||
# Try live /models endpoint
|
||||
live_models = fetch_api_models(api_key, base_url)
|
||||
if is_copilot_catalog_provider and catalog:
|
||||
live_models = [item.get("id", "") for item in catalog if item.get("id")]
|
||||
else:
|
||||
live_models = fetch_api_models(api_key, base_url)
|
||||
|
||||
if live_models:
|
||||
provider_models = live_models
|
||||
print_info(f"Found {len(live_models)} model(s) from {pconfig.name} API")
|
||||
else:
|
||||
provider_models = _DEFAULT_PROVIDER_MODELS.get(provider_id, [])
|
||||
fallback_provider_id = "copilot" if provider_id == "copilot-acp" else provider_id
|
||||
provider_models = _DEFAULT_PROVIDER_MODELS.get(fallback_provider_id, [])
|
||||
if provider_models:
|
||||
print_warning(
|
||||
f"Could not auto-detect models from {pconfig.name} API — showing defaults.\n"
|
||||
|
|
@ -107,12 +210,29 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
|
|||
keep_idx = len(model_choices) - 1
|
||||
model_idx = prompt_choice("Select default model:", model_choices, keep_idx)
|
||||
|
||||
selected_model = current_model
|
||||
|
||||
if model_idx < len(provider_models):
|
||||
_set_default_model(config, provider_models[model_idx])
|
||||
selected_model = provider_models[model_idx]
|
||||
if is_copilot_catalog_provider:
|
||||
selected_model = normalize_copilot_model_id(
|
||||
selected_model,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or selected_model
|
||||
_set_default_model(config, selected_model)
|
||||
elif model_idx == len(provider_models):
|
||||
custom = prompt_fn("Enter model name")
|
||||
if custom:
|
||||
_set_default_model(config, custom)
|
||||
if is_copilot_catalog_provider:
|
||||
selected_model = normalize_copilot_model_id(
|
||||
custom,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
) or custom
|
||||
else:
|
||||
selected_model = custom
|
||||
_set_default_model(config, selected_model)
|
||||
else:
|
||||
# "Keep current" selected — validate it's compatible with the new
|
||||
# provider. OpenRouter-formatted names (containing "/") won't work
|
||||
|
|
@ -123,8 +243,25 @@ def _setup_provider_model_selection(config, provider_id, current_model, prompt_c
|
|||
f"and won't work with {pconfig.name}. "
|
||||
f"Switching to {provider_models[0]}."
|
||||
)
|
||||
selected_model = provider_models[0]
|
||||
_set_default_model(config, provider_models[0])
|
||||
|
||||
if provider_id == "copilot" and selected_model:
|
||||
model_cfg = _model_config_dict(config)
|
||||
model_cfg["api_mode"] = copilot_model_api_mode(
|
||||
selected_model,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
)
|
||||
config["model"] = model_cfg
|
||||
_setup_copilot_reasoning_selection(
|
||||
config,
|
||||
selected_model,
|
||||
prompt_choice,
|
||||
catalog=catalog,
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
|
||||
def _sync_model_from_disk(config: Dict[str, Any]) -> None:
|
||||
disk_model = load_config().get("model")
|
||||
|
|
@ -673,6 +810,8 @@ def setup_model_provider(config: dict):
|
|||
resolve_codex_runtime_credentials,
|
||||
DEFAULT_CODEX_BASE_URL,
|
||||
detect_external_credentials,
|
||||
get_auth_status,
|
||||
resolve_api_key_provider_credentials,
|
||||
)
|
||||
|
||||
print_header("Inference Provider")
|
||||
|
|
@ -682,6 +821,8 @@ def setup_model_provider(config: dict):
|
|||
existing_or = get_env_value("OPENROUTER_API_KEY")
|
||||
active_oauth = get_active_provider()
|
||||
existing_custom = get_env_value("OPENAI_BASE_URL")
|
||||
copilot_status = get_auth_status("copilot")
|
||||
copilot_acp_status = get_auth_status("copilot-acp")
|
||||
|
||||
model_cfg = config.get("model") if isinstance(config.get("model"), dict) else {}
|
||||
current_config_provider = str(model_cfg.get("provider") or "").strip().lower() or None
|
||||
|
|
@ -702,7 +843,12 @@ def setup_model_provider(config: dict):
|
|||
|
||||
# Detect if any provider is already configured
|
||||
has_any_provider = bool(
|
||||
current_config_provider or active_oauth or existing_custom or existing_or
|
||||
current_config_provider
|
||||
or active_oauth
|
||||
or existing_custom
|
||||
or existing_or
|
||||
or copilot_status.get("logged_in")
|
||||
or copilot_acp_status.get("logged_in")
|
||||
)
|
||||
|
||||
# Build "keep current" label
|
||||
|
|
@ -741,6 +887,8 @@ def setup_model_provider(config: dict):
|
|||
"Alibaba Cloud / DashScope (Qwen models via Anthropic-compatible API)",
|
||||
"OpenCode Zen (35+ curated models, pay-as-you-go)",
|
||||
"OpenCode Go (open models, $10/month subscription)",
|
||||
"GitHub Copilot (uses GITHUB_TOKEN or gh auth token)",
|
||||
"GitHub Copilot ACP (spawns `copilot --acp --stdio`)",
|
||||
]
|
||||
if keep_label:
|
||||
provider_choices.append(keep_label)
|
||||
|
|
@ -1412,7 +1560,56 @@ def setup_model_provider(config: dict):
|
|||
_set_model_provider(config, "opencode-go", pconfig.inference_base_url)
|
||||
selected_base_url = pconfig.inference_base_url
|
||||
|
||||
# else: provider_idx == 14 (Keep current) — only shown when a provider already exists
|
||||
elif provider_idx == 14: # GitHub Copilot
|
||||
selected_provider = "copilot"
|
||||
print()
|
||||
print_header("GitHub Copilot")
|
||||
pconfig = PROVIDER_REGISTRY["copilot"]
|
||||
print_info("Hermes can use GITHUB_TOKEN, GH_TOKEN, or your gh CLI login.")
|
||||
print_info(f"Base URL: {pconfig.inference_base_url}")
|
||||
print()
|
||||
|
||||
copilot_creds = resolve_api_key_provider_credentials("copilot")
|
||||
source = copilot_creds.get("source", "")
|
||||
token = copilot_creds.get("api_key", "")
|
||||
if token:
|
||||
if source in ("GITHUB_TOKEN", "GH_TOKEN"):
|
||||
print_info(f"Current: {token[:8]}... ({source})")
|
||||
elif source == "gh auth token":
|
||||
print_info("Current: authenticated via `gh auth token`")
|
||||
else:
|
||||
print_info("Current: GitHub token configured")
|
||||
else:
|
||||
api_key = prompt(" GitHub token", password=True)
|
||||
if api_key:
|
||||
save_env_value("GITHUB_TOKEN", api_key)
|
||||
print_success("GitHub token saved")
|
||||
else:
|
||||
print_warning("Skipped - agent won't work without a GitHub token or gh auth login")
|
||||
|
||||
if existing_custom:
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
save_env_value("OPENAI_API_KEY", "")
|
||||
_set_model_provider(config, "copilot", pconfig.inference_base_url)
|
||||
selected_base_url = pconfig.inference_base_url
|
||||
|
||||
elif provider_idx == 15: # GitHub Copilot ACP
|
||||
selected_provider = "copilot-acp"
|
||||
print()
|
||||
print_header("GitHub Copilot ACP")
|
||||
pconfig = PROVIDER_REGISTRY["copilot-acp"]
|
||||
print_info("Hermes will start `copilot --acp --stdio` for each request.")
|
||||
print_info("Use HERMES_COPILOT_ACP_COMMAND or COPILOT_CLI_PATH to override the command.")
|
||||
print_info(f"Base marker: {pconfig.inference_base_url}")
|
||||
print()
|
||||
|
||||
if existing_custom:
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
save_env_value("OPENAI_API_KEY", "")
|
||||
_set_model_provider(config, "copilot-acp", pconfig.inference_base_url)
|
||||
selected_base_url = pconfig.inference_base_url
|
||||
|
||||
# else: provider_idx == 16 (Keep current) — only shown when a provider already exists
|
||||
# Normalize "keep current" to an explicit provider so downstream logic
|
||||
# doesn't fall back to the generic OpenRouter/static-model path.
|
||||
if selected_provider is None:
|
||||
|
|
@ -1444,6 +1641,8 @@ def setup_model_provider(config: dict):
|
|||
if _vision_needs_setup:
|
||||
_prov_names = {
|
||||
"nous-api": "Nous Portal API key",
|
||||
"copilot": "GitHub Copilot",
|
||||
"copilot-acp": "GitHub Copilot ACP",
|
||||
"zai": "Z.AI / GLM",
|
||||
"kimi-coding": "Kimi / Moonshot",
|
||||
"minimax": "MiniMax",
|
||||
|
|
@ -1583,7 +1782,15 @@ def setup_model_provider(config: dict):
|
|||
_set_default_model(config, custom)
|
||||
_update_config_for_provider("openai-codex", DEFAULT_CODEX_BASE_URL)
|
||||
_set_model_provider(config, "openai-codex", DEFAULT_CODEX_BASE_URL)
|
||||
elif selected_provider in ("zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "ai-gateway"):
|
||||
elif selected_provider == "copilot-acp":
|
||||
_setup_provider_model_selection(
|
||||
config, selected_provider, current_model,
|
||||
prompt_choice, prompt,
|
||||
)
|
||||
model_cfg = _model_config_dict(config)
|
||||
model_cfg["api_mode"] = "chat_completions"
|
||||
config["model"] = model_cfg
|
||||
elif selected_provider in ("copilot", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "ai-gateway"):
|
||||
_setup_provider_model_selection(
|
||||
config, selected_provider, current_model,
|
||||
prompt_choice, prompt,
|
||||
|
|
@ -1644,7 +1851,7 @@ def setup_model_provider(config: dict):
|
|||
# Write provider+base_url to config.yaml only after model selection is complete.
|
||||
# This prevents a race condition where the gateway picks up a new provider
|
||||
# before the model name has been updated to match.
|
||||
if selected_provider in ("zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "anthropic") and selected_base_url is not None:
|
||||
if selected_provider in ("copilot-acp", "copilot", "zai", "kimi-coding", "minimax", "minimax-cn", "kilocode", "anthropic") and selected_base_url is not None:
|
||||
_update_config_for_provider(selected_provider, selected_base_url)
|
||||
|
||||
save_config(config)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue