Merge origin/main into hermes/hermes-5d160594

This commit is contained in:
teknium1 2026-03-14 19:34:05 -07:00
commit 3229e434b8
78 changed files with 3762 additions and 395 deletions

View file

@ -821,7 +821,7 @@ def migrate_config(interactive: bool = True, quiet: bool = False) -> Dict[str, A
print(f" ✓ Saved {name}")
print()
else:
print(" Set later with: hermes config set KEY VALUE")
print(" Set later with: hermes config set <key> <value>")
# Check for missing config fields
missing_config = get_missing_config_fields()
@ -1265,7 +1265,7 @@ def show_config():
print()
print(color("" * 60, Colors.DIM))
print(color(" hermes config edit # Edit config file", Colors.DIM))
print(color(" hermes config set KEY VALUE", Colors.DIM))
print(color(" hermes config set <key> <value>", Colors.DIM))
print(color(" hermes setup # Run setup wizard", Colors.DIM))
print()
@ -1391,7 +1391,7 @@ def config_command(args):
key = getattr(args, 'key', None)
value = getattr(args, 'value', None)
if not key or not value:
print("Usage: hermes config set KEY VALUE")
print("Usage: hermes config set <key> <value>")
print()
print("Examples:")
print(" hermes config set model anthropic/claude-sonnet-4")
@ -1506,7 +1506,7 @@ def config_command(args):
print("Available commands:")
print(" hermes config Show current configuration")
print(" hermes config edit Open config in editor")
print(" hermes config set K V Set a config value")
print(" hermes config set <key> <value> Set a config value")
print(" hermes config check Check for missing/outdated config")
print(" hermes config migrate Update config with new options")
print(" hermes config path Show config file path")

View file

@ -251,7 +251,6 @@ StandardError=journal
WantedBy=default.target
"""
def _normalize_service_definition(text: str) -> str:
return "\n".join(line.rstrip() for line in text.strip().splitlines())
@ -279,6 +278,65 @@ def refresh_systemd_unit_if_needed() -> bool:
return True
def _print_linger_enable_warning(username: str, detail: str | None = None) -> None:
print()
print("⚠ Linger not enabled — gateway may stop when you close this terminal.")
if detail:
print(f" Auto-enable failed: {detail}")
print()
print(" On headless servers (VPS, cloud instances) run:")
print(f" sudo loginctl enable-linger {username}")
print()
print(" Then restart the gateway:")
print(f" systemctl --user restart {SERVICE_NAME}.service")
print()
def _ensure_linger_enabled() -> None:
"""Enable linger when possible so the user gateway survives logout."""
if not is_linux():
return
import getpass
import shutil
username = getpass.getuser()
linger_file = Path(f"/var/lib/systemd/linger/{username}")
if linger_file.exists():
print("✓ Systemd linger is enabled (service survives logout)")
return
linger_enabled, linger_detail = get_systemd_linger_status()
if linger_enabled is True:
print("✓ Systemd linger is enabled (service survives logout)")
return
if not shutil.which("loginctl"):
_print_linger_enable_warning(username, linger_detail or "loginctl not found")
return
print("Enabling linger so the gateway survives SSH logout...")
try:
result = subprocess.run(
["loginctl", "enable-linger", username],
capture_output=True,
text=True,
check=False,
)
except Exception as e:
_print_linger_enable_warning(username, str(e))
return
if result.returncode == 0:
print("✓ Linger enabled — gateway will persist after logout")
return
detail = (result.stderr or result.stdout or f"exit {result.returncode}").strip()
_print_linger_enable_warning(username, detail or linger_detail)
def systemd_install(force: bool = False):
unit_path = get_systemd_unit_path()
@ -302,7 +360,7 @@ def systemd_install(force: bool = False):
print(f" hermes gateway status # Check status")
print(f" journalctl --user -u {SERVICE_NAME} -f # View logs")
print()
print_systemd_linger_guidance()
_ensure_linger_enabled()
def systemd_uninstall():
subprocess.run(["systemctl", "--user", "stop", SERVICE_NAME], check=False)
@ -367,6 +425,13 @@ def systemd_status(deep: bool = False):
print("✗ Gateway service is stopped")
print(" Run: hermes gateway start")
runtime_lines = _runtime_health_lines()
if runtime_lines:
print()
print("Recent gateway health:")
for line in runtime_lines:
print(f" {line}")
if deep:
print_systemd_linger_guidance()
else:
@ -693,6 +758,35 @@ def _platform_status(platform: dict) -> str:
return "not configured"
def _runtime_health_lines() -> list[str]:
"""Summarize the latest persisted gateway runtime health state."""
try:
from gateway.status import read_runtime_status
except Exception:
return []
state = read_runtime_status()
if not state:
return []
lines: list[str] = []
gateway_state = state.get("gateway_state")
exit_reason = state.get("exit_reason")
platforms = state.get("platforms", {}) or {}
for platform, pdata in platforms.items():
if pdata.get("state") == "fatal":
message = pdata.get("error_message") or "unknown error"
lines.append(f"{platform}: {message}")
if gateway_state == "startup_failed" and exit_reason:
lines.append(f"⚠ Last startup issue: {exit_reason}")
elif gateway_state == "stopped" and exit_reason:
lines.append(f"⚠ Last shutdown reason: {exit_reason}")
return lines
def _setup_standard_platform(platform: dict):
"""Interactive setup for Telegram, Discord, or Slack."""
emoji = platform["emoji"]
@ -1186,11 +1280,23 @@ def gateway_command(args):
if pids:
print(f"✓ Gateway is running (PID: {', '.join(map(str, pids))})")
print(" (Running manually, not as a system service)")
runtime_lines = _runtime_health_lines()
if runtime_lines:
print()
print("Recent gateway health:")
for line in runtime_lines:
print(f" {line}")
print()
print("To install as a service:")
print(" hermes gateway install")
else:
print("✗ Gateway is not running")
runtime_lines = _runtime_health_lines()
if runtime_lines:
print()
print("Recent gateway health:")
for line in runtime_lines:
print(f" {line}")
print()
print("To start:")
print(" hermes gateway # Run in foreground")

View file

@ -2056,7 +2056,15 @@ def cmd_update(args):
check=True
)
branch = result.stdout.strip()
# Fall back to main if the current branch doesn't exist on the remote
verify = subprocess.run(
git_cmd + ["rev-parse", "--verify", f"origin/{branch}"],
cwd=PROJECT_ROOT, capture_output=True, text=True,
)
if verify.returncode != 0:
branch = "main"
# Check if there are updates
result = subprocess.run(
git_cmd + ["rev-list", f"HEAD..origin/{branch}", "--count"],
@ -2736,7 +2744,7 @@ For more help on a command:
skills_install = skills_subparsers.add_parser("install", help="Install a skill")
skills_install.add_argument("identifier", help="Skill identifier (e.g. openai/skills/skill-creator)")
skills_install.add_argument("--category", default="", help="Category folder to install into")
skills_install.add_argument("--force", action="store_true", help="Install despite caution verdict")
skills_install.add_argument("--force", "--yes", "-y", dest="force", action="store_true", help="Install despite blocked scan verdict")
skills_inspect = skills_subparsers.add_parser("inspect", help="Preview a skill without installing")
skills_inspect.add_argument("identifier", help="Skill identifier")

View file

@ -5,6 +5,7 @@ from __future__ import annotations
import os
from typing import Any, Dict, Optional
from hermes_cli import auth as auth_mod
from hermes_cli.auth import (
AuthError,
PROVIDER_REGISTRY,
@ -18,6 +19,10 @@ from hermes_cli.config import load_config
from hermes_constants import OPENROUTER_BASE_URL
def _normalize_custom_provider_name(value: str) -> str:
return value.strip().lower().replace(" ", "-")
def _get_model_config() -> Dict[str, Any]:
config = load_config()
model_cfg = config.get("model")
@ -47,6 +52,82 @@ def resolve_requested_provider(requested: Optional[str] = None) -> str:
return "auto"
def _get_named_custom_provider(requested_provider: str) -> Optional[Dict[str, Any]]:
requested_norm = _normalize_custom_provider_name(requested_provider or "")
if not requested_norm or requested_norm == "custom":
return None
# Raw names should only map to custom providers when they are not already
# valid built-in providers or aliases. Explicit menu keys like
# ``custom:local`` always target the saved custom provider.
if requested_norm == "auto":
return None
if not requested_norm.startswith("custom:"):
try:
auth_mod.resolve_provider(requested_norm)
except AuthError:
pass
else:
return None
config = load_config()
custom_providers = config.get("custom_providers")
if not isinstance(custom_providers, list):
return None
for entry in custom_providers:
if not isinstance(entry, dict):
continue
name = entry.get("name")
base_url = entry.get("base_url")
if not isinstance(name, str) or not isinstance(base_url, str):
continue
name_norm = _normalize_custom_provider_name(name)
menu_key = f"custom:{name_norm}"
if requested_norm not in {name_norm, menu_key}:
continue
return {
"name": name.strip(),
"base_url": base_url.strip(),
"api_key": str(entry.get("api_key", "") or "").strip(),
}
return None
def _resolve_named_custom_runtime(
*,
requested_provider: str,
explicit_api_key: Optional[str] = None,
explicit_base_url: Optional[str] = None,
) -> Optional[Dict[str, Any]]:
custom_provider = _get_named_custom_provider(requested_provider)
if not custom_provider:
return None
base_url = (
(explicit_base_url or "").strip()
or custom_provider.get("base_url", "")
).rstrip("/")
if not base_url:
return None
api_key = (
(explicit_api_key or "").strip()
or custom_provider.get("api_key", "")
or os.getenv("OPENAI_API_KEY", "").strip()
or os.getenv("OPENROUTER_API_KEY", "").strip()
)
return {
"provider": "openrouter",
"api_mode": "chat_completions",
"base_url": base_url,
"api_key": api_key,
"source": f"custom_provider:{custom_provider.get('name', requested_provider)}",
}
def _resolve_openrouter_runtime(
*,
requested_provider: str,
@ -122,6 +203,15 @@ def resolve_runtime_provider(
"""Resolve runtime provider credentials for agent execution."""
requested_provider = resolve_requested_provider(requested)
custom_runtime = _resolve_named_custom_runtime(
requested_provider=requested_provider,
explicit_api_key=explicit_api_key,
explicit_base_url=explicit_base_url,
)
if custom_runtime:
custom_runtime["requested_provider"] = requested_provider
return custom_runtime
provider = resolve_provider(
requested_provider,
explicit_api_key=explicit_api_key,

View file

@ -460,12 +460,41 @@ def _print_setup_summary(config: dict, hermes_home):
tool_status = []
# OpenRouter (required for vision, moa)
# Vision — works with OpenRouter, Nous OAuth, Codex OAuth, or OpenAI endpoint
_has_vision = False
if get_env_value("OPENROUTER_API_KEY"):
_has_vision = True
else:
try:
_vauth_path = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes")) / "auth.json"
if _vauth_path.is_file():
import json as _vjson
_vauth = _vjson.loads(_vauth_path.read_text())
if _vauth.get("active_provider") == "nous":
_np = _vauth.get("providers", {}).get("nous", {})
if _np.get("agent_key") or _np.get("access_token"):
_has_vision = True
elif _vauth.get("active_provider") == "openai-codex":
_cp = _vauth.get("providers", {}).get("openai-codex", {})
if _cp.get("tokens", {}).get("access_token"):
_has_vision = True
except Exception:
pass
if not _has_vision:
_oai_base = get_env_value("OPENAI_BASE_URL") or ""
if get_env_value("OPENAI_API_KEY") and "api.openai.com" in _oai_base.lower():
_has_vision = True
if _has_vision:
tool_status.append(("Vision (image analysis)", True, None))
else:
tool_status.append(("Vision (image analysis)", False, "run 'hermes setup' to configure"))
# Mixture of Agents — requires OpenRouter specifically (calls multiple models)
if get_env_value("OPENROUTER_API_KEY"):
tool_status.append(("Mixture of Agents", True, None))
else:
tool_status.append(("Vision (image analysis)", False, "OPENROUTER_API_KEY"))
tool_status.append(("Mixture of Agents", False, "OPENROUTER_API_KEY"))
# Firecrawl (web tools)
@ -602,7 +631,7 @@ def _print_setup_summary(config: dict, hermes_home):
print(
f" {color('hermes config edit', Colors.GREEN)} Open config in your editor"
)
print(f" {color('hermes config set KEY VALUE', Colors.GREEN)}")
print(f" {color('hermes config set <key> <value>', Colors.GREEN)}")
print(f" Set a specific value")
print()
print(f" Or edit the files directly:")
@ -1246,35 +1275,112 @@ def setup_model_provider(config: dict):
elif existing_or:
selected_provider = "openrouter"
# ── OpenRouter API Key for tools (if not already set) ──
# Tools (vision, web, MoA) use OpenRouter independently of the main provider.
# Prompt for OpenRouter key if not set and a non-OpenRouter provider was chosen.
if selected_provider in (
"nous",
"openai-codex",
"custom",
"zai",
"kimi-coding",
"minimax",
"minimax-cn",
"anthropic",
) and not get_env_value("OPENROUTER_API_KEY"):
print()
print_header("OpenRouter API Key (for tools)")
print_info("Tools like vision analysis, web search, and MoA use OpenRouter")
print_info("independently of your main inference provider.")
print_info("Get your API key at: https://openrouter.ai/keys")
# ── Vision & Image Analysis Setup ──
# Vision requires a multimodal-capable provider. Check whether the user's
# chosen provider already covers it — if so, skip the prompt entirely.
_vision_needs_setup = True
api_key = prompt(
" OpenRouter API key (optional, press Enter to skip)", password=True
)
if api_key:
save_env_value("OPENROUTER_API_KEY", api_key)
print_success("OpenRouter API key saved (for tools)")
else:
print_info(
"Skipped - some tools (vision, web scraping) won't work without this"
if selected_provider == "openrouter":
# OpenRouter → Gemini for vision, already configured
_vision_needs_setup = False
elif selected_provider == "nous":
# Nous Portal OAuth → Gemini via Nous, already configured
_vision_needs_setup = False
elif selected_provider == "openai-codex":
# Codex OAuth → gpt-5.3-codex supports vision
_vision_needs_setup = False
elif selected_provider == "custom":
_custom_base = (get_env_value("OPENAI_BASE_URL") or "").lower()
if "api.openai.com" in _custom_base:
# Direct OpenAI endpoint — show vision model picker
print()
print_header("Vision Model")
print_info("Your OpenAI endpoint supports vision. Pick a model for image analysis:")
_oai_vision_models = ["gpt-4o", "gpt-4o-mini", "gpt-4.1", "gpt-4.1-mini", "gpt-4.1-nano"]
_vm_choices = _oai_vision_models + ["Keep default (gpt-4o-mini)"]
_vm_idx = prompt_choice("Select vision model:", _vm_choices, len(_vm_choices) - 1)
_selected_vision_model = (
_oai_vision_models[_vm_idx]
if _vm_idx < len(_oai_vision_models)
else "gpt-4o-mini"
)
save_env_value("AUXILIARY_VISION_MODEL", _selected_vision_model)
print_success(f"Vision model set to {_selected_vision_model}")
_vision_needs_setup = False
# Even for providers without native vision, check if existing credentials
# from a previous setup already cover it (e.g. user had OpenRouter before
# switching to z.ai)
if _vision_needs_setup:
if get_env_value("OPENROUTER_API_KEY"):
_vision_needs_setup = False
else:
# Check for Nous Portal OAuth in auth.json
try:
_auth_path = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes")) / "auth.json"
if _auth_path.is_file():
import json as _json
_auth_data = _json.loads(_auth_path.read_text())
if _auth_data.get("active_provider") == "nous":
_nous_p = _auth_data.get("providers", {}).get("nous", {})
if _nous_p.get("agent_key") or _nous_p.get("access_token"):
_vision_needs_setup = False
except Exception:
pass
if _vision_needs_setup:
_prov_names = {
"nous-api": "Nous Portal API key",
"zai": "Z.AI / GLM",
"kimi-coding": "Kimi / Moonshot",
"minimax": "MiniMax",
"minimax-cn": "MiniMax CN",
"anthropic": "Anthropic",
"custom": "your custom endpoint",
}
_prov_display = _prov_names.get(selected_provider, selected_provider or "your provider")
print()
print_header("Vision & Image Analysis (optional)")
print_info(f"Vision requires a multimodal-capable provider. {_prov_display}")
print_info("doesn't natively support it. Choose how to enable vision,")
print_info("or skip to configure later.")
print()
_vision_choices = [
"OpenRouter — uses Gemini (free tier at openrouter.ai/keys)",
"OpenAI — enter API key & choose a vision model",
"Skip for now",
]
_vision_idx = prompt_choice("Configure vision:", _vision_choices, 2)
if _vision_idx == 0: # OpenRouter
_or_key = prompt(" OpenRouter API key", password=True)
if _or_key:
save_env_value("OPENROUTER_API_KEY", _or_key)
print_success("OpenRouter key saved — vision will use Gemini")
else:
print_info("Skipped — vision won't be available")
elif _vision_idx == 1: # OpenAI
_oai_key = prompt(" OpenAI API key", password=True)
if _oai_key:
save_env_value("OPENAI_API_KEY", _oai_key)
save_env_value("OPENAI_BASE_URL", "https://api.openai.com/v1")
_oai_vision_models = ["gpt-4o", "gpt-4o-mini", "gpt-4.1", "gpt-4.1-mini", "gpt-4.1-nano"]
_vm_choices = _oai_vision_models + ["Use default (gpt-4o-mini)"]
_vm_idx = prompt_choice("Select vision model:", _vm_choices, 0)
_selected_vision_model = (
_oai_vision_models[_vm_idx]
if _vm_idx < len(_oai_vision_models)
else "gpt-4o-mini"
)
save_env_value("AUXILIARY_VISION_MODEL", _selected_vision_model)
print_success(f"Vision configured with OpenAI ({_selected_vision_model})")
else:
print_info("Skipped — vision won't be available")
else:
print_info("Skipped — add later with 'hermes config set OPENROUTER_API_KEY ...'")
# ── Model Selection (adapts based on provider) ──
if selected_provider != "custom": # Custom already prompted for model name

View file

@ -1050,11 +1050,11 @@ def handle_skills_slash(cmd: str, console: Optional[Console] = None) -> None:
elif action == "install":
if not args:
c.print("[bold red]Usage:[/] /skills install <identifier> [--category <cat>] [--force]\n")
c.print("[bold red]Usage:[/] /skills install <identifier> [--category <cat>] [--force|--yes]\n")
return
identifier = args[0]
category = ""
force = "--force" in args
force = any(flag in args for flag in ("--force", "--yes", "-y"))
for i, a in enumerate(args):
if a == "--category" and i + 1 < len(args):
category = args[i + 1]