feat: add multi-provider authentication and inference provider selection
- Implemented a multi-provider authentication system for the Hermes Agent, supporting OAuth for Nous Portal and traditional API key methods for OpenRouter and custom endpoints. - Enhanced CLI with commands for logging in and out of providers, allowing users to authenticate and manage their credentials easily. - Updated configuration options to select inference providers, with detailed documentation on usage and setup. - Improved status reporting to include authentication status and provider details, enhancing user awareness of their current configuration. - Added new files for authentication handling and updated existing components to integrate the new provider system.
This commit is contained in:
parent
c007b9e5bd
commit
cfef34f7a6
9 changed files with 1639 additions and 113 deletions
1054
hermes_cli/auth.py
Normal file
1054
hermes_cli/auth.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -12,6 +12,8 @@ Usage:
|
|||
hermes gateway install # Install gateway service
|
||||
hermes gateway uninstall # Uninstall gateway service
|
||||
hermes setup # Interactive setup wizard
|
||||
hermes login # Authenticate with Nous Portal (or other providers)
|
||||
hermes logout # Clear stored authentication
|
||||
hermes status # Show status of all components
|
||||
hermes cron # Manage cron jobs
|
||||
hermes cron list # List cron jobs
|
||||
|
|
@ -48,6 +50,7 @@ def cmd_chat(args):
|
|||
# Build kwargs from args
|
||||
kwargs = {
|
||||
"model": args.model,
|
||||
"provider": getattr(args, "provider", None),
|
||||
"toolsets": args.toolsets,
|
||||
"verbose": args.verbose,
|
||||
"query": args.query,
|
||||
|
|
@ -70,6 +73,18 @@ def cmd_setup(args):
|
|||
run_setup_wizard(args)
|
||||
|
||||
|
||||
def cmd_login(args):
|
||||
"""Authenticate Hermes CLI with a provider."""
|
||||
from hermes_cli.auth import login_command
|
||||
login_command(args)
|
||||
|
||||
|
||||
def cmd_logout(args):
|
||||
"""Clear provider authentication."""
|
||||
from hermes_cli.auth import logout_command
|
||||
logout_command(args)
|
||||
|
||||
|
||||
def cmd_status(args):
|
||||
"""Show status of all components."""
|
||||
from hermes_cli.status import show_status
|
||||
|
|
@ -244,6 +259,9 @@ def cmd_update(args):
|
|||
print()
|
||||
print("✓ Update complete!")
|
||||
print()
|
||||
print("Tip: You can now log in with Nous Portal for inference:")
|
||||
print(" hermes login # Authenticate with Nous Portal")
|
||||
print()
|
||||
print("Note: If you have the gateway service running, restart it:")
|
||||
print(" hermes gateway restart")
|
||||
|
||||
|
|
@ -263,6 +281,8 @@ Examples:
|
|||
hermes Start interactive chat
|
||||
hermes chat -q "Hello" Single query mode
|
||||
hermes setup Run setup wizard
|
||||
hermes login Authenticate with an inference provider
|
||||
hermes logout Clear stored authentication
|
||||
hermes config View configuration
|
||||
hermes config edit Edit config in $EDITOR
|
||||
hermes config set model gpt-4 Set a config value
|
||||
|
|
@ -303,6 +323,12 @@ For more help on a command:
|
|||
"-t", "--toolsets",
|
||||
help="Comma-separated toolsets to enable"
|
||||
)
|
||||
chat_parser.add_argument(
|
||||
"--provider",
|
||||
choices=["auto", "openrouter", "nous"],
|
||||
default=None,
|
||||
help="Inference provider (default: auto)"
|
||||
)
|
||||
chat_parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action="store_true",
|
||||
|
|
@ -365,7 +391,77 @@ For more help on a command:
|
|||
help="Reset configuration to defaults"
|
||||
)
|
||||
setup_parser.set_defaults(func=cmd_setup)
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# login command
|
||||
# =========================================================================
|
||||
login_parser = subparsers.add_parser(
|
||||
"login",
|
||||
help="Authenticate with an inference provider",
|
||||
description="Run OAuth device authorization flow for Hermes CLI"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--provider",
|
||||
choices=["nous"],
|
||||
default=None,
|
||||
help="Provider to authenticate with (default: interactive selection)"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--portal-url",
|
||||
help="Portal base URL (default: production portal)"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--inference-url",
|
||||
help="Inference API base URL (default: production inference API)"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--client-id",
|
||||
default=None,
|
||||
help="OAuth client id to use (default: hermes-cli)"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--scope",
|
||||
default=None,
|
||||
help="OAuth scope to request"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--no-browser",
|
||||
action="store_true",
|
||||
help="Do not attempt to open the browser automatically"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--timeout",
|
||||
type=float,
|
||||
default=15.0,
|
||||
help="HTTP request timeout in seconds (default: 15)"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--ca-bundle",
|
||||
help="Path to CA bundle PEM file for TLS verification"
|
||||
)
|
||||
login_parser.add_argument(
|
||||
"--insecure",
|
||||
action="store_true",
|
||||
help="Disable TLS verification (testing only)"
|
||||
)
|
||||
login_parser.set_defaults(func=cmd_login)
|
||||
|
||||
# =========================================================================
|
||||
# logout command
|
||||
# =========================================================================
|
||||
logout_parser = subparsers.add_parser(
|
||||
"logout",
|
||||
help="Clear authentication for an inference provider",
|
||||
description="Remove stored credentials and reset provider config"
|
||||
)
|
||||
logout_parser.add_argument(
|
||||
"--provider",
|
||||
choices=["nous"],
|
||||
default=None,
|
||||
help="Provider to log out from (default: active provider)"
|
||||
)
|
||||
logout_parser.set_defaults(func=cmd_logout)
|
||||
|
||||
# =========================================================================
|
||||
# status command
|
||||
# =========================================================================
|
||||
|
|
@ -712,9 +808,9 @@ For more help on a command:
|
|||
|
||||
# Default to chat if no command specified
|
||||
if args.command is None:
|
||||
# No command = run chat
|
||||
args.query = None
|
||||
args.model = None
|
||||
args.provider = None
|
||||
args.toolsets = None
|
||||
args.verbose = False
|
||||
cmd_chat(args)
|
||||
|
|
|
|||
|
|
@ -437,127 +437,233 @@ def run_setup_wizard(args):
|
|||
print_info("You can edit these files directly or use 'hermes config edit'")
|
||||
|
||||
# =========================================================================
|
||||
# Step 1: OpenRouter API Key (Required for tools)
|
||||
# Step 1: Inference Provider Selection
|
||||
# =========================================================================
|
||||
print_header("OpenRouter API Key (Required)")
|
||||
print_info("OpenRouter is used for vision, web scraping, and tool operations")
|
||||
print_info("even if you use a custom endpoint for your main agent.")
|
||||
print_info("Get your API key at: https://openrouter.ai/keys")
|
||||
|
||||
print_header("Inference Provider")
|
||||
print_info("Choose how to connect to your main chat model.")
|
||||
print()
|
||||
|
||||
# Detect current provider state
|
||||
from hermes_cli.auth import (
|
||||
get_active_provider, get_provider_auth_state, PROVIDER_REGISTRY,
|
||||
format_auth_error, AuthError, fetch_nous_models,
|
||||
resolve_nous_runtime_credentials, _update_config_for_provider,
|
||||
)
|
||||
existing_custom = get_env_value("OPENAI_BASE_URL")
|
||||
existing_or = get_env_value("OPENROUTER_API_KEY")
|
||||
if existing_or:
|
||||
print_info(f"Current: {existing_or[:8]}... (configured)")
|
||||
if prompt_yes_no("Update OpenRouter API key?", False):
|
||||
active_oauth = get_active_provider()
|
||||
|
||||
# Build "keep current" label
|
||||
if active_oauth and active_oauth in PROVIDER_REGISTRY:
|
||||
keep_label = f"Keep current ({PROVIDER_REGISTRY[active_oauth].name})"
|
||||
elif existing_custom:
|
||||
keep_label = f"Keep current (Custom: {existing_custom})"
|
||||
elif existing_or:
|
||||
keep_label = "Keep current (OpenRouter)"
|
||||
else:
|
||||
keep_label = "Keep current"
|
||||
|
||||
provider_choices = [
|
||||
"Login with Nous Portal (Nous Research subscription)",
|
||||
"OpenRouter API key (100+ models, pay-per-use)",
|
||||
"Custom OpenAI-compatible endpoint (self-hosted / VLLM / etc.)",
|
||||
keep_label,
|
||||
]
|
||||
|
||||
provider_idx = prompt_choice("Select your inference provider:", provider_choices, 3)
|
||||
|
||||
# Track which provider was selected for model step
|
||||
selected_provider = None # "nous", "openrouter", "custom", or None (keep)
|
||||
nous_models = [] # populated if Nous login succeeds
|
||||
|
||||
if provider_idx == 0: # Nous Portal
|
||||
selected_provider = "nous"
|
||||
print()
|
||||
print_header("Nous Portal Login")
|
||||
print_info("This will open your browser to authenticate with Nous Portal.")
|
||||
print_info("You'll need a Nous Research account with an active subscription.")
|
||||
print()
|
||||
|
||||
try:
|
||||
from hermes_cli.auth import _login_nous, ProviderConfig
|
||||
import argparse
|
||||
mock_args = argparse.Namespace(
|
||||
portal_url=None, inference_url=None, client_id=None,
|
||||
scope=None, no_browser=False, timeout=15.0,
|
||||
ca_bundle=None, insecure=False,
|
||||
)
|
||||
pconfig = PROVIDER_REGISTRY["nous"]
|
||||
_login_nous(mock_args, pconfig)
|
||||
|
||||
# Fetch models for the selection step
|
||||
try:
|
||||
creds = resolve_nous_runtime_credentials(
|
||||
min_key_ttl_seconds=5 * 60, timeout_seconds=15.0,
|
||||
)
|
||||
nous_models = fetch_nous_models(
|
||||
inference_base_url=creds.get("base_url", ""),
|
||||
api_key=creds.get("api_key", ""),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except SystemExit:
|
||||
print_warning("Nous Portal login was cancelled or failed.")
|
||||
print_info("You can try again later with: hermes login")
|
||||
selected_provider = None
|
||||
except Exception as e:
|
||||
print_error(f"Login failed: {e}")
|
||||
print_info("You can try again later with: hermes login")
|
||||
selected_provider = None
|
||||
|
||||
elif provider_idx == 1: # OpenRouter
|
||||
selected_provider = "openrouter"
|
||||
print()
|
||||
print_header("OpenRouter API Key")
|
||||
print_info("OpenRouter provides access to 100+ models from multiple providers.")
|
||||
print_info("Get your API key at: https://openrouter.ai/keys")
|
||||
|
||||
if existing_or:
|
||||
print_info(f"Current: {existing_or[:8]}... (configured)")
|
||||
if prompt_yes_no("Update OpenRouter API key?", False):
|
||||
api_key = prompt(" OpenRouter API key", password=True)
|
||||
if api_key:
|
||||
save_env_value("OPENROUTER_API_KEY", api_key)
|
||||
print_success("OpenRouter API key updated")
|
||||
else:
|
||||
api_key = prompt(" OpenRouter API key", password=True)
|
||||
if api_key:
|
||||
save_env_value("OPENROUTER_API_KEY", api_key)
|
||||
print_success("OpenRouter API key updated")
|
||||
else:
|
||||
api_key = prompt(" OpenRouter API key", password=True)
|
||||
if api_key:
|
||||
save_env_value("OPENROUTER_API_KEY", api_key)
|
||||
print_success("OpenRouter API key saved")
|
||||
else:
|
||||
print_warning("Skipped - some tools (vision, web scraping) won't work without this")
|
||||
|
||||
# =========================================================================
|
||||
# Step 2: Main Agent Provider
|
||||
# =========================================================================
|
||||
print_header("Main Agent Provider")
|
||||
print_info("Choose how to connect to your main chat model.")
|
||||
|
||||
existing_custom = get_env_value("OPENAI_BASE_URL")
|
||||
|
||||
provider_choices = [
|
||||
"OpenRouter (use same key for agent - recommended)",
|
||||
"Custom OpenAI-compatible endpoint (separate from OpenRouter)",
|
||||
f"Keep current" + (f" ({existing_custom})" if existing_custom else " (OpenRouter)")
|
||||
]
|
||||
|
||||
provider_idx = prompt_choice("Select your main agent provider:", provider_choices, 2)
|
||||
|
||||
if provider_idx == 0: # OpenRouter for agent too
|
||||
# Clear any custom endpoint - will use OpenRouter
|
||||
print_success("OpenRouter API key saved")
|
||||
else:
|
||||
print_warning("Skipped - agent won't work without an API key")
|
||||
|
||||
# Clear any custom endpoint if switching to OpenRouter
|
||||
if existing_custom:
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
save_env_value("OPENAI_API_KEY", "")
|
||||
print_success("Agent will use OpenRouter")
|
||||
|
||||
elif provider_idx == 1: # Custom endpoint
|
||||
print_info("Custom OpenAI-Compatible Endpoint Configuration:")
|
||||
|
||||
elif provider_idx == 2: # Custom endpoint
|
||||
selected_provider = "custom"
|
||||
print()
|
||||
print_header("Custom OpenAI-Compatible Endpoint")
|
||||
print_info("Works with any API that follows OpenAI's chat completions spec")
|
||||
|
||||
# Show current values if set
|
||||
|
||||
current_url = get_env_value("OPENAI_BASE_URL") or ""
|
||||
current_key = get_env_value("OPENAI_API_KEY")
|
||||
current_model = config.get('model', '')
|
||||
|
||||
|
||||
if current_url:
|
||||
print_info(f" Current URL: {current_url}")
|
||||
if current_key:
|
||||
print_info(f" Current key: {current_key[:8]}... (configured)")
|
||||
|
||||
|
||||
base_url = prompt(" API base URL (e.g., https://api.example.com/v1)", current_url)
|
||||
api_key = prompt(" API key", password=True)
|
||||
model_name = prompt(" Model name (e.g., gpt-4, claude-3-opus)", current_model)
|
||||
|
||||
|
||||
if base_url:
|
||||
save_env_value("OPENAI_BASE_URL", base_url)
|
||||
if api_key:
|
||||
save_env_value("OPENAI_API_KEY", api_key)
|
||||
if model_name:
|
||||
config['model'] = model_name
|
||||
save_env_value("LLM_MODEL", model_name)
|
||||
print_success("Custom endpoint configured")
|
||||
# else: Keep current (provider_idx == 2)
|
||||
|
||||
# else: provider_idx == 3, keep current
|
||||
|
||||
# =========================================================================
|
||||
# Step 3: Model Selection
|
||||
# Step 1b: OpenRouter API Key for tools (if not already set)
|
||||
# =========================================================================
|
||||
print_header("Default Model")
|
||||
|
||||
current_model = config.get('model', 'anthropic/claude-opus-4.6')
|
||||
print_info(f"Current: {current_model}")
|
||||
|
||||
model_choices = [
|
||||
"anthropic/claude-opus-4.6 (recommended)",
|
||||
"anthropic/claude-sonnet-4.5",
|
||||
"anthropic/claude-opus-4.5",
|
||||
"openai/gpt-5.2",
|
||||
"openai/gpt-5.2-codex",
|
||||
"google/gemini-3-pro-preview",
|
||||
"google/gemini-3-flash-preview",
|
||||
"z-ai/glm-4.7",
|
||||
"moonshotai/kimi-k2.5",
|
||||
"minimax/minimax-m2.1",
|
||||
"Custom model",
|
||||
f"Keep current ({current_model})"
|
||||
]
|
||||
|
||||
model_idx = prompt_choice("Select default model:", model_choices, 11) # Default: keep current
|
||||
|
||||
model_map = {
|
||||
0: "anthropic/claude-opus-4.6",
|
||||
1: "anthropic/claude-sonnet-4.5",
|
||||
2: "anthropic/claude-opus-4.5",
|
||||
3: "openai/gpt-5.2",
|
||||
4: "openai/gpt-5.2-codex",
|
||||
5: "google/gemini-3-pro-preview",
|
||||
6: "google/gemini-3-flash-preview",
|
||||
7: "z-ai/glm-4.7",
|
||||
8: "moonshotai/kimi-k2.5",
|
||||
9: "minimax/minimax-m2.1",
|
||||
}
|
||||
|
||||
if model_idx in model_map:
|
||||
config['model'] = model_map[model_idx]
|
||||
# Also update LLM_MODEL in .env so it stays in sync (cli.py reads .env first)
|
||||
save_env_value("LLM_MODEL", model_map[model_idx])
|
||||
elif model_idx == 10: # Custom
|
||||
custom = prompt("Enter model name (e.g., anthropic/claude-opus-4.6)")
|
||||
if custom:
|
||||
config['model'] = custom
|
||||
save_env_value("LLM_MODEL", custom)
|
||||
# else: Keep current (model_idx == 11)
|
||||
# Tools (vision, web, MoA) use OpenRouter independently of the main provider.
|
||||
# Prompt for OpenRouter key if not set and a non-OpenRouter provider was chosen.
|
||||
if selected_provider in ("nous", "custom") and not get_env_value("OPENROUTER_API_KEY"):
|
||||
print()
|
||||
print_header("OpenRouter API Key (for tools)")
|
||||
print_info("Tools like vision analysis, web search, and MoA use OpenRouter")
|
||||
print_info("independently of your main inference provider.")
|
||||
print_info("Get your API key at: https://openrouter.ai/keys")
|
||||
|
||||
api_key = prompt(" OpenRouter API key (optional, press Enter to skip)", password=True)
|
||||
if api_key:
|
||||
save_env_value("OPENROUTER_API_KEY", api_key)
|
||||
print_success("OpenRouter API key saved (for tools)")
|
||||
else:
|
||||
print_info("Skipped - some tools (vision, web scraping) won't work without this")
|
||||
|
||||
# =========================================================================
|
||||
# Step 2: Model Selection (adapts based on provider)
|
||||
# =========================================================================
|
||||
if selected_provider != "custom": # Custom already prompted for model name
|
||||
print_header("Default Model")
|
||||
|
||||
current_model = config.get('model', 'anthropic/claude-opus-4.6')
|
||||
print_info(f"Current: {current_model}")
|
||||
|
||||
if selected_provider == "nous" and nous_models:
|
||||
# Dynamic model list from Nous Portal
|
||||
model_choices = [f"{m}" for m in nous_models]
|
||||
model_choices.append("Custom model")
|
||||
model_choices.append(f"Keep current ({current_model})")
|
||||
|
||||
# Post-login validation: warn if current model might not be available
|
||||
if current_model and current_model not in nous_models:
|
||||
print_warning(f"Your current model ({current_model}) may not be available via Nous Portal.")
|
||||
print_info("Select a model from the list, or keep current to use it anyway.")
|
||||
print()
|
||||
|
||||
model_idx = prompt_choice("Select default model:", model_choices, len(model_choices) - 1)
|
||||
|
||||
if model_idx < len(nous_models):
|
||||
config['model'] = nous_models[model_idx]
|
||||
save_env_value("LLM_MODEL", nous_models[model_idx])
|
||||
elif model_idx == len(nous_models): # Custom
|
||||
custom = prompt("Enter model name")
|
||||
if custom:
|
||||
config['model'] = custom
|
||||
save_env_value("LLM_MODEL", custom)
|
||||
# else: keep current
|
||||
else:
|
||||
# Static list for OpenRouter / fallback
|
||||
model_choices = [
|
||||
"anthropic/claude-opus-4.6 (recommended)",
|
||||
"anthropic/claude-sonnet-4.5",
|
||||
"anthropic/claude-opus-4.5",
|
||||
"openai/gpt-5.2",
|
||||
"openai/gpt-5.2-codex",
|
||||
"google/gemini-3-pro-preview",
|
||||
"google/gemini-3-flash-preview",
|
||||
"z-ai/glm-4.7",
|
||||
"moonshotai/kimi-k2.5",
|
||||
"minimax/minimax-m2.1",
|
||||
"Custom model",
|
||||
f"Keep current ({current_model})"
|
||||
]
|
||||
|
||||
model_idx = prompt_choice("Select default model:", model_choices, 11)
|
||||
|
||||
model_map = {
|
||||
0: "anthropic/claude-opus-4.6",
|
||||
1: "anthropic/claude-sonnet-4.5",
|
||||
2: "anthropic/claude-opus-4.5",
|
||||
3: "openai/gpt-5.2",
|
||||
4: "openai/gpt-5.2-codex",
|
||||
5: "google/gemini-3-pro-preview",
|
||||
6: "google/gemini-3-flash-preview",
|
||||
7: "z-ai/glm-4.7",
|
||||
8: "moonshotai/kimi-k2.5",
|
||||
9: "minimax/minimax-m2.1",
|
||||
}
|
||||
|
||||
if model_idx in model_map:
|
||||
config['model'] = model_map[model_idx]
|
||||
save_env_value("LLM_MODEL", model_map[model_idx])
|
||||
elif model_idx == 10: # Custom
|
||||
custom = prompt("Enter model name (e.g., anthropic/claude-opus-4.6)")
|
||||
if custom:
|
||||
config['model'] = custom
|
||||
save_env_value("LLM_MODEL", custom)
|
||||
# else: Keep current (model_idx == 11)
|
||||
|
||||
# =========================================================================
|
||||
# Step 4: Terminal Backend
|
||||
|
|
|
|||
|
|
@ -40,6 +40,25 @@ def redact_key(key: str) -> str:
|
|||
return key[:4] + "..." + key[-4:]
|
||||
|
||||
|
||||
def _format_iso_timestamp(value) -> str:
|
||||
"""Format ISO timestamps for status output, converting to local timezone."""
|
||||
if not value or not isinstance(value, str):
|
||||
return "(unknown)"
|
||||
from datetime import datetime, timezone
|
||||
text = value.strip()
|
||||
if not text:
|
||||
return "(unknown)"
|
||||
if text.endswith("Z"):
|
||||
text = text[:-1] + "+00:00"
|
||||
try:
|
||||
parsed = datetime.fromisoformat(text)
|
||||
if parsed.tzinfo is None:
|
||||
parsed = parsed.replace(tzinfo=timezone.utc)
|
||||
except Exception:
|
||||
return value
|
||||
return parsed.astimezone().strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
|
||||
|
||||
def show_status(args):
|
||||
"""Show status of all Hermes Agent components."""
|
||||
show_all = getattr(args, 'all', False)
|
||||
|
|
@ -85,7 +104,34 @@ def show_status(args):
|
|||
has_key = bool(value)
|
||||
display = redact_key(value) if not show_all else value
|
||||
print(f" {name:<12} {check_mark(has_key)} {display}")
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# Auth Providers (OAuth)
|
||||
# =========================================================================
|
||||
print()
|
||||
print(color("◆ Auth Providers", Colors.CYAN, Colors.BOLD))
|
||||
|
||||
try:
|
||||
from hermes_cli.auth import get_nous_auth_status
|
||||
nous_status = get_nous_auth_status()
|
||||
except Exception:
|
||||
nous_status = {}
|
||||
|
||||
nous_logged_in = bool(nous_status.get("logged_in"))
|
||||
print(
|
||||
f" {'Nous Portal':<12} {check_mark(nous_logged_in)} "
|
||||
f"{'logged in' if nous_logged_in else 'not logged in (run: hermes login)'}"
|
||||
)
|
||||
if nous_logged_in:
|
||||
portal_url = nous_status.get("portal_base_url") or "(unknown)"
|
||||
access_exp = _format_iso_timestamp(nous_status.get("access_expires_at"))
|
||||
key_exp = _format_iso_timestamp(nous_status.get("agent_key_expires_at"))
|
||||
refresh_label = "yes" if nous_status.get("has_refresh_token") else "no"
|
||||
print(f" Portal URL: {portal_url}")
|
||||
print(f" Access exp: {access_exp}")
|
||||
print(f" Key exp: {key_exp}")
|
||||
print(f" Refresh: {refresh_label}")
|
||||
|
||||
# =========================================================================
|
||||
# Terminal Configuration
|
||||
# =========================================================================
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue