Merge pull request #1303 from NousResearch/hermes/hermes-aa653753
feat(skills): integrate skills.sh as a hub source
This commit is contained in:
commit
681f1068ea
9 changed files with 1413 additions and 47 deletions
|
|
@ -2682,7 +2682,7 @@ For more help on a command:
|
||||||
skills_parser = subparsers.add_parser(
|
skills_parser = subparsers.add_parser(
|
||||||
"skills",
|
"skills",
|
||||||
help="Search, install, configure, and manage skills",
|
help="Search, install, configure, and manage skills",
|
||||||
description="Search, install, inspect, audit, configure, and manage skills from GitHub, ClawHub, and other registries."
|
description="Search, install, inspect, audit, configure, and manage skills from skills.sh, well-known agent skill endpoints, GitHub, ClawHub, and other registries."
|
||||||
)
|
)
|
||||||
skills_subparsers = skills_parser.add_subparsers(dest="skills_action")
|
skills_subparsers = skills_parser.add_subparsers(dest="skills_action")
|
||||||
|
|
||||||
|
|
@ -2690,12 +2690,12 @@ For more help on a command:
|
||||||
skills_browse.add_argument("--page", type=int, default=1, help="Page number (default: 1)")
|
skills_browse.add_argument("--page", type=int, default=1, help="Page number (default: 1)")
|
||||||
skills_browse.add_argument("--size", type=int, default=20, help="Results per page (default: 20)")
|
skills_browse.add_argument("--size", type=int, default=20, help="Results per page (default: 20)")
|
||||||
skills_browse.add_argument("--source", default="all",
|
skills_browse.add_argument("--source", default="all",
|
||||||
choices=["all", "official", "github", "clawhub", "lobehub"],
|
choices=["all", "official", "skills-sh", "well-known", "github", "clawhub", "lobehub"],
|
||||||
help="Filter by source (default: all)")
|
help="Filter by source (default: all)")
|
||||||
|
|
||||||
skills_search = skills_subparsers.add_parser("search", help="Search skill registries")
|
skills_search = skills_subparsers.add_parser("search", help="Search skill registries")
|
||||||
skills_search.add_argument("query", help="Search query")
|
skills_search.add_argument("query", help="Search query")
|
||||||
skills_search.add_argument("--source", default="all", choices=["all", "official", "github", "clawhub", "lobehub"])
|
skills_search.add_argument("--source", default="all", choices=["all", "official", "skills-sh", "well-known", "github", "clawhub", "lobehub"])
|
||||||
skills_search.add_argument("--limit", type=int, default=10, help="Max results")
|
skills_search.add_argument("--limit", type=int, default=10, help="Max results")
|
||||||
|
|
||||||
skills_install = skills_subparsers.add_parser("install", help="Install a skill")
|
skills_install = skills_subparsers.add_parser("install", help="Install a skill")
|
||||||
|
|
@ -2709,6 +2709,12 @@ For more help on a command:
|
||||||
skills_list = skills_subparsers.add_parser("list", help="List installed skills")
|
skills_list = skills_subparsers.add_parser("list", help="List installed skills")
|
||||||
skills_list.add_argument("--source", default="all", choices=["all", "hub", "builtin", "local"])
|
skills_list.add_argument("--source", default="all", choices=["all", "hub", "builtin", "local"])
|
||||||
|
|
||||||
|
skills_check = skills_subparsers.add_parser("check", help="Check installed hub skills for updates")
|
||||||
|
skills_check.add_argument("name", nargs="?", help="Specific skill to check (default: all)")
|
||||||
|
|
||||||
|
skills_update = skills_subparsers.add_parser("update", help="Update installed hub skills")
|
||||||
|
skills_update.add_argument("name", nargs="?", help="Specific skill to update (default: all outdated skills)")
|
||||||
|
|
||||||
skills_audit = skills_subparsers.add_parser("audit", help="Re-scan installed hub skills")
|
skills_audit = skills_subparsers.add_parser("audit", help="Re-scan installed hub skills")
|
||||||
skills_audit.add_argument("name", nargs="?", help="Specific skill to audit (default: all)")
|
skills_audit.add_argument("name", nargs="?", help="Specific skill to audit (default: all)")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ handler are thin wrappers that parse args and delegate.
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
|
|
@ -76,6 +76,70 @@ def _resolve_short_name(name: str, sources, console: Console) -> str:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _format_extra_metadata_lines(extra: Dict[str, Any]) -> list[str]:
|
||||||
|
lines: list[str] = []
|
||||||
|
if not extra:
|
||||||
|
return lines
|
||||||
|
|
||||||
|
if extra.get("repo_url"):
|
||||||
|
lines.append(f"[bold]Repo:[/] {extra['repo_url']}")
|
||||||
|
if extra.get("detail_url"):
|
||||||
|
lines.append(f"[bold]Detail Page:[/] {extra['detail_url']}")
|
||||||
|
if extra.get("index_url"):
|
||||||
|
lines.append(f"[bold]Index:[/] {extra['index_url']}")
|
||||||
|
if extra.get("endpoint"):
|
||||||
|
lines.append(f"[bold]Endpoint:[/] {extra['endpoint']}")
|
||||||
|
if extra.get("install_command"):
|
||||||
|
lines.append(f"[bold]Install Command:[/] {extra['install_command']}")
|
||||||
|
if extra.get("installs") is not None:
|
||||||
|
lines.append(f"[bold]Installs:[/] {extra['installs']}")
|
||||||
|
if extra.get("weekly_installs"):
|
||||||
|
lines.append(f"[bold]Weekly Installs:[/] {extra['weekly_installs']}")
|
||||||
|
|
||||||
|
security = extra.get("security_audits")
|
||||||
|
if isinstance(security, dict) and security:
|
||||||
|
ordered = ", ".join(f"{name}={status}" for name, status in sorted(security.items()))
|
||||||
|
lines.append(f"[bold]Security:[/] {ordered}")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_source_meta_and_bundle(identifier: str, sources):
|
||||||
|
"""Resolve metadata and bundle for a specific identifier."""
|
||||||
|
meta = None
|
||||||
|
bundle = None
|
||||||
|
matched_source = None
|
||||||
|
|
||||||
|
for src in sources:
|
||||||
|
if meta is None:
|
||||||
|
try:
|
||||||
|
meta = src.inspect(identifier)
|
||||||
|
if meta:
|
||||||
|
matched_source = src
|
||||||
|
except Exception:
|
||||||
|
meta = None
|
||||||
|
try:
|
||||||
|
bundle = src.fetch(identifier)
|
||||||
|
except Exception:
|
||||||
|
bundle = None
|
||||||
|
if bundle:
|
||||||
|
matched_source = src
|
||||||
|
if meta is None:
|
||||||
|
try:
|
||||||
|
meta = src.inspect(identifier)
|
||||||
|
except Exception:
|
||||||
|
meta = None
|
||||||
|
break
|
||||||
|
|
||||||
|
return meta, bundle, matched_source
|
||||||
|
|
||||||
|
|
||||||
|
def _derive_category_from_install_path(install_path: str) -> str:
|
||||||
|
path = Path(install_path)
|
||||||
|
parent = str(path.parent)
|
||||||
|
return "" if parent == "." else parent
|
||||||
|
|
||||||
|
|
||||||
def do_search(query: str, source: str = "all", limit: int = 10,
|
def do_search(query: str, source: str = "all", limit: int = 10,
|
||||||
console: Optional[Console] = None) -> None:
|
console: Optional[Console] = None) -> None:
|
||||||
"""Search registries and display results as a Rich table."""
|
"""Search registries and display results as a Rich table."""
|
||||||
|
|
@ -136,7 +200,7 @@ def do_browse(page: int = 1, page_size: int = 20, source: str = "all",
|
||||||
# Collect results from all (or filtered) sources
|
# Collect results from all (or filtered) sources
|
||||||
# Use empty query to get everything; per-source limits prevent overload
|
# Use empty query to get everything; per-source limits prevent overload
|
||||||
_TRUST_RANK = {"builtin": 3, "trusted": 2, "community": 1}
|
_TRUST_RANK = {"builtin": 3, "trusted": 2, "community": 1}
|
||||||
_PER_SOURCE_LIMIT = {"official": 100, "github": 100, "clawhub": 50,
|
_PER_SOURCE_LIMIT = {"official": 100, "skills-sh": 100, "well-known": 25, "github": 100, "clawhub": 50,
|
||||||
"claude-marketplace": 50, "lobehub": 50}
|
"claude-marketplace": 50, "lobehub": 50}
|
||||||
|
|
||||||
all_results: list = []
|
all_results: list = []
|
||||||
|
|
@ -263,11 +327,7 @@ def do_install(identifier: str, category: str = "", force: bool = False,
|
||||||
|
|
||||||
c.print(f"\n[bold]Fetching:[/] {identifier}")
|
c.print(f"\n[bold]Fetching:[/] {identifier}")
|
||||||
|
|
||||||
bundle = None
|
meta, bundle, _matched_source = _resolve_source_meta_and_bundle(identifier, sources)
|
||||||
for src in sources:
|
|
||||||
bundle = src.fetch(identifier)
|
|
||||||
if bundle:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not bundle:
|
if not bundle:
|
||||||
c.print(f"[bold red]Error:[/] Could not fetch '{identifier}' from any source.\n")
|
c.print(f"[bold red]Error:[/] Could not fetch '{identifier}' from any source.\n")
|
||||||
|
|
@ -288,6 +348,9 @@ def do_install(identifier: str, category: str = "", force: bool = False,
|
||||||
c.print("Use --force to reinstall.\n")
|
c.print("Use --force to reinstall.\n")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
extra_metadata = dict(getattr(meta, "extra", {}) or {})
|
||||||
|
extra_metadata.update(getattr(bundle, "metadata", {}) or {})
|
||||||
|
|
||||||
# Quarantine the bundle
|
# Quarantine the bundle
|
||||||
q_path = quarantine_bundle(bundle)
|
q_path = quarantine_bundle(bundle)
|
||||||
c.print(f"[dim]Quarantined to {q_path.relative_to(q_path.parent.parent.parent)}[/]")
|
c.print(f"[dim]Quarantined to {q_path.relative_to(q_path.parent.parent.parent)}[/]")
|
||||||
|
|
@ -309,6 +372,11 @@ def do_install(identifier: str, category: str = "", force: bool = False,
|
||||||
f"{len(result.findings)}_findings")
|
f"{len(result.findings)}_findings")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if extra_metadata:
|
||||||
|
metadata_lines = _format_extra_metadata_lines(extra_metadata)
|
||||||
|
if metadata_lines:
|
||||||
|
c.print(Panel("\n".join(metadata_lines), title="Upstream Metadata", border_style="blue"))
|
||||||
|
|
||||||
# Confirm with user — show appropriate warning based on source
|
# Confirm with user — show appropriate warning based on source
|
||||||
if not force:
|
if not force:
|
||||||
c.print()
|
c.print()
|
||||||
|
|
@ -361,23 +429,12 @@ def do_inspect(identifier: str, console: Optional[Console] = None) -> None:
|
||||||
if not identifier:
|
if not identifier:
|
||||||
return
|
return
|
||||||
|
|
||||||
meta = None
|
meta, bundle, _matched_source = _resolve_source_meta_and_bundle(identifier, sources)
|
||||||
for src in sources:
|
|
||||||
meta = src.inspect(identifier)
|
|
||||||
if meta:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not meta:
|
if not meta:
|
||||||
c.print(f"[bold red]Error:[/] Could not find '{identifier}' in any source.\n")
|
c.print(f"[bold red]Error:[/] Could not find '{identifier}' in any source.\n")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Also fetch full content for preview
|
|
||||||
bundle = None
|
|
||||||
for src in sources:
|
|
||||||
bundle = src.fetch(identifier)
|
|
||||||
if bundle:
|
|
||||||
break
|
|
||||||
|
|
||||||
c.print()
|
c.print()
|
||||||
trust_style = {"builtin": "bright_cyan", "trusted": "green", "community": "yellow"}.get(meta.trust_level, "dim")
|
trust_style = {"builtin": "bright_cyan", "trusted": "green", "community": "yellow"}.get(meta.trust_level, "dim")
|
||||||
trust_label = "official" if meta.source == "official" else meta.trust_level
|
trust_label = "official" if meta.source == "official" else meta.trust_level
|
||||||
|
|
@ -391,6 +448,7 @@ def do_inspect(identifier: str, console: Optional[Console] = None) -> None:
|
||||||
]
|
]
|
||||||
if meta.tags:
|
if meta.tags:
|
||||||
info_lines.append(f"[bold]Tags:[/] {', '.join(meta.tags)}")
|
info_lines.append(f"[bold]Tags:[/] {', '.join(meta.tags)}")
|
||||||
|
info_lines.extend(_format_extra_metadata_lines(meta.extra))
|
||||||
|
|
||||||
c.print(Panel("\n".join(info_lines), title=f"Skill: {meta.name}"))
|
c.print(Panel("\n".join(info_lines), title=f"Skill: {meta.name}"))
|
||||||
|
|
||||||
|
|
@ -464,6 +522,49 @@ def do_list(source_filter: str = "all", console: Optional[Console] = None) -> No
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def do_check(name: Optional[str] = None, console: Optional[Console] = None) -> None:
|
||||||
|
"""Check hub-installed skills for upstream updates."""
|
||||||
|
from tools.skills_hub import check_for_skill_updates
|
||||||
|
|
||||||
|
c = console or _console
|
||||||
|
results = check_for_skill_updates(name=name)
|
||||||
|
if not results:
|
||||||
|
c.print("[dim]No hub-installed skills to check.[/]\n")
|
||||||
|
return
|
||||||
|
|
||||||
|
table = Table(title="Skill Updates")
|
||||||
|
table.add_column("Name", style="bold cyan")
|
||||||
|
table.add_column("Source", style="dim")
|
||||||
|
table.add_column("Status", style="dim")
|
||||||
|
|
||||||
|
for entry in results:
|
||||||
|
table.add_row(entry.get("name", ""), entry.get("source", ""), entry.get("status", ""))
|
||||||
|
|
||||||
|
c.print(table)
|
||||||
|
update_count = sum(1 for entry in results if entry.get("status") == "update_available")
|
||||||
|
c.print(f"[dim]{update_count} update(s) available across {len(results)} checked skill(s)[/]\n")
|
||||||
|
|
||||||
|
|
||||||
|
def do_update(name: Optional[str] = None, console: Optional[Console] = None) -> None:
|
||||||
|
"""Update hub-installed skills with upstream changes."""
|
||||||
|
from tools.skills_hub import HubLockFile, check_for_skill_updates
|
||||||
|
|
||||||
|
c = console or _console
|
||||||
|
lock = HubLockFile()
|
||||||
|
updates = [entry for entry in check_for_skill_updates(name=name) if entry.get("status") == "update_available"]
|
||||||
|
if not updates:
|
||||||
|
c.print("[dim]No updates available.[/]\n")
|
||||||
|
return
|
||||||
|
|
||||||
|
for entry in updates:
|
||||||
|
installed = lock.get_installed(entry["name"])
|
||||||
|
category = _derive_category_from_install_path(installed.get("install_path", "")) if installed else ""
|
||||||
|
c.print(f"[bold]Updating:[/] {entry['name']}")
|
||||||
|
do_install(entry["identifier"], category=category, force=True, console=c)
|
||||||
|
|
||||||
|
c.print(f"[bold green]Updated {len(updates)} skill(s).[/]\n")
|
||||||
|
|
||||||
|
|
||||||
def do_audit(name: Optional[str] = None, console: Optional[Console] = None) -> None:
|
def do_audit(name: Optional[str] = None, console: Optional[Console] = None) -> None:
|
||||||
"""Re-run security scan on installed hub skills."""
|
"""Re-run security scan on installed hub skills."""
|
||||||
from tools.skills_hub import HubLockFile, SKILLS_DIR
|
from tools.skills_hub import HubLockFile, SKILLS_DIR
|
||||||
|
|
@ -827,6 +928,10 @@ def skills_command(args) -> None:
|
||||||
do_inspect(args.identifier)
|
do_inspect(args.identifier)
|
||||||
elif action == "list":
|
elif action == "list":
|
||||||
do_list(source_filter=args.source)
|
do_list(source_filter=args.source)
|
||||||
|
elif action == "check":
|
||||||
|
do_check(name=getattr(args, "name", None))
|
||||||
|
elif action == "update":
|
||||||
|
do_update(name=getattr(args, "name", None))
|
||||||
elif action == "audit":
|
elif action == "audit":
|
||||||
do_audit(name=getattr(args, "name", None))
|
do_audit(name=getattr(args, "name", None))
|
||||||
elif action == "uninstall":
|
elif action == "uninstall":
|
||||||
|
|
@ -853,7 +958,7 @@ def skills_command(args) -> None:
|
||||||
return
|
return
|
||||||
do_tap(tap_action, repo=repo)
|
do_tap(tap_action, repo=repo)
|
||||||
else:
|
else:
|
||||||
_console.print("Usage: hermes skills [browse|search|install|inspect|list|audit|uninstall|publish|snapshot|tap]\n")
|
_console.print("Usage: hermes skills [browse|search|install|inspect|list|check|update|audit|uninstall|publish|snapshot|tap]\n")
|
||||||
_console.print("Run 'hermes skills <command> --help' for details.\n")
|
_console.print("Run 'hermes skills <command> --help' for details.\n")
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -872,6 +977,8 @@ def handle_skills_slash(cmd: str, console: Optional[Console] = None) -> None:
|
||||||
/skills inspect openai/skills/skill-creator
|
/skills inspect openai/skills/skill-creator
|
||||||
/skills list
|
/skills list
|
||||||
/skills list --source hub
|
/skills list --source hub
|
||||||
|
/skills check
|
||||||
|
/skills update
|
||||||
/skills audit
|
/skills audit
|
||||||
/skills audit my-skill
|
/skills audit my-skill
|
||||||
/skills uninstall my-skill
|
/skills uninstall my-skill
|
||||||
|
|
@ -920,7 +1027,7 @@ def handle_skills_slash(cmd: str, console: Optional[Console] = None) -> None:
|
||||||
|
|
||||||
elif action == "search":
|
elif action == "search":
|
||||||
if not args:
|
if not args:
|
||||||
c.print("[bold red]Usage:[/] /skills search <query> [--source github] [--limit N]\n")
|
c.print("[bold red]Usage:[/] /skills search <query> [--source skills-sh|well-known|github|official] [--limit N]\n")
|
||||||
return
|
return
|
||||||
source = "all"
|
source = "all"
|
||||||
limit = 10
|
limit = 10
|
||||||
|
|
@ -967,6 +1074,14 @@ def handle_skills_slash(cmd: str, console: Optional[Console] = None) -> None:
|
||||||
source_filter = args[idx + 1]
|
source_filter = args[idx + 1]
|
||||||
do_list(source_filter=source_filter, console=c)
|
do_list(source_filter=source_filter, console=c)
|
||||||
|
|
||||||
|
elif action == "check":
|
||||||
|
name = args[0] if args else None
|
||||||
|
do_check(name=name, console=c)
|
||||||
|
|
||||||
|
elif action == "update":
|
||||||
|
name = args[0] if args else None
|
||||||
|
do_update(name=name, console=c)
|
||||||
|
|
||||||
elif action == "audit":
|
elif action == "audit":
|
||||||
name = args[0] if args else None
|
name = args[0] if args else None
|
||||||
do_audit(name=name, console=c)
|
do_audit(name=name, console=c)
|
||||||
|
|
@ -1029,6 +1144,8 @@ def _print_skills_help(console: Console) -> None:
|
||||||
" [cyan]install[/] <identifier> Install a skill (with security scan)\n"
|
" [cyan]install[/] <identifier> Install a skill (with security scan)\n"
|
||||||
" [cyan]inspect[/] <identifier> Preview a skill without installing\n"
|
" [cyan]inspect[/] <identifier> Preview a skill without installing\n"
|
||||||
" [cyan]list[/] [--source hub|builtin|local] List installed skills\n"
|
" [cyan]list[/] [--source hub|builtin|local] List installed skills\n"
|
||||||
|
" [cyan]check[/] [name] Check hub skills for upstream updates\n"
|
||||||
|
" [cyan]update[/] [name] Update hub skills with upstream changes\n"
|
||||||
" [cyan]audit[/] [name] Re-scan hub skills for security\n"
|
" [cyan]audit[/] [name] Re-scan hub skills for security\n"
|
||||||
" [cyan]uninstall[/] <name> Remove a hub-installed skill\n"
|
" [cyan]uninstall[/] <name> Remove a hub-installed skill\n"
|
||||||
" [cyan]publish[/] <path> --repo <r> Publish a skill to GitHub via PR\n"
|
" [cyan]publish[/] <path> --repo <r> Publish a skill to GitHub via PR\n"
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ from io import StringIO
|
||||||
import pytest
|
import pytest
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
||||||
from hermes_cli.skills_hub import do_list
|
from hermes_cli.skills_hub import do_check, do_list, do_update
|
||||||
|
|
||||||
|
|
||||||
class _DummyLockFile:
|
class _DummyLockFile:
|
||||||
|
|
@ -68,6 +68,34 @@ def _capture(source_filter: str = "all") -> str:
|
||||||
return sink.getvalue()
|
return sink.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
def _capture_check(monkeypatch, results, name=None) -> str:
|
||||||
|
import tools.skills_hub as hub
|
||||||
|
|
||||||
|
sink = StringIO()
|
||||||
|
console = Console(file=sink, force_terminal=False, color_system=None)
|
||||||
|
monkeypatch.setattr(hub, "check_for_skill_updates", lambda **_kwargs: results)
|
||||||
|
do_check(name=name, console=console)
|
||||||
|
return sink.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
def _capture_update(monkeypatch, results) -> tuple[str, list[tuple[str, str, bool]]]:
|
||||||
|
import tools.skills_hub as hub
|
||||||
|
import hermes_cli.skills_hub as cli_hub
|
||||||
|
|
||||||
|
sink = StringIO()
|
||||||
|
console = Console(file=sink, force_terminal=False, color_system=None)
|
||||||
|
installs = []
|
||||||
|
|
||||||
|
monkeypatch.setattr(hub, "check_for_skill_updates", lambda **_kwargs: results)
|
||||||
|
monkeypatch.setattr(hub, "HubLockFile", lambda: type("L", (), {
|
||||||
|
"get_installed": lambda self, name: {"install_path": "category/" + name}
|
||||||
|
})())
|
||||||
|
monkeypatch.setattr(cli_hub, "do_install", lambda identifier, category="", force=False, console=None: installs.append((identifier, category, force)))
|
||||||
|
|
||||||
|
do_update(console=console)
|
||||||
|
return sink.getvalue(), installs
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Tests
|
# Tests
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -122,3 +150,30 @@ def test_do_list_filter_builtin(three_source_env):
|
||||||
assert "builtin-skill" in output
|
assert "builtin-skill" in output
|
||||||
assert "hub-skill" not in output
|
assert "hub-skill" not in output
|
||||||
assert "local-skill" not in output
|
assert "local-skill" not in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_do_check_reports_available_updates(monkeypatch):
|
||||||
|
output = _capture_check(monkeypatch, [
|
||||||
|
{"name": "hub-skill", "source": "skills.sh", "status": "update_available"},
|
||||||
|
{"name": "other-skill", "source": "github", "status": "up_to_date"},
|
||||||
|
])
|
||||||
|
|
||||||
|
assert "hub-skill" in output
|
||||||
|
assert "update_available" in output
|
||||||
|
assert "up_to_date" in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_do_check_handles_no_installed_updates(monkeypatch):
|
||||||
|
output = _capture_check(monkeypatch, [])
|
||||||
|
|
||||||
|
assert "No hub-installed skills to check" in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_do_update_reinstalls_outdated_skills(monkeypatch):
|
||||||
|
output, installs = _capture_update(monkeypatch, [
|
||||||
|
{"name": "hub-skill", "identifier": "skills-sh/example/repo/hub-skill", "status": "update_available"},
|
||||||
|
{"name": "other-skill", "identifier": "github/example/other-skill", "status": "up_to_date"},
|
||||||
|
])
|
||||||
|
|
||||||
|
assert installs == [("skills-sh/example/repo/hub-skill", "category", True)]
|
||||||
|
assert "Updated 1 skill" in output
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,15 @@ from tools.skills_hub import (
|
||||||
GitHubAuth,
|
GitHubAuth,
|
||||||
GitHubSource,
|
GitHubSource,
|
||||||
LobeHubSource,
|
LobeHubSource,
|
||||||
|
SkillsShSource,
|
||||||
|
WellKnownSkillSource,
|
||||||
SkillMeta,
|
SkillMeta,
|
||||||
SkillBundle,
|
SkillBundle,
|
||||||
HubLockFile,
|
HubLockFile,
|
||||||
TapsManager,
|
TapsManager,
|
||||||
|
bundle_content_hash,
|
||||||
|
check_for_skill_updates,
|
||||||
|
create_source_router,
|
||||||
unified_search,
|
unified_search,
|
||||||
append_audit_log,
|
append_audit_log,
|
||||||
_skill_meta_to_dict,
|
_skill_meta_to_dict,
|
||||||
|
|
@ -93,6 +98,387 @@ class TestTrustLevelFor:
|
||||||
assert result in ("trusted", "community")
|
assert result in ("trusted", "community")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# SkillsShSource
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class TestSkillsShSource:
|
||||||
|
def _source(self):
|
||||||
|
auth = MagicMock(spec=GitHubAuth)
|
||||||
|
return SkillsShSource(auth=auth)
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_search_maps_skills_sh_results_to_prefixed_identifiers(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {
|
||||||
|
"skills": [
|
||||||
|
{
|
||||||
|
"id": "vercel-labs/agent-skills/vercel-react-best-practices",
|
||||||
|
"skillId": "vercel-react-best-practices",
|
||||||
|
"name": "vercel-react-best-practices",
|
||||||
|
"installs": 207679,
|
||||||
|
"source": "vercel-labs/agent-skills",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
results = self._source().search("react", limit=5)
|
||||||
|
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results[0].source == "skills.sh"
|
||||||
|
assert results[0].identifier == "skills-sh/vercel-labs/agent-skills/vercel-react-best-practices"
|
||||||
|
assert "skills.sh" in results[0].description
|
||||||
|
assert results[0].repo == "vercel-labs/agent-skills"
|
||||||
|
assert results[0].path == "vercel-react-best-practices"
|
||||||
|
assert results[0].extra["installs"] == 207679
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_empty_search_uses_featured_homepage_links(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
text='''
|
||||||
|
<a href="/vercel-labs/agent-skills/vercel-react-best-practices">React</a>
|
||||||
|
<a href="/anthropics/skills/pdf">PDF</a>
|
||||||
|
<a href="/vercel-labs/agent-skills/vercel-react-best-practices">React again</a>
|
||||||
|
''',
|
||||||
|
)
|
||||||
|
|
||||||
|
results = self._source().search("", limit=10)
|
||||||
|
|
||||||
|
assert [r.identifier for r in results] == [
|
||||||
|
"skills-sh/vercel-labs/agent-skills/vercel-react-best-practices",
|
||||||
|
"skills-sh/anthropics/skills/pdf",
|
||||||
|
]
|
||||||
|
assert all(r.source == "skills.sh" for r in results)
|
||||||
|
|
||||||
|
@patch.object(GitHubSource, "fetch")
|
||||||
|
def test_fetch_delegates_to_github_source_and_relabels_bundle(self, mock_fetch):
|
||||||
|
mock_fetch.return_value = SkillBundle(
|
||||||
|
name="vercel-react-best-practices",
|
||||||
|
files={"SKILL.md": "# Test"},
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/agent-skills/vercel-react-best-practices",
|
||||||
|
trust_level="community",
|
||||||
|
)
|
||||||
|
|
||||||
|
bundle = self._source().fetch("skills-sh/vercel-labs/agent-skills/vercel-react-best-practices")
|
||||||
|
|
||||||
|
assert bundle is not None
|
||||||
|
assert bundle.source == "skills.sh"
|
||||||
|
assert bundle.identifier == "skills-sh/vercel-labs/agent-skills/vercel-react-best-practices"
|
||||||
|
mock_fetch.assert_called_once_with("vercel-labs/agent-skills/vercel-react-best-practices")
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
@patch.object(GitHubSource, "inspect")
|
||||||
|
def test_inspect_delegates_to_github_source_and_relabels_meta(self, mock_inspect, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
mock_inspect.return_value = SkillMeta(
|
||||||
|
name="vercel-react-best-practices",
|
||||||
|
description="React rules",
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/agent-skills/vercel-react-best-practices",
|
||||||
|
trust_level="community",
|
||||||
|
repo="vercel-labs/agent-skills",
|
||||||
|
path="vercel-react-best-practices",
|
||||||
|
)
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
text='''
|
||||||
|
<h1>vercel-react-best-practices</h1>
|
||||||
|
<code>$ npx skills add https://github.com/vercel-labs/agent-skills --skill vercel-react-best-practices</code>
|
||||||
|
<div class="prose"><h1>Vercel React Best Practices</h1><p>React rules.</p></div>
|
||||||
|
<a href="/vercel-labs/agent-skills/vercel-react-best-practices/security/socket">Socket</a> Pass
|
||||||
|
<a href="/vercel-labs/agent-skills/vercel-react-best-practices/security/snyk">Snyk</a> Pass
|
||||||
|
''',
|
||||||
|
)
|
||||||
|
|
||||||
|
meta = self._source().inspect("skills-sh/vercel-labs/agent-skills/vercel-react-best-practices")
|
||||||
|
|
||||||
|
assert meta is not None
|
||||||
|
assert meta.source == "skills.sh"
|
||||||
|
assert meta.identifier == "skills-sh/vercel-labs/agent-skills/vercel-react-best-practices"
|
||||||
|
assert meta.extra["install_command"].endswith("--skill vercel-react-best-practices")
|
||||||
|
assert meta.extra["security_audits"]["socket"] == "Pass"
|
||||||
|
mock_inspect.assert_called_once_with("vercel-labs/agent-skills/vercel-react-best-practices")
|
||||||
|
|
||||||
|
@patch.object(GitHubSource, "_list_skills_in_repo")
|
||||||
|
@patch.object(GitHubSource, "inspect")
|
||||||
|
def test_inspect_falls_back_to_repo_skill_catalog_when_slug_differs(self, mock_inspect, mock_list_skills):
|
||||||
|
resolved = SkillMeta(
|
||||||
|
name="vercel-react-best-practices",
|
||||||
|
description="React rules",
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/agent-skills/skills/react-best-practices",
|
||||||
|
trust_level="community",
|
||||||
|
repo="vercel-labs/agent-skills",
|
||||||
|
path="skills/react-best-practices",
|
||||||
|
)
|
||||||
|
mock_inspect.side_effect = lambda identifier: resolved if identifier == resolved.identifier else None
|
||||||
|
mock_list_skills.return_value = [resolved]
|
||||||
|
|
||||||
|
meta = self._source().inspect("skills-sh/vercel-labs/agent-skills/vercel-react-best-practices")
|
||||||
|
|
||||||
|
assert meta is not None
|
||||||
|
assert meta.identifier == "skills-sh/vercel-labs/agent-skills/vercel-react-best-practices"
|
||||||
|
assert mock_list_skills.called
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
@patch.object(GitHubSource, "_list_skills_in_repo")
|
||||||
|
@patch.object(GitHubSource, "inspect")
|
||||||
|
def test_inspect_uses_detail_page_to_resolve_alias_skill(self, mock_inspect, mock_list_skills, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
resolved = SkillMeta(
|
||||||
|
name="react",
|
||||||
|
description="React renderer",
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/json-render/skills/react",
|
||||||
|
trust_level="community",
|
||||||
|
repo="vercel-labs/json-render",
|
||||||
|
path="skills/react",
|
||||||
|
)
|
||||||
|
mock_inspect.side_effect = lambda identifier: resolved if identifier == resolved.identifier else None
|
||||||
|
mock_list_skills.return_value = [resolved]
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
text='''
|
||||||
|
<h1>json-render-react</h1>
|
||||||
|
<code>$ npx skills add https://github.com/vercel-labs/json-render --skill json-render-react</code>
|
||||||
|
<div class="prose"><h1>@json-render/react</h1><p>React renderer.</p></div>
|
||||||
|
''',
|
||||||
|
)
|
||||||
|
|
||||||
|
meta = self._source().inspect("skills-sh/vercel-labs/json-render/json-render-react")
|
||||||
|
|
||||||
|
assert meta is not None
|
||||||
|
assert meta.identifier == "skills-sh/vercel-labs/json-render/json-render-react"
|
||||||
|
assert meta.path == "skills/react"
|
||||||
|
assert mock_get.called
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
@patch.object(GitHubSource, "_list_skills_in_repo")
|
||||||
|
@patch.object(GitHubSource, "fetch")
|
||||||
|
def test_fetch_uses_detail_page_to_resolve_alias_skill(self, mock_fetch, mock_list_skills, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
resolved_meta = SkillMeta(
|
||||||
|
name="react",
|
||||||
|
description="React renderer",
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/json-render/skills/react",
|
||||||
|
trust_level="community",
|
||||||
|
repo="vercel-labs/json-render",
|
||||||
|
path="skills/react",
|
||||||
|
)
|
||||||
|
resolved_bundle = SkillBundle(
|
||||||
|
name="react",
|
||||||
|
files={"SKILL.md": "# react"},
|
||||||
|
source="github",
|
||||||
|
identifier="vercel-labs/json-render/skills/react",
|
||||||
|
trust_level="community",
|
||||||
|
)
|
||||||
|
mock_fetch.side_effect = lambda identifier: resolved_bundle if identifier == resolved_bundle.identifier else None
|
||||||
|
mock_list_skills.return_value = [resolved_meta]
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
text='''
|
||||||
|
<h1>json-render-react</h1>
|
||||||
|
<code>$ npx skills add https://github.com/vercel-labs/json-render --skill json-render-react</code>
|
||||||
|
<div class="prose"><h1>@json-render/react</h1><p>React renderer.</p></div>
|
||||||
|
''',
|
||||||
|
)
|
||||||
|
|
||||||
|
bundle = self._source().fetch("skills-sh/vercel-labs/json-render/json-render-react")
|
||||||
|
|
||||||
|
assert bundle is not None
|
||||||
|
assert bundle.identifier == "skills-sh/vercel-labs/json-render/json-render-react"
|
||||||
|
assert bundle.files["SKILL.md"] == "# react"
|
||||||
|
assert mock_get.called
|
||||||
|
|
||||||
|
|
||||||
|
class TestWellKnownSkillSource:
|
||||||
|
def _source(self):
|
||||||
|
return WellKnownSkillSource()
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_search_reads_index_from_well_known_url(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {
|
||||||
|
"skills": [
|
||||||
|
{"name": "git-workflow", "description": "Git rules", "files": ["SKILL.md"]},
|
||||||
|
{"name": "code-review", "description": "Review code", "files": ["SKILL.md", "references/checklist.md"]},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
results = self._source().search("https://example.com/.well-known/skills/index.json", limit=10)
|
||||||
|
|
||||||
|
assert [r.identifier for r in results] == [
|
||||||
|
"well-known:https://example.com/.well-known/skills/git-workflow",
|
||||||
|
"well-known:https://example.com/.well-known/skills/code-review",
|
||||||
|
]
|
||||||
|
assert all(r.source == "well-known" for r in results)
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_search_accepts_domain_root_and_resolves_index(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
mock_get.return_value = MagicMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {"skills": [{"name": "git-workflow", "description": "Git rules", "files": ["SKILL.md"]}]},
|
||||||
|
)
|
||||||
|
|
||||||
|
results = self._source().search("https://example.com", limit=10)
|
||||||
|
|
||||||
|
assert len(results) == 1
|
||||||
|
called_url = mock_get.call_args.args[0]
|
||||||
|
assert called_url == "https://example.com/.well-known/skills/index.json"
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_inspect_fetches_skill_md_from_well_known_endpoint(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
def fake_get(url, *args, **kwargs):
|
||||||
|
if url.endswith("/index.json"):
|
||||||
|
return MagicMock(status_code=200, json=lambda: {
|
||||||
|
"skills": [{"name": "git-workflow", "description": "Git rules", "files": ["SKILL.md"]}]
|
||||||
|
})
|
||||||
|
if url.endswith("/git-workflow/SKILL.md"):
|
||||||
|
return MagicMock(status_code=200, text="---\nname: git-workflow\ndescription: Git rules\n---\n\n# Git Workflow\n")
|
||||||
|
raise AssertionError(url)
|
||||||
|
|
||||||
|
mock_get.side_effect = fake_get
|
||||||
|
|
||||||
|
meta = self._source().inspect("well-known:https://example.com/.well-known/skills/git-workflow")
|
||||||
|
|
||||||
|
assert meta is not None
|
||||||
|
assert meta.name == "git-workflow"
|
||||||
|
assert meta.source == "well-known"
|
||||||
|
assert meta.extra["base_url"] == "https://example.com/.well-known/skills"
|
||||||
|
|
||||||
|
@patch("tools.skills_hub._write_index_cache")
|
||||||
|
@patch("tools.skills_hub._read_index_cache", return_value=None)
|
||||||
|
@patch("tools.skills_hub.httpx.get")
|
||||||
|
def test_fetch_downloads_skill_files_from_well_known_endpoint(self, mock_get, _mock_read_cache, _mock_write_cache):
|
||||||
|
def fake_get(url, *args, **kwargs):
|
||||||
|
if url.endswith("/index.json"):
|
||||||
|
return MagicMock(status_code=200, json=lambda: {
|
||||||
|
"skills": [{
|
||||||
|
"name": "code-review",
|
||||||
|
"description": "Review code",
|
||||||
|
"files": ["SKILL.md", "references/checklist.md"],
|
||||||
|
}]
|
||||||
|
})
|
||||||
|
if url.endswith("/code-review/SKILL.md"):
|
||||||
|
return MagicMock(status_code=200, text="# Code Review\n")
|
||||||
|
if url.endswith("/code-review/references/checklist.md"):
|
||||||
|
return MagicMock(status_code=200, text="- [ ] security\n")
|
||||||
|
raise AssertionError(url)
|
||||||
|
|
||||||
|
mock_get.side_effect = fake_get
|
||||||
|
|
||||||
|
bundle = self._source().fetch("well-known:https://example.com/.well-known/skills/code-review")
|
||||||
|
|
||||||
|
assert bundle is not None
|
||||||
|
assert bundle.source == "well-known"
|
||||||
|
assert bundle.files["SKILL.md"] == "# Code Review\n"
|
||||||
|
assert bundle.files["references/checklist.md"] == "- [ ] security\n"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCheckForSkillUpdates:
|
||||||
|
def test_bundle_content_hash_matches_installed_content_hash(self, tmp_path):
|
||||||
|
from tools.skills_guard import content_hash
|
||||||
|
|
||||||
|
bundle = SkillBundle(
|
||||||
|
name="demo-skill",
|
||||||
|
files={
|
||||||
|
"SKILL.md": "same content",
|
||||||
|
"references/checklist.md": "- [ ] security\n",
|
||||||
|
},
|
||||||
|
source="github",
|
||||||
|
identifier="owner/repo/demo-skill",
|
||||||
|
trust_level="community",
|
||||||
|
)
|
||||||
|
skill_dir = tmp_path / "demo-skill"
|
||||||
|
skill_dir.mkdir()
|
||||||
|
(skill_dir / "SKILL.md").write_text("same content")
|
||||||
|
(skill_dir / "references").mkdir()
|
||||||
|
(skill_dir / "references" / "checklist.md").write_text("- [ ] security\n")
|
||||||
|
|
||||||
|
assert bundle_content_hash(bundle) == content_hash(skill_dir)
|
||||||
|
|
||||||
|
def test_reports_update_when_remote_hash_differs(self):
|
||||||
|
lock = MagicMock()
|
||||||
|
lock.list_installed.return_value = [{
|
||||||
|
"name": "demo-skill",
|
||||||
|
"source": "github",
|
||||||
|
"identifier": "owner/repo/demo-skill",
|
||||||
|
"content_hash": "oldhash",
|
||||||
|
"install_path": "demo-skill",
|
||||||
|
}]
|
||||||
|
|
||||||
|
source = MagicMock()
|
||||||
|
source.source_id.return_value = "github"
|
||||||
|
source.fetch.return_value = SkillBundle(
|
||||||
|
name="demo-skill",
|
||||||
|
files={"SKILL.md": "new content"},
|
||||||
|
source="github",
|
||||||
|
identifier="owner/repo/demo-skill",
|
||||||
|
trust_level="community",
|
||||||
|
)
|
||||||
|
|
||||||
|
results = check_for_skill_updates(lock=lock, sources=[source])
|
||||||
|
|
||||||
|
assert len(results) == 1
|
||||||
|
assert results[0]["name"] == "demo-skill"
|
||||||
|
assert results[0]["status"] == "update_available"
|
||||||
|
|
||||||
|
def test_reports_up_to_date_when_hash_matches(self):
|
||||||
|
bundle = SkillBundle(
|
||||||
|
name="demo-skill",
|
||||||
|
files={"SKILL.md": "same content"},
|
||||||
|
source="github",
|
||||||
|
identifier="owner/repo/demo-skill",
|
||||||
|
trust_level="community",
|
||||||
|
)
|
||||||
|
lock = MagicMock()
|
||||||
|
lock.list_installed.return_value = [{
|
||||||
|
"name": "demo-skill",
|
||||||
|
"source": "github",
|
||||||
|
"identifier": "owner/repo/demo-skill",
|
||||||
|
"content_hash": bundle_content_hash(bundle),
|
||||||
|
"install_path": "demo-skill",
|
||||||
|
}]
|
||||||
|
source = MagicMock()
|
||||||
|
source.source_id.return_value = "github"
|
||||||
|
source.fetch.return_value = bundle
|
||||||
|
|
||||||
|
results = check_for_skill_updates(lock=lock, sources=[source])
|
||||||
|
|
||||||
|
assert results[0]["status"] == "up_to_date"
|
||||||
|
|
||||||
|
|
||||||
|
class TestCreateSourceRouter:
|
||||||
|
def test_includes_skills_sh_source(self):
|
||||||
|
sources = create_source_router(auth=MagicMock(spec=GitHubAuth))
|
||||||
|
assert any(isinstance(src, SkillsShSource) for src in sources)
|
||||||
|
|
||||||
|
def test_includes_well_known_source(self):
|
||||||
|
sources = create_source_router(auth=MagicMock(spec=GitHubAuth))
|
||||||
|
assert any(isinstance(src, WellKnownSkillSource) for src in sources)
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# HubLockFile
|
# HubLockFile
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,7 @@ from dataclasses import dataclass, field
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
import yaml
|
import yaml
|
||||||
|
|
@ -69,6 +70,7 @@ class SkillMeta:
|
||||||
repo: Optional[str] = None
|
repo: Optional[str] = None
|
||||||
path: Optional[str] = None
|
path: Optional[str] = None
|
||||||
tags: List[str] = field(default_factory=list)
|
tags: List[str] = field(default_factory=list)
|
||||||
|
extra: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|
@ -79,6 +81,7 @@ class SkillBundle:
|
||||||
source: str
|
source: str
|
||||||
identifier: str
|
identifier: str
|
||||||
trust_level: str
|
trust_level: str
|
||||||
|
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -497,6 +500,643 @@ class GitHubSource(SkillSource):
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Well-known Agent Skills endpoint source adapter
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class WellKnownSkillSource(SkillSource):
|
||||||
|
"""Read skills from a domain exposing /.well-known/skills/index.json."""
|
||||||
|
|
||||||
|
BASE_PATH = "/.well-known/skills"
|
||||||
|
|
||||||
|
def source_id(self) -> str:
|
||||||
|
return "well-known"
|
||||||
|
|
||||||
|
def trust_level_for(self, identifier: str) -> str:
|
||||||
|
return "community"
|
||||||
|
|
||||||
|
def search(self, query: str, limit: int = 10) -> List[SkillMeta]:
|
||||||
|
index_url = self._query_to_index_url(query)
|
||||||
|
if not index_url:
|
||||||
|
return []
|
||||||
|
|
||||||
|
parsed = self._parse_index(index_url)
|
||||||
|
if not parsed:
|
||||||
|
return []
|
||||||
|
|
||||||
|
results: List[SkillMeta] = []
|
||||||
|
for entry in parsed["skills"][:limit]:
|
||||||
|
name = entry.get("name")
|
||||||
|
if not isinstance(name, str) or not name:
|
||||||
|
continue
|
||||||
|
description = entry.get("description", "")
|
||||||
|
files = entry.get("files", ["SKILL.md"])
|
||||||
|
results.append(SkillMeta(
|
||||||
|
name=name,
|
||||||
|
description=str(description),
|
||||||
|
source="well-known",
|
||||||
|
identifier=self._wrap_identifier(parsed["base_url"], name),
|
||||||
|
trust_level="community",
|
||||||
|
path=name,
|
||||||
|
extra={
|
||||||
|
"index_url": parsed["index_url"],
|
||||||
|
"base_url": parsed["base_url"],
|
||||||
|
"files": files if isinstance(files, list) else ["SKILL.md"],
|
||||||
|
},
|
||||||
|
))
|
||||||
|
return results
|
||||||
|
|
||||||
|
def inspect(self, identifier: str) -> Optional[SkillMeta]:
|
||||||
|
parsed = self._parse_identifier(identifier)
|
||||||
|
if not parsed:
|
||||||
|
return None
|
||||||
|
|
||||||
|
entry = self._index_entry(parsed["index_url"], parsed["skill_name"])
|
||||||
|
if not entry:
|
||||||
|
return None
|
||||||
|
|
||||||
|
skill_md = self._fetch_text(f"{parsed['skill_url']}/SKILL.md")
|
||||||
|
if skill_md is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
fm = GitHubSource._parse_frontmatter_quick(skill_md)
|
||||||
|
description = str(fm.get("description") or entry.get("description") or "")
|
||||||
|
name = str(fm.get("name") or parsed["skill_name"])
|
||||||
|
return SkillMeta(
|
||||||
|
name=name,
|
||||||
|
description=description,
|
||||||
|
source="well-known",
|
||||||
|
identifier=self._wrap_identifier(parsed["base_url"], parsed["skill_name"]),
|
||||||
|
trust_level="community",
|
||||||
|
path=parsed["skill_name"],
|
||||||
|
extra={
|
||||||
|
"index_url": parsed["index_url"],
|
||||||
|
"base_url": parsed["base_url"],
|
||||||
|
"files": entry.get("files", ["SKILL.md"]),
|
||||||
|
"endpoint": parsed["skill_url"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def fetch(self, identifier: str) -> Optional[SkillBundle]:
|
||||||
|
parsed = self._parse_identifier(identifier)
|
||||||
|
if not parsed:
|
||||||
|
return None
|
||||||
|
|
||||||
|
entry = self._index_entry(parsed["index_url"], parsed["skill_name"])
|
||||||
|
if not entry:
|
||||||
|
return None
|
||||||
|
|
||||||
|
files = entry.get("files", ["SKILL.md"])
|
||||||
|
if not isinstance(files, list) or not files:
|
||||||
|
files = ["SKILL.md"]
|
||||||
|
|
||||||
|
downloaded: Dict[str, str] = {}
|
||||||
|
for rel_path in files:
|
||||||
|
if not isinstance(rel_path, str) or not rel_path:
|
||||||
|
continue
|
||||||
|
text = self._fetch_text(f"{parsed['skill_url']}/{rel_path}")
|
||||||
|
if text is None:
|
||||||
|
return None
|
||||||
|
downloaded[rel_path] = text
|
||||||
|
|
||||||
|
if "SKILL.md" not in downloaded:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return SkillBundle(
|
||||||
|
name=parsed["skill_name"],
|
||||||
|
files=downloaded,
|
||||||
|
source="well-known",
|
||||||
|
identifier=self._wrap_identifier(parsed["base_url"], parsed["skill_name"]),
|
||||||
|
trust_level="community",
|
||||||
|
metadata={
|
||||||
|
"index_url": parsed["index_url"],
|
||||||
|
"base_url": parsed["base_url"],
|
||||||
|
"endpoint": parsed["skill_url"],
|
||||||
|
"files": files,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _query_to_index_url(self, query: str) -> Optional[str]:
|
||||||
|
query = query.strip()
|
||||||
|
if not query.startswith(("http://", "https://")):
|
||||||
|
return None
|
||||||
|
if query.endswith("/index.json"):
|
||||||
|
return query
|
||||||
|
if f"{self.BASE_PATH}/" in query:
|
||||||
|
base_url = query.split(f"{self.BASE_PATH}/", 1)[0] + self.BASE_PATH
|
||||||
|
return f"{base_url}/index.json"
|
||||||
|
return query.rstrip("/") + f"{self.BASE_PATH}/index.json"
|
||||||
|
|
||||||
|
def _parse_identifier(self, identifier: str) -> Optional[dict]:
|
||||||
|
raw = identifier[len("well-known:"):] if identifier.startswith("well-known:") else identifier
|
||||||
|
if not raw.startswith(("http://", "https://")):
|
||||||
|
return None
|
||||||
|
|
||||||
|
parsed_url = urlparse(raw)
|
||||||
|
clean_url = urlunparse(parsed_url._replace(fragment=""))
|
||||||
|
fragment = parsed_url.fragment
|
||||||
|
|
||||||
|
if clean_url.endswith("/index.json"):
|
||||||
|
if not fragment:
|
||||||
|
return None
|
||||||
|
base_url = clean_url[:-len("/index.json")]
|
||||||
|
skill_name = fragment
|
||||||
|
skill_url = f"{base_url}/{skill_name}"
|
||||||
|
return {
|
||||||
|
"index_url": clean_url,
|
||||||
|
"base_url": base_url,
|
||||||
|
"skill_name": skill_name,
|
||||||
|
"skill_url": skill_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
if clean_url.endswith("/SKILL.md"):
|
||||||
|
skill_url = clean_url[:-len("/SKILL.md")]
|
||||||
|
else:
|
||||||
|
skill_url = clean_url.rstrip("/")
|
||||||
|
|
||||||
|
if f"{self.BASE_PATH}/" not in skill_url:
|
||||||
|
return None
|
||||||
|
|
||||||
|
base_url, skill_name = skill_url.rsplit("/", 1)
|
||||||
|
return {
|
||||||
|
"index_url": f"{base_url}/index.json",
|
||||||
|
"base_url": base_url,
|
||||||
|
"skill_name": skill_name,
|
||||||
|
"skill_url": skill_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _parse_index(self, index_url: str) -> Optional[dict]:
|
||||||
|
cache_key = f"well_known_index_{hashlib.md5(index_url.encode()).hexdigest()}"
|
||||||
|
cached = _read_index_cache(cache_key)
|
||||||
|
if isinstance(cached, dict) and isinstance(cached.get("skills"), list):
|
||||||
|
return cached
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = httpx.get(index_url, timeout=20, follow_redirects=True)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return None
|
||||||
|
data = resp.json()
|
||||||
|
except (httpx.HTTPError, json.JSONDecodeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
skills = data.get("skills", []) if isinstance(data, dict) else []
|
||||||
|
if not isinstance(skills, list):
|
||||||
|
return None
|
||||||
|
|
||||||
|
parsed = {
|
||||||
|
"index_url": index_url,
|
||||||
|
"base_url": index_url[:-len("/index.json")],
|
||||||
|
"skills": skills,
|
||||||
|
}
|
||||||
|
_write_index_cache(cache_key, parsed)
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
def _index_entry(self, index_url: str, skill_name: str) -> Optional[dict]:
|
||||||
|
parsed = self._parse_index(index_url)
|
||||||
|
if not parsed:
|
||||||
|
return None
|
||||||
|
for entry in parsed["skills"]:
|
||||||
|
if isinstance(entry, dict) and entry.get("name") == skill_name:
|
||||||
|
return entry
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _fetch_text(url: str) -> Optional[str]:
|
||||||
|
try:
|
||||||
|
resp = httpx.get(url, timeout=20, follow_redirects=True)
|
||||||
|
if resp.status_code == 200:
|
||||||
|
return resp.text
|
||||||
|
except httpx.HTTPError:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _wrap_identifier(base_url: str, skill_name: str) -> str:
|
||||||
|
return f"well-known:{base_url.rstrip('/')}/{skill_name}"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# skills.sh source adapter
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class SkillsShSource(SkillSource):
|
||||||
|
"""Discover skills via skills.sh and fetch content from the underlying GitHub repo."""
|
||||||
|
|
||||||
|
BASE_URL = "https://skills.sh"
|
||||||
|
SEARCH_URL = f"{BASE_URL}/api/search"
|
||||||
|
_SKILL_LINK_RE = re.compile(r'href=["\']/(?P<id>(?!agents/|_next/|api/)[^"\'/]+/[^"\'/]+/[^"\'/]+)["\']')
|
||||||
|
_INSTALL_CMD_RE = re.compile(
|
||||||
|
r'npx\s+skills\s+add\s+(?P<repo>https?://github\.com/[^\s<]+|[^\s<]+)'
|
||||||
|
r'(?:\s+--skill\s+(?P<skill>[^\s<]+))?',
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
_PAGE_H1_RE = re.compile(r'<h1[^>]*>(?P<title>.*?)</h1>', re.IGNORECASE | re.DOTALL)
|
||||||
|
_PROSE_H1_RE = re.compile(
|
||||||
|
r'<div[^>]*class=["\'][^"\']*prose[^"\']*["\'][^>]*>.*?<h1[^>]*>(?P<title>.*?)</h1>',
|
||||||
|
re.IGNORECASE | re.DOTALL,
|
||||||
|
)
|
||||||
|
_PROSE_P_RE = re.compile(
|
||||||
|
r'<div[^>]*class=["\'][^"\']*prose[^"\']*["\'][^>]*>.*?<p[^>]*>(?P<body>.*?)</p>',
|
||||||
|
re.IGNORECASE | re.DOTALL,
|
||||||
|
)
|
||||||
|
_WEEKLY_INSTALLS_RE = re.compile(r'Weekly Installs.*?children\\":\\"(?P<count>[0-9.,Kk]+)\\"', re.DOTALL)
|
||||||
|
|
||||||
|
def __init__(self, auth: GitHubAuth):
|
||||||
|
self.auth = auth
|
||||||
|
self.github = GitHubSource(auth=auth)
|
||||||
|
|
||||||
|
def source_id(self) -> str:
|
||||||
|
return "skills-sh"
|
||||||
|
|
||||||
|
def trust_level_for(self, identifier: str) -> str:
|
||||||
|
return self.github.trust_level_for(self._normalize_identifier(identifier))
|
||||||
|
|
||||||
|
def search(self, query: str, limit: int = 10) -> List[SkillMeta]:
|
||||||
|
if not query.strip():
|
||||||
|
return self._featured_skills(limit)
|
||||||
|
|
||||||
|
cache_key = f"skills_sh_search_{hashlib.md5(f'{query}|{limit}'.encode()).hexdigest()}"
|
||||||
|
cached = _read_index_cache(cache_key)
|
||||||
|
if cached is not None:
|
||||||
|
return [SkillMeta(**item) for item in cached][:limit]
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = httpx.get(
|
||||||
|
self.SEARCH_URL,
|
||||||
|
params={"q": query, "limit": limit},
|
||||||
|
timeout=20,
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return []
|
||||||
|
data = resp.json()
|
||||||
|
except (httpx.HTTPError, json.JSONDecodeError):
|
||||||
|
return []
|
||||||
|
|
||||||
|
items = data.get("skills", []) if isinstance(data, dict) else []
|
||||||
|
if not isinstance(items, list):
|
||||||
|
return []
|
||||||
|
|
||||||
|
results: List[SkillMeta] = []
|
||||||
|
for item in items[:limit]:
|
||||||
|
meta = self._meta_from_search_item(item)
|
||||||
|
if meta:
|
||||||
|
results.append(meta)
|
||||||
|
|
||||||
|
_write_index_cache(cache_key, [_skill_meta_to_dict(item) for item in results])
|
||||||
|
return results
|
||||||
|
|
||||||
|
def fetch(self, identifier: str) -> Optional[SkillBundle]:
|
||||||
|
canonical = self._normalize_identifier(identifier)
|
||||||
|
detail = self._fetch_detail_page(canonical)
|
||||||
|
for candidate in self._candidate_identifiers(canonical):
|
||||||
|
bundle = self.github.fetch(candidate)
|
||||||
|
if bundle:
|
||||||
|
bundle.source = "skills.sh"
|
||||||
|
bundle.identifier = self._wrap_identifier(canonical)
|
||||||
|
bundle.metadata.update(self._detail_to_metadata(canonical, detail))
|
||||||
|
return bundle
|
||||||
|
|
||||||
|
resolved = self._discover_identifier(canonical, detail=detail)
|
||||||
|
if resolved:
|
||||||
|
bundle = self.github.fetch(resolved)
|
||||||
|
if bundle:
|
||||||
|
bundle.source = "skills.sh"
|
||||||
|
bundle.identifier = self._wrap_identifier(canonical)
|
||||||
|
bundle.metadata.update(self._detail_to_metadata(canonical, detail))
|
||||||
|
return bundle
|
||||||
|
return None
|
||||||
|
|
||||||
|
def inspect(self, identifier: str) -> Optional[SkillMeta]:
|
||||||
|
canonical = self._normalize_identifier(identifier)
|
||||||
|
detail: Optional[dict] = None
|
||||||
|
for candidate in self._candidate_identifiers(canonical):
|
||||||
|
meta = self.github.inspect(candidate)
|
||||||
|
if meta:
|
||||||
|
detail = self._fetch_detail_page(canonical)
|
||||||
|
return self._finalize_inspect_meta(meta, canonical, detail)
|
||||||
|
|
||||||
|
detail = self._fetch_detail_page(canonical)
|
||||||
|
resolved = self._discover_identifier(canonical, detail=detail)
|
||||||
|
if resolved:
|
||||||
|
meta = self.github.inspect(resolved)
|
||||||
|
if meta:
|
||||||
|
return self._finalize_inspect_meta(meta, canonical, detail)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _featured_skills(self, limit: int) -> List[SkillMeta]:
|
||||||
|
cache_key = "skills_sh_featured"
|
||||||
|
cached = _read_index_cache(cache_key)
|
||||||
|
if cached is not None:
|
||||||
|
return [SkillMeta(**item) for item in cached][:limit]
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = httpx.get(self.BASE_URL, timeout=20)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return []
|
||||||
|
except httpx.HTTPError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
seen: set[str] = set()
|
||||||
|
results: List[SkillMeta] = []
|
||||||
|
for match in self._SKILL_LINK_RE.finditer(resp.text):
|
||||||
|
canonical = match.group("id")
|
||||||
|
if canonical in seen:
|
||||||
|
continue
|
||||||
|
seen.add(canonical)
|
||||||
|
parts = canonical.split("/", 2)
|
||||||
|
if len(parts) < 3:
|
||||||
|
continue
|
||||||
|
repo = f"{parts[0]}/{parts[1]}"
|
||||||
|
skill_path = parts[2]
|
||||||
|
results.append(SkillMeta(
|
||||||
|
name=skill_path.split("/")[-1],
|
||||||
|
description=f"Featured on skills.sh from {repo}",
|
||||||
|
source="skills.sh",
|
||||||
|
identifier=self._wrap_identifier(canonical),
|
||||||
|
trust_level=self.github.trust_level_for(canonical),
|
||||||
|
repo=repo,
|
||||||
|
path=skill_path,
|
||||||
|
))
|
||||||
|
if len(results) >= limit:
|
||||||
|
break
|
||||||
|
|
||||||
|
_write_index_cache(cache_key, [_skill_meta_to_dict(item) for item in results])
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _meta_from_search_item(self, item: dict) -> Optional[SkillMeta]:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
canonical = item.get("id")
|
||||||
|
repo = item.get("source")
|
||||||
|
skill_path = item.get("skillId")
|
||||||
|
if not isinstance(canonical, str) or canonical.count("/") < 2:
|
||||||
|
if not (isinstance(repo, str) and isinstance(skill_path, str)):
|
||||||
|
return None
|
||||||
|
canonical = f"{repo}/{skill_path}"
|
||||||
|
|
||||||
|
parts = canonical.split("/", 2)
|
||||||
|
if len(parts) < 3:
|
||||||
|
return None
|
||||||
|
|
||||||
|
repo = f"{parts[0]}/{parts[1]}"
|
||||||
|
skill_path = parts[2]
|
||||||
|
installs = item.get("installs")
|
||||||
|
installs_label = f" · {int(installs):,} installs" if isinstance(installs, int) else ""
|
||||||
|
|
||||||
|
return SkillMeta(
|
||||||
|
name=str(item.get("name") or skill_path.split("/")[-1]),
|
||||||
|
description=f"Indexed by skills.sh from {repo}{installs_label}",
|
||||||
|
source="skills.sh",
|
||||||
|
identifier=self._wrap_identifier(canonical),
|
||||||
|
trust_level=self.github.trust_level_for(canonical),
|
||||||
|
repo=repo,
|
||||||
|
path=skill_path,
|
||||||
|
extra={
|
||||||
|
"installs": installs,
|
||||||
|
"detail_url": f"{self.BASE_URL}/{canonical}",
|
||||||
|
"repo_url": f"https://github.com/{repo}",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _fetch_detail_page(self, identifier: str) -> Optional[dict]:
|
||||||
|
cache_key = f"skills_sh_detail_{hashlib.md5(identifier.encode()).hexdigest()}"
|
||||||
|
cached = _read_index_cache(cache_key)
|
||||||
|
if isinstance(cached, dict):
|
||||||
|
return cached
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = httpx.get(f"{self.BASE_URL}/{identifier}", timeout=20)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return None
|
||||||
|
except httpx.HTTPError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
detail = self._parse_detail_page(identifier, resp.text)
|
||||||
|
if detail:
|
||||||
|
_write_index_cache(cache_key, detail)
|
||||||
|
return detail
|
||||||
|
|
||||||
|
def _parse_detail_page(self, identifier: str, html: str) -> Optional[dict]:
|
||||||
|
parts = identifier.split("/", 2)
|
||||||
|
if len(parts) < 3:
|
||||||
|
return None
|
||||||
|
|
||||||
|
default_repo = f"{parts[0]}/{parts[1]}"
|
||||||
|
skill_token = parts[2]
|
||||||
|
repo = default_repo
|
||||||
|
install_skill = skill_token
|
||||||
|
|
||||||
|
install_command = None
|
||||||
|
install_match = self._INSTALL_CMD_RE.search(html)
|
||||||
|
if install_match:
|
||||||
|
install_command = install_match.group(0).strip()
|
||||||
|
repo_value = (install_match.group("repo") or "").strip()
|
||||||
|
install_skill = (install_match.group("skill") or install_skill).strip()
|
||||||
|
repo = self._extract_repo_slug(repo_value) or repo
|
||||||
|
|
||||||
|
page_title = self._extract_first_match(self._PAGE_H1_RE, html)
|
||||||
|
body_title = self._extract_first_match(self._PROSE_H1_RE, html)
|
||||||
|
body_summary = self._extract_first_match(self._PROSE_P_RE, html)
|
||||||
|
weekly_installs = self._extract_weekly_installs(html)
|
||||||
|
security_audits = self._extract_security_audits(html, identifier)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"repo": repo,
|
||||||
|
"install_skill": install_skill,
|
||||||
|
"page_title": page_title,
|
||||||
|
"body_title": body_title,
|
||||||
|
"body_summary": body_summary,
|
||||||
|
"weekly_installs": weekly_installs,
|
||||||
|
"install_command": install_command,
|
||||||
|
"repo_url": f"https://github.com/{repo}",
|
||||||
|
"detail_url": f"{self.BASE_URL}/{identifier}",
|
||||||
|
"security_audits": security_audits,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _discover_identifier(self, identifier: str, detail: Optional[dict] = None) -> Optional[str]:
|
||||||
|
parts = identifier.split("/", 2)
|
||||||
|
if len(parts) < 3:
|
||||||
|
return None
|
||||||
|
|
||||||
|
default_repo = f"{parts[0]}/{parts[1]}"
|
||||||
|
repo = detail.get("repo", default_repo) if isinstance(detail, dict) else default_repo
|
||||||
|
skill_token = parts[2]
|
||||||
|
tokens = [skill_token]
|
||||||
|
if isinstance(detail, dict):
|
||||||
|
tokens.extend([
|
||||||
|
detail.get("install_skill", ""),
|
||||||
|
detail.get("page_title", ""),
|
||||||
|
detail.get("body_title", ""),
|
||||||
|
])
|
||||||
|
|
||||||
|
for base_path in ("skills/", ".agents/skills/", ".claude/skills/"):
|
||||||
|
try:
|
||||||
|
skills = self.github._list_skills_in_repo(repo, base_path)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
for meta in skills:
|
||||||
|
if self._matches_skill_tokens(meta, tokens):
|
||||||
|
return meta.identifier
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _finalize_inspect_meta(self, meta: SkillMeta, canonical: str, detail: Optional[dict]) -> SkillMeta:
|
||||||
|
meta.source = "skills.sh"
|
||||||
|
meta.identifier = self._wrap_identifier(canonical)
|
||||||
|
meta.trust_level = self.trust_level_for(canonical)
|
||||||
|
merged_extra = dict(meta.extra)
|
||||||
|
merged_extra.update(self._detail_to_metadata(canonical, detail))
|
||||||
|
meta.extra = merged_extra
|
||||||
|
|
||||||
|
if isinstance(detail, dict):
|
||||||
|
body_summary = detail.get("body_summary")
|
||||||
|
weekly_installs = detail.get("weekly_installs")
|
||||||
|
if body_summary:
|
||||||
|
meta.description = body_summary
|
||||||
|
elif meta.description and weekly_installs:
|
||||||
|
meta.description = f"{meta.description} · {weekly_installs} weekly installs on skills.sh"
|
||||||
|
return meta
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _matches_skill_tokens(cls, meta: SkillMeta, skill_tokens: List[str]) -> bool:
|
||||||
|
candidates = set()
|
||||||
|
candidates.update(cls._token_variants(meta.name))
|
||||||
|
candidates.update(cls._token_variants(meta.path))
|
||||||
|
candidates.update(cls._token_variants(meta.identifier.split("/", 2)[-1] if meta.identifier else None))
|
||||||
|
|
||||||
|
for token in skill_tokens:
|
||||||
|
variants = cls._token_variants(token)
|
||||||
|
if variants & candidates:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _token_variants(value: Optional[str]) -> set[str]:
|
||||||
|
if not value:
|
||||||
|
return set()
|
||||||
|
|
||||||
|
plain = SkillsShSource._strip_html(str(value)).strip().strip("/").lower()
|
||||||
|
if not plain:
|
||||||
|
return set()
|
||||||
|
|
||||||
|
base = plain.split("/")[-1]
|
||||||
|
sanitized = re.sub(r'[^a-z0-9/_-]+', '-', plain).strip('-')
|
||||||
|
sanitized_base = sanitized.split("/")[-1] if sanitized else ""
|
||||||
|
slash_tail = plain.split("/")[-1]
|
||||||
|
slash_tail_clean = slash_tail.lstrip('@')
|
||||||
|
slash_tail_clean = slash_tail_clean.split('/')[-1]
|
||||||
|
|
||||||
|
variants = {
|
||||||
|
plain,
|
||||||
|
plain.replace("_", "-"),
|
||||||
|
plain.replace("/", "-"),
|
||||||
|
base,
|
||||||
|
base.replace("_", "-"),
|
||||||
|
base.replace("/", "-"),
|
||||||
|
sanitized,
|
||||||
|
sanitized.replace("/", "-") if sanitized else "",
|
||||||
|
sanitized_base,
|
||||||
|
slash_tail_clean,
|
||||||
|
slash_tail_clean.replace("_", "-"),
|
||||||
|
}
|
||||||
|
return {v for v in variants if v}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_repo_slug(repo_value: str) -> Optional[str]:
|
||||||
|
repo_value = repo_value.strip()
|
||||||
|
if repo_value.startswith("https://github.com/"):
|
||||||
|
repo_value = repo_value[len("https://github.com/"):]
|
||||||
|
repo_value = repo_value.strip("/")
|
||||||
|
parts = repo_value.split("/")
|
||||||
|
if len(parts) >= 2:
|
||||||
|
return f"{parts[0]}/{parts[1]}"
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_first_match(pattern: re.Pattern, text: str) -> Optional[str]:
|
||||||
|
match = pattern.search(text)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
value = next((group for group in match.groups() if group), None)
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
return SkillsShSource._strip_html(value).strip() or None
|
||||||
|
|
||||||
|
def _detail_to_metadata(self, canonical: str, detail: Optional[dict]) -> Dict[str, Any]:
|
||||||
|
parts = canonical.split("/", 2)
|
||||||
|
repo = f"{parts[0]}/{parts[1]}" if len(parts) >= 2 else ""
|
||||||
|
metadata = {
|
||||||
|
"detail_url": f"{self.BASE_URL}/{canonical}",
|
||||||
|
}
|
||||||
|
if repo:
|
||||||
|
metadata["repo_url"] = f"https://github.com/{repo}"
|
||||||
|
if isinstance(detail, dict):
|
||||||
|
for key in ("weekly_installs", "install_command", "repo_url", "detail_url", "security_audits"):
|
||||||
|
value = detail.get(key)
|
||||||
|
if value:
|
||||||
|
metadata[key] = value
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_weekly_installs(html: str) -> Optional[str]:
|
||||||
|
match = SkillsShSource._WEEKLY_INSTALLS_RE.search(html)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
return match.group("count")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_security_audits(html: str, identifier: str) -> Dict[str, str]:
|
||||||
|
audits: Dict[str, str] = {}
|
||||||
|
for audit in ("agent-trust-hub", "socket", "snyk"):
|
||||||
|
idx = html.find(f"/security/{audit}")
|
||||||
|
if idx == -1:
|
||||||
|
continue
|
||||||
|
window = html[idx:idx + 500]
|
||||||
|
match = re.search(r'(Pass|Warn|Fail)', window, re.IGNORECASE)
|
||||||
|
if match:
|
||||||
|
audits[audit] = match.group(1).title()
|
||||||
|
return audits
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _strip_html(value: str) -> str:
|
||||||
|
return re.sub(r'<[^>]+>', '', value)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_identifier(identifier: str) -> str:
|
||||||
|
if identifier.startswith("skills-sh/"):
|
||||||
|
return identifier[len("skills-sh/"):]
|
||||||
|
if identifier.startswith("skills.sh/"):
|
||||||
|
return identifier[len("skills.sh/"):]
|
||||||
|
return identifier
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _candidate_identifiers(identifier: str) -> List[str]:
|
||||||
|
parts = identifier.split("/", 2)
|
||||||
|
if len(parts) < 3:
|
||||||
|
return [identifier]
|
||||||
|
|
||||||
|
repo = f"{parts[0]}/{parts[1]}"
|
||||||
|
skill_path = parts[2].lstrip("/")
|
||||||
|
candidates = [
|
||||||
|
f"{repo}/{skill_path}",
|
||||||
|
f"{repo}/skills/{skill_path}",
|
||||||
|
f"{repo}/.agents/skills/{skill_path}",
|
||||||
|
f"{repo}/.claude/skills/{skill_path}",
|
||||||
|
]
|
||||||
|
|
||||||
|
seen = set()
|
||||||
|
deduped: List[str] = []
|
||||||
|
for candidate in candidates:
|
||||||
|
if candidate not in seen:
|
||||||
|
seen.add(candidate)
|
||||||
|
deduped.append(candidate)
|
||||||
|
return deduped
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _wrap_identifier(identifier: str) -> str:
|
||||||
|
return f"skills-sh/{identifier}"
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# ClawHub source adapter
|
# ClawHub source adapter
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
@ -1213,6 +1853,7 @@ def _skill_meta_to_dict(meta: SkillMeta) -> dict:
|
||||||
"repo": meta.repo,
|
"repo": meta.repo,
|
||||||
"path": meta.path,
|
"path": meta.path,
|
||||||
"tags": meta.tags,
|
"tags": meta.tags,
|
||||||
|
"extra": meta.extra,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1248,6 +1889,7 @@ class HubLockFile:
|
||||||
skill_hash: str,
|
skill_hash: str,
|
||||||
install_path: str,
|
install_path: str,
|
||||||
files: List[str],
|
files: List[str],
|
||||||
|
metadata: Optional[Dict[str, Any]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
data = self.load()
|
data = self.load()
|
||||||
data["installed"][name] = {
|
data["installed"][name] = {
|
||||||
|
|
@ -1258,6 +1900,7 @@ class HubLockFile:
|
||||||
"content_hash": skill_hash,
|
"content_hash": skill_hash,
|
||||||
"install_path": install_path,
|
"install_path": install_path,
|
||||||
"files": files,
|
"files": files,
|
||||||
|
"metadata": metadata or {},
|
||||||
"installed_at": datetime.now(timezone.utc).isoformat(),
|
"installed_at": datetime.now(timezone.utc).isoformat(),
|
||||||
"updated_at": datetime.now(timezone.utc).isoformat(),
|
"updated_at": datetime.now(timezone.utc).isoformat(),
|
||||||
}
|
}
|
||||||
|
|
@ -1412,6 +2055,7 @@ def install_from_quarantine(
|
||||||
skill_hash=content_hash(install_dir),
|
skill_hash=content_hash(install_dir),
|
||||||
install_path=str(install_dir.relative_to(SKILLS_DIR)),
|
install_path=str(install_dir.relative_to(SKILLS_DIR)),
|
||||||
files=list(bundle.files.keys()),
|
files=list(bundle.files.keys()),
|
||||||
|
metadata=bundle.metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
append_audit_log(
|
append_audit_log(
|
||||||
|
|
@ -1440,6 +2084,78 @@ def uninstall_skill(skill_name: str) -> Tuple[bool, str]:
|
||||||
return True, f"Uninstalled '{skill_name}' from {entry['install_path']}"
|
return True, f"Uninstalled '{skill_name}' from {entry['install_path']}"
|
||||||
|
|
||||||
|
|
||||||
|
def bundle_content_hash(bundle: SkillBundle) -> str:
|
||||||
|
"""Compute a deterministic hash for an in-memory skill bundle."""
|
||||||
|
h = hashlib.sha256()
|
||||||
|
for rel_path in sorted(bundle.files):
|
||||||
|
h.update(bundle.files[rel_path].encode("utf-8"))
|
||||||
|
return f"sha256:{h.hexdigest()[:16]}"
|
||||||
|
|
||||||
|
|
||||||
|
def _source_matches(source: SkillSource, source_name: str) -> bool:
|
||||||
|
aliases = {
|
||||||
|
"skills.sh": "skills-sh",
|
||||||
|
}
|
||||||
|
normalized = aliases.get(source_name, source_name)
|
||||||
|
return source.source_id() == normalized
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_skill_updates(
|
||||||
|
name: Optional[str] = None,
|
||||||
|
*,
|
||||||
|
lock: Optional[HubLockFile] = None,
|
||||||
|
sources: Optional[List[SkillSource]] = None,
|
||||||
|
auth: Optional[GitHubAuth] = None,
|
||||||
|
) -> List[dict]:
|
||||||
|
"""Check installed hub skills for upstream changes."""
|
||||||
|
lock = lock or HubLockFile()
|
||||||
|
installed = lock.list_installed()
|
||||||
|
if name:
|
||||||
|
installed = [entry for entry in installed if entry.get("name") == name]
|
||||||
|
|
||||||
|
if sources is None:
|
||||||
|
sources = create_source_router(auth=auth)
|
||||||
|
|
||||||
|
results: List[dict] = []
|
||||||
|
for entry in installed:
|
||||||
|
identifier = entry.get("identifier", "")
|
||||||
|
source_name = entry.get("source", "")
|
||||||
|
candidate_sources = [src for src in sources if _source_matches(src, source_name)] or sources
|
||||||
|
|
||||||
|
bundle = None
|
||||||
|
for src in candidate_sources:
|
||||||
|
try:
|
||||||
|
bundle = src.fetch(identifier)
|
||||||
|
except Exception:
|
||||||
|
bundle = None
|
||||||
|
if bundle:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not bundle:
|
||||||
|
results.append({
|
||||||
|
"name": entry.get("name", ""),
|
||||||
|
"identifier": identifier,
|
||||||
|
"source": source_name,
|
||||||
|
"status": "unavailable",
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
|
||||||
|
current_hash = entry.get("content_hash", "")
|
||||||
|
latest_hash = bundle_content_hash(bundle)
|
||||||
|
status = "up_to_date" if current_hash == latest_hash else "update_available"
|
||||||
|
results.append({
|
||||||
|
"name": entry.get("name", ""),
|
||||||
|
"identifier": identifier,
|
||||||
|
"source": source_name,
|
||||||
|
"status": status,
|
||||||
|
"current_hash": current_hash,
|
||||||
|
"latest_hash": latest_hash,
|
||||||
|
"bundle": bundle,
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
def create_source_router(auth: Optional[GitHubAuth] = None) -> List[SkillSource]:
|
def create_source_router(auth: Optional[GitHubAuth] = None) -> List[SkillSource]:
|
||||||
"""
|
"""
|
||||||
Create all configured source adapters.
|
Create all configured source adapters.
|
||||||
|
|
@ -1453,6 +2169,8 @@ def create_source_router(auth: Optional[GitHubAuth] = None) -> List[SkillSource]
|
||||||
|
|
||||||
sources: List[SkillSource] = [
|
sources: List[SkillSource] = [
|
||||||
OptionalSkillSource(), # Official optional skills (highest priority)
|
OptionalSkillSource(), # Official optional skills (highest priority)
|
||||||
|
SkillsShSource(auth=auth),
|
||||||
|
WellKnownSkillSource(),
|
||||||
GitHubSource(auth=auth, extra_taps=extra_taps),
|
GitHubSource(auth=auth, extra_taps=extra_taps),
|
||||||
ClawHubSource(),
|
ClawHubSource(),
|
||||||
ClaudeMarketplaceSource(auth=auth),
|
ClaudeMarketplaceSource(auth=auth),
|
||||||
|
|
|
||||||
|
|
@ -173,4 +173,11 @@ Trust levels:
|
||||||
- `builtin` — ships with Hermes (always trusted)
|
- `builtin` — ships with Hermes (always trusted)
|
||||||
- `official` — from `optional-skills/` in the repo (builtin trust, no third-party warning)
|
- `official` — from `optional-skills/` in the repo (builtin trust, no third-party warning)
|
||||||
- `trusted` — from openai/skills, anthropics/skills
|
- `trusted` — from openai/skills, anthropics/skills
|
||||||
- `community` — any findings = blocked unless `--force`
|
- `community` — non-dangerous findings can be overridden with `--force`; `dangerous` verdicts remain blocked
|
||||||
|
|
||||||
|
Hermes can now consume third-party skills from multiple external discovery models:
|
||||||
|
- direct GitHub identifiers (for example `openai/skills/k8s`)
|
||||||
|
- `skills.sh` identifiers (for example `skills-sh/vercel-labs/json-render/json-render-react`)
|
||||||
|
- well-known endpoints served from `/.well-known/skills/index.json`
|
||||||
|
|
||||||
|
If you want your skills to be discoverable without a GitHub-specific installer, consider serving them from a well-known endpoint in addition to publishing them in a repo or marketplace.
|
||||||
|
|
|
||||||
|
|
@ -141,10 +141,18 @@ The agent will set up a cron job that runs automatically via the gateway.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
hermes skills search kubernetes
|
hermes skills search kubernetes
|
||||||
|
hermes skills search react --source skills-sh
|
||||||
|
hermes skills search https://mintlify.com/docs --source well-known
|
||||||
hermes skills install openai/skills/k8s
|
hermes skills install openai/skills/k8s
|
||||||
hermes skills install official/security/1password
|
hermes skills install official/security/1password
|
||||||
|
hermes skills install skills-sh/vercel-labs/json-render/json-render-react --force
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Tips:
|
||||||
|
- Use `--source skills-sh` to search the public `skills.sh` directory.
|
||||||
|
- Use `--source well-known` with a docs/site URL to discover skills from `/.well-known/skills/index.json`.
|
||||||
|
- Use `--force` only after reviewing a third-party skill. It can override non-dangerous policy blocks, but not a `dangerous` scan verdict.
|
||||||
|
|
||||||
Or use the `/skills` slash command inside chat.
|
Or use the `/skills` slash command inside chat.
|
||||||
|
|
||||||
### Use Hermes inside an editor via ACP
|
### Use Hermes inside an editor via ACP
|
||||||
|
|
|
||||||
|
|
@ -246,6 +246,8 @@ Subcommands:
|
||||||
| `install` | Install a skill. |
|
| `install` | Install a skill. |
|
||||||
| `inspect` | Preview a skill without installing it. |
|
| `inspect` | Preview a skill without installing it. |
|
||||||
| `list` | List installed skills. |
|
| `list` | List installed skills. |
|
||||||
|
| `check` | Check installed hub skills for upstream updates. |
|
||||||
|
| `update` | Reinstall hub skills with upstream changes when available. |
|
||||||
| `audit` | Re-scan installed hub skills. |
|
| `audit` | Re-scan installed hub skills. |
|
||||||
| `uninstall` | Remove a hub-installed skill. |
|
| `uninstall` | Remove a hub-installed skill. |
|
||||||
| `publish` | Publish a skill to a registry. |
|
| `publish` | Publish a skill to a registry. |
|
||||||
|
|
@ -258,12 +260,23 @@ Common examples:
|
||||||
```bash
|
```bash
|
||||||
hermes skills browse
|
hermes skills browse
|
||||||
hermes skills browse --source official
|
hermes skills browse --source official
|
||||||
hermes skills search kubernetes
|
hermes skills search react --source skills-sh
|
||||||
|
hermes skills search https://mintlify.com/docs --source well-known
|
||||||
hermes skills inspect official/security/1password
|
hermes skills inspect official/security/1password
|
||||||
|
hermes skills inspect skills-sh/vercel-labs/json-render/json-render-react
|
||||||
hermes skills install official/migration/openclaw-migration
|
hermes skills install official/migration/openclaw-migration
|
||||||
|
hermes skills install skills-sh/anthropics/skills/pdf --force
|
||||||
|
hermes skills check
|
||||||
|
hermes skills update
|
||||||
hermes skills config
|
hermes skills config
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- `--force` can override non-dangerous policy blocks for third-party/community skills.
|
||||||
|
- `--force` does not override a `dangerous` scan verdict.
|
||||||
|
- `--source skills-sh` searches the public `skills.sh` directory.
|
||||||
|
- `--source well-known` lets you point Hermes at a site exposing `/.well-known/skills/index.json`.
|
||||||
|
|
||||||
## `hermes honcho`
|
## `hermes honcho`
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|
|
||||||
|
|
@ -187,42 +187,98 @@ The `patch` action is preferred for updates — it's more token-efficient than `
|
||||||
|
|
||||||
## Skills Hub
|
## Skills Hub
|
||||||
|
|
||||||
Browse, search, install, and manage skills from online registries and official optional skills:
|
Browse, search, install, and manage skills from online registries, `skills.sh`, direct well-known skill endpoints, and official optional skills.
|
||||||
|
|
||||||
|
### Common commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
hermes skills browse # Browse all hub skills (official first)
|
hermes skills browse # Browse all hub skills (official first)
|
||||||
hermes skills browse --source official # Browse only official optional skills
|
hermes skills browse --source official # Browse only official optional skills
|
||||||
hermes skills search kubernetes # Search all sources
|
hermes skills search kubernetes # Search all sources
|
||||||
hermes skills install openai/skills/k8s # Install with security scan
|
hermes skills search react --source skills-sh # Search the skills.sh directory
|
||||||
hermes skills inspect openai/skills/k8s # Preview before installing
|
hermes skills search https://mintlify.com/docs --source well-known
|
||||||
hermes skills list --source hub # List hub-installed skills
|
hermes skills inspect openai/skills/k8s # Preview before installing
|
||||||
hermes skills audit # Re-scan all hub skills
|
hermes skills install openai/skills/k8s # Install with security scan
|
||||||
hermes skills uninstall k8s # Remove a hub skill
|
hermes skills install official/security/1password
|
||||||
|
hermes skills install skills-sh/vercel-labs/json-render/json-render-react --force
|
||||||
|
hermes skills install well-known:https://mintlify.com/docs/.well-known/skills/mintlify
|
||||||
|
hermes skills list --source hub # List hub-installed skills
|
||||||
|
hermes skills check # Check installed hub skills for upstream updates
|
||||||
|
hermes skills update # Reinstall hub skills with upstream changes when needed
|
||||||
|
hermes skills audit # Re-scan all hub skills for security
|
||||||
|
hermes skills uninstall k8s # Remove a hub skill
|
||||||
hermes skills publish skills/my-skill --to github --repo owner/repo
|
hermes skills publish skills/my-skill --to github --repo owner/repo
|
||||||
hermes skills snapshot export setup.json # Export skill config
|
hermes skills snapshot export setup.json # Export skill config
|
||||||
hermes skills tap add myorg/skills-repo # Add a custom source
|
hermes skills tap add myorg/skills-repo # Add a custom GitHub source
|
||||||
```
|
```
|
||||||
|
|
||||||
All hub-installed skills go through a **security scanner** that checks for data exfiltration, prompt injection, destructive commands, and other threats.
|
### Supported hub sources
|
||||||
|
|
||||||
Official optional skills use identifiers like `official/security/1password` and `official/migration/openclaw-migration`.
|
| Source | Example | Notes |
|
||||||
|
|--------|---------|-------|
|
||||||
|
| `official` | `official/security/1password` | Optional skills shipped with Hermes. |
|
||||||
|
| `skills-sh` | `skills-sh/vercel-labs/agent-skills/vercel-react-best-practices` | Searchable via `hermes skills search <query> --source skills-sh`. Hermes resolves alias-style skills when the skills.sh slug differs from the repo folder. |
|
||||||
|
| `well-known` | `well-known:https://mintlify.com/docs/.well-known/skills/mintlify` | Skills served directly from `/.well-known/skills/index.json` on a website. Search using the site or docs URL. |
|
||||||
|
| `github` | `openai/skills/k8s` | Direct GitHub repo/path installs and custom taps. |
|
||||||
|
| `clawhub`, `lobehub`, `claude-marketplace` | Source-specific identifiers | Community or marketplace integrations. |
|
||||||
|
|
||||||
### Trust Levels
|
### Security scanning and `--force`
|
||||||
|
|
||||||
|
All hub-installed skills go through a **security scanner** that checks for data exfiltration, prompt injection, destructive commands, supply-chain signals, and other threats.
|
||||||
|
|
||||||
|
`hermes skills inspect ...` now also surfaces upstream metadata when available:
|
||||||
|
- repo URL
|
||||||
|
- skills.sh detail page URL
|
||||||
|
- install command
|
||||||
|
- weekly installs
|
||||||
|
- upstream security audit statuses
|
||||||
|
- well-known index/endpoint URLs
|
||||||
|
|
||||||
|
Use `--force` when you have reviewed a third-party skill and want to override a non-dangerous policy block:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hermes skills install skills-sh/anthropics/skills/pdf --force
|
||||||
|
```
|
||||||
|
|
||||||
|
Important behavior:
|
||||||
|
- `--force` can override policy blocks for caution/warn-style findings.
|
||||||
|
- `--force` does **not** override a `dangerous` scan verdict.
|
||||||
|
- Official optional skills (`official/...`) are treated as builtin trust and do not show the third-party warning panel.
|
||||||
|
|
||||||
|
### Trust levels
|
||||||
|
|
||||||
| Level | Source | Policy |
|
| Level | Source | Policy |
|
||||||
|-------|--------|--------|
|
|-------|--------|--------|
|
||||||
| `builtin` | Ships with Hermes | Always trusted |
|
| `builtin` | Ships with Hermes | Always trusted |
|
||||||
| `official` | `optional-skills/` in the repo | Builtin trust, no third-party warning |
|
| `official` | `optional-skills/` in the repo | Builtin trust, no third-party warning |
|
||||||
| `trusted` | openai/skills, anthropics/skills | Trusted sources |
|
| `trusted` | Trusted registries/repos such as `openai/skills`, `anthropics/skills` | More permissive policy than community sources |
|
||||||
| `community` | Everything else | Any findings = blocked unless `--force` |
|
| `community` | Everything else (`skills.sh`, well-known endpoints, custom GitHub repos, most marketplaces) | Non-dangerous findings can be overridden with `--force`; `dangerous` verdicts stay blocked |
|
||||||
|
|
||||||
### Slash Commands (Inside Chat)
|
### Update lifecycle
|
||||||
|
|
||||||
All the same commands work with `/skills` prefix:
|
The hub now tracks enough provenance to re-check upstream copies of installed skills:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
hermes skills check # Report which installed hub skills changed upstream
|
||||||
|
hermes skills update # Reinstall only the skills with updates available
|
||||||
|
hermes skills update react # Update one specific installed hub skill
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This uses the stored source identifier plus the current upstream bundle content hash to detect drift.
|
||||||
|
|
||||||
|
### Slash commands (inside chat)
|
||||||
|
|
||||||
|
All the same commands work with `/skills`:
|
||||||
|
|
||||||
|
```text
|
||||||
/skills browse
|
/skills browse
|
||||||
/skills search kubernetes
|
/skills search react --source skills-sh
|
||||||
/skills install openai/skills/skill-creator
|
/skills search https://mintlify.com/docs --source well-known
|
||||||
|
/skills inspect skills-sh/vercel-labs/json-render/json-render-react
|
||||||
|
/skills install openai/skills/skill-creator --force
|
||||||
|
/skills check
|
||||||
|
/skills update
|
||||||
/skills list
|
/skills list
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Official optional skills still use identifiers like `official/security/1password` and `official/migration/openclaw-migration`.
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue