feat(honcho): honcho_context can query any peer (user or ai)

Optional 'peer' parameter: "user" (default) or "ai". Allows asking
about the AI assistant's history/identity, not just the user's.
This commit is contained in:
Erosika 2026-03-09 18:02:09 -04:00
parent 0cb639d472
commit c047c03e82
2 changed files with 26 additions and 15 deletions

View file

@ -441,19 +441,24 @@ class HonchoSessionManager:
idx = min(default_idx + bump, 3)
return levels[idx]
def dialectic_query(self, session_key: str, query: str, reasoning_level: str | None = None) -> str:
def dialectic_query(
self, session_key: str, query: str,
reasoning_level: str | None = None,
peer: str = "user",
) -> str:
"""
Query Honcho's dialectic endpoint about the user.
Query Honcho's dialectic endpoint about a peer.
Runs an LLM on Honcho's backend against the user peer's full
Runs an LLM on Honcho's backend against the target peer's full
representation. Higher latency than context() call async via
prefetch_dialectic() to avoid blocking the response.
Args:
session_key: The session key to query against.
query: Natural language question about the user.
query: Natural language question.
reasoning_level: Override the config default. If None, uses
_dynamic_reasoning_level(query).
peer: Which peer to query "user" (default) or "ai".
Returns:
Honcho's synthesized answer, or empty string on failure.
@ -462,11 +467,12 @@ class HonchoSessionManager:
if not session:
return ""
user_peer = self._get_or_create_peer(session.user_peer_id)
peer_id = session.assistant_peer_id if peer == "ai" else session.user_peer_id
target_peer = self._get_or_create_peer(peer_id)
level = reasoning_level or self._dynamic_reasoning_level(query)
try:
result = user_peer.chat(query, reasoning_level=level) or ""
result = target_peer.chat(query, reasoning_level=level) or ""
# Apply Hermes-side char cap before caching
if result and self._dialectic_max_chars and len(result) > self._dialectic_max_chars:
result = result[:self._dialectic_max_chars].rsplit(" ", 1)[0] + ""

View file

@ -131,19 +131,23 @@ def _handle_honcho_search(args: dict, **kw) -> str:
_QUERY_SCHEMA = {
"name": "honcho_context",
"description": (
"Ask Honcho a natural language question about the user and get a synthesized answer. "
"Ask Honcho a natural language question and get a synthesized answer. "
"Uses Honcho's LLM (dialectic reasoning) — higher cost than honcho_profile or honcho_search. "
"Use this when you need a direct answer synthesized from the user's full history. "
"Examples: 'What are this user's main goals?', 'How does this user prefer to communicate?', "
"'What is this user's technical expertise level?'"
"Can query about any peer: the user (default), the AI assistant, or any named peer. "
"Examples: 'What are the user's main goals?', 'What has hermes been working on?', "
"'What is the user's technical expertise level?'"
),
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "A natural language question about the user.",
}
"description": "A natural language question.",
},
"peer": {
"type": "string",
"description": "Which peer to query about: 'user' (default) or 'ai'. Omit for user.",
},
},
"required": ["query"],
},
@ -156,12 +160,13 @@ def _handle_honcho_context(args: dict, **kw) -> str:
return json.dumps({"error": "Missing required parameter: query"})
if not _session_manager or not _session_key:
return json.dumps({"error": "Honcho is not active for this session."})
peer_target = args.get("peer", "user")
try:
result = _session_manager.dialectic_query(_session_key, query)
result = _session_manager.dialectic_query(_session_key, query, peer=peer_target)
return json.dumps({"result": result or "No result from Honcho."})
except Exception as e:
logger.error("Error querying Honcho user context: %s", e)
return json.dumps({"error": f"Failed to query user context: {e}"})
logger.error("Error querying Honcho context: %s", e)
return json.dumps({"error": f"Failed to query context: {e}"})
# ── honcho_conclude ──