Merge origin/main into codex/align-codex-provider-conventions-mainrepo

This commit is contained in:
George Pickett 2026-02-25 19:28:44 -08:00
commit e71d9a89d2
5 changed files with 40 additions and 1 deletions

View file

@ -25,7 +25,15 @@ def get_async_client() -> AsyncOpenAI:
api_key = os.getenv("OPENROUTER_API_KEY")
if not api_key:
raise ValueError("OPENROUTER_API_KEY environment variable not set")
_client = AsyncOpenAI(api_key=api_key, base_url=OPENROUTER_BASE_URL)
_client = AsyncOpenAI(
api_key=api_key,
base_url=OPENROUTER_BASE_URL,
default_headers={
"HTTP-Referer": "https://github.com/NousResearch/hermes-agent",
"X-OpenRouter-Title": "Hermes Agent",
"X-OpenRouter-Categories": "cli-agent",
},
)
return _client

View file

@ -170,12 +170,15 @@ async def _summarize_session(
max_retries = 3
for attempt in range(max_retries):
try:
from agent.auxiliary_client import get_auxiliary_extra_body
_extra = get_auxiliary_extra_body()
response = await _async_aux_client.chat.completions.create(
model=_SUMMARIZER_MODEL,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
**({} if not _extra else {"extra_body": _extra}),
temperature=0.1,
max_tokens=MAX_SUMMARY_TOKENS,
)

View file

@ -314,11 +314,14 @@ async def vision_analyze_tool(
logger.info("Processing image with %s...", model)
# Call the vision API
from agent.auxiliary_client import get_auxiliary_extra_body
_extra = get_auxiliary_extra_body()
response = await _aux_async_client.chat.completions.create(
model=model,
messages=messages,
temperature=0.1,
max_tokens=2000,
**({} if not _extra else {"extra_body": _extra}),
)
# Extract the analysis

View file

@ -242,6 +242,8 @@ Create a markdown summary that captures all key information in a well-organized,
if _aux_async_client is None:
logger.warning("No auxiliary model available for web content processing")
return None
from agent.auxiliary_client import get_auxiliary_extra_body
_extra = get_auxiliary_extra_body()
response = await _aux_async_client.chat.completions.create(
model=model,
messages=[
@ -250,6 +252,7 @@ Create a markdown summary that captures all key information in a well-organized,
],
temperature=0.1,
max_tokens=max_tokens,
**({} if not _extra else {"extra_body": _extra}),
)
return response.choices[0].message.content.strip()
except Exception as api_error:
@ -362,6 +365,8 @@ Create a single, unified markdown summary."""
fallback = fallback[:max_output_size] + "\n\n[... truncated ...]"
return fallback
from agent.auxiliary_client import get_auxiliary_extra_body
_extra = get_auxiliary_extra_body()
response = await _aux_async_client.chat.completions.create(
model=model,
messages=[
@ -370,6 +375,7 @@ Create a single, unified markdown summary."""
],
temperature=0.1,
max_tokens=4000,
**({} if not _extra else {"extra_body": _extra}),
)
final_summary = response.choices[0].message.content.strip()