fix(anthropic): final polish — tool ID sanitization, crash guards, temp=1
Remaining issues from deep scan: Adapter (agent/anthropic_adapter.py): - Add _sanitize_tool_id() — Anthropic requires IDs matching [a-zA-Z0-9_-], now strips invalid chars and ensures non-empty (both tool_use and tool_result) - Empty tool result content → '(no output)' placeholder (Anthropic rejects empty) - Set temperature=1 when thinking type='enabled' on older models (required) - normalize_model_name now case-insensitive for 'Anthropic/' prefix - Fix stale docstrings referencing only ~/.claude/.credentials.json Agent loop (run_agent.py): - Guard memory flush path (line ~2684) — was calling self.client.chat.completions which is None in anthropic_messages mode. Now routes through Anthropic client. - Guard summary generation path (line ~3171) — same crash when reaching iteration limit. Now builds proper Anthropic kwargs and normalizes response. - Guard retry summary path (line ~3200) — same fix for the summary retry loop. All three self.client.chat.completions.create() calls outside the main loop now have anthropic_messages branches to prevent NoneType crashes.
This commit is contained in:
parent
4068f20ce9
commit
aaaba78126
2 changed files with 57 additions and 13 deletions
34
run_agent.py
34
run_agent.py
|
|
@ -2673,6 +2673,15 @@ class AIAgent:
|
|||
if "max_output_tokens" in codex_kwargs:
|
||||
codex_kwargs["max_output_tokens"] = 5120
|
||||
response = self._run_codex_stream(codex_kwargs)
|
||||
elif not _aux_available and self.api_mode == "anthropic_messages":
|
||||
# Native Anthropic — use the Anthropic client directly
|
||||
from agent.anthropic_adapter import build_anthropic_kwargs as _build_ant_kwargs
|
||||
ant_kwargs = _build_ant_kwargs(
|
||||
model=self.model, messages=api_messages,
|
||||
tools=[memory_tool_def], max_tokens=5120,
|
||||
reasoning_config=None,
|
||||
)
|
||||
response = self._anthropic_client.messages.create(**ant_kwargs)
|
||||
elif not _aux_available:
|
||||
api_kwargs = {
|
||||
"model": self.model,
|
||||
|
|
@ -3158,12 +3167,20 @@ class AIAgent:
|
|||
if summary_extra_body:
|
||||
summary_kwargs["extra_body"] = summary_extra_body
|
||||
|
||||
summary_response = self.client.chat.completions.create(**summary_kwargs)
|
||||
|
||||
if summary_response.choices and summary_response.choices[0].message.content:
|
||||
final_response = summary_response.choices[0].message.content
|
||||
if self.api_mode == "anthropic_messages":
|
||||
from agent.anthropic_adapter import build_anthropic_kwargs as _bak, normalize_anthropic_response as _nar
|
||||
_ant_kw = _bak(model=self.model, messages=api_messages, tools=None,
|
||||
max_tokens=self.max_tokens, reasoning_config=self.reasoning_config)
|
||||
summary_response = self._anthropic_client.messages.create(**_ant_kw)
|
||||
_msg, _ = _nar(summary_response)
|
||||
final_response = (_msg.content or "").strip()
|
||||
else:
|
||||
final_response = ""
|
||||
summary_response = self.client.chat.completions.create(**summary_kwargs)
|
||||
|
||||
if summary_response.choices and summary_response.choices[0].message.content:
|
||||
final_response = summary_response.choices[0].message.content
|
||||
else:
|
||||
final_response = ""
|
||||
|
||||
if final_response:
|
||||
if "<think>" in final_response:
|
||||
|
|
@ -3180,6 +3197,13 @@ class AIAgent:
|
|||
retry_response = self._run_codex_stream(codex_kwargs)
|
||||
retry_msg, _ = self._normalize_codex_response(retry_response)
|
||||
final_response = (retry_msg.content or "").strip() if retry_msg else ""
|
||||
elif self.api_mode == "anthropic_messages":
|
||||
from agent.anthropic_adapter import build_anthropic_kwargs as _bak2, normalize_anthropic_response as _nar2
|
||||
_ant_kw2 = _bak2(model=self.model, messages=api_messages, tools=None,
|
||||
max_tokens=self.max_tokens, reasoning_config=self.reasoning_config)
|
||||
retry_response = self._anthropic_client.messages.create(**_ant_kw2)
|
||||
_retry_msg, _ = _nar2(retry_response)
|
||||
final_response = (_retry_msg.content or "").strip()
|
||||
else:
|
||||
summary_kwargs = {
|
||||
"model": self.model,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue