fix(file_tools): include pagination args in repeated search key

This commit is contained in:
darya 2026-03-18 01:19:05 +03:00
parent 588962d24e
commit a654bc04f7
2 changed files with 19 additions and 1 deletions

View file

@ -441,6 +441,14 @@ class TestSearchLoopDetection(unittest.TestCase):
self.assertNotIn("_warning", result)
self.assertNotIn("error", result)
@patch("tools.file_tools._get_file_ops", return_value=_make_fake_file_ops())
def test_pagination_offset_does_not_count_as_repeat(self, _mock_ops):
"""Paginating truncated results should not be blocked as a repeat search."""
for offset in (0, 50, 100, 150):
result = json.loads(search_tool("def main", task_id="t1", offset=offset, limit=50))
self.assertNotIn("_warning", result)
self.assertNotIn("error", result)
@patch("tools.file_tools._get_file_ops", return_value=_make_fake_file_ops())
def test_read_between_searches_resets_consecutive(self, _mock_ops):
"""A read_file call between searches resets search consecutive counter."""

View file

@ -337,7 +337,17 @@ def search_tool(pattern: str, target: str = "content", path: str = ".",
"""Search for content or files."""
try:
# Track searches to detect *consecutive* repeated search loops.
search_key = ("search", pattern, target, str(path), file_glob or "")
# Include pagination args so users can page through truncated
# results without tripping the repeated-search guard.
search_key = (
"search",
pattern,
target,
str(path),
file_glob or "",
limit,
offset,
)
with _read_tracker_lock:
task_data = _read_tracker.setdefault(task_id, {
"last_key": None, "consecutive": 0, "read_history": set(),