Merge pull request #1824 from cutepawss/fix/search-files-pagination

Clean fix — adds pagination args to search_key for parity with read_file. Thanks @cutepawss!
This commit is contained in:
Teknium 2026-03-17 16:16:47 -07:00 committed by GitHub
commit 7f85b2914d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 19 additions and 1 deletions

View file

@ -441,6 +441,14 @@ class TestSearchLoopDetection(unittest.TestCase):
self.assertNotIn("_warning", result)
self.assertNotIn("error", result)
@patch("tools.file_tools._get_file_ops", return_value=_make_fake_file_ops())
def test_pagination_offset_does_not_count_as_repeat(self, _mock_ops):
"""Paginating truncated results should not be blocked as a repeat search."""
for offset in (0, 50, 100, 150):
result = json.loads(search_tool("def main", task_id="t1", offset=offset, limit=50))
self.assertNotIn("_warning", result)
self.assertNotIn("error", result)
@patch("tools.file_tools._get_file_ops", return_value=_make_fake_file_ops())
def test_read_between_searches_resets_consecutive(self, _mock_ops):
"""A read_file call between searches resets search consecutive counter."""

View file

@ -337,7 +337,17 @@ def search_tool(pattern: str, target: str = "content", path: str = ".",
"""Search for content or files."""
try:
# Track searches to detect *consecutive* repeated search loops.
search_key = ("search", pattern, target, str(path), file_glob or "")
# Include pagination args so users can page through truncated
# results without tripping the repeated-search guard.
search_key = (
"search",
pattern,
target,
str(path),
file_glob or "",
limit,
offset,
)
with _read_tracker_lock:
task_data = _read_tracker.setdefault(task_id, {
"last_key": None, "consecutive": 0, "read_history": set(),