Compare commits

...

6 commits

17 changed files with 382 additions and 295 deletions

View file

@ -7,4 +7,4 @@
*.pyc
__pycache__/
.env
workspace/
/data/

2
.gitignore vendored
View file

@ -1,4 +1,4 @@
workspace/
/data/
.idea/
workspace/

View file

@ -8,9 +8,13 @@ RUN apt update && apt install make sudo -y
ENV AGENT_USER="agent"
ENV WORKSPACE_DIR="/workspace/"
ENV INTERNAL_DATA_DIR="/internal_data/"
RUN useradd --shell /bin/bash $AGENT_USER \
&& mkdir -p $WORKSPACE_DIR /home/$AGENT_USER \
&& chown -R agent:agent $WORKSPACE_DIR /home/$AGENT_USER
RUN mkdir -p $INTERNAL_DATA_DIR \
&& chown -R root:root $INTERNAL_DATA_DIR \
&& chmod o-rwx $INTERNAL_DATA_DIR
FROM base AS builder

View file

@ -20,7 +20,8 @@ services:
volumes:
- ./src:/app/src
- ${AGENT_API_PATH}:/agent_api/
- ./workspace:/workspace/
- ./data/workspace:/workspace/
- ./data/internal:/internal_data/
ports:
- "8000:8000"
env_file:

View file

@ -7,8 +7,10 @@ requires-python = ">=3.14"
dependencies = [
"fastapi>=0.135.3",
"uvicorn[standard]>=0.34.0",
"deepagents>=0.1.0",
"deepagents>=0.5.0",
"langchain-openai>=1.1.12",
"composio>=0.11.5",
"composio-langchain>=0.11.5"
"composio-langchain>=0.11.5",
"langgraph-checkpoint-sqlite>=3.0.3",
"aiosqlite>=0.22.1",
]

View file

@ -1,3 +0,0 @@
from src.agent.backends.isolated_shell import IsolatedShellBackend
__all__ = ["IsolatedShellBackend"]

View file

@ -1,114 +0,0 @@
import os
import pwd
import subprocess
from typing import Any
from deepagents.backends.local_shell import LocalShellBackend
from src.core.logger import get_logger
logger = get_logger(__name__)
class IsolatedShellBackend(LocalShellBackend):
"""LocalShellBackend с изоляцией shell-команд через отдельного пользователя."""
def __init__(
self,
user: str,
**kwargs: Any,
):
logger.debug(f"Инициализация IsolatedShellBackend для пользователя {user}")
super().__init__(**kwargs)
self._user = user
self._uid = pwd.getpwnam(user).pw_uid # type: ignore[attr-defined]
self._gid = pwd.getpwnam(user).pw_gid # type: ignore[attr-defined]
def execute(
self,
command: str,
*,
timeout: int | None = None,
) -> Any:
logger.info(f"Вход в execute, команда: {command}")
if not command or not isinstance(command, str):
logger.warning("Команда должна быть непустой строкой")
return type(self)._error_response("Command must be a non-empty string.")
effective_timeout = timeout if timeout is not None else self._default_timeout
if effective_timeout <= 0:
logger.warning(f"Некорректный timeout: {effective_timeout}")
return type(self)._error_response(
f"timeout must be positive, got {effective_timeout}"
)
proc: subprocess.Popen | None = None
try:
logger.debug(f"Запуск subprocess для команды: {command}")
proc = subprocess.Popen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
cwd=str(self.cwd),
env=self._env,
preexec_fn=lambda: (
os.setgid(self._gid) or os.setuid(self._uid) # type: ignore[attr-defined]
),
)
stdout, stderr = proc.communicate(timeout=effective_timeout)
logger.debug(f"Команда выполнена, exit_code={proc.returncode}")
output_parts = []
if stdout:
output_parts.append(stdout)
if stderr:
stderr_lines = stderr.strip().split("\n")
output_parts.extend(f"[stderr] {line}" for line in stderr_lines)
output = "\n".join(output_parts) if output_parts else "<no output>"
logger.trace(
f"Детали вывода: {output[:200]}{'...' if len(output) > 200 else ''}"
)
truncated = False
if len(output) > self._max_output_bytes:
output = output[: self._max_output_bytes]
output += f"\n\n... Output truncated at {self._max_output_bytes} bytes."
truncated = True
logger.warning(f"Вывод усечен до {self._max_output_bytes} байт")
if proc.returncode != 0:
output = f"{output.rstrip()}\n\nExit code: {proc.returncode}"
logger.debug(f"Команда завершилась с кодом {proc.returncode}")
result = self._make_response(output, proc.returncode, truncated)
logger.info("Выход из execute")
return result
except subprocess.TimeoutExpired:
logger.warning(f"Команда timed out после {effective_timeout} секунд")
proc.kill()
proc.communicate()
msg = f"Error: Command timed out after {effective_timeout} seconds."
return self._make_response(msg, 124, False)
except Exception as e:
logger.error(f"Ошибка выполнения команды: {e}", exc_info=True)
return self._make_response(
f"Error executing command ({type(e).__name__}): {e}", 1, False
)
@staticmethod
def _error_response(message: str):
logger.debug(f"Создание error response: {message}")
from deepagents.backends.protocol import ExecuteResponse
return ExecuteResponse(output=message, exit_code=1, truncated=False)
@staticmethod
def _make_response(output: str, exit_code: int, truncated: bool):
logger.debug(f"Создание response: exit_code={exit_code}, truncated={truncated}")
from deepagents.backends.protocol import ExecuteResponse
return ExecuteResponse(output=output, exit_code=exit_code, truncated=truncated)

View file

@ -1,73 +1,85 @@
import os
from deepagents import create_deep_agent
from deepagents import create_deep_agent, FilesystemPermission
from deepagents.backends import CompositeBackend, FilesystemBackend, StateBackend
from langchain_openai import ChatOpenAI
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph.state import CompiledStateGraph
from composio import Composio
from composio_langchain import LangchainProvider
from langgraph.checkpoint.sqlite import SqliteSaver
from src.agent.backends import IsolatedShellBackend
from src.agent.tools import send_file
from src.agent.tools import send_file, execute_shell
from src.agent.checkpointer import get_active_checkpointer
from src.core.logger import get_logger
logger = get_logger(__name__)
class Agent(CompiledStateGraph):
"""
Временный (надеюсь) костыль, чтобы дать доступ сервису к файловой системе агента.
"""
backend: IsolatedShellBackend
backend: CompositeBackend
def create_agent() -> Agent:
logger.info("Вход в функцию create_agent, начало инициализации агента")
try:
model = ChatOpenAI(
model=os.environ["PROVIDER_MODEL"],
base_url=os.environ["PROVIDER_URL"],
api_key=os.environ["PROVIDER_API_KEY"],
api_key=os.environ["PROVIDER_API_KEY"], # type: ignore
)
logger.debug(f"Экземпляр ChatOpenAI успешно создан: {model.__class__.__name__}")
logger.info(f"Init model with name: {os.environ['PROVIDER_MODEL']} at url: {os.environ['PROVIDER_URL']}")
composio_user_id = os.environ["AGENT_ID"]
logger.debug(f"Инциализация Composio с user_id={composio_user_id}")
composio = Composio(provider=LangchainProvider())
logger.info(f"Init composio with user_id: {composio_user_id}")
session = composio.create(user_id=composio_user_id)
tools = session.tools()
logger.info(f"Загружено инструментов Composio: {len(tools)}")
logger.trace(f"Список инструментов: {[t.name for t in tools]}")
logger.debug(f"Composio tools: {tools} for user_id: {composio_user_id}")
workspace_dir = os.environ["WORKSPACE_DIR"]
agent_user = os.environ.get("AGENT_USER", "agent")
logger.debug(f"Настройка бэкенда: user={agent_user}, root={workspace_dir}")
if not os.path.exists(workspace_dir):
logger.warning(f"Рабочая директория {workspace_dir} не существует на момент инициализации")
backend = IsolatedShellBackend(
user=agent_user,
root_dir=workspace_dir,
virtual_mode=True,
backend = CompositeBackend(
default=StateBackend(),
routes={
workspace_dir: FilesystemBackend(workspace_dir, virtual_mode=True),
}
)
logger.debug(f"Configured CompositeBackend with workspace: {workspace_dir}")
logger.info("Сборка графа LangGraph через create_deep_agent")
checkpointer = get_active_checkpointer()
logger.debug(f"Retrieved checkpointer: {type(checkpointer).__name__}")
# noinspection PyTypeChecker
# create_deep_agent возвращает CompiledStateGraph, но ниже мы его дополняем так, чтобы он соответствовал сигнатуре Agent
agent: Agent = create_deep_agent(
model=model,
system_prompt="You are a helpful assistant. Use Composio tools to take action when needed.",
checkpointer=MemorySaver(),
tools=tools + [send_file],
tools=tools + [send_file, execute_shell],
backend=backend,
checkpointer=checkpointer,
permissions=[
FilesystemPermission(
operations=["read", "write"],
paths=["/workspace/**"],
mode="allow",
),
FilesystemPermission(
operations=["read", "write"],
paths=["/**"],
mode="deny"
)
]
) # type: ignore
agent.backend = backend
logger.info("Агент успешно создан и готов к работе. Выход из функции create_agent, конец инициализации агента")
return agent
logger.info(f"Agent {composio_user_id} created successfully")
except KeyError as e:
# отсутствие переменной окружения
logger.critical(f"Ошибка конфигурации: отсутствует переменная окружения {e}")
logger.exception(f"Environment variable {e} is not set")
raise
except Exception as e:
# другие исключения
logger.error(f"Ошибка при создании агента: {str(e)}", exc_info=True)
logger.exception(f"Error creating agent: {e}")
raise
return agent

42
src/agent/checkpointer.py Normal file
View file

@ -0,0 +1,42 @@
from contextlib import asynccontextmanager
import os
from typing import AsyncIterable
from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver
from pathlib import Path
from src.core.logger import get_logger
logger = get_logger(__name__)
_instance: AsyncSqliteSaver | None = None
def get_active_checkpointer() -> AsyncSqliteSaver:
if not _instance:
logger.error("Checkpointer not initialized when requested")
raise RuntimeError("Checkpointer not initialized")
logger.trace("Checkpointer instance retrieved successfully")
return _instance
@asynccontextmanager
async def create_checkpointer() -> AsyncIterable[AsyncSqliteSaver]:
global _instance
try:
internal_data_dir = os.environ["INTERNAL_DATA_DIR"]
filepath = Path(internal_data_dir) / "checkpoint.sqlite"
logger.debug(f"Attempting to initialize SQLite checkpointer at: {filepath}")
async with AsyncSqliteSaver.from_conn_string(filepath) as saver:
_instance = saver
logger.info("Checkpointer initialized successfully")
yield saver
logger.trace("Tearing down checkpointer context...")
_instance = None
logger.info("Checkpointer closed and cleaned up successfully")
except KeyError as e:
logger.exception(f"Environment variable {e} is not set")
raise
except Exception as e:
logger.exception(f"Error initializing checkpointer: {e}")
raise

View file

@ -1,19 +1,20 @@
from typing import AsyncIterator, AsyncContextManager, Self
from abc import abstractmethod
import os
from pathlib import Path
from src.agent.base import create_agent
from src.core.logger import get_logger
logger = get_logger(__name__)
from lambda_agent_api.server import (
AgentEventUnion,
MsgEventTextChunk,
MsgEventToolCallChunk,
MsgEventToolResult,
MsgEventSendFile,
MsgEventEnd,
)
logger = get_logger(__name__)
class ChatBusyError(Exception):
"""
@ -49,22 +50,12 @@ class AgentService:
_instance = None # синглтон
def __new__(cls):
logger.info("Вход в __new__ AgentService, начало создания экземпляра")
try:
if cls._instance is None:
logger.debug("Создание нового экземпляра AgentService (синглтон)")
logger.debug("Creating new AgentService singleton instance")
cls._instance = super().__new__(cls)
cls._instance._agent = create_agent()
logger.info("Экземпляр AgentService успешно создан")
else:
logger.debug("Возвращение существующего экземпляра AgentService")
logger.info("Выход из __new__ AgentService")
logger.info("AgentService singleton instance created successfully")
return cls._instance
except Exception as e:
logger.error(
f"Ошибка при создании экземпляра AgentService: {str(e)}", exc_info=True
)
raise
class __AgentChat(AgentChat):
"""
@ -83,131 +74,116 @@ class AgentService:
return self.__chat_id
async def __aenter__(self):
logger.debug(f"Вход в чат {self.__chat_id}")
if self.__chat_id in self.__locks:
logger.warning(f"Чат {self.__chat_id} уже занят")
logger.warning(f"Chat {self.__chat_id} is already locked")
raise ChatBusyError()
self.__locks.add(self.__chat_id)
logger.debug(f"Chat {self.__chat_id} acquired lock")
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
logger.debug(f"Выход из чата {self.__chat_id}")
self.__locks.remove(self.__chat_id)
logger.debug(f"Chat {self.__chat_id} released lock")
if exc_type:
logger.exception(f"Exception in chat {self.__chat_id} context")
def astream(self, text: str, attachments: list[str] = None) -> AsyncIterator[AgentEventUnion]:
logger.debug(f"Вызов astream для чата {self.__chat_id}")
if not self.__chat_id in self.__locks:
logger.error(
f"Попытка вызвать astream без блокировки чата {self.__chat_id}"
)
logger.error(f"Chat {self.__chat_id} accessed outside of 'with' statement")
raise RuntimeError("Chat must be used in `with` statement")
return self.__service._AgentService__astream(
self.__chat_id, text, attachments
)
logger.debug(f"Starting stream for chat {self.__chat_id} with text length: {len(text)}, attachments: {len(attachments) if attachments else 0}")
return self.__service._AgentService__astream(self.__chat_id, text, attachments)
def chat(self, chat_id: int) -> AgentChat:
"""
Возвращает объект чата с заданным ID. Не проверяет Mutex.
"""
logger.debug(f"Создание объекта чата для chat_id={chat_id}")
logger.trace(f"Creating chat object for chat_id: {chat_id}")
return self.__AgentChat(self, chat_id)
async def __astream(
self, chat_id: int, text: str, attachments: list[str] = None
) -> AsyncIterator[AgentEventUnion]:
logger.info(
f"Вход в __astream для chat_id={chat_id}, начало обработки сообщения"
)
config = {"configurable": {"thread_id": chat_id}}
new_message = text
if attachments:
logger.debug(f"Обработка вложений для chat_id={chat_id}: {attachments}")
logger.debug(f"Processing {len(attachments)} attachments for chat {chat_id}")
attachments_description = await self.__describe_attachments(attachments)
new_message += "\n" + attachments_description
logger.debug("Вложения добавлены к сообщению")
logger.info("Начало стрима событий от агента")
try:
# Используем astream_events для перехвата детальных событий (инструменты, чанки и т.д.)
logger.debug(f"Starting agent stream for chat {chat_id}")
# astream_events, чтобы выходили промежуточные ивенты по мере их генерации
async for event in self._agent.astream_events(
{"messages": [{"role": "user", "content": new_message}]},
config=config,
version="v2", # Обязательно v2 для современных версий LangChain
version="v2",
):
kind = event["event"]
# 1. Агент генерирует токены (текст или аргументы для инструмента)
# пришли чанки от LLM
if kind == "on_chat_model_stream":
chunk = event["data"]["chunk"]
# Если генерируется обычный текст
# обычный текст
if chunk.content:
logger.trace(f"Генерация текста: {chunk.content}")
logger.trace(f"Yielding text chunk for chat {chat_id}")
yield MsgEventTextChunk(text=chunk.content)
# Если агент решил использовать инструмент (Langchain выдает tool_call_chunks)
# если вернулся tool_call
if hasattr(chunk, "tool_call_chunks") and chunk.tool_call_chunks:
for tool_chunk in chunk.tool_call_chunks:
logger.debug(
f"Инструмент {tool_chunk.get('name')} вызывается с args: {tool_chunk.get('args')}"
)
tool_name = tool_chunk.get("name")
logger.debug(f"Tool call initiated for chat {chat_id}: {tool_name}")
yield MsgEventToolCallChunk(
tool_name=tool_chunk.get("name"),
args_chunk=tool_chunk.get("args"),
)
# 2. Инструмент завершил работу и вернул результат
# завершилось выполнение тула
elif kind == "on_tool_end":
result = event["data"].get("output")
logger.debug(
f"Инструмент {event['name']} завершил работу с результатом: {str(result)}"
)
tool_name = event["name"]
logger.debug(f"Tool {tool_name} completed for chat {chat_id}")
"""# Перехватываем ссылку на авторизацию Composio v3
if result and "connect.composio.dev" in str(result):
yield MsgEventTextChunk(
text=f"\n⚠️ Для выполнения действия требуется авторизация. Перейдите по ссылке: {result}\n")
else:"""
yield MsgEventToolResult(
tool_name=event["name"],
result=str(result), # Страховка от ошибки сериализации JSON
tool_name=tool_name,
result=str(result) # переводим в строку, потому что иногда приходит кривой json
)
# 3. Кастомные события (send_file и др.)
# события, которые мы вызвали (например внутри тулов) через adispatch_custom_event
elif kind == "on_custom_event":
if event["name"] == "send_file":
logger.info(f"Кастомное событие send_file: {event['data']['path']}")
yield MsgEventSendFile(path=event["data"]["path"])
except Exception as e:
logger.error(f"Ошибка в стриме событий для chat_id={chat_id}: {str(e)}", exc_info=True)
raise
file_path = event["data"]["path"]
logger.debug(f"Send file event for chat {chat_id}: {file_path}")
yield MsgEventSendFile(path=file_path)
logger.info("Стрим событий завершен")
# 3. В конце генерации отправляем событие завершения
yield MsgEventEnd(tokens_used=0) # потом заменить на метадату
logger.info("Выход из __astream")
logger.debug(f"Agent stream completed for chat {chat_id}")
async def __describe_attachments(self, raw_paths: list[str]) -> str:
logger.debug(f"Обработка вложений: {raw_paths}")
lines = []
logger.debug(f"Describing {len(raw_paths)} attachments")
for raw_path in raw_paths:
logger.trace(f"Обработка файла: {raw_path}")
try:
p = self._agent.backend._resolve_path(raw_path)
rel = p.relative_to(self._agent.backend.cwd)
workspace_dir = Path(os.environ["WORKSPACE_DIR"])
p = workspace_dir / Path(raw_path)
rel = p.relative_to(workspace_dir)
if not p.exists():
logger.warning(f"Attachment file not found: {rel.as_posix()}")
raise FileNotFoundError(f"File {rel.as_posix()} not found")
lines.append(f"- {rel.as_posix()}")
logger.trace(f"Attachment validated: {rel.as_posix()}")
except Exception as e:
logger.warning(f"Ошибка при обработке вложения {raw_path}: {str(e)}")
raise AttachmentError(
f"Failed to validate attachment {raw_path}: {str(e)}"
) from e
raise AttachmentError(f"Failed to validate attachment {raw_path}: {str(e)}") from e
result = (f"К сообщению приложены {len(lines)} файлов. "
f"Ниже даны пути до этих файлов относительно рабочей директории:\n") + "\n".join(lines)
logger.debug(f"Attachments description generated successfully for {len(lines)} files")
return result
return (
f"К сообщению приложены {len(lines)} файлов. "
f"Ниже даны пути до этих файлов относительно рабочей директории:\n"
) + "\n".join(lines)

View file

@ -0,0 +1,2 @@
from src.agent.tools.send_file import send_file
from src.agent.tools.execute_shell import execute_shell

View file

@ -0,0 +1,105 @@
import os
import subprocess
from dataclasses import dataclass
from typing import Annotated
from langchain_core.tools import tool
from src.core.logger import get_logger
logger = get_logger(__name__)
DEFAULT_TIMEOUT = 30
DEFAULT_MAX_OUTPUT = 100_000
CWD = os.environ.get("WORKSPACE_DIR", None) or "/"
TOOL_DESCRIPTION = \
f"""
Execute shell command and return formatted output.
Your default working dir is: {CWD}
Args:
command: shell command to execute
timeout: timeout in seconds (None = use default)
Returns:
Formatted output with exit code
"""
@tool(description=TOOL_DESCRIPTION)
def execute_shell(
command: Annotated[str, "Shell command to execute"],
timeout: Annotated[
int | None, f"Timeout in seconds, default is not specified"
] = None,
) -> Annotated[str, "Command output with exit code"]:
# Validate timeout type
if timeout is not None:
if not isinstance(timeout, int):
logger.warning(f"Bad type timeout: {type(timeout).__name__}, expected int")
return "Error: timeout must be an integer"
if timeout < 0:
logger.warning(f"Negative value timeout: {timeout}")
return f"Error: timeout must be non-negative, got {timeout}"
# Validate command
if not command or not isinstance(command, str):
logger.warning("Command is empty or of wrong type")
return "Error: command must be a non-empty string"
# Apply defaults
effective_timeout = DEFAULT_TIMEOUT if timeout is None or timeout == 0 else timeout
cwd = os.environ.get("WORKSPACE_DIR", None) or "/"
logger.debug(f"Execution parameters: timeout={effective_timeout}s, cwd='{cwd}'")
try:
logger.info(f"Executing command in workspace '{cwd}'")
result = subprocess.run(
command,
shell=True,
capture_output=True,
text=True,
timeout=effective_timeout if effective_timeout > 0 else None,
cwd=cwd,
)
output = result.stdout
if output:
logger.trace(f"Command output: {len(output)} characters on stdout")
if result.stderr:
logger.debug("Command output errors on stderr")
stderr_lines = result.stderr.strip().split("\n")
output += "\n" + "\n".join(f"[stderr] {line}" for line in stderr_lines)
# Truncate if needed
max_output = DEFAULT_MAX_OUTPUT
if len(output) > max_output:
logger.warning(f"Command output exceeds limit ({len(output)} > {max_output}), truncating")
output = output[:max_output]
output += f"\n\n... Output truncated at {max_output} bytes"
# Add exit code status
status = "succeeded" if result.returncode == 0 else "failed"
output += f"\n\n[Command {status} with exit code {result.returncode}]"
if result.returncode == 0:
logger.info(f"Command succeeded. Exiting execute_shell function")
else:
logger.warning(f"Command failed with exit code {result.returncode}")
return output
except subprocess.TimeoutExpired:
logger.warning(f"Command timed out after {effective_timeout} seconds")
return f"Error: Command timed out after {effective_timeout} seconds"
except FileNotFoundError:
logger.warning(f"Command not found: '{command}'")
return f"Error: Command not found"
except PermissionError:
logger.warning(f"No permission to execute command: '{command}'")
return f"Error: Permission denied"
except Exception as e:
logger.exception(f"Error executing command '{command}': {str(e)}", exc_info=True)
return f"Error executing command ({type(e).__name__}): {e}"

View file

@ -33,7 +33,7 @@ async def send_file(path: str) -> str:
Returns:
Подтверждение отправки или сообщение об ошибке
"""
logger.info("Вход в функцию send_file, начало отправки файла")
try:
workspace = os.environ.get("WORKSPACE_DIR", "/workspace")
@ -42,21 +42,21 @@ async def send_file(path: str) -> str:
input_path = input_path[len("workspace/") :]
full_path = Path(workspace) / input_path
logger.debug(f"Обработка пути: исходный '{path}', полный '{full_path}'")
logger.debug(f"File path: current '{path}', full '{full_path}'")
if not full_path.exists():
logger.warning(f"Файл '{path}' не найден")
return f"Ошибка: файл '{path}' не найден"
logger.warning(f"File '{path}' not found")
return f"Error: file '{path}' not found"
if not full_path.is_file():
logger.warning(f"'{path}' не является файлом")
return f"Ошибка: '{path}' не является файлом"
logger.warning(f"'{path}' is not a file")
return f"Error: '{path}' is not a file"
logger.info("Отправка события send_file")
logger.info(f"Sending file to user: '{input_path}'")
await adispatch_custom_event(name="send_file", data={"path": path})
logger.info("Файл успешно отправлен пользователю. Выход из функции send_file")
return f"Файл '{path}' отправлен пользователю"
return f"File '{path}' sent to user"
except Exception as e:
logger.error(f"Ошибка при отправке файла '{path}': {str(e)}", exc_info=True)
logger.exception(f"Error sending file '{path}': {str(e)}")
raise

View file

@ -6,13 +6,13 @@ from src.core.logger import get_logger
logger = get_logger(__name__)
def get_agent_service() -> AgentService:
logger.debug("Получение экземпляра AgentService")
logger.trace("Get new AgentService")
return AgentService()
async def get_chat(service: Annotated[AgentService, Depends(get_agent_service)],
chat_id: int) -> AsyncGenerator[AgentChat]:
logger.debug(f"Получение экземпляра AgentChat для chat_id={chat_id}")
logger.trace(f"Get instance of AgentChat for chat_id={chat_id}")
async with service.chat(chat_id) as chat:
yield chat
@ -26,13 +26,13 @@ async def get_chat_ws(service: Annotated[AgentService, Depends(get_agent_service
- ``ChatBusyError`` -> ``WebSocketException(status.WS_1008_POLICY_VIOLATION, reason=str(e))``
"""
try:
logger.debug(f"Получение экземпляра AgentChat для WS для chat_id={chat_id}")
logger.trace(f"Get instance of AgentChat for WS for chat_id={chat_id}")
gen = get_chat(service, chat_id)
yield await gen.__anext__()
except StopAsyncIteration:
logger.error(f"Chat {chat_id} закрыт")
logger.trace(f"Chat {chat_id} closed")
pass
except ChatBusyError as e:
logger.error(f"Chat {chat_id} занят")
logger.warning(f"Chat {chat_id} busy")
raise WebSocketException(status.WS_1008_POLICY_VIOLATION,
reason=str(e))

View file

@ -1,10 +1,10 @@
from typing import Annotated
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Depends
from pydantic_core import ValidationError
from lambda_agent_api.server import (
MsgStatus,
MsgEventTextChunk,
MsgEventEnd,
MsgError,
)
@ -27,22 +27,27 @@ async def websocket_endpoint(
# важно использовать именно _ws вариант, чтобы корректно обрабатывались исключения
chat: Annotated[AgentChat, Depends(get_chat_ws)],
):
logger.trace(f"WebSocket connection accepted for chat_id: {chat_id}")
logger.info(f"WebSocket connection accepted for chat_id: {chat_id}")
await ws.accept()
await ws.send_text(MsgStatus().model_dump_json())
try:
while True:
raw = await ws.receive_text()
logger.trace(f"Received raw message: {raw}")
logger.trace(f"Received raw message: {len(raw)} characters for chat_id: {chat_id}")
try:
msg = ClientMessage.validate_json(raw)
except ValidationError as e:
logger.warning(f"Invalid JSON received from chat {chat_id}: {e}")
await ws.send_text(MsgError(code="BAD_REQUEST", details="Invalid message format").model_dump_json())
continue
await process_message(ws, chat, msg)
except WebSocketDisconnect:
logger.trace(f"WebSocket disconnected for chat_id: {chat_id}")
logger.info(f"WebSocket disconnected for chat_id: {chat_id}")
pass
except Exception as exc:
logger.trace(f"Error occurred for chat_id {chat_id}: {exc}")
logger.exception("Unexpected error in websocket")
await ws.send_text(
MsgError(code="INTERNAL_ERROR", details=str(exc)).model_dump_json()
)
@ -51,7 +56,9 @@ async def websocket_endpoint(
async def process_message(ws: WebSocket, chat: AgentChat, msg):
match msg:
case MsgUserMessage():
logger.trace(f"Processing user message: {msg.text}")
logger.debug(f"Processing user message for chat {chat.chat_id} (text length: {len(msg.text)}, attachments: {len(msg.attachments) if msg.attachments else 0})")
async for chunk in chat.astream(msg.text, msg.attachments):
logger.trace(f"Sending chunk: {chunk}")
logger.trace(f"Sending stream chunk to chat {chat.chat_id}: {chunk.__class__.__name__}")
await ws.send_text(chunk.model_dump_json())
logger.debug(f"Finished processing user message for chat {chat.chat_id}")
await ws.send_text(MsgEventEnd(tokens_used=0).model_dump_json()) # TODO: подставить реальное потребление токенов

View file

@ -4,6 +4,7 @@ from fastapi import FastAPI
from src.api.external import router as ws_router
from src.agent import AgentService
from src.agent.checkpointer import create_checkpointer
from src.core.logger import get_logger
@ -11,9 +12,8 @@ logger = get_logger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
logger.info("Инициализация AgentService...")
async with create_checkpointer():
AgentService() # инициализируем синглтон
logger.info("AgentService инициализирован")
yield

85
uv.lock generated
View file

@ -7,24 +7,37 @@ name = "agent"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "aiosqlite" },
{ name = "composio" },
{ name = "composio-langchain" },
{ name = "deepagents" },
{ name = "fastapi" },
{ name = "langchain-openai" },
{ name = "langgraph-checkpoint-sqlite" },
{ name = "uvicorn", extra = ["standard"] },
]
[package.metadata]
requires-dist = [
{ name = "aiosqlite", specifier = ">=0.22.1" },
{ name = "composio", specifier = ">=0.11.5" },
{ name = "composio-langchain", specifier = ">=0.11.5" },
{ name = "deepagents", specifier = ">=0.1.0" },
{ name = "deepagents", specifier = ">=0.5.0" },
{ name = "fastapi", specifier = ">=0.135.3" },
{ name = "langchain-openai", specifier = ">=1.1.12" },
{ name = "langgraph-checkpoint-sqlite", specifier = ">=3.0.3" },
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.34.0" },
]
[[package]]
name = "aiosqlite"
version = "0.22.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" },
]
[[package]]
name = "annotated-doc"
version = "0.0.4"
@ -289,18 +302,19 @@ wheels = [
[[package]]
name = "deepagents"
version = "0.4.12"
version = "0.5.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain" },
{ name = "langchain-anthropic" },
{ name = "langchain-core" },
{ name = "langchain-google-genai" },
{ name = "langsmith" },
{ name = "wcmatch" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5a/ef/0b2ccd5e4f40c1554145de17a7f3ee41994de1dda3ea36abe28600f1a3cf/deepagents-0.4.12.tar.gz", hash = "sha256:fc24a691e5cba00920ac4fa1d94f8147d6081fe513ed22bdba7da469288681c3", size = 91870, upload-time = "2026-03-20T14:54:29.904Z" }
sdist = { url = "https://files.pythonhosted.org/packages/bd/c5/fbf36ff707f7ad4ff2d1590ba8c3b44622370955c74f0c84e4bb1b101d7d/deepagents-0.5.3.tar.gz", hash = "sha256:cbe63bd482c37d3aef883326f5dde70effcd489e67fc91834ffe7a8769796a5f", size = 122654, upload-time = "2026-04-15T13:06:34.875Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5e/77/63a5cb4e3a8871c4a52d600661a232c26b35f52931ee551c3adc38eeacf6/deepagents-0.4.12-py3-none-any.whl", hash = "sha256:76a272bac25607c5ef8c5adc876e391da945f1107b504686964dfdb6afdc1ebb", size = 104455, upload-time = "2026-03-20T14:54:28.786Z" },
{ url = "https://files.pythonhosted.org/packages/30/8f/40c91a29e4f094e5a1375e33309a3d0ca2e5204816d1dcdcd41ac38410d8/deepagents-0.5.3-py3-none-any.whl", hash = "sha256:f1f1c968f17a5bfb0a6d588d00c2e83264cdac0faa91f7b522892d5a2bd303cb", size = 138475, upload-time = "2026-04-15T13:06:33.593Z" },
]
[[package]]
@ -514,16 +528,16 @@ wheels = [
[[package]]
name = "langchain"
version = "1.2.14"
version = "1.2.15"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph" },
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/2b/0ca77ee988a9f1c1f1d923115d7c91221ab434067bc36f2f637201aeee81/langchain-1.2.14.tar.gz", hash = "sha256:fc5511e8f8af7efee9e5a144da4392d700d627b301d240470db97272940ad317", size = 574190, upload-time = "2026-03-31T13:50:37.398Z" }
sdist = { url = "https://files.pythonhosted.org/packages/98/3f/888a7099d2bd2917f8b0c3ffc7e347f1e664cf64267820b0b923c4f339fc/langchain-1.2.15.tar.gz", hash = "sha256:1717b6719daefae90b2728314a5e2a117ff916291e2862595b6c3d6fba33d652", size = 574732, upload-time = "2026-04-03T14:26:03.994Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4c/87/324ae5fd9993f024339a452fc89e3fd808bccde87ef95c8dafab3de023c0/langchain-1.2.14-py3-none-any.whl", hash = "sha256:96da6d7338d5a6fc41eb4ec0db83f7ef5d03bb5efd17bb269f34ba4378ebdb4d", size = 112715, upload-time = "2026-03-31T13:50:35.997Z" },
{ url = "https://files.pythonhosted.org/packages/3f/e8/a3b8cb0005553f6a876865073c81ef93bd7c5b18381bcb9ba4013af96ebc/langchain-1.2.15-py3-none-any.whl", hash = "sha256:e349db349cb3e9550c4044077cf90a1717691756cc236438404b23500e615874", size = 112714, upload-time = "2026-04-03T14:26:02.557Z" },
]
[[package]]
@ -542,10 +556,11 @@ wheels = [
[[package]]
name = "langchain-core"
version = "1.2.25"
version = "1.3.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jsonpatch" },
{ name = "langchain-protocol" },
{ name = "langsmith" },
{ name = "packaging" },
{ name = "pydantic" },
@ -554,9 +569,9 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "uuid-utils" },
]
sdist = { url = "https://files.pythonhosted.org/packages/86/2a/d65de24fc9b7989137253da8973f850f3e39b4ce3e0377bc8200d6b3c189/langchain_core-1.2.25.tar.gz", hash = "sha256:77e032b96509d0eb1f6875042fdf97b7e2334a815314700c6894d9d078909b9c", size = 842347, upload-time = "2026-04-02T22:39:11.528Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a8/03/7219502e8ca728d65eb44d7a3eb60239230742a70dbfc9241b9bfd61c4ab/langchain_core-1.3.2.tar.gz", hash = "sha256:fd7a50b2f28ba561fd9d7f5d2760bc9e06cf00cdf820a3ccafe88a94ffa8d5b7", size = 911813, upload-time = "2026-04-24T15:49:23.699Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3d/0e/7b31b0249f9b9b0fc7829d5b0ee484b8f8d43c78e376e9951e2ef3eac70c/langchain_core-1.2.25-py3-none-any.whl", hash = "sha256:0c05bf395aec6d2dfa14488fd006f7bcd0540e7e89287e04f92203532a82c828", size = 506866, upload-time = "2026-04-02T22:39:10.137Z" },
{ url = "https://files.pythonhosted.org/packages/7d/d5/8fa4431007cbb7cfed7590f4d6a5dea3ad724f4174d248f6642ef5ce7d05/langchain_core-1.3.2-py3-none-any.whl", hash = "sha256:d44a66127f9f8db735bdfd0ab9661bccb47a97113cfd3f2d89c74864422b7274", size = 542390, upload-time = "2026-04-24T15:49:21.991Z" },
]
[[package]]
@ -588,9 +603,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6e/a6/68fb22e3604015e6f546fa1d3677d24378b482855ae74710cbf4aec44132/langchain_openai-1.1.12-py3-none-any.whl", hash = "sha256:da71ca3f2d18c16f7a2443cc306aa195ad2a07054335ac9b0626dcae02b6a0c5", size = 88487, upload-time = "2026-03-23T18:59:17.978Z" },
]
[[package]]
name = "langchain-protocol"
version = "0.0.12"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5c/51/1157009b6f94e6e58be58fa8b620187d657909a8b36a6bf5b0c52a2711f6/langchain_protocol-0.0.12.tar.gz", hash = "sha256:5e14c434290a705c9510fdb1a83ecf7561a5e6e0dfd053930ade80dba069269f", size = 6408, upload-time = "2026-04-25T01:05:01.489Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/95/82/3431e3061c917439589fa88a6b23c9bc0e154cba0f05d2e895a68c76ff74/langchain_protocol-0.0.12-py3-none-any.whl", hash = "sha256:402b61f42d4139692528cf37226c367bb6efc8ff8165b29380accb0abfece7b2", size = 6639, upload-time = "2026-04-25T01:05:00.487Z" },
]
[[package]]
name = "langgraph"
version = "1.1.4"
version = "1.1.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
@ -600,9 +627,9 @@ dependencies = [
{ name = "pydantic" },
{ name = "xxhash" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c0/ba/8a8f48ca1248ecff4844cb27247d10a85f05b4ac6b903298d36b2ca090fd/langgraph-1.1.4.tar.gz", hash = "sha256:c951a859f68a021c69a27500db4eafc1900fc7ac32a54f7fc31d277165d04bed", size = 545440, upload-time = "2026-03-31T12:56:45.344Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/9d9c65d5500a1ca7ea63d6d65aecfb248037018a74d7d4ef52e276bb4e4b/langgraph-1.1.9.tar.gz", hash = "sha256:bc5a49d5a5e71fda1f9c53c06c62f4caec9a95545b739d130a58b6ab3269e274", size = 560717, upload-time = "2026-04-21T13:43:06.809Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d0/74/22ea4734247b59e7c98e575e31a1f463366b084e0dc83cf63715b079ff28/langgraph-1.1.4-py3-none-any.whl", hash = "sha256:77ebe7ed44a2699f13696bf41f1dabe7b5fa8e6ad51e3597f2f175492e8f3656", size = 168190, upload-time = "2026-03-31T12:56:44.221Z" },
{ url = "https://files.pythonhosted.org/packages/16/58/0380420e66619d12c992c1f8cfda0c7a04e8f0fe8a84752245b9e7b1cba7/langgraph-1.1.9-py3-none-any.whl", hash = "sha256:7db13ceecde4ea643df6c097dcc9e534895dcd9fcc6500eeff2f2cde0fab16b2", size = 173744, upload-time = "2026-04-21T13:43:05.513Z" },
]
[[package]]
@ -618,17 +645,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/65/4c/09a4a0c42f5d2fc38d6c4d67884788eff7fd2cfdf367fdf7033de908b4c0/langgraph_checkpoint-4.0.1-py3-none-any.whl", hash = "sha256:e3adcd7a0e0166f3b48b8cf508ce0ea366e7420b5a73aa81289888727769b034", size = 50453, upload-time = "2026-02-27T21:06:14.293Z" },
]
[[package]]
name = "langgraph-checkpoint-sqlite"
version = "3.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiosqlite" },
{ name = "langgraph-checkpoint" },
{ name = "sqlite-vec" },
]
sdist = { url = "https://files.pythonhosted.org/packages/04/61/40b7f8f29d6de92406e668c35265f409f57064907e31eae84ab3f2a3e3e1/langgraph_checkpoint_sqlite-3.0.3.tar.gz", hash = "sha256:438c234d37dabda979218954c9c6eb1db73bee6492c2f1d3a00552fe23fa34ed", size = 123876, upload-time = "2026-01-19T00:38:44.473Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a3/d8/84ef22ee1cc485c4910df450108fd5e246497379522b3c6cfba896f71bf6/langgraph_checkpoint_sqlite-3.0.3-py3-none-any.whl", hash = "sha256:02eb683a79aa6fcda7cd4de43861062a5d160dbbb990ef8a9fd76c979998a952", size = 33593, upload-time = "2026-01-19T00:38:43.288Z" },
]
[[package]]
name = "langgraph-prebuilt"
version = "1.0.8"
version = "1.0.11"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph-checkpoint" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0d/06/dd61a5c2dce009d1b03b1d56f2a85b3127659fdddf5b3be5d8f1d60820fb/langgraph_prebuilt-1.0.8.tar.gz", hash = "sha256:0cd3cf5473ced8a6cd687cc5294e08d3de57529d8dd14fdc6ae4899549efcf69", size = 164442, upload-time = "2026-02-19T18:14:39.083Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8d/bb/0e0b3eb33b1f2f32f8810a49aa24b7d11a5b0ed45f679386095946a59557/langgraph_prebuilt-1.0.11.tar.gz", hash = "sha256:0e71545f706a134b6a80a2a56916562797b499e3e4ab6eed5ce89396ac03d322", size = 171759, upload-time = "2026-04-24T18:18:34.528Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/41/ec966424ad3f2ed3996d24079d3342c8cd6c0bd0653c12b2a917a685ec6c/langgraph_prebuilt-1.0.8-py3-none-any.whl", hash = "sha256:d16a731e591ba4470f3e313a319c7eee7dbc40895bcf15c821f985a3522a7ce0", size = 35648, upload-time = "2026-02-19T18:14:37.611Z" },
{ url = "https://files.pythonhosted.org/packages/f6/8c/f4c574cb75ae9b8a474215d03a029ea723c919f65771ca1c82fe532d0297/langgraph_prebuilt-1.0.11-py3-none-any.whl", hash = "sha256:7afbaf5d64959e452976664c75bb8ec24098d3510cf9c205919baf443e7342ec", size = 36832, upload-time = "2026-04-24T18:18:33.586Z" },
]
[[package]]
@ -941,6 +982,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "sqlite-vec"
version = "0.1.9"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/85/9fad0045d8e7c8df3e0fa5a56c630e8e15ad6e5ca2e6106fceb666aa6638/sqlite_vec-0.1.9-py3-none-macosx_10_6_x86_64.whl", hash = "sha256:1b62a7f0a060d9475575d4e599bbf94a13d85af896bc1ce86ee80d1b5b48e5fb", size = 131171, upload-time = "2026-03-31T08:02:31.717Z" },
{ url = "https://files.pythonhosted.org/packages/a4/3d/3677e0cd2f92e5ebc43cd29fbf565b75582bff1ccfa0b8327c7508e1084f/sqlite_vec-0.1.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d52e30513bae4cc9778ddbf6145610434081be4c3afe57cd877893bad9f6b6c", size = 165434, upload-time = "2026-03-31T08:02:32.712Z" },
{ url = "https://files.pythonhosted.org/packages/00/d4/f2b936d3bdc38eadcbd2a87875815db36430fab0363182ba5d12cd8e0b51/sqlite_vec-0.1.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e921e592f24a5f9a18f590b6ddd530eb637e2d474e3b1972f9bbeb773aa3cb9", size = 160076, upload-time = "2026-03-31T08:02:33.796Z" },
{ url = "https://files.pythonhosted.org/packages/6f/ad/6afd073b0f817b3e03f9e37ad626ae341805891f23c74b5292818f49ac63/sqlite_vec-0.1.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux1_x86_64.whl", hash = "sha256:1515727990b49e79bcaf75fdee2ffc7d461f8b66905013231251f1c8938e7786", size = 163388, upload-time = "2026-03-31T08:02:34.888Z" },
{ url = "https://files.pythonhosted.org/packages/42/89/81b2907cda14e566b9bf215e2ad82fc9b349edf07d2010756ffdb902f328/sqlite_vec-0.1.9-py3-none-win_amd64.whl", hash = "sha256:4a28dc12fa4b53d7b1dced22da2488fade444e96b5d16fd2d698cd670675cf32", size = 292804, upload-time = "2026-03-31T08:02:36.035Z" },
]
[[package]]
name = "starlette"
version = "1.0.0"