подключение api к модели через deepagent
This commit is contained in:
parent
04bb17190e
commit
635986ba70
5 changed files with 864 additions and 13 deletions
0
src/agent/__init__.py
Normal file
0
src/agent/__init__.py
Normal file
26
src/agent/base.py
Normal file
26
src/agent/base.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
from deepagents import create_deep_agent
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
|
||||
def create_agent():
|
||||
api_url = os.environ.get("PROVIDER_URL")
|
||||
api_key = os.environ.get("PROVIDER_API_KEY")
|
||||
model_name = os.environ.get("PROVIDER_MODEL")
|
||||
|
||||
if None in (api_url, api_key, model_name):
|
||||
raise RuntimeError("PROVIDER_URL, PROVIDER_API_KEY or PROVIDER_MODEL_NAME is not configured")
|
||||
|
||||
model = ChatOpenAI(
|
||||
model=model_name,
|
||||
base_url=api_url,
|
||||
api_key=api_key,
|
||||
)
|
||||
|
||||
return create_deep_agent(
|
||||
model=model,
|
||||
system_prompt="You are a helpful assistant.",
|
||||
)
|
||||
|
||||
|
||||
agent = create_agent()
|
||||
|
|
@ -1,7 +1,14 @@
|
|||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
|
||||
from lambda_agent_api.server import *
|
||||
from lambda_agent_api.client import *
|
||||
from lambda_agent_api.server import (
|
||||
MsgStatus,
|
||||
MsgEventTextChunk,
|
||||
MsgEventEnd,
|
||||
MsgError,
|
||||
)
|
||||
from lambda_agent_api.client import ClientMessage, MsgUserMessage
|
||||
|
||||
from src.agent.base import agent
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
|
@ -26,20 +33,23 @@ async def websocket_endpoint(ws: WebSocket):
|
|||
)
|
||||
|
||||
|
||||
async def process_message(ws: WebSocket, msg): # msg должно быть ClientMessage (аннотация не работает из-за TypeAdapter)
|
||||
async def process_message(
|
||||
ws: WebSocket, msg
|
||||
): # msg должно быть ClientMessage (аннотация не работает из-за TypeAdapter)
|
||||
match msg:
|
||||
case MsgUserMessage():
|
||||
await handle_user_message(ws, msg.text)
|
||||
|
||||
|
||||
async def handle_user_message(ws: WebSocket, text: str):
|
||||
await ws.send_text(
|
||||
MsgEventTextChunk(
|
||||
text="Hello!",
|
||||
).model_dump_json()
|
||||
)
|
||||
await ws.send_text(
|
||||
MsgEventEnd(
|
||||
tokens_used=10,
|
||||
).model_dump_json()
|
||||
)
|
||||
tokens_used = 0
|
||||
|
||||
async for event in agent.astream({"messages": [{"role": "user", "content": text}]}):
|
||||
messages = event.get("messages") or event.get("model", {}).get("messages", [])
|
||||
if messages:
|
||||
last_msg = messages[-1]
|
||||
content = getattr(last_msg, "content", None)
|
||||
if isinstance(content, str) and content.strip():
|
||||
await ws.send_text(MsgEventTextChunk(text=content.strip()).model_dump_json())
|
||||
|
||||
await ws.send_text(MsgEventEnd(tokens_used=tokens_used).model_dump_json())
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue