services: matrix-bot: extends: file: docker-compose.prod.yml service: matrix-bot build: context: . dockerfile: Dockerfile target: development args: LAMBDA_AGENT_API_REF: ${LAMBDA_AGENT_API_REF:-master} additional_contexts: agent_api: ./external/platform-agent_api tags: - ${SURFACES_BOT_DEV_IMAGE:-surfaces-bot:dev} environment: AGENT_BASE_URL: http://platform-agent:8000 depends_on: platform-agent: condition: service_healthy platform-agent: build: context: ./external/platform-agent target: development additional_contexts: agent_api: ./external/platform-agent_api environment: PYTHONUNBUFFERED: "1" AGENT_ID: ${AGENT_ID:-matrix-dev} PROVIDER_MODEL: ${PROVIDER_MODEL:-openai/gpt-4o-mini} PROVIDER_URL: ${PROVIDER_URL:-} PROVIDER_API_KEY: ${PROVIDER_API_KEY:-} COMPOSIO_API_KEY: ${COMPOSIO_API_KEY:-} volumes: - ./external/platform-agent/src:/app/src - ./external/platform-agent_api:/agent_api - agents:/workspace command: > sh -lc " mkdir -p /workspace && chown -R agent:agent /workspace && exec /app/.venv/bin/uvicorn src.main:app --host 0.0.0.0 --port 8000 --no-access-log " ports: - "8000:8000" healthcheck: test: - CMD-SHELL - python -c "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8000/openapi.json', timeout=2).read()" interval: 60s timeout: 5s retries: 5 start_period: 15s restart: unless-stopped volumes: agents: name: ${SURFACES_SHARED_VOLUME:-surfaces-agents} bot-state: name: ${SURFACES_BOT_STATE_VOLUME:-surfaces-bot-state}