Upload 182 files
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +6 -0
- Dockerfile +32 -0
- backend/app/__init__.py +4 -0
- backend/app/__pycache__/__init__.cpython-310.pyc +0 -0
- backend/app/__pycache__/__init__.cpython-311.pyc +0 -0
- backend/app/__pycache__/__init__.cpython-314.pyc +0 -0
- backend/app/__pycache__/config.cpython-310.pyc +0 -0
- backend/app/__pycache__/config.cpython-311.pyc +0 -0
- backend/app/__pycache__/config.cpython-314.pyc +0 -0
- backend/app/__pycache__/database.cpython-310.pyc +0 -0
- backend/app/__pycache__/database.cpython-311.pyc +0 -0
- backend/app/__pycache__/database.cpython-314.pyc +0 -0
- backend/app/__pycache__/dependencies.cpython-310.pyc +0 -0
- backend/app/__pycache__/dependencies.cpython-311.pyc +0 -0
- backend/app/__pycache__/dependencies.cpython-314.pyc +0 -0
- backend/app/__pycache__/main.cpython-310.pyc +0 -0
- backend/app/__pycache__/main.cpython-311.pyc +0 -0
- backend/app/__pycache__/main.cpython-314.pyc +0 -0
- backend/app/__pycache__/redis_client.cpython-310.pyc +0 -0
- backend/app/__pycache__/redis_client.cpython-311.pyc +0 -0
- backend/app/__pycache__/results_listener.cpython-310.pyc +0 -0
- backend/app/__pycache__/results_listener.cpython-311.pyc +0 -0
- backend/app/__pycache__/schemas.cpython-310.pyc +0 -0
- backend/app/__pycache__/schemas.cpython-311.pyc +0 -0
- backend/app/__pycache__/schemas.cpython-314.pyc +0 -0
- backend/app/__pycache__/tasks.cpython-310.pyc +0 -0
- backend/app/__pycache__/tasks.cpython-311.pyc +0 -0
- backend/app/config.py +89 -0
- backend/app/database.py +34 -0
- backend/app/dependencies.py +54 -0
- backend/app/main.py +117 -0
- backend/app/models/__init__.py +123 -0
- backend/app/models/__pycache__/__init__.cpython-310.pyc +0 -0
- backend/app/models/__pycache__/__init__.cpython-311.pyc +0 -0
- backend/app/models/__pycache__/__init__.cpython-314.pyc +0 -0
- backend/app/redis_client.py +12 -0
- backend/app/results_listener.py +239 -0
- backend/app/routers/__init__.py +9 -0
- backend/app/routers/__pycache__/__init__.cpython-310.pyc +0 -0
- backend/app/routers/__pycache__/__init__.cpython-311.pyc +0 -0
- backend/app/routers/__pycache__/__init__.cpython-314.pyc +0 -0
- backend/app/routers/__pycache__/agents.cpython-310.pyc +0 -0
- backend/app/routers/__pycache__/agents.cpython-311.pyc +0 -0
- backend/app/routers/__pycache__/auth.cpython-310.pyc +0 -0
- backend/app/routers/__pycache__/auth.cpython-311.pyc +0 -0
- backend/app/routers/__pycache__/auth.cpython-314.pyc +0 -0
- backend/app/routers/__pycache__/projects.cpython-310.pyc +0 -0
- backend/app/routers/__pycache__/projects.cpython-311.pyc +0 -0
- backend/app/routers/__pycache__/projects.cpython-314.pyc +0 -0
- backend/app/routers/__pycache__/simulations.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -38,3 +38,9 @@ backend/reports/simulation_report_0c8fa771-b63c-4986-ba2c-02eb82baeb5b.pdf filte
|
|
| 38 |
backend/reports/simulation_report_89e70e1b-10ed-4fc0-bcaf-b2bbdb309584.pdf filter=lfs diff=lfs merge=lfs -text
|
| 39 |
backend/reports/simulation_report_ab587025-827b-489b-9a3a-c1504d2d1770.pdf filter=lfs diff=lfs merge=lfs -text
|
| 40 |
backend/reports/simulation_report_aed696fc-a253-42fd-a8c1-186494a1ee2d.pdf filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
backend/reports/simulation_report_89e70e1b-10ed-4fc0-bcaf-b2bbdb309584.pdf filter=lfs diff=lfs merge=lfs -text
|
| 39 |
backend/reports/simulation_report_ab587025-827b-489b-9a3a-c1504d2d1770.pdf filter=lfs diff=lfs merge=lfs -text
|
| 40 |
backend/reports/simulation_report_aed696fc-a253-42fd-a8c1-186494a1ee2d.pdf filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
backend/reports/simulation_report_2a0bafc1-f5c0-4f61-b68c-c8cce12152b7.pdf filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
backend/reports/simulation_report_46f895f7-f35a-4f35-8197-c65c22c7d119.pdf filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
backend/reports/simulation_report_63b56b63-cca1-4f61-b51e-03a2aa5b81d3.pdf filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
backend/reports/simulation_report_ca702f0f-e863-4cf1-886f-5e09c2ee9539.pdf filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
backend/reports/simulation_report_df9ef9a7-fd76-42ec-8c33-240a2a5c95a5.pdf filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
backend/reports/simulation_report_fb43988b-c0a1-4272-87c0-a126a09ea8b0.pdf filter=lfs diff=lfs merge=lfs -text
|
Dockerfile
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
| 4 |
+
PYTHONUNBUFFERED=1 \
|
| 5 |
+
PIP_NO_CACHE_DIR=1 \
|
| 6 |
+
PORT=7860 \
|
| 7 |
+
PYTHONPATH=/app
|
| 8 |
+
|
| 9 |
+
WORKDIR /app
|
| 10 |
+
|
| 11 |
+
# Common system packages needed by scientific/ML dependencies.
|
| 12 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 13 |
+
build-essential \
|
| 14 |
+
curl \
|
| 15 |
+
libgl1 \
|
| 16 |
+
libglib2.0-0 \
|
| 17 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 18 |
+
|
| 19 |
+
COPY requirements.txt ./requirements.txt
|
| 20 |
+
RUN pip install --upgrade pip && pip install -r requirements.txt
|
| 21 |
+
|
| 22 |
+
# Copy backend and simulation modules used by API/background logic.
|
| 23 |
+
COPY backend ./backend
|
| 24 |
+
COPY simulation ./simulation
|
| 25 |
+
COPY start_hf_services.sh ./start_hf_services.sh
|
| 26 |
+
|
| 27 |
+
RUN mkdir -p /app/backend/uploads /app/backend/reports
|
| 28 |
+
RUN chmod +x /app/start_hf_services.sh
|
| 29 |
+
|
| 30 |
+
EXPOSE 7860
|
| 31 |
+
|
| 32 |
+
CMD ["/app/start_hf_services.sh"]
|
backend/app/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backend application module
|
| 3 |
+
"""
|
| 4 |
+
from app.main import app
|
backend/app/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (260 Bytes). View file
|
|
|
backend/app/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (311 Bytes). View file
|
|
|
backend/app/__pycache__/__init__.cpython-314.pyc
ADDED
|
Binary file (272 Bytes). View file
|
|
|
backend/app/__pycache__/config.cpython-310.pyc
ADDED
|
Binary file (2.61 kB). View file
|
|
|
backend/app/__pycache__/config.cpython-311.pyc
ADDED
|
Binary file (4.33 kB). View file
|
|
|
backend/app/__pycache__/config.cpython-314.pyc
ADDED
|
Binary file (4.36 kB). View file
|
|
|
backend/app/__pycache__/database.cpython-310.pyc
ADDED
|
Binary file (860 Bytes). View file
|
|
|
backend/app/__pycache__/database.cpython-311.pyc
ADDED
|
Binary file (1.27 kB). View file
|
|
|
backend/app/__pycache__/database.cpython-314.pyc
ADDED
|
Binary file (1.08 kB). View file
|
|
|
backend/app/__pycache__/dependencies.cpython-310.pyc
ADDED
|
Binary file (1.41 kB). View file
|
|
|
backend/app/__pycache__/dependencies.cpython-311.pyc
ADDED
|
Binary file (2.17 kB). View file
|
|
|
backend/app/__pycache__/dependencies.cpython-314.pyc
ADDED
|
Binary file (2.1 kB). View file
|
|
|
backend/app/__pycache__/main.cpython-310.pyc
ADDED
|
Binary file (2.92 kB). View file
|
|
|
backend/app/__pycache__/main.cpython-311.pyc
ADDED
|
Binary file (4.92 kB). View file
|
|
|
backend/app/__pycache__/main.cpython-314.pyc
ADDED
|
Binary file (4.61 kB). View file
|
|
|
backend/app/__pycache__/redis_client.cpython-310.pyc
ADDED
|
Binary file (590 Bytes). View file
|
|
|
backend/app/__pycache__/redis_client.cpython-311.pyc
ADDED
|
Binary file (907 Bytes). View file
|
|
|
backend/app/__pycache__/results_listener.cpython-310.pyc
ADDED
|
Binary file (7.29 kB). View file
|
|
|
backend/app/__pycache__/results_listener.cpython-311.pyc
ADDED
|
Binary file (14.6 kB). View file
|
|
|
backend/app/__pycache__/schemas.cpython-310.pyc
ADDED
|
Binary file (8.25 kB). View file
|
|
|
backend/app/__pycache__/schemas.cpython-311.pyc
ADDED
|
Binary file (13 kB). View file
|
|
|
backend/app/__pycache__/schemas.cpython-314.pyc
ADDED
|
Binary file (15.8 kB). View file
|
|
|
backend/app/__pycache__/tasks.cpython-310.pyc
ADDED
|
Binary file (5.76 kB). View file
|
|
|
backend/app/__pycache__/tasks.cpython-311.pyc
ADDED
|
Binary file (12.2 kB). View file
|
|
|
backend/app/config.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Configuration settings loaded from environment variables
|
| 3 |
+
"""
|
| 4 |
+
import os
|
| 5 |
+
from pydantic_settings import BaseSettings
|
| 6 |
+
from functools import lru_cache
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# Determine .env path - check both current dir and parent dir
|
| 10 |
+
def find_env_file():
|
| 11 |
+
"""Find .env file in app, backend, or project root directory"""
|
| 12 |
+
app_dir = os.path.dirname(os.path.abspath(__file__))
|
| 13 |
+
backend_dir = os.path.dirname(app_dir)
|
| 14 |
+
project_root_dir = os.path.dirname(backend_dir)
|
| 15 |
+
|
| 16 |
+
# Check app directory (this folder)
|
| 17 |
+
if os.path.exists(os.path.join(app_dir, ".env")):
|
| 18 |
+
return os.path.join(app_dir, ".env")
|
| 19 |
+
|
| 20 |
+
# Check backend directory
|
| 21 |
+
if os.path.exists(os.path.join(backend_dir, ".env")):
|
| 22 |
+
return os.path.join(backend_dir, ".env")
|
| 23 |
+
|
| 24 |
+
# Check project root directory
|
| 25 |
+
if os.path.exists(os.path.join(project_root_dir, ".env")):
|
| 26 |
+
return os.path.join(project_root_dir, ".env")
|
| 27 |
+
|
| 28 |
+
return ".env"
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class Settings(BaseSettings):
|
| 32 |
+
# Database
|
| 33 |
+
database_url: str = "postgresql://agentsociety:dev_password@localhost:5433/agentsociety_db"
|
| 34 |
+
|
| 35 |
+
# Redis
|
| 36 |
+
redis_url: str = "redis://localhost:6379/0"
|
| 37 |
+
|
| 38 |
+
# MQTT
|
| 39 |
+
mqtt_broker_host: str = "localhost"
|
| 40 |
+
mqtt_broker_port: int = 1883
|
| 41 |
+
mqtt_transport: str = "tcp"
|
| 42 |
+
mqtt_path: str = ""
|
| 43 |
+
|
| 44 |
+
# ChromaDB
|
| 45 |
+
chroma_host: str = "localhost"
|
| 46 |
+
chroma_port: int = 8000
|
| 47 |
+
chroma_ssl: bool = False
|
| 48 |
+
|
| 49 |
+
# AWS S3 (optional)
|
| 50 |
+
aws_access_key_id: str = ""
|
| 51 |
+
aws_secret_access_key: str = ""
|
| 52 |
+
aws_s3_bucket: str = "agentsociety-videos-dev"
|
| 53 |
+
aws_region: str = "ap-south-1"
|
| 54 |
+
|
| 55 |
+
# Google Gemini API (used by VLM video analysis)
|
| 56 |
+
gemini_api_key: str = ""
|
| 57 |
+
gemini_api_keys: str = "" # comma-separated keys for rotation
|
| 58 |
+
|
| 59 |
+
# Qwen LLM (HuggingFace Space - Ollama API, used by simulation agents)
|
| 60 |
+
qwen_api_url: str = "https://vish85521-qwen.hf.space/api/generate"
|
| 61 |
+
qwen_model_name: str = "qwen3.5:397b-cloud"
|
| 62 |
+
|
| 63 |
+
# Hugging Face video storage
|
| 64 |
+
hf_access_token: str = ""
|
| 65 |
+
hf_video_repo_id: str = "vish85521/videos"
|
| 66 |
+
hf_video_repo_type: str = "dataset"
|
| 67 |
+
hf_video_path_prefix: str = "videos"
|
| 68 |
+
|
| 69 |
+
# Security
|
| 70 |
+
jwt_secret: str = "change_this_to_a_random_32_character_string"
|
| 71 |
+
jwt_algorithm: str = "HS256"
|
| 72 |
+
jwt_expiry_hours: int = 24
|
| 73 |
+
|
| 74 |
+
# Simulation
|
| 75 |
+
default_num_agents: int = 10
|
| 76 |
+
default_simulation_days: int = 5
|
| 77 |
+
|
| 78 |
+
# File Storage
|
| 79 |
+
upload_dir: str = "uploads"
|
| 80 |
+
|
| 81 |
+
class Config:
|
| 82 |
+
env_file = find_env_file()
|
| 83 |
+
env_file_encoding = "utf-8"
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@lru_cache()
|
| 87 |
+
def get_settings() -> Settings:
|
| 88 |
+
return Settings()
|
| 89 |
+
|
backend/app/database.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database connection and session management
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy import create_engine
|
| 5 |
+
from sqlalchemy.ext.declarative import declarative_base
|
| 6 |
+
from sqlalchemy.orm import sessionmaker
|
| 7 |
+
from app.config import get_settings
|
| 8 |
+
|
| 9 |
+
settings = get_settings()
|
| 10 |
+
|
| 11 |
+
# Create engine
|
| 12 |
+
engine = create_engine(
|
| 13 |
+
settings.database_url,
|
| 14 |
+
pool_pre_ping=True,
|
| 15 |
+
pool_size=10,
|
| 16 |
+
max_overflow=20
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# Create session factory
|
| 20 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
| 21 |
+
|
| 22 |
+
# Base class for models
|
| 23 |
+
Base = declarative_base()
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def get_db():
|
| 27 |
+
"""
|
| 28 |
+
Dependency that provides database session
|
| 29 |
+
"""
|
| 30 |
+
db = SessionLocal()
|
| 31 |
+
try:
|
| 32 |
+
yield db
|
| 33 |
+
finally:
|
| 34 |
+
db.close()
|
backend/app/dependencies.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FastAPI dependencies for authentication and database access
|
| 3 |
+
"""
|
| 4 |
+
from fastapi import Depends, HTTPException, status
|
| 5 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 6 |
+
from sqlalchemy.orm import Session
|
| 7 |
+
from app.database import get_db
|
| 8 |
+
from app.models import User
|
| 9 |
+
from app.services.auth_service import decode_access_token
|
| 10 |
+
|
| 11 |
+
# Security scheme
|
| 12 |
+
security = HTTPBearer()
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
async def get_current_user(
|
| 16 |
+
credentials: HTTPAuthorizationCredentials = Depends(security),
|
| 17 |
+
db: Session = Depends(get_db)
|
| 18 |
+
) -> User:
|
| 19 |
+
"""
|
| 20 |
+
Dependency to get the current authenticated user from JWT token
|
| 21 |
+
|
| 22 |
+
Raises:
|
| 23 |
+
HTTPException 401: If token is invalid or user not found
|
| 24 |
+
"""
|
| 25 |
+
token = credentials.credentials
|
| 26 |
+
|
| 27 |
+
# Decode token
|
| 28 |
+
payload = decode_access_token(token)
|
| 29 |
+
if payload is None:
|
| 30 |
+
raise HTTPException(
|
| 31 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 32 |
+
detail="Invalid or expired token",
|
| 33 |
+
headers={"WWW-Authenticate": "Bearer"}
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
# Get user ID from token
|
| 37 |
+
user_id = payload.get("sub")
|
| 38 |
+
if user_id is None:
|
| 39 |
+
raise HTTPException(
|
| 40 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 41 |
+
detail="Invalid token payload",
|
| 42 |
+
headers={"WWW-Authenticate": "Bearer"}
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
# Fetch user from database
|
| 46 |
+
user = db.query(User).filter(User.id == user_id).first()
|
| 47 |
+
if user is None:
|
| 48 |
+
raise HTTPException(
|
| 49 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 50 |
+
detail="User not found",
|
| 51 |
+
headers={"WWW-Authenticate": "Bearer"}
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
return user
|
backend/app/main.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
AgentSociety Marketing Platform - FastAPI Backend
|
| 3 |
+
"""
|
| 4 |
+
import os
|
| 5 |
+
import logging
|
| 6 |
+
from fastapi import FastAPI
|
| 7 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 8 |
+
from contextlib import asynccontextmanager
|
| 9 |
+
from sqlalchemy import text
|
| 10 |
+
|
| 11 |
+
# Configure logging
|
| 12 |
+
logging.basicConfig(
|
| 13 |
+
level=logging.INFO,
|
| 14 |
+
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s"
|
| 15 |
+
)
|
| 16 |
+
logger = logging.getLogger(__name__)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@asynccontextmanager
|
| 20 |
+
async def lifespan(app: FastAPI):
|
| 21 |
+
"""Application lifespan events"""
|
| 22 |
+
from app.config import get_settings
|
| 23 |
+
settings = get_settings()
|
| 24 |
+
|
| 25 |
+
# Startup
|
| 26 |
+
logger.info("Starting AgentSociety API...")
|
| 27 |
+
|
| 28 |
+
# Create upload directory
|
| 29 |
+
os.makedirs(settings.upload_dir, exist_ok=True)
|
| 30 |
+
|
| 31 |
+
# Start results listener (receives simulation results from Ray worker)
|
| 32 |
+
from app.results_listener import start_results_listener
|
| 33 |
+
start_results_listener(redis_url=settings.redis_url)
|
| 34 |
+
logger.info("Results listener started - listening for Ray worker results")
|
| 35 |
+
|
| 36 |
+
yield
|
| 37 |
+
|
| 38 |
+
# Shutdown
|
| 39 |
+
from app.results_listener import stop_results_listener
|
| 40 |
+
stop_results_listener()
|
| 41 |
+
logger.info("Shutting down AgentSociety API...")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Create FastAPI app - disable redirect_slashes to prevent 307 that strips auth headers
|
| 45 |
+
app = FastAPI(
|
| 46 |
+
title="AgentSociety Marketing Platform",
|
| 47 |
+
description="AI-powered marketing simulation platform that simulates 1,000+ AI agents reacting to video advertisements",
|
| 48 |
+
version="1.0.0",
|
| 49 |
+
lifespan=lifespan,
|
| 50 |
+
redirect_slashes=False
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# CORS configuration - allow any frontend origin.
|
| 54 |
+
origins = ["*"]
|
| 55 |
+
|
| 56 |
+
app.add_middleware(
|
| 57 |
+
CORSMiddleware,
|
| 58 |
+
allow_origins=origins,
|
| 59 |
+
allow_credentials=False,
|
| 60 |
+
allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
|
| 61 |
+
allow_headers=["*"],
|
| 62 |
+
expose_headers=["*"],
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
# Register routers
|
| 66 |
+
from app.routers import auth_router, projects_router, simulations_router, agents_router
|
| 67 |
+
app.include_router(auth_router)
|
| 68 |
+
app.include_router(projects_router)
|
| 69 |
+
app.include_router(simulations_router)
|
| 70 |
+
app.include_router(agents_router)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@app.get("/")
|
| 74 |
+
async def root():
|
| 75 |
+
"""Health check endpoint"""
|
| 76 |
+
return {
|
| 77 |
+
"status": "healthy",
|
| 78 |
+
"service": "AgentSociety API",
|
| 79 |
+
"version": "1.0.0"
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@app.get("/health")
|
| 84 |
+
async def health_check():
|
| 85 |
+
"""Detailed health check"""
|
| 86 |
+
from app.config import get_settings
|
| 87 |
+
settings = get_settings()
|
| 88 |
+
|
| 89 |
+
health = {
|
| 90 |
+
"api": "healthy",
|
| 91 |
+
"database": "unknown",
|
| 92 |
+
"redis": "unknown"
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
# Check database
|
| 96 |
+
try:
|
| 97 |
+
from app.database import engine
|
| 98 |
+
with engine.connect() as conn:
|
| 99 |
+
conn.execute(text("SELECT 1"))
|
| 100 |
+
health["database"] = "healthy"
|
| 101 |
+
except Exception as e:
|
| 102 |
+
health["database"] = f"unhealthy: {str(e)}"
|
| 103 |
+
|
| 104 |
+
# Check Redis
|
| 105 |
+
try:
|
| 106 |
+
import redis
|
| 107 |
+
import ssl as ssl_module
|
| 108 |
+
redis_kwargs = {}
|
| 109 |
+
if settings.redis_url.startswith("rediss://"):
|
| 110 |
+
redis_kwargs["ssl_cert_reqs"] = ssl_module.CERT_REQUIRED
|
| 111 |
+
r = redis.from_url(settings.redis_url, **redis_kwargs)
|
| 112 |
+
r.ping()
|
| 113 |
+
health["redis"] = "healthy"
|
| 114 |
+
except Exception as e:
|
| 115 |
+
health["redis"] = f"unhealthy: {str(e)}"
|
| 116 |
+
|
| 117 |
+
return health
|
backend/app/models/__init__.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SQLAlchemy models for the AgentSociety platform
|
| 3 |
+
"""
|
| 4 |
+
import uuid
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from sqlalchemy import Column, String, Integer, Float, Text, DateTime, ForeignKey, JSON, BigInteger, Boolean
|
| 7 |
+
from sqlalchemy.dialects.postgresql import UUID
|
| 8 |
+
from sqlalchemy.orm import relationship
|
| 9 |
+
from app.database import Base
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class User(Base):
|
| 13 |
+
__tablename__ = "users"
|
| 14 |
+
|
| 15 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 16 |
+
email = Column(String(255), unique=True, nullable=False, index=True)
|
| 17 |
+
password_hash = Column(String(255), nullable=False)
|
| 18 |
+
created_at = Column(DateTime, default=datetime.utcnow)
|
| 19 |
+
subscription_tier = Column(String(20), default="FREE")
|
| 20 |
+
|
| 21 |
+
# Relationships
|
| 22 |
+
projects = relationship("Project", back_populates="user", cascade="all, delete-orphan")
|
| 23 |
+
custom_agents = relationship("CustomAgent", back_populates="user", cascade="all, delete-orphan")
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Project(Base):
|
| 27 |
+
__tablename__ = "projects"
|
| 28 |
+
|
| 29 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 30 |
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
| 31 |
+
title = Column(String(255), nullable=False)
|
| 32 |
+
video_path = Column(String(500), nullable=False)
|
| 33 |
+
video_duration_seconds = Column(Integer, nullable=True)
|
| 34 |
+
vlm_generated_context = Column(Text, nullable=True)
|
| 35 |
+
demographic_filter = Column(JSON, nullable=True)
|
| 36 |
+
status = Column(String(20), default="PENDING")
|
| 37 |
+
created_at = Column(DateTime, default=datetime.utcnow)
|
| 38 |
+
|
| 39 |
+
# Relationships
|
| 40 |
+
user = relationship("User", back_populates="projects")
|
| 41 |
+
simulation_runs = relationship("SimulationRun", back_populates="project", cascade="all, delete-orphan")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class SimulationRun(Base):
|
| 45 |
+
__tablename__ = "simulation_runs"
|
| 46 |
+
|
| 47 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 48 |
+
project_id = Column(UUID(as_uuid=True), ForeignKey("projects.id", ondelete="CASCADE"), nullable=False)
|
| 49 |
+
status = Column(String(20), default="PENDING")
|
| 50 |
+
num_agents = Column(Integer, default=1000)
|
| 51 |
+
simulation_days = Column(Integer, default=5)
|
| 52 |
+
engagement_score = Column(Float, nullable=True)
|
| 53 |
+
sentiment_breakdown = Column(JSON, nullable=True)
|
| 54 |
+
map_data = Column(JSON, nullable=True) # lightweight per-agent coords/opinion/friends
|
| 55 |
+
agent_states = Column(JSON, nullable=True) # full agent profile/emotion/reasoning
|
| 56 |
+
opinion_trajectory = Column(JSON, nullable=True)
|
| 57 |
+
started_at = Column(DateTime, nullable=True)
|
| 58 |
+
completed_at = Column(DateTime, nullable=True)
|
| 59 |
+
error_message = Column(Text, nullable=True)
|
| 60 |
+
created_at = Column(DateTime, default=datetime.utcnow)
|
| 61 |
+
|
| 62 |
+
# Custom Agents
|
| 63 |
+
use_custom_agents_only = Column(Boolean, default=False)
|
| 64 |
+
agent_ids = Column(JSON, nullable=True)
|
| 65 |
+
|
| 66 |
+
# Relationships
|
| 67 |
+
project = relationship("Project", back_populates="simulation_runs")
|
| 68 |
+
agent_logs = relationship("AgentLog", back_populates="simulation_run", cascade="all, delete-orphan")
|
| 69 |
+
risk_flags = relationship("RiskFlag", back_populates="simulation_run", cascade="all, delete-orphan")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class AgentLog(Base):
|
| 73 |
+
__tablename__ = "agent_logs"
|
| 74 |
+
|
| 75 |
+
id = Column(BigInteger, primary_key=True, autoincrement=True)
|
| 76 |
+
simulation_run_id = Column(UUID(as_uuid=True), ForeignKey("simulation_runs.id", ondelete="CASCADE"), nullable=False)
|
| 77 |
+
agent_id = Column(String(50), nullable=False)
|
| 78 |
+
timestamp = Column(DateTime, default=datetime.utcnow)
|
| 79 |
+
event_type = Column(String(30), nullable=False)
|
| 80 |
+
event_data = Column(JSON, nullable=False)
|
| 81 |
+
|
| 82 |
+
# Relationships
|
| 83 |
+
simulation_run = relationship("SimulationRun", back_populates="agent_logs")
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class RiskFlag(Base):
|
| 87 |
+
__tablename__ = "risk_flags"
|
| 88 |
+
|
| 89 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 90 |
+
simulation_run_id = Column(UUID(as_uuid=True), ForeignKey("simulation_runs.id", ondelete="CASCADE"), nullable=False)
|
| 91 |
+
flag_type = Column(String(50), nullable=False)
|
| 92 |
+
severity = Column(String(10), nullable=False)
|
| 93 |
+
description = Column(Text, nullable=False)
|
| 94 |
+
affected_demographics = Column(JSON, nullable=True)
|
| 95 |
+
sample_agent_reactions = Column(JSON, nullable=True)
|
| 96 |
+
detected_at = Column(DateTime, default=datetime.utcnow)
|
| 97 |
+
|
| 98 |
+
# Relationships
|
| 99 |
+
simulation_run = relationship("SimulationRun", back_populates="risk_flags")
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class CustomAgent(Base):
|
| 103 |
+
__tablename__ = "custom_agents"
|
| 104 |
+
|
| 105 |
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
| 106 |
+
user_id = Column(UUID(as_uuid=True), ForeignKey("users.id", ondelete="CASCADE"), nullable=False)
|
| 107 |
+
name = Column(String(100), nullable=False)
|
| 108 |
+
age = Column(Integer, nullable=False)
|
| 109 |
+
gender = Column(String(20), nullable=False)
|
| 110 |
+
location = Column(String(100), nullable=False)
|
| 111 |
+
occupation = Column(String(100), nullable=False)
|
| 112 |
+
education = Column(String(100), nullable=False)
|
| 113 |
+
income_level = Column(String(50), nullable=False)
|
| 114 |
+
religion = Column(String(50), nullable=True)
|
| 115 |
+
ethnicity = Column(String(50), nullable=True)
|
| 116 |
+
social_media_usage = Column(String(50), nullable=False)
|
| 117 |
+
political_leaning = Column(String(50), nullable=True)
|
| 118 |
+
values = Column(JSON, nullable=False)
|
| 119 |
+
personality_traits = Column(JSON, nullable=False)
|
| 120 |
+
bio = Column(Text, nullable=True)
|
| 121 |
+
created_at = Column(DateTime, default=datetime.utcnow)
|
| 122 |
+
|
| 123 |
+
user = relationship("User", back_populates="custom_agents")
|
backend/app/models/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (4.61 kB). View file
|
|
|
backend/app/models/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (8.6 kB). View file
|
|
|
backend/app/models/__pycache__/__init__.cpython-314.pyc
ADDED
|
Binary file (6.94 kB). View file
|
|
|
backend/app/redis_client.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import redis
|
| 2 |
+
import functools
|
| 3 |
+
import ssl as ssl_module
|
| 4 |
+
from app.config import get_settings
|
| 5 |
+
|
| 6 |
+
@functools.lru_cache()
|
| 7 |
+
def get_redis_client():
|
| 8 |
+
settings = get_settings()
|
| 9 |
+
redis_kwargs = {}
|
| 10 |
+
if settings.redis_url.startswith("rediss://"):
|
| 11 |
+
redis_kwargs["ssl_cert_reqs"] = ssl_module.CERT_REQUIRED
|
| 12 |
+
return redis.from_url(settings.redis_url, **redis_kwargs)
|
backend/app/results_listener.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Results Listener - Background thread that listens for simulation results from Ray worker
|
| 3 |
+
|
| 4 |
+
This runs as a background thread in the FastAPI application, listening to Redis
|
| 5 |
+
for simulation results and updating the database accordingly.
|
| 6 |
+
"""
|
| 7 |
+
import os
|
| 8 |
+
import json
|
| 9 |
+
import logging
|
| 10 |
+
import threading
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
from typing import Optional
|
| 13 |
+
|
| 14 |
+
import redis
|
| 15 |
+
from sqlalchemy.orm import Session
|
| 16 |
+
|
| 17 |
+
from app.database import SessionLocal
|
| 18 |
+
from app.models import SimulationRun, AgentLog, RiskFlag
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ResultsListener:
|
| 24 |
+
"""Background listener for simulation results from Ray worker"""
|
| 25 |
+
|
| 26 |
+
def __init__(self, redis_url: str = None, reconnect_delay: int = 2, max_reconnect_delay: int = 30):
|
| 27 |
+
self.redis_url = redis_url or os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
| 28 |
+
self.running = False
|
| 29 |
+
self.thread: Optional[threading.Thread] = None
|
| 30 |
+
self.reconnect_delay = reconnect_delay
|
| 31 |
+
self.max_reconnect_delay = max_reconnect_delay
|
| 32 |
+
|
| 33 |
+
def start(self):
|
| 34 |
+
"""Start the listener in a background thread"""
|
| 35 |
+
if self.running:
|
| 36 |
+
logger.warning("ResultsListener already running")
|
| 37 |
+
return
|
| 38 |
+
|
| 39 |
+
self.running = True
|
| 40 |
+
self.thread = threading.Thread(target=self._listen_loop, daemon=True)
|
| 41 |
+
self.thread.start()
|
| 42 |
+
logger.info("ResultsListener started")
|
| 43 |
+
|
| 44 |
+
def stop(self):
|
| 45 |
+
"""Stop the listener"""
|
| 46 |
+
self.running = False
|
| 47 |
+
if self.thread:
|
| 48 |
+
self.thread.join(timeout=5)
|
| 49 |
+
logger.info("ResultsListener stopped")
|
| 50 |
+
|
| 51 |
+
def _listen_loop(self):
|
| 52 |
+
"""Main listening loop"""
|
| 53 |
+
import time
|
| 54 |
+
import ssl as ssl_module
|
| 55 |
+
|
| 56 |
+
current_delay = self.reconnect_delay
|
| 57 |
+
|
| 58 |
+
while self.running:
|
| 59 |
+
redis_client = None
|
| 60 |
+
pubsub = None
|
| 61 |
+
try:
|
| 62 |
+
redis_kwargs = {}
|
| 63 |
+
if self.redis_url.startswith("rediss://"):
|
| 64 |
+
redis_kwargs["ssl_cert_reqs"] = ssl_module.CERT_REQUIRED
|
| 65 |
+
redis_client = redis.from_url(self.redis_url, **redis_kwargs)
|
| 66 |
+
pubsub = redis_client.pubsub()
|
| 67 |
+
pubsub.subscribe("simulation_results")
|
| 68 |
+
|
| 69 |
+
logger.info("Subscribed to 'simulation_results' channel")
|
| 70 |
+
|
| 71 |
+
# Reset delay on successful connection
|
| 72 |
+
current_delay = self.reconnect_delay
|
| 73 |
+
|
| 74 |
+
while self.running:
|
| 75 |
+
message = pubsub.get_message(timeout=1.0)
|
| 76 |
+
if message and message['type'] == 'message':
|
| 77 |
+
try:
|
| 78 |
+
data = json.loads(message['data'])
|
| 79 |
+
self._handle_result(data)
|
| 80 |
+
except json.JSONDecodeError as e:
|
| 81 |
+
logger.error(f"Invalid JSON in result: {e}")
|
| 82 |
+
except Exception as e:
|
| 83 |
+
logger.error(f"Error handling result: {e}")
|
| 84 |
+
|
| 85 |
+
except (redis.exceptions.ConnectionError, OSError) as e:
|
| 86 |
+
logger.error(f"Redis connection error: {e}. Reconnecting in {current_delay}s...")
|
| 87 |
+
time.sleep(current_delay)
|
| 88 |
+
current_delay = min(current_delay * 2, self.max_reconnect_delay)
|
| 89 |
+
except Exception as e:
|
| 90 |
+
logger.error(f"ResultsListener unexpected error: {e}. Retrying in {current_delay}s...")
|
| 91 |
+
time.sleep(current_delay)
|
| 92 |
+
current_delay = min(current_delay * 2, self.max_reconnect_delay)
|
| 93 |
+
finally:
|
| 94 |
+
try:
|
| 95 |
+
if pubsub:
|
| 96 |
+
pubsub.close()
|
| 97 |
+
except Exception:
|
| 98 |
+
pass
|
| 99 |
+
try:
|
| 100 |
+
if redis_client:
|
| 101 |
+
redis_client.close()
|
| 102 |
+
except Exception:
|
| 103 |
+
pass
|
| 104 |
+
|
| 105 |
+
def _handle_result(self, data: dict):
|
| 106 |
+
"""Process a simulation result from Ray worker"""
|
| 107 |
+
simulation_id = data.get('simulation_id')
|
| 108 |
+
status = data.get('status', 'FAILED')
|
| 109 |
+
|
| 110 |
+
if not simulation_id:
|
| 111 |
+
logger.error("Result missing simulation_id")
|
| 112 |
+
return
|
| 113 |
+
|
| 114 |
+
logger.info(f"Received result for simulation {simulation_id}: {status}")
|
| 115 |
+
|
| 116 |
+
db = SessionLocal()
|
| 117 |
+
try:
|
| 118 |
+
simulation = db.query(SimulationRun).filter(
|
| 119 |
+
SimulationRun.id == simulation_id
|
| 120 |
+
).first()
|
| 121 |
+
|
| 122 |
+
if not simulation:
|
| 123 |
+
logger.error(f"Simulation {simulation_id} not found in database")
|
| 124 |
+
return
|
| 125 |
+
|
| 126 |
+
if status == 'COMPLETED':
|
| 127 |
+
results = data.get('results', {})
|
| 128 |
+
|
| 129 |
+
# Update simulation with results
|
| 130 |
+
simulation.status = "COMPLETED"
|
| 131 |
+
simulation.completed_at = datetime.utcnow()
|
| 132 |
+
simulation.engagement_score = results.get('engagement_score', 0)
|
| 133 |
+
simulation.sentiment_breakdown = results.get('sentiment_breakdown', {})
|
| 134 |
+
|
| 135 |
+
# Save map data and agent states for map visualization
|
| 136 |
+
simulation.map_data = results.get('map_data', [])
|
| 137 |
+
simulation.agent_states = results.get('agent_states', [])
|
| 138 |
+
simulation.opinion_trajectory = results.get('opinion_trajectory', {})
|
| 139 |
+
|
| 140 |
+
db.commit()
|
| 141 |
+
logger.info(f"Simulation {simulation_id} marked as COMPLETED")
|
| 142 |
+
|
| 143 |
+
# Save agent logs (separate transaction to avoid rollback issues)
|
| 144 |
+
try:
|
| 145 |
+
self._save_agent_logs(db, simulation.id, results.get('agent_logs', []))
|
| 146 |
+
except Exception as e:
|
| 147 |
+
logger.warning(f"Failed to save agent logs: {e}")
|
| 148 |
+
|
| 149 |
+
# Save risk flags
|
| 150 |
+
try:
|
| 151 |
+
self._save_risk_flags(db, simulation.id, results.get('risk_flags', []))
|
| 152 |
+
except Exception as e:
|
| 153 |
+
logger.warning(f"Failed to save risk flags: {e}")
|
| 154 |
+
|
| 155 |
+
else:
|
| 156 |
+
# Failed simulation
|
| 157 |
+
simulation.status = "FAILED"
|
| 158 |
+
simulation.completed_at = datetime.utcnow()
|
| 159 |
+
simulation.error_message = data.get('error', 'Unknown error')
|
| 160 |
+
db.commit()
|
| 161 |
+
logger.info(f"Simulation {simulation_id} marked as FAILED: {data.get('error')}")
|
| 162 |
+
|
| 163 |
+
except Exception as e:
|
| 164 |
+
logger.error(f"Database error handling result: {e}")
|
| 165 |
+
db.rollback()
|
| 166 |
+
finally:
|
| 167 |
+
db.close()
|
| 168 |
+
|
| 169 |
+
def _save_agent_logs(self, db: Session, simulation_id: str, logs: list):
|
| 170 |
+
"""Save agent logs to database"""
|
| 171 |
+
# Limit to prevent overwhelming the database
|
| 172 |
+
total = len(logs)
|
| 173 |
+
if total > 50:
|
| 174 |
+
logger.warning(f"Agent log truncation: {total} received, saving only 50")
|
| 175 |
+
|
| 176 |
+
for log_data in logs[:50]:
|
| 177 |
+
try:
|
| 178 |
+
event_data = log_data.get('event_data', {})
|
| 179 |
+
# Ensure JSON serializable
|
| 180 |
+
if isinstance(event_data, dict):
|
| 181 |
+
event_data = json.loads(json.dumps(event_data, default=str))
|
| 182 |
+
|
| 183 |
+
agent_log = AgentLog(
|
| 184 |
+
simulation_run_id=simulation_id,
|
| 185 |
+
agent_id=str(log_data.get('agent_id', 'unknown')),
|
| 186 |
+
event_type=str(log_data.get('event_type', 'UNKNOWN')),
|
| 187 |
+
event_data=event_data
|
| 188 |
+
)
|
| 189 |
+
db.add(agent_log)
|
| 190 |
+
except Exception as e:
|
| 191 |
+
logger.warning(f"Failed to add agent log: {e}")
|
| 192 |
+
|
| 193 |
+
db.commit()
|
| 194 |
+
logger.info(f"Saved {min(50, len(logs))} agent logs for simulation {simulation_id}")
|
| 195 |
+
|
| 196 |
+
def _save_risk_flags(self, db: Session, simulation_id: str, flags: list):
|
| 197 |
+
"""Save risk flags to database"""
|
| 198 |
+
for flag_data in flags:
|
| 199 |
+
try:
|
| 200 |
+
risk_flag = RiskFlag(
|
| 201 |
+
simulation_run_id=simulation_id,
|
| 202 |
+
flag_type=str(flag_data.get('flag_type', 'UNKNOWN')),
|
| 203 |
+
severity=str(flag_data.get('severity', 'LOW')),
|
| 204 |
+
description=str(flag_data.get('description', '')),
|
| 205 |
+
affected_demographics=flag_data.get('affected_demographics'),
|
| 206 |
+
sample_agent_reactions=flag_data.get('sample_agent_reactions')
|
| 207 |
+
)
|
| 208 |
+
db.add(risk_flag)
|
| 209 |
+
except Exception as e:
|
| 210 |
+
logger.warning(f"Failed to add risk flag: {e}")
|
| 211 |
+
|
| 212 |
+
db.commit()
|
| 213 |
+
logger.info(f"Saved {len(flags)} risk flags for simulation {simulation_id}")
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
# Global instance
|
| 217 |
+
_results_listener: Optional[ResultsListener] = None
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def get_results_listener() -> ResultsListener:
|
| 221 |
+
"""Get or create the global results listener"""
|
| 222 |
+
global _results_listener
|
| 223 |
+
if _results_listener is None:
|
| 224 |
+
_results_listener = ResultsListener()
|
| 225 |
+
return _results_listener
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def start_results_listener(redis_url: str = None):
|
| 229 |
+
"""Start the global results listener"""
|
| 230 |
+
global _results_listener
|
| 231 |
+
if _results_listener is None:
|
| 232 |
+
_results_listener = ResultsListener(redis_url=redis_url)
|
| 233 |
+
_results_listener.start()
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def stop_results_listener():
|
| 237 |
+
"""Stop the global results listener"""
|
| 238 |
+
if _results_listener:
|
| 239 |
+
_results_listener.stop()
|
backend/app/routers/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Router module exports
|
| 3 |
+
"""
|
| 4 |
+
from app.routers.auth import router as auth_router
|
| 5 |
+
from app.routers.projects import router as projects_router
|
| 6 |
+
from app.routers.simulations import router as simulations_router
|
| 7 |
+
from app.routers.agents import router as agents_router
|
| 8 |
+
|
| 9 |
+
__all__ = ["auth_router", "projects_router", "simulations_router", "agents_router"]
|
backend/app/routers/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (489 Bytes). View file
|
|
|
backend/app/routers/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (595 Bytes). View file
|
|
|
backend/app/routers/__pycache__/__init__.cpython-314.pyc
ADDED
|
Binary file (507 Bytes). View file
|
|
|
backend/app/routers/__pycache__/agents.cpython-310.pyc
ADDED
|
Binary file (2.82 kB). View file
|
|
|
backend/app/routers/__pycache__/agents.cpython-311.pyc
ADDED
|
Binary file (5.66 kB). View file
|
|
|
backend/app/routers/__pycache__/auth.cpython-310.pyc
ADDED
|
Binary file (2.18 kB). View file
|
|
|
backend/app/routers/__pycache__/auth.cpython-311.pyc
ADDED
|
Binary file (3.76 kB). View file
|
|
|
backend/app/routers/__pycache__/auth.cpython-314.pyc
ADDED
|
Binary file (3.83 kB). View file
|
|
|
backend/app/routers/__pycache__/projects.cpython-310.pyc
ADDED
|
Binary file (4.88 kB). View file
|
|
|
backend/app/routers/__pycache__/projects.cpython-311.pyc
ADDED
|
Binary file (10.3 kB). View file
|
|
|
backend/app/routers/__pycache__/projects.cpython-314.pyc
ADDED
|
Binary file (9.41 kB). View file
|
|
|
backend/app/routers/__pycache__/simulations.cpython-310.pyc
ADDED
|
Binary file (8.74 kB). View file
|
|
|