Commit ·
42d88ae
0
Parent(s):
Initial Hugging Face Space - Backend deployment
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .dockerignore +37 -0
- .gitattributes +4 -0
- .gitignore +100 -0
- Backend/.env.example +7 -0
- Backend/agents/__init__.py +6 -0
- Backend/agents/escalation/__init__.py +1 -0
- Backend/agents/escalation/agent.py +178 -0
- Backend/agents/geoDeduplicate/__init__.py +1 -0
- Backend/agents/geoDeduplicate/agent.py +225 -0
- Backend/agents/notification/__init__.py +1 -0
- Backend/agents/notification/agent.py +333 -0
- Backend/agents/priority/__init__.py +1 -0
- Backend/agents/priority/agent.py +144 -0
- Backend/agents/routing/__init__.py +1 -0
- Backend/agents/routing/agent.py +222 -0
- Backend/agents/sla/agent.py +157 -0
- Backend/agents/vision/__init__.py +1 -0
- Backend/agents/vision/agent.py +296 -0
- Backend/agents/vision/model.pt +3 -0
- Backend/api/__init__.py +3 -0
- Backend/api/app.py +126 -0
- Backend/api/routes/__init__.py +16 -0
- Backend/api/routes/admin.py +1160 -0
- Backend/api/routes/flow.py +163 -0
- Backend/api/routes/health.py +21 -0
- Backend/api/routes/issues.py +519 -0
- Backend/api/routes/worker.py +204 -0
- Backend/core/__init__.py +4 -0
- Backend/core/auth.py +109 -0
- Backend/core/config.py +83 -0
- Backend/core/events.py +106 -0
- Backend/core/flow_tracker.py +188 -0
- Backend/core/logging.py +77 -0
- Backend/core/schemas.py +169 -0
- Backend/core/security.py +80 -0
- Backend/database/__init__.py +2 -0
- Backend/database/connection.py +57 -0
- Backend/database/init_db.py +46 -0
- Backend/database/models.py +174 -0
- Backend/database/seed.py +83 -0
- Backend/main.py +14 -0
- Backend/orchestration/__init__.py +1 -0
- Backend/orchestration/base.py +25 -0
- Backend/requirements.txt +19 -0
- Backend/services/__init__.py +2 -0
- Backend/services/email.py +273 -0
- Backend/services/geocoding.py +100 -0
- Backend/services/ingestion.py +85 -0
- Backend/services/supabase_auth.py +119 -0
- Backend/services/vision.py +3 -0
.dockerignore
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.git
|
| 2 |
+
.github
|
| 3 |
+
.venv
|
| 4 |
+
.env
|
| 5 |
+
__pycache__
|
| 6 |
+
*.pyc
|
| 7 |
+
*.pyo
|
| 8 |
+
*.pyd
|
| 9 |
+
.Python
|
| 10 |
+
*.so
|
| 11 |
+
*.egg
|
| 12 |
+
*.egg-info
|
| 13 |
+
dist
|
| 14 |
+
build
|
| 15 |
+
.vscode
|
| 16 |
+
.idea
|
| 17 |
+
*.log
|
| 18 |
+
.DS_Store
|
| 19 |
+
Thumbs.db
|
| 20 |
+
node_modules
|
| 21 |
+
Frontend
|
| 22 |
+
User
|
| 23 |
+
Dataset
|
| 24 |
+
Dataset_Merged
|
| 25 |
+
Model/runs
|
| 26 |
+
runs
|
| 27 |
+
noupload
|
| 28 |
+
archive.zip
|
| 29 |
+
Backendbackup
|
| 30 |
+
design-system
|
| 31 |
+
docs
|
| 32 |
+
infra
|
| 33 |
+
migrations
|
| 34 |
+
createadmin.py
|
| 35 |
+
generate_password_hash.py
|
| 36 |
+
start.js
|
| 37 |
+
start_system.bat
|
.gitattributes
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
.env
|
| 3 |
+
.env.*
|
| 4 |
+
!.env.example
|
| 5 |
+
!.env.sample
|
| 6 |
+
|
| 7 |
+
*.pem
|
| 8 |
+
*.key
|
| 9 |
+
*.p12
|
| 10 |
+
*.pfx
|
| 11 |
+
*.crt
|
| 12 |
+
*.cer
|
| 13 |
+
*.der
|
| 14 |
+
|
| 15 |
+
*.log
|
| 16 |
+
|
| 17 |
+
$**/__pycache__/
|
| 18 |
+
*.py[cod]
|
| 19 |
+
*$py.class
|
| 20 |
+
.pytest_cache/
|
| 21 |
+
.mypy_cache/
|
| 22 |
+
.ruff_cache/
|
| 23 |
+
.coverage
|
| 24 |
+
htmlcov/
|
| 25 |
+
.tox/
|
| 26 |
+
|
| 27 |
+
.venv/
|
| 28 |
+
venv/
|
| 29 |
+
env/
|
| 30 |
+
ENV/
|
| 31 |
+
|
| 32 |
+
# Backend cache directories are covered by the global pattern
|
| 33 |
+
Backendbackup/
|
| 34 |
+
|
| 35 |
+
node_modules/
|
| 36 |
+
.next/
|
| 37 |
+
out/
|
| 38 |
+
dist/
|
| 39 |
+
build/
|
| 40 |
+
|
| 41 |
+
npm-debug.log*
|
| 42 |
+
yarn-debug.log*
|
| 43 |
+
yarn-error.log*
|
| 44 |
+
pnpm-debug.log*
|
| 45 |
+
|
| 46 |
+
.DS_Store
|
| 47 |
+
Thumbs.db
|
| 48 |
+
|
| 49 |
+
.vscode/
|
| 50 |
+
!.vscode/settings.json
|
| 51 |
+
!.vscode/tasks.json
|
| 52 |
+
!.vscode/launch.json
|
| 53 |
+
!.vscode/extensions.json
|
| 54 |
+
|
| 55 |
+
Frontend/.env
|
| 56 |
+
Frontend/.env.*
|
| 57 |
+
|
| 58 |
+
User/.env
|
| 59 |
+
User/.env.*
|
| 60 |
+
|
| 61 |
+
User/.expo/
|
| 62 |
+
User/.metro-cache/
|
| 63 |
+
User/.cache/
|
| 64 |
+
|
| 65 |
+
User/android/.gradle/
|
| 66 |
+
User/android/build/
|
| 67 |
+
User/android/app/build/
|
| 68 |
+
User/android/local.properties
|
| 69 |
+
User/android/app/release/
|
| 70 |
+
User/android/app/debug/
|
| 71 |
+
|
| 72 |
+
static/temp/
|
| 73 |
+
Backend/static/temp/
|
| 74 |
+
|
| 75 |
+
runs/
|
| 76 |
+
Model/runs/
|
| 77 |
+
Model/test_predictions/
|
| 78 |
+
Dataset/
|
| 79 |
+
Dataset_Merged/
|
| 80 |
+
|
| 81 |
+
*.pt
|
| 82 |
+
*.onnx
|
| 83 |
+
*.torchscript
|
| 84 |
+
*.engine
|
| 85 |
+
*.tflite
|
| 86 |
+
*.weights
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
noupload/
|
| 90 |
+
infra/env/
|
| 91 |
+
infra/**/dev.env
|
| 92 |
+
infra/**/prod.env
|
| 93 |
+
infra/cloudflared/*.json
|
| 94 |
+
|
| 95 |
+
archive.zip
|
| 96 |
+
|
| 97 |
+
*.sql
|
| 98 |
+
!model.pt
|
| 99 |
+
.agent
|
| 100 |
+
design-system
|
Backend/.env.example
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
DATABASE_URL=
|
| 2 |
+
SUPABASE_URL=
|
| 3 |
+
SUPABASE_KEY=
|
| 4 |
+
SUPABASE_JWT_SECRET=
|
| 5 |
+
SUPABASE_BUCKET=city-issues
|
| 6 |
+
GEMINI_API_KEY=
|
| 7 |
+
FRONTEND_URL=
|
Backend/agents/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .vision import VisionAgent
|
| 2 |
+
from .geoDeduplicate import GeoDeduplicateAgent
|
| 3 |
+
from .priority import PriorityAgent
|
| 4 |
+
from .routing import RoutingAgent
|
| 5 |
+
from .escalation import EscalationAgent
|
| 6 |
+
from .notification import NotificationAgent
|
Backend/agents/escalation/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import EscalationAgent, IssueEscalated
|
Backend/agents/escalation/agent.py
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from sqlalchemy import select
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
|
| 9 |
+
from Backend.core.events import event_bus, Event
|
| 10 |
+
from Backend.core.logging import get_logger
|
| 11 |
+
from Backend.core.config import settings
|
| 12 |
+
from Backend.database.models import Issue, IssueEvent, Escalation, Department, Member
|
| 13 |
+
from Backend.orchestration.base import BaseAgent
|
| 14 |
+
|
| 15 |
+
logger = get_logger(__name__, agent_name="EscalationAgent")
|
| 16 |
+
|
| 17 |
+
if settings.gemini_api_key:
|
| 18 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class IssueEscalated(Event):
|
| 22 |
+
from_level: int
|
| 23 |
+
to_level: int
|
| 24 |
+
reason: str
|
| 25 |
+
hours_overdue: float
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class EscalationAgent(BaseAgent):
|
| 29 |
+
def __init__(self, db: AsyncSession):
|
| 30 |
+
super().__init__("EscalationAgent")
|
| 31 |
+
self.db = db
|
| 32 |
+
if settings.gemini_api_key:
|
| 33 |
+
self.model = genai.GenerativeModel('gemma-3-27b-it')
|
| 34 |
+
else:
|
| 35 |
+
self.model = None
|
| 36 |
+
|
| 37 |
+
async def should_escalate(self, issue: Issue) -> tuple[bool, int, str]:
|
| 38 |
+
if not issue.sla_deadline:
|
| 39 |
+
return False, 0, "No SLA deadline set"
|
| 40 |
+
|
| 41 |
+
if not self.model:
|
| 42 |
+
return False, 0, "Gemini API not configured"
|
| 43 |
+
|
| 44 |
+
now = datetime.utcnow()
|
| 45 |
+
hours_since_creation = (now - issue.created_at).total_seconds() / 3600
|
| 46 |
+
hours_until_deadline = (issue.sla_deadline - now).total_seconds() / 3600
|
| 47 |
+
|
| 48 |
+
prompt = f"""Analyze civic issue escalation:
|
| 49 |
+
|
| 50 |
+
Issue State: {issue.state}
|
| 51 |
+
Priority: {issue.priority} (1=Critical, 2=High, 3=Medium, 4=Low)
|
| 52 |
+
Current Escalation Level: {issue.escalation_level}
|
| 53 |
+
Hours Since Creation: {hours_since_creation:.1f}
|
| 54 |
+
Hours Until Deadline: {hours_until_deadline:.1f}
|
| 55 |
+
Category: {issue.description[:100] if issue.description else 'N/A'}
|
| 56 |
+
|
| 57 |
+
Determine if escalation is needed. Consider:
|
| 58 |
+
- SLA breach (negative deadline hours)
|
| 59 |
+
- Priority urgency
|
| 60 |
+
- Time criticality
|
| 61 |
+
|
| 62 |
+
Return ONLY valid JSON:
|
| 63 |
+
{{"should_escalate": true/false, "new_level": 0-3, "reason": "max 80 chars"}}"""
|
| 64 |
+
|
| 65 |
+
try:
|
| 66 |
+
response = self.model.generate_content(prompt)
|
| 67 |
+
result = json.loads(response.text.replace("```json", "").replace("```", "").strip())
|
| 68 |
+
return result.get("should_escalate", False), result.get("new_level", issue.escalation_level), result.get("reason", "Analysis completed")
|
| 69 |
+
except Exception as e:
|
| 70 |
+
logger.error(f"Gemini escalation analysis failed: {e}")
|
| 71 |
+
return False, issue.escalation_level, "Analysis error"
|
| 72 |
+
|
| 73 |
+
async def get_escalation_targets(self, issue: Issue) -> list[str]:
|
| 74 |
+
targets = []
|
| 75 |
+
|
| 76 |
+
if issue.department_id:
|
| 77 |
+
query = select(Department).where(Department.id == issue.department_id)
|
| 78 |
+
result = await self.db.execute(query)
|
| 79 |
+
dept = result.scalar_one_or_none()
|
| 80 |
+
if dept and dept.escalation_email:
|
| 81 |
+
targets.append(dept.escalation_email)
|
| 82 |
+
|
| 83 |
+
if issue.assigned_member_id:
|
| 84 |
+
query = select(Member).where(Member.id == issue.assigned_member_id)
|
| 85 |
+
result = await self.db.execute(query)
|
| 86 |
+
member = result.scalar_one_or_none()
|
| 87 |
+
if member:
|
| 88 |
+
targets.append(member.email)
|
| 89 |
+
|
| 90 |
+
return targets
|
| 91 |
+
|
| 92 |
+
async def process_issue(self, issue_id: UUID) -> dict:
|
| 93 |
+
issue = await self.db.get(Issue, issue_id)
|
| 94 |
+
if not issue:
|
| 95 |
+
return {"error": "Issue not found"}
|
| 96 |
+
|
| 97 |
+
if issue.state in ["resolved", "verified", "closed"]:
|
| 98 |
+
return {"skipped": True, "reason": "Issue already resolved"}
|
| 99 |
+
|
| 100 |
+
should_esc, new_level, reason = await self.should_escalate(issue)
|
| 101 |
+
|
| 102 |
+
if not should_esc:
|
| 103 |
+
return {"escalated": False, "reason": reason}
|
| 104 |
+
|
| 105 |
+
old_level = issue.escalation_level
|
| 106 |
+
issue.escalation_level = new_level
|
| 107 |
+
issue.escalated_at = datetime.utcnow()
|
| 108 |
+
issue.state = "escalated"
|
| 109 |
+
|
| 110 |
+
targets = await self.get_escalation_targets(issue)
|
| 111 |
+
|
| 112 |
+
escalation = Escalation(
|
| 113 |
+
issue_id=issue_id,
|
| 114 |
+
from_level=old_level,
|
| 115 |
+
to_level=new_level,
|
| 116 |
+
reason=reason,
|
| 117 |
+
escalated_by="EscalationAgent",
|
| 118 |
+
notified_emails=",".join(targets) if targets else None,
|
| 119 |
+
)
|
| 120 |
+
self.db.add(escalation)
|
| 121 |
+
|
| 122 |
+
self.log_decision(
|
| 123 |
+
issue_id=issue_id,
|
| 124 |
+
decision=f"Escalated from level {old_level} to {new_level}",
|
| 125 |
+
reasoning=reason
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
event_record = IssueEvent(
|
| 129 |
+
issue_id=issue_id,
|
| 130 |
+
event_type="escalated",
|
| 131 |
+
agent_name=self.name,
|
| 132 |
+
event_data=json.dumps({
|
| 133 |
+
"from_level": old_level,
|
| 134 |
+
"to_level": new_level,
|
| 135 |
+
"reason": reason,
|
| 136 |
+
"notified": targets,
|
| 137 |
+
})
|
| 138 |
+
)
|
| 139 |
+
self.db.add(event_record)
|
| 140 |
+
await self.db.flush()
|
| 141 |
+
|
| 142 |
+
esc_event = IssueEscalated(
|
| 143 |
+
issue_id=issue_id,
|
| 144 |
+
from_level=old_level,
|
| 145 |
+
to_level=new_level,
|
| 146 |
+
reason=reason,
|
| 147 |
+
hours_overdue=0,
|
| 148 |
+
)
|
| 149 |
+
await event_bus.publish(esc_event)
|
| 150 |
+
|
| 151 |
+
return {
|
| 152 |
+
"escalated": True,
|
| 153 |
+
"from_level": old_level,
|
| 154 |
+
"to_level": new_level,
|
| 155 |
+
"reason": reason,
|
| 156 |
+
"notified": targets,
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
async def check_all_pending(self) -> list[dict]:
|
| 160 |
+
query = (
|
| 161 |
+
select(Issue)
|
| 162 |
+
.where(Issue.state.in_(["assigned", "in_progress", "escalated"]))
|
| 163 |
+
.where(Issue.is_duplicate == False)
|
| 164 |
+
.where(Issue.sla_deadline.isnot(None))
|
| 165 |
+
)
|
| 166 |
+
result = await self.db.execute(query)
|
| 167 |
+
issues = result.scalars().all()
|
| 168 |
+
|
| 169 |
+
results = []
|
| 170 |
+
for issue in issues:
|
| 171 |
+
result = await self.process_issue(issue.id)
|
| 172 |
+
if result.get("escalated"):
|
| 173 |
+
results.append(result)
|
| 174 |
+
|
| 175 |
+
return results
|
| 176 |
+
|
| 177 |
+
async def handle(self, event) -> None:
|
| 178 |
+
await self.process_issue(event.issue_id)
|
Backend/agents/geoDeduplicate/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import GeoDeduplicateAgent, IssueDeduplicated
|
Backend/agents/geoDeduplicate/agent.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from uuid import UUID
|
| 4 |
+
from sqlalchemy import select
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from sqlalchemy.orm import selectinload
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
|
| 9 |
+
from Backend.core.config import settings
|
| 10 |
+
from Backend.core.events import event_bus, IssueClassified, Event
|
| 11 |
+
from Backend.core.logging import get_logger
|
| 12 |
+
from Backend.database.models import Issue, IssueEvent, Classification
|
| 13 |
+
from Backend.utils.geo import haversine_distance, get_bounding_box
|
| 14 |
+
from Backend.orchestration.base import BaseAgent
|
| 15 |
+
|
| 16 |
+
logger = get_logger(__name__, agent_name="GeoDeduplicateAgent")
|
| 17 |
+
|
| 18 |
+
if settings.gemini_api_key:
|
| 19 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class IssueDeduplicated(Event):
|
| 23 |
+
is_duplicate: bool
|
| 24 |
+
parent_issue_id: Optional[UUID] = None
|
| 25 |
+
cluster_id: Optional[str] = None
|
| 26 |
+
nearby_count: int = 0
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class GeoDeduplicateAgent(BaseAgent):
|
| 30 |
+
def __init__(self, db: AsyncSession):
|
| 31 |
+
super().__init__("GeoDeduplicateAgent")
|
| 32 |
+
self.db = db
|
| 33 |
+
self.radius_meters = settings.duplicate_radius_meters
|
| 34 |
+
if settings.gemini_api_key:
|
| 35 |
+
self.model = genai.GenerativeModel('gemma-3-27b-it')
|
| 36 |
+
else:
|
| 37 |
+
self.model = None
|
| 38 |
+
|
| 39 |
+
async def semantic_similarity(self, desc1: str, desc2: str, cat1: str, cat2: str) -> float:
|
| 40 |
+
if not self.model:
|
| 41 |
+
return 0.5
|
| 42 |
+
|
| 43 |
+
prompt = f"""Rate semantic similarity (0.0-1.0) between civic issue reports:
|
| 44 |
+
|
| 45 |
+
Issue A:
|
| 46 |
+
Category: {cat1}
|
| 47 |
+
Description: {desc1[:200] if desc1 else 'N/A'}
|
| 48 |
+
|
| 49 |
+
Issue B:
|
| 50 |
+
Category: {cat2}
|
| 51 |
+
Description: {desc2[:200] if desc2 else 'N/A'}
|
| 52 |
+
|
| 53 |
+
Consider:
|
| 54 |
+
- Same problem type?
|
| 55 |
+
- Same physical location context?
|
| 56 |
+
- Same infrastructure element?
|
| 57 |
+
|
| 58 |
+
Return ONLY a decimal number between 0.0 and 1.0."""
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
response = self.model.generate_content(prompt)
|
| 62 |
+
score = float(response.text.strip())
|
| 63 |
+
return max(0.0, min(1.0, score))
|
| 64 |
+
except Exception as e:
|
| 65 |
+
logger.error(f"Gemini similarity failed: {e}")
|
| 66 |
+
return 0.5
|
| 67 |
+
|
| 68 |
+
async def find_nearby_issues(
|
| 69 |
+
self,
|
| 70 |
+
latitude: float,
|
| 71 |
+
longitude: float,
|
| 72 |
+
exclude_id: UUID,
|
| 73 |
+
category: Optional[str] = None
|
| 74 |
+
) -> list[tuple[Issue, float]]:
|
| 75 |
+
min_lat, max_lat, min_lon, max_lon = get_bounding_box(
|
| 76 |
+
latitude, longitude, self.radius_meters
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
query = (
|
| 80 |
+
select(Issue)
|
| 81 |
+
.options(selectinload(Issue.classification))
|
| 82 |
+
.where(Issue.latitude >= min_lat)
|
| 83 |
+
.where(Issue.latitude <= max_lat)
|
| 84 |
+
.where(Issue.longitude >= min_lon)
|
| 85 |
+
.where(Issue.longitude <= max_lon)
|
| 86 |
+
.where(Issue.id != exclude_id)
|
| 87 |
+
.where(Issue.state.in_(["reported", "validated", "assigned", "in_progress"]))
|
| 88 |
+
.where(Issue.is_duplicate == False)
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
result = await self.db.execute(query)
|
| 92 |
+
candidates = result.scalars().all()
|
| 93 |
+
|
| 94 |
+
nearby = []
|
| 95 |
+
for issue in candidates:
|
| 96 |
+
distance = haversine_distance(
|
| 97 |
+
latitude, longitude,
|
| 98 |
+
issue.latitude, issue.longitude
|
| 99 |
+
)
|
| 100 |
+
if distance <= self.radius_meters:
|
| 101 |
+
if category and issue.classification:
|
| 102 |
+
if issue.classification.primary_category == category:
|
| 103 |
+
nearby.append((issue, distance))
|
| 104 |
+
else:
|
| 105 |
+
nearby.append((issue, distance))
|
| 106 |
+
|
| 107 |
+
return sorted(nearby, key=lambda x: x[1])
|
| 108 |
+
|
| 109 |
+
async def check_duplicate(
|
| 110 |
+
self,
|
| 111 |
+
issue_id: UUID,
|
| 112 |
+
latitude: float,
|
| 113 |
+
longitude: float,
|
| 114 |
+
category: Optional[str] = None,
|
| 115 |
+
description: Optional[str] = None
|
| 116 |
+
) -> tuple[bool, Optional[UUID], list[tuple[Issue, float]]]:
|
| 117 |
+
nearby = await self.find_nearby_issues(
|
| 118 |
+
latitude, longitude, issue_id, category
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
if not nearby:
|
| 122 |
+
return False, None, []
|
| 123 |
+
|
| 124 |
+
best_match = None
|
| 125 |
+
highest_score = 0.0
|
| 126 |
+
|
| 127 |
+
for issue, distance in nearby:
|
| 128 |
+
if issue.classification and category:
|
| 129 |
+
cat1 = category
|
| 130 |
+
cat2 = issue.classification.primary_category
|
| 131 |
+
desc1 = description or ""
|
| 132 |
+
desc2 = issue.description or ""
|
| 133 |
+
|
| 134 |
+
similarity = await self.semantic_similarity(desc1, desc2, cat1, cat2)
|
| 135 |
+
|
| 136 |
+
if similarity > highest_score:
|
| 137 |
+
highest_score = similarity
|
| 138 |
+
best_match = issue
|
| 139 |
+
|
| 140 |
+
if highest_score > 0.75 and best_match:
|
| 141 |
+
return True, best_match.id, nearby
|
| 142 |
+
|
| 143 |
+
return False, None, nearby
|
| 144 |
+
|
| 145 |
+
async def process_issue(self, issue_id: UUID) -> dict:
|
| 146 |
+
query = (
|
| 147 |
+
select(Issue)
|
| 148 |
+
.options(selectinload(Issue.classification))
|
| 149 |
+
.where(Issue.id == issue_id)
|
| 150 |
+
)
|
| 151 |
+
result = await self.db.execute(query)
|
| 152 |
+
issue = result.scalar_one_or_none()
|
| 153 |
+
if not issue:
|
| 154 |
+
return {"error": "Issue not found"}
|
| 155 |
+
|
| 156 |
+
category = None
|
| 157 |
+
if issue.classification:
|
| 158 |
+
category = issue.classification.primary_category
|
| 159 |
+
|
| 160 |
+
is_duplicate, parent_id, nearby = await self.check_duplicate(
|
| 161 |
+
issue.id,
|
| 162 |
+
issue.latitude,
|
| 163 |
+
issue.longitude,
|
| 164 |
+
category,
|
| 165 |
+
issue.description
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
if is_duplicate and parent_id:
|
| 169 |
+
issue.is_duplicate = True
|
| 170 |
+
issue.parent_issue_id = parent_id
|
| 171 |
+
issue.geo_status = "duplicate"
|
| 172 |
+
issue.geo_cluster_id = str(parent_id)
|
| 173 |
+
|
| 174 |
+
parent = await self.db.get(Issue, parent_id)
|
| 175 |
+
if parent and issue.priority and parent.priority:
|
| 176 |
+
if issue.priority < parent.priority:
|
| 177 |
+
parent.priority = issue.priority
|
| 178 |
+
|
| 179 |
+
self.log_decision(
|
| 180 |
+
issue_id=issue_id,
|
| 181 |
+
decision="Marked as duplicate",
|
| 182 |
+
reasoning=f"Found {len(nearby)} nearby issues within {self.radius_meters}m, linked to parent {parent_id}"
|
| 183 |
+
)
|
| 184 |
+
else:
|
| 185 |
+
issue.is_duplicate = False
|
| 186 |
+
issue.geo_status = "unique"
|
| 187 |
+
|
| 188 |
+
self.log_decision(
|
| 189 |
+
issue_id=issue_id,
|
| 190 |
+
decision="Marked as unique",
|
| 191 |
+
reasoning=f"No similar issues found within {self.radius_meters}m radius"
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
event_record = IssueEvent(
|
| 195 |
+
issue_id=issue_id,
|
| 196 |
+
event_type="geo_deduplicated",
|
| 197 |
+
agent_name=self.name,
|
| 198 |
+
event_data=json.dumps({
|
| 199 |
+
"is_duplicate": is_duplicate,
|
| 200 |
+
"parent_issue_id": str(parent_id) if parent_id else None,
|
| 201 |
+
"nearby_count": len(nearby),
|
| 202 |
+
"radius_meters": self.radius_meters,
|
| 203 |
+
})
|
| 204 |
+
)
|
| 205 |
+
self.db.add(event_record)
|
| 206 |
+
await self.db.flush()
|
| 207 |
+
|
| 208 |
+
dedup_event = IssueDeduplicated(
|
| 209 |
+
issue_id=issue_id,
|
| 210 |
+
is_duplicate=is_duplicate,
|
| 211 |
+
parent_issue_id=parent_id,
|
| 212 |
+
cluster_id=str(parent_id) if parent_id else None,
|
| 213 |
+
nearby_count=len(nearby),
|
| 214 |
+
)
|
| 215 |
+
await event_bus.publish(dedup_event)
|
| 216 |
+
|
| 217 |
+
return {
|
| 218 |
+
"is_duplicate": is_duplicate,
|
| 219 |
+
"parent_issue_id": str(parent_id) if parent_id else None,
|
| 220 |
+
"nearby_count": len(nearby),
|
| 221 |
+
"geo_status": issue.geo_status,
|
| 222 |
+
}
|
| 223 |
+
|
| 224 |
+
async def handle(self, event: IssueClassified) -> None:
|
| 225 |
+
await self.process_issue(event.issue_id)
|
Backend/agents/notification/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import NotificationAgent, NotificationSent
|
Backend/agents/notification/agent.py
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
|
| 6 |
+
from sqlalchemy import select
|
| 7 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 8 |
+
from sqlalchemy.orm import selectinload
|
| 9 |
+
|
| 10 |
+
from Backend.core.events import Event, event_bus
|
| 11 |
+
from Backend.core.logging import get_logger
|
| 12 |
+
from Backend.core.config import settings
|
| 13 |
+
from Backend.database.models import Classification, Issue, IssueEvent, Member
|
| 14 |
+
from Backend.orchestration.base import BaseAgent
|
| 15 |
+
from Backend.services.email import email_service
|
| 16 |
+
|
| 17 |
+
logger = get_logger(__name__, agent_name="NotificationAgent")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class NotificationSent(Event):
|
| 21 |
+
notification_type: str
|
| 22 |
+
recipients: list[str]
|
| 23 |
+
message: str
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class NotificationAgent(BaseAgent):
|
| 27 |
+
def __init__(self, db: AsyncSession):
|
| 28 |
+
super().__init__("NotificationAgent")
|
| 29 |
+
self.db = db
|
| 30 |
+
self.pending_notifications: list[dict] = []
|
| 31 |
+
|
| 32 |
+
async def get_issue_with_classification(self, issue_id: UUID) -> Optional[Issue]:
|
| 33 |
+
query = (
|
| 34 |
+
select(Issue)
|
| 35 |
+
.options(selectinload(Issue.classification))
|
| 36 |
+
.where(Issue.id == issue_id)
|
| 37 |
+
)
|
| 38 |
+
result = await self.db.execute(query)
|
| 39 |
+
return result.scalar_one_or_none()
|
| 40 |
+
|
| 41 |
+
def format_issue_summary(self, issue: Issue) -> str:
|
| 42 |
+
category = (
|
| 43 |
+
issue.classification.primary_category if issue.classification else "Unknown"
|
| 44 |
+
)
|
| 45 |
+
priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"}
|
| 46 |
+
priority_str = priority_map.get(issue.priority, "UNKNOWN")
|
| 47 |
+
|
| 48 |
+
return (
|
| 49 |
+
f"Issue #{str(issue.id)[:8]}\n"
|
| 50 |
+
f"Category: {category}\n"
|
| 51 |
+
f"Priority: {priority_str}\n"
|
| 52 |
+
f"Location: ({issue.latitude:.4f}, {issue.longitude:.4f})\n"
|
| 53 |
+
f"Description: {issue.description or 'No description'}\n"
|
| 54 |
+
f"State: {issue.state}"
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
async def queue_notification(
|
| 58 |
+
self,
|
| 59 |
+
notification_type: str,
|
| 60 |
+
recipients: list[str],
|
| 61 |
+
subject: str,
|
| 62 |
+
message: str,
|
| 63 |
+
issue_id: Optional[UUID] = None,
|
| 64 |
+
):
|
| 65 |
+
notification = {
|
| 66 |
+
"type": notification_type,
|
| 67 |
+
"recipients": recipients,
|
| 68 |
+
"subject": subject,
|
| 69 |
+
"message": message,
|
| 70 |
+
"issue_id": str(issue_id) if issue_id else None,
|
| 71 |
+
"queued_at": datetime.utcnow().isoformat(),
|
| 72 |
+
}
|
| 73 |
+
self.pending_notifications.append(notification)
|
| 74 |
+
|
| 75 |
+
logger.info(f"Notification queued: {notification_type} to {recipients}")
|
| 76 |
+
|
| 77 |
+
if issue_id:
|
| 78 |
+
event_record = IssueEvent(
|
| 79 |
+
issue_id=issue_id,
|
| 80 |
+
event_type="notification_queued",
|
| 81 |
+
agent_name=self.name,
|
| 82 |
+
event_data=json.dumps(notification),
|
| 83 |
+
)
|
| 84 |
+
self.db.add(event_record)
|
| 85 |
+
await self.db.flush()
|
| 86 |
+
|
| 87 |
+
return notification
|
| 88 |
+
|
| 89 |
+
async def notify_assignment(self, issue_id: UUID):
|
| 90 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 91 |
+
if not issue:
|
| 92 |
+
return
|
| 93 |
+
|
| 94 |
+
recipients = []
|
| 95 |
+
worker_name = "Worker"
|
| 96 |
+
|
| 97 |
+
if issue.assigned_member_id:
|
| 98 |
+
query = select(Member).where(Member.id == issue.assigned_member_id)
|
| 99 |
+
result = await self.db.execute(query)
|
| 100 |
+
member = result.scalar_one_or_none()
|
| 101 |
+
if member:
|
| 102 |
+
recipients.append(member.email)
|
| 103 |
+
worker_name = member.name
|
| 104 |
+
|
| 105 |
+
category = (
|
| 106 |
+
issue.classification.primary_category
|
| 107 |
+
if issue.classification
|
| 108 |
+
else "Unknown"
|
| 109 |
+
)
|
| 110 |
+
priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"}
|
| 111 |
+
priority_str = priority_map.get(issue.priority, "UNKNOWN")
|
| 112 |
+
location = f"({issue.latitude:.4f}, {issue.longitude:.4f})"
|
| 113 |
+
|
| 114 |
+
try:
|
| 115 |
+
await email_service.send_assignment_email(
|
| 116 |
+
worker_email=member.email,
|
| 117 |
+
worker_name=worker_name,
|
| 118 |
+
issue_id=str(issue.id)[:8],
|
| 119 |
+
category=category,
|
| 120 |
+
priority=priority_str,
|
| 121 |
+
location=location,
|
| 122 |
+
description=issue.description or "No description"
|
| 123 |
+
)
|
| 124 |
+
logger.info(f"Assignment email sent to {member.email}")
|
| 125 |
+
except Exception as e:
|
| 126 |
+
logger.error(f"Failed to send assignment email: {e}")
|
| 127 |
+
|
| 128 |
+
if recipients:
|
| 129 |
+
summary = self.format_issue_summary(issue)
|
| 130 |
+
await self.queue_notification(
|
| 131 |
+
notification_type="assignment",
|
| 132 |
+
recipients=recipients,
|
| 133 |
+
subject=f"New Issue Assigned: #{str(issue.id)[:8]}",
|
| 134 |
+
message=f"You have been assigned a new issue:\n\n{summary}",
|
| 135 |
+
issue_id=issue_id,
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
async def notify_escalation(self, issue_id: UUID, reason: str, targets: list[str]):
|
| 139 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 140 |
+
if not issue:
|
| 141 |
+
return
|
| 142 |
+
|
| 143 |
+
category = (
|
| 144 |
+
issue.classification.primary_category
|
| 145 |
+
if issue.classification
|
| 146 |
+
else "Unknown"
|
| 147 |
+
)
|
| 148 |
+
priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"}
|
| 149 |
+
priority_str = priority_map.get(issue.priority, "UNKNOWN")
|
| 150 |
+
|
| 151 |
+
for target in targets:
|
| 152 |
+
try:
|
| 153 |
+
await email_service.send_escalation_email(
|
| 154 |
+
admin_email=target,
|
| 155 |
+
issue_id=str(issue.id)[:8],
|
| 156 |
+
category=category,
|
| 157 |
+
priority=priority_str,
|
| 158 |
+
reason=reason,
|
| 159 |
+
escalation_level=issue.escalation_level
|
| 160 |
+
)
|
| 161 |
+
logger.info(f"Escalation email sent to {target}")
|
| 162 |
+
except Exception as e:
|
| 163 |
+
logger.error(f"Failed to send escalation email: {e}")
|
| 164 |
+
|
| 165 |
+
summary = self.format_issue_summary(issue)
|
| 166 |
+
await self.queue_notification(
|
| 167 |
+
notification_type="escalation",
|
| 168 |
+
recipients=targets,
|
| 169 |
+
subject=f"ESCALATION: Issue #{str(issue.id)[:8]} - Level {issue.escalation_level}",
|
| 170 |
+
message=f"Issue has been escalated:\n\nReason: {reason}\n\n{summary}",
|
| 171 |
+
issue_id=issue_id,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
async def notify_resolution(self, issue_id: UUID):
|
| 175 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 176 |
+
if not issue:
|
| 177 |
+
return
|
| 178 |
+
|
| 179 |
+
category = (
|
| 180 |
+
issue.classification.primary_category
|
| 181 |
+
if issue.classification
|
| 182 |
+
else "Unknown"
|
| 183 |
+
)
|
| 184 |
+
location = f"({issue.latitude:.4f}, {issue.longitude:.4f})"
|
| 185 |
+
|
| 186 |
+
if issue.user_id:
|
| 187 |
+
try:
|
| 188 |
+
await email_service.send_completion_email(
|
| 189 |
+
user_email=issue.user_id,
|
| 190 |
+
issue_id=str(issue.id)[:8],
|
| 191 |
+
category=category,
|
| 192 |
+
location=location,
|
| 193 |
+
resolution_notes=issue.resolution_notes or "Issue resolved successfully"
|
| 194 |
+
)
|
| 195 |
+
logger.info(f"Resolution email sent to user {issue.user_id}")
|
| 196 |
+
except Exception as e:
|
| 197 |
+
logger.error(f"Failed to send resolution email: {e}")
|
| 198 |
+
|
| 199 |
+
await self.queue_notification(
|
| 200 |
+
notification_type="resolution",
|
| 201 |
+
recipients=[settings.admin_email],
|
| 202 |
+
subject=f"Issue Resolved: #{str(issue.id)[:8]}",
|
| 203 |
+
message=f"Issue has been marked as resolved.\n\n{self.format_issue_summary(issue)}",
|
| 204 |
+
issue_id=issue_id,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
async def notify_manual_review(self, issue_id: UUID, reason: str):
|
| 208 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 209 |
+
if not issue:
|
| 210 |
+
return
|
| 211 |
+
|
| 212 |
+
category = (
|
| 213 |
+
issue.classification.primary_category
|
| 214 |
+
if issue.classification
|
| 215 |
+
else "Unknown"
|
| 216 |
+
)
|
| 217 |
+
location = f"({issue.latitude:.4f}, {issue.longitude:.4f})"
|
| 218 |
+
image_url = f"{settings.supabase_url}/storage/v1/object/public/{settings.supabase_bucket}/{issue.id}/original.jpg"
|
| 219 |
+
|
| 220 |
+
try:
|
| 221 |
+
await email_service.send_manual_review_email(
|
| 222 |
+
issue_id=str(issue.id)[:8],
|
| 223 |
+
reason=reason,
|
| 224 |
+
category=category,
|
| 225 |
+
location=location,
|
| 226 |
+
image_url=image_url
|
| 227 |
+
)
|
| 228 |
+
logger.info(f"Manual review email sent to admin")
|
| 229 |
+
except Exception as e:
|
| 230 |
+
logger.error(f"Failed to send manual review email: {e}")
|
| 231 |
+
|
| 232 |
+
await self.queue_notification(
|
| 233 |
+
notification_type="manual_review",
|
| 234 |
+
recipients=[settings.admin_email],
|
| 235 |
+
subject=f"Manual Review Required: #{str(issue.id)[:8]}",
|
| 236 |
+
message=f"Issue requires manual review.\n\nReason: {reason}\n\n{self.format_issue_summary(issue)}",
|
| 237 |
+
issue_id=issue_id,
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
async def notify_user_confirmation(self, issue_id: UUID):
|
| 241 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 242 |
+
if not issue:
|
| 243 |
+
return
|
| 244 |
+
|
| 245 |
+
category = (
|
| 246 |
+
issue.classification.primary_category
|
| 247 |
+
if issue.classification
|
| 248 |
+
else "Unknown"
|
| 249 |
+
)
|
| 250 |
+
confirmation_link = f"https://app.urbanlens.city/confirm/{issue.id}"
|
| 251 |
+
|
| 252 |
+
if issue.user_id:
|
| 253 |
+
try:
|
| 254 |
+
await email_service.send_confirmation_request_email(
|
| 255 |
+
user_email=issue.user_id,
|
| 256 |
+
issue_id=str(issue.id)[:8],
|
| 257 |
+
category=category,
|
| 258 |
+
confirmation_link=confirmation_link
|
| 259 |
+
)
|
| 260 |
+
logger.info(f"Confirmation request email sent to user {issue.user_id}")
|
| 261 |
+
except Exception as e:
|
| 262 |
+
logger.error(f"Failed to send confirmation email: {e}")
|
| 263 |
+
|
| 264 |
+
await self.queue_notification(
|
| 265 |
+
notification_type="user_confirmation",
|
| 266 |
+
recipients=[issue.user_id] if issue.user_id else [],
|
| 267 |
+
subject=f"Please Confirm Resolution: #{str(issue.id)[:8]}",
|
| 268 |
+
message=f"Please confirm if this issue has been resolved.\n\n{self.format_issue_summary(issue)}",
|
| 269 |
+
issue_id=issue_id,
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
async def notify_issue_accepted(self, issue_id: UUID, accepted_by: str = "automatic"):
|
| 273 |
+
issue = await self.get_issue_with_classification(issue_id)
|
| 274 |
+
if not issue:
|
| 275 |
+
return
|
| 276 |
+
|
| 277 |
+
category = (
|
| 278 |
+
issue.classification.primary_category
|
| 279 |
+
if issue.classification
|
| 280 |
+
else "Unknown"
|
| 281 |
+
)
|
| 282 |
+
priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"}
|
| 283 |
+
priority_str = priority_map.get(issue.priority, "UNKNOWN")
|
| 284 |
+
location = f"({issue.latitude:.4f}, {issue.longitude:.4f})"
|
| 285 |
+
tracking_url = f"https://app.urbanlens.city/track/{issue.id}"
|
| 286 |
+
|
| 287 |
+
if issue.user_id:
|
| 288 |
+
try:
|
| 289 |
+
await email_service.send_issue_accepted_email(
|
| 290 |
+
user_email=issue.user_id,
|
| 291 |
+
issue_id=str(issue.id)[:8],
|
| 292 |
+
category=category,
|
| 293 |
+
priority=priority_str,
|
| 294 |
+
location=location,
|
| 295 |
+
accepted_by=accepted_by,
|
| 296 |
+
tracking_url=tracking_url
|
| 297 |
+
)
|
| 298 |
+
logger.info(f"Issue accepted email sent to user {issue.user_id} ({accepted_by})")
|
| 299 |
+
except Exception as e:
|
| 300 |
+
logger.error(f"Failed to send issue accepted email: {e}")
|
| 301 |
+
|
| 302 |
+
await self.queue_notification(
|
| 303 |
+
notification_type="issue_accepted",
|
| 304 |
+
recipients=[issue.user_id] if issue.user_id else [],
|
| 305 |
+
subject=f"Issue Accepted: #{str(issue.id)[:8]}",
|
| 306 |
+
message=f"Your issue has been accepted {accepted_by}.\n\n{self.format_issue_summary(issue)}",
|
| 307 |
+
issue_id=issue_id,
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
async def process_issue(
|
| 311 |
+
self, issue_id: UUID, event_type: str = "assignment"
|
| 312 |
+
) -> dict:
|
| 313 |
+
if event_type == "assignment":
|
| 314 |
+
await self.notify_assignment(issue_id)
|
| 315 |
+
elif event_type == "resolution":
|
| 316 |
+
await self.notify_resolution(issue_id)
|
| 317 |
+
elif event_type == "escalation":
|
| 318 |
+
await self.notify_escalation(issue_id, "SLA breach or priority escalation", [settings.admin_email])
|
| 319 |
+
elif event_type == "manual_review":
|
| 320 |
+
await self.notify_manual_review(issue_id, "Requires admin attention")
|
| 321 |
+
elif event_type == "user_confirmation":
|
| 322 |
+
await self.notify_user_confirmation(issue_id)
|
| 323 |
+
elif event_type == "issue_accepted":
|
| 324 |
+
accepted_by = "automatic"
|
| 325 |
+
await self.notify_issue_accepted(issue_id, accepted_by)
|
| 326 |
+
elif event_type == "issue_accepted_manual":
|
| 327 |
+
await self.notify_issue_accepted(issue_id, "manual")
|
| 328 |
+
|
| 329 |
+
return {"queued": len(self.pending_notifications)}
|
| 330 |
+
|
| 331 |
+
async def handle(self, event) -> None:
|
| 332 |
+
event_type = getattr(event, "notification_type", "assignment")
|
| 333 |
+
await self.process_issue(event.issue_id, event_type)
|
Backend/agents/priority/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import PriorityAgent, IssuePrioritized
|
Backend/agents/priority/agent.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from uuid import UUID
|
| 4 |
+
from sqlalchemy import select, func
|
| 5 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 6 |
+
from sqlalchemy.orm import selectinload
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
|
| 9 |
+
from Backend.core.config import settings
|
| 10 |
+
from Backend.core.events import event_bus, Event
|
| 11 |
+
from Backend.core.logging import get_logger
|
| 12 |
+
from Backend.database.models import Issue, IssueEvent, Classification
|
| 13 |
+
from Backend.orchestration.base import BaseAgent
|
| 14 |
+
|
| 15 |
+
logger = get_logger(__name__, agent_name="PriorityAgent")
|
| 16 |
+
|
| 17 |
+
if settings.gemini_api_key:
|
| 18 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class IssuePrioritized(Event):
|
| 22 |
+
priority: int
|
| 23 |
+
reasoning: str
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class PriorityAgent(BaseAgent):
|
| 27 |
+
def __init__(self, db: AsyncSession):
|
| 28 |
+
super().__init__("PriorityAgent")
|
| 29 |
+
self.db = db
|
| 30 |
+
if settings.gemini_api_key:
|
| 31 |
+
self.model = genai.GenerativeModel('gemma-3-27b-it')
|
| 32 |
+
else:
|
| 33 |
+
self.model = None
|
| 34 |
+
|
| 35 |
+
async def calculate_priority(
|
| 36 |
+
self,
|
| 37 |
+
category: Optional[str],
|
| 38 |
+
confidence: float,
|
| 39 |
+
is_duplicate: bool,
|
| 40 |
+
duplicate_count: int = 0,
|
| 41 |
+
description: Optional[str] = None,
|
| 42 |
+
city: Optional[str] = None
|
| 43 |
+
) -> tuple[int, str]:
|
| 44 |
+
if not self.model:
|
| 45 |
+
return 3, "Gemini API not configured"
|
| 46 |
+
|
| 47 |
+
prompt = f"""Assign priority for civic infrastructure issue:
|
| 48 |
+
|
| 49 |
+
Category: {category or 'Unknown'}
|
| 50 |
+
AI Confidence: {confidence:.1%}
|
| 51 |
+
Duplicate Reports: {duplicate_count}
|
| 52 |
+
Location: {city or 'Unknown'}
|
| 53 |
+
Description: {description[:200] if description else 'N/A'}
|
| 54 |
+
|
| 55 |
+
Priority Scale:
|
| 56 |
+
1 = CRITICAL (Public safety, electrical hazards, major hazards)
|
| 57 |
+
2 = HIGH (Potholes, road damage, fallen trees)
|
| 58 |
+
3 = MEDIUM (Garbage, broken signs, minor structures)
|
| 59 |
+
4 = LOW (Parking violations, minor vandalism)
|
| 60 |
+
|
| 61 |
+
Consider safety impact, infrastructure criticality, and community accessibility.
|
| 62 |
+
|
| 63 |
+
Return ONLY valid JSON:
|
| 64 |
+
{{"priority": 1-4, "reasoning": "max 80 chars"}}"""
|
| 65 |
+
|
| 66 |
+
try:
|
| 67 |
+
response = self.model.generate_content(prompt)
|
| 68 |
+
result = json.loads(response.text.replace("```json", "").replace("```", "").strip())
|
| 69 |
+
return result.get("priority", 3), result.get("reasoning", "Priority assigned")
|
| 70 |
+
except Exception as e:
|
| 71 |
+
logger.error(f"Gemini priority calculation failed: {e}")
|
| 72 |
+
return 3, "Analysis error"
|
| 73 |
+
|
| 74 |
+
async def process_issue(self, issue_id: UUID) -> dict:
|
| 75 |
+
query = (
|
| 76 |
+
select(Issue)
|
| 77 |
+
.options(selectinload(Issue.classification))
|
| 78 |
+
.where(Issue.id == issue_id)
|
| 79 |
+
)
|
| 80 |
+
result = await self.db.execute(query)
|
| 81 |
+
issue = result.scalar_one_or_none()
|
| 82 |
+
if not issue:
|
| 83 |
+
return {"error": "Issue not found"}
|
| 84 |
+
|
| 85 |
+
if issue.is_duplicate:
|
| 86 |
+
self.log_decision(
|
| 87 |
+
issue_id=issue_id,
|
| 88 |
+
decision="Skipped prioritization",
|
| 89 |
+
reasoning="Issue is a duplicate, priority inherited from parent"
|
| 90 |
+
)
|
| 91 |
+
return {"skipped": True, "reason": "duplicate"}
|
| 92 |
+
|
| 93 |
+
category = None
|
| 94 |
+
confidence = 0.0
|
| 95 |
+
if issue.classification:
|
| 96 |
+
category = issue.classification.primary_category
|
| 97 |
+
confidence = issue.classification.primary_confidence
|
| 98 |
+
|
| 99 |
+
dup_count_result = await self.db.execute(
|
| 100 |
+
select(func.count(Issue.id)).where(Issue.parent_issue_id == issue_id)
|
| 101 |
+
)
|
| 102 |
+
duplicate_count = dup_count_result.scalar() or 0
|
| 103 |
+
|
| 104 |
+
priority, reasoning = await self.calculate_priority(
|
| 105 |
+
category, confidence, issue.is_duplicate, duplicate_count, issue.description, issue.city
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
issue.priority = priority
|
| 109 |
+
issue.priority_reason = reasoning
|
| 110 |
+
|
| 111 |
+
self.log_decision(
|
| 112 |
+
issue_id=issue_id,
|
| 113 |
+
decision=f"Priority set to {priority}",
|
| 114 |
+
reasoning=reasoning
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
event_record = IssueEvent(
|
| 118 |
+
issue_id=issue_id,
|
| 119 |
+
event_type="prioritized",
|
| 120 |
+
agent_name=self.name,
|
| 121 |
+
event_data=json.dumps({
|
| 122 |
+
"priority": priority,
|
| 123 |
+
"reasoning": reasoning,
|
| 124 |
+
"category": category,
|
| 125 |
+
"confidence": confidence,
|
| 126 |
+
})
|
| 127 |
+
)
|
| 128 |
+
self.db.add(event_record)
|
| 129 |
+
await self.db.flush()
|
| 130 |
+
|
| 131 |
+
priority_event = IssuePrioritized(
|
| 132 |
+
issue_id=issue_id,
|
| 133 |
+
priority=priority,
|
| 134 |
+
reasoning=reasoning,
|
| 135 |
+
)
|
| 136 |
+
await event_bus.publish(priority_event)
|
| 137 |
+
|
| 138 |
+
return {
|
| 139 |
+
"priority": priority,
|
| 140 |
+
"reasoning": reasoning,
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
async def handle(self, event) -> None:
|
| 144 |
+
await self.process_issue(event.issue_id)
|
Backend/agents/routing/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import RoutingAgent, IssueAssigned
|
Backend/agents/routing/agent.py
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from datetime import datetime, timedelta
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from sqlalchemy import select
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
from sqlalchemy.orm import selectinload
|
| 8 |
+
import google.generativeai as genai
|
| 9 |
+
|
| 10 |
+
from Backend.core.config import settings
|
| 11 |
+
from Backend.core.events import event_bus, Event
|
| 12 |
+
from Backend.core.logging import get_logger
|
| 13 |
+
from Backend.database.models import Issue, IssueEvent, Department, Member, Classification
|
| 14 |
+
from Backend.orchestration.base import BaseAgent
|
| 15 |
+
|
| 16 |
+
logger = get_logger(__name__, agent_name="RoutingAgent")
|
| 17 |
+
|
| 18 |
+
if settings.gemini_api_key:
|
| 19 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 20 |
+
|
| 21 |
+
PRIORITY_SLA_HOURS = {
|
| 22 |
+
1: 4,
|
| 23 |
+
2: 12,
|
| 24 |
+
3: 48,
|
| 25 |
+
4: 168,
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class IssueAssigned(Event):
|
| 30 |
+
department_code: str
|
| 31 |
+
member_id: Optional[UUID] = None
|
| 32 |
+
member_name: Optional[str] = None
|
| 33 |
+
sla_deadline: datetime
|
| 34 |
+
sla_hours: int
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class RoutingAgent(BaseAgent):
|
| 38 |
+
def __init__(self, db: AsyncSession):
|
| 39 |
+
super().__init__("RoutingAgent")
|
| 40 |
+
self.db = db
|
| 41 |
+
if settings.gemini_api_key:
|
| 42 |
+
self.model = genai.GenerativeModel('gemma-3-27b-it')
|
| 43 |
+
else:
|
| 44 |
+
self.model = None
|
| 45 |
+
|
| 46 |
+
async def find_department(self, category: Optional[str], description: Optional[str] = None) -> Optional[Department]:
|
| 47 |
+
query = select(Department).where(Department.is_active == True)
|
| 48 |
+
result = await self.db.execute(query)
|
| 49 |
+
departments = result.scalars().all()
|
| 50 |
+
|
| 51 |
+
if not departments:
|
| 52 |
+
return None
|
| 53 |
+
|
| 54 |
+
if not self.model or not category:
|
| 55 |
+
return departments[0]
|
| 56 |
+
|
| 57 |
+
dept_info = "\n".join([f"- {d.code}: {d.name} ({d.categories})" for d in departments])
|
| 58 |
+
|
| 59 |
+
prompt = f"""Route civic issue to correct department:
|
| 60 |
+
|
| 61 |
+
Issue Category: {category}
|
| 62 |
+
Description: {description[:150] if description else 'N/A'}
|
| 63 |
+
|
| 64 |
+
Available Departments:
|
| 65 |
+
{dept_info}
|
| 66 |
+
|
| 67 |
+
Return ONLY the department CODE (e.g., PWD, TRAFFIC, SANITATION)"""
|
| 68 |
+
|
| 69 |
+
try:
|
| 70 |
+
response = self.model.generate_content(prompt)
|
| 71 |
+
dept_code = response.text.strip().upper()
|
| 72 |
+
|
| 73 |
+
for dept in departments:
|
| 74 |
+
if dept.code == dept_code:
|
| 75 |
+
return dept
|
| 76 |
+
except Exception as e:
|
| 77 |
+
logger.error(f"Gemini routing failed: {e}")
|
| 78 |
+
|
| 79 |
+
return departments[0]
|
| 80 |
+
|
| 81 |
+
async def find_available_member(
|
| 82 |
+
self,
|
| 83 |
+
department_id: UUID,
|
| 84 |
+
city: Optional[str] = None,
|
| 85 |
+
locality: Optional[str] = None
|
| 86 |
+
) -> Optional[Member]:
|
| 87 |
+
base_query = (
|
| 88 |
+
select(Member)
|
| 89 |
+
.where(Member.department_id == department_id)
|
| 90 |
+
.where(Member.is_active == True)
|
| 91 |
+
.where(Member.current_workload < Member.max_workload)
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
if city:
|
| 95 |
+
city_query = base_query.where(Member.city.ilike(f"%{city}%"))
|
| 96 |
+
result = await self.db.execute(city_query.order_by(Member.current_workload.asc()))
|
| 97 |
+
member = result.scalars().first()
|
| 98 |
+
if member:
|
| 99 |
+
logger.info(f"Found member in city: {city}")
|
| 100 |
+
return member
|
| 101 |
+
|
| 102 |
+
if locality:
|
| 103 |
+
locality_query = base_query.where(Member.locality.ilike(f"%{locality}%"))
|
| 104 |
+
result = await self.db.execute(locality_query.order_by(Member.current_workload.asc()))
|
| 105 |
+
member = result.scalars().first()
|
| 106 |
+
if member:
|
| 107 |
+
logger.info(f"Found member in locality: {locality}")
|
| 108 |
+
return member
|
| 109 |
+
|
| 110 |
+
result = await self.db.execute(base_query.order_by(Member.current_workload.asc()))
|
| 111 |
+
member = result.scalars().first()
|
| 112 |
+
if member:
|
| 113 |
+
logger.info(f"Assigned to available member (no location match)")
|
| 114 |
+
return member
|
| 115 |
+
|
| 116 |
+
def calculate_sla(self, priority: int, department: Optional[Department]) -> tuple[int, datetime]:
|
| 117 |
+
base_hours = PRIORITY_SLA_HOURS.get(priority, 48)
|
| 118 |
+
|
| 119 |
+
if department and department.default_sla_hours:
|
| 120 |
+
base_hours = min(base_hours, department.default_sla_hours)
|
| 121 |
+
|
| 122 |
+
deadline = datetime.utcnow() + timedelta(hours=base_hours)
|
| 123 |
+
return base_hours, deadline
|
| 124 |
+
|
| 125 |
+
async def process_issue(self, issue_id: UUID) -> dict:
|
| 126 |
+
query = (
|
| 127 |
+
select(Issue)
|
| 128 |
+
.options(selectinload(Issue.classification))
|
| 129 |
+
.where(Issue.id == issue_id)
|
| 130 |
+
)
|
| 131 |
+
result = await self.db.execute(query)
|
| 132 |
+
issue = result.scalar_one_or_none()
|
| 133 |
+
if not issue:
|
| 134 |
+
return {"error": "Issue not found"}
|
| 135 |
+
|
| 136 |
+
if issue.is_duplicate:
|
| 137 |
+
self.log_decision(
|
| 138 |
+
issue_id=issue_id,
|
| 139 |
+
decision="Skipped routing",
|
| 140 |
+
reasoning="Issue is a duplicate"
|
| 141 |
+
)
|
| 142 |
+
return {"skipped": True, "reason": "duplicate"}
|
| 143 |
+
|
| 144 |
+
category = issue.classification.primary_category if issue.classification else None
|
| 145 |
+
priority = issue.priority or 3
|
| 146 |
+
|
| 147 |
+
department = await self.find_department(category, issue.description)
|
| 148 |
+
|
| 149 |
+
member = None
|
| 150 |
+
if department:
|
| 151 |
+
member = await self.find_available_member(
|
| 152 |
+
department.id,
|
| 153 |
+
city=issue.city,
|
| 154 |
+
locality=issue.locality
|
| 155 |
+
)
|
| 156 |
+
if member:
|
| 157 |
+
member.current_workload += 1
|
| 158 |
+
|
| 159 |
+
sla_hours, sla_deadline = self.calculate_sla(priority, department)
|
| 160 |
+
|
| 161 |
+
issue.department_id = department.id if department else None
|
| 162 |
+
issue.assigned_member_id = member.id if member else None
|
| 163 |
+
issue.sla_hours = sla_hours
|
| 164 |
+
issue.sla_deadline = sla_deadline
|
| 165 |
+
issue.state = "assigned"
|
| 166 |
+
|
| 167 |
+
dept_code = department.code if department else "UNASSIGNED"
|
| 168 |
+
member_name = member.name if member else "Unassigned"
|
| 169 |
+
member_city = member.city if member else "N/A"
|
| 170 |
+
|
| 171 |
+
reasoning = f"Category '{category}' → {dept_code}"
|
| 172 |
+
if issue.city:
|
| 173 |
+
reasoning += f", Issue location: {issue.city}"
|
| 174 |
+
if member:
|
| 175 |
+
reasoning += f", Member location: {member_city}"
|
| 176 |
+
reasoning += f", SLA: {sla_hours}h"
|
| 177 |
+
|
| 178 |
+
self.log_decision(
|
| 179 |
+
issue_id=issue_id,
|
| 180 |
+
decision=f"Routed to {dept_code} → {member_name}",
|
| 181 |
+
reasoning=reasoning
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
event_record = IssueEvent(
|
| 185 |
+
issue_id=issue_id,
|
| 186 |
+
event_type="assigned",
|
| 187 |
+
agent_name=self.name,
|
| 188 |
+
event_data=json.dumps({
|
| 189 |
+
"department_code": dept_code,
|
| 190 |
+
"member_id": str(member.id) if member else None,
|
| 191 |
+
"member_name": member_name,
|
| 192 |
+
"issue_city": issue.city,
|
| 193 |
+
"issue_locality": issue.locality,
|
| 194 |
+
"member_city": member.city if member else None,
|
| 195 |
+
"sla_hours": sla_hours,
|
| 196 |
+
"sla_deadline": sla_deadline.isoformat(),
|
| 197 |
+
})
|
| 198 |
+
)
|
| 199 |
+
self.db.add(event_record)
|
| 200 |
+
await self.db.flush()
|
| 201 |
+
|
| 202 |
+
assign_event = IssueAssigned(
|
| 203 |
+
issue_id=issue_id,
|
| 204 |
+
department_code=dept_code,
|
| 205 |
+
member_id=member.id if member else None,
|
| 206 |
+
member_name=member_name,
|
| 207 |
+
sla_deadline=sla_deadline,
|
| 208 |
+
sla_hours=sla_hours,
|
| 209 |
+
)
|
| 210 |
+
await event_bus.publish(assign_event)
|
| 211 |
+
|
| 212 |
+
return {
|
| 213 |
+
"department": dept_code,
|
| 214 |
+
"member": member_name,
|
| 215 |
+
"issue_city": issue.city,
|
| 216 |
+
"issue_locality": issue.locality,
|
| 217 |
+
"sla_hours": sla_hours,
|
| 218 |
+
"sla_deadline": sla_deadline.isoformat(),
|
| 219 |
+
}
|
| 220 |
+
|
| 221 |
+
async def handle(self, event) -> None:
|
| 222 |
+
await self.process_issue(event.issue_id)
|
Backend/agents/sla/agent.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID
|
| 5 |
+
from sqlalchemy import select
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
import google.generativeai as genai
|
| 8 |
+
|
| 9 |
+
from Backend.core.events import event_bus, Event
|
| 10 |
+
from Backend.core.logging import get_logger
|
| 11 |
+
from Backend.core.config import settings
|
| 12 |
+
from Backend.database.models import Issue, IssueEvent, Member, Department
|
| 13 |
+
from Backend.orchestration.base import BaseAgent
|
| 14 |
+
|
| 15 |
+
logger = get_logger(__name__, agent_name="SLAAgent")
|
| 16 |
+
|
| 17 |
+
if settings.gemini_api_key:
|
| 18 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class SLAWarning(Event):
|
| 22 |
+
hours_remaining: float
|
| 23 |
+
threshold_hours: float
|
| 24 |
+
warning_level: str
|
| 25 |
+
assigned_email: Optional[str] = None
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class SLAAgent(BaseAgent):
|
| 29 |
+
def __init__(self, db: AsyncSession):
|
| 30 |
+
super().__init__("SLAAgent")
|
| 31 |
+
self.db = db
|
| 32 |
+
if settings.gemini_api_key:
|
| 33 |
+
self.model = genai.GenerativeModel('gemma-3-27b-it')
|
| 34 |
+
else:
|
| 35 |
+
self.model = None
|
| 36 |
+
|
| 37 |
+
async def check_sla_status(self, issue: Issue) -> tuple[bool, str, Optional[str]]:
|
| 38 |
+
"""
|
| 39 |
+
Checks if an issue needs an SLA warning.
|
| 40 |
+
Returns: (needs_warning, warning_type, reason)
|
| 41 |
+
"""
|
| 42 |
+
if not issue.sla_deadline or issue.state in ["resolved", "verified", "closed", "escalated"]:
|
| 43 |
+
return False, "", None
|
| 44 |
+
|
| 45 |
+
if not self.model:
|
| 46 |
+
now = datetime.utcnow()
|
| 47 |
+
hours_remaining = (issue.sla_deadline - now).total_seconds() / 3600
|
| 48 |
+
total_sla_hours = issue.sla_hours or 48
|
| 49 |
+
|
| 50 |
+
if 0 < hours_remaining <= (total_sla_hours * 0.5) and hours_remaining > (total_sla_hours * 0.2):
|
| 51 |
+
return True, "warning", f"50% SLA time remaining ({hours_remaining:.1f}h)"
|
| 52 |
+
elif 0 < hours_remaining <= (total_sla_hours * 0.2):
|
| 53 |
+
return True, "critical", f"Critical: Less than 20% SLA time remaining ({hours_remaining:.1f}h)"
|
| 54 |
+
return False, "", None
|
| 55 |
+
|
| 56 |
+
now = datetime.utcnow()
|
| 57 |
+
hours_remaining = (issue.sla_deadline - now).total_seconds() / 3600
|
| 58 |
+
total_sla_hours = issue.sla_hours or 48
|
| 59 |
+
hours_elapsed = total_sla_hours - hours_remaining
|
| 60 |
+
|
| 61 |
+
prompt = f"""Assess SLA status for civic issue:
|
| 62 |
+
|
| 63 |
+
Priority: {issue.priority} (1=Critical, 2=High, 3=Medium, 4=Low)
|
| 64 |
+
State: {issue.state}
|
| 65 |
+
Total SLA Hours: {total_sla_hours}
|
| 66 |
+
Hours Elapsed: {hours_elapsed:.1f}
|
| 67 |
+
Hours Remaining: {hours_remaining:.1f}
|
| 68 |
+
Time Used: {(hours_elapsed/total_sla_hours*100):.1f}%
|
| 69 |
+
|
| 70 |
+
Determine if warning is needed:
|
| 71 |
+
- "none": No warning needed (>50% time remaining)
|
| 72 |
+
- "warning": Warning level (20-50% time remaining)
|
| 73 |
+
- "critical": Critical warning (<20% time remaining)
|
| 74 |
+
|
| 75 |
+
Return ONLY valid JSON:
|
| 76 |
+
{{"warning_level": "none/warning/critical", "reason": "max 60 chars"}}"""
|
| 77 |
+
|
| 78 |
+
try:
|
| 79 |
+
response = self.model.generate_content(prompt)
|
| 80 |
+
result = json.loads(response.text.replace("```json", "").replace("```", "").strip())
|
| 81 |
+
level = result.get("warning_level", "none")
|
| 82 |
+
reason = result.get("reason", "SLA assessment completed")
|
| 83 |
+
|
| 84 |
+
if level == "none":
|
| 85 |
+
return False, "", None
|
| 86 |
+
return True, level, reason
|
| 87 |
+
except Exception as e:
|
| 88 |
+
logger.error(f"Gemini SLA check failed: {e}")
|
| 89 |
+
if 0 < hours_remaining <= (total_sla_hours * 0.2):
|
| 90 |
+
return True, "critical", f"Less than 20% SLA time remaining"
|
| 91 |
+
elif 0 < hours_remaining <= (total_sla_hours * 0.5):
|
| 92 |
+
return True, "warning", f"50% SLA time remaining"
|
| 93 |
+
return False, "", None
|
| 94 |
+
|
| 95 |
+
async def process_issue(self, issue_id: UUID) -> dict:
|
| 96 |
+
issue = await self.db.get(Issue, issue_id)
|
| 97 |
+
if not issue:
|
| 98 |
+
return {"error": "Issue not found"}
|
| 99 |
+
|
| 100 |
+
needs_warning, level, reason = await self.check_sla_status(issue)
|
| 101 |
+
|
| 102 |
+
if not needs_warning:
|
| 103 |
+
return {"status": "ok"}
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
assigned_email = None
|
| 107 |
+
if issue.assigned_member_id:
|
| 108 |
+
member = await self.db.get(Member, issue.assigned_member_id)
|
| 109 |
+
if member:
|
| 110 |
+
assigned_email = member.email
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
warning_event = SLAWarning(
|
| 114 |
+
issue_id=issue_id,
|
| 115 |
+
hours_remaining=(issue.sla_deadline - datetime.utcnow()).total_seconds() / 3600,
|
| 116 |
+
threshold_hours=0,
|
| 117 |
+
warning_level=level,
|
| 118 |
+
assigned_email=assigned_email
|
| 119 |
+
)
|
| 120 |
+
await event_bus.publish(warning_event)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
event_record = IssueEvent(
|
| 124 |
+
issue_id=issue_id,
|
| 125 |
+
event_type=f"sla_{level}",
|
| 126 |
+
agent_name=self.name,
|
| 127 |
+
event_data=json.dumps({
|
| 128 |
+
"hours_remaining": warning_event.hours_remaining,
|
| 129 |
+
"level": level,
|
| 130 |
+
"reason": reason
|
| 131 |
+
})
|
| 132 |
+
)
|
| 133 |
+
self.db.add(event_record)
|
| 134 |
+
await self.db.flush()
|
| 135 |
+
|
| 136 |
+
return {"warning_sent": True, "level": level, "recipient": assigned_email}
|
| 137 |
+
|
| 138 |
+
async def check_all_active(self) -> list[dict]:
|
| 139 |
+
"""Scans all active issues for SLA breaches."""
|
| 140 |
+
query = select(Issue).where(
|
| 141 |
+
Issue.state.in_(["assigned", "in_progress"]),
|
| 142 |
+
Issue.sla_deadline.isnot(None)
|
| 143 |
+
)
|
| 144 |
+
result = await self.db.execute(query)
|
| 145 |
+
issues = result.scalars().all()
|
| 146 |
+
|
| 147 |
+
results = []
|
| 148 |
+
for issue in issues:
|
| 149 |
+
res = await self.process_issue(issue.id)
|
| 150 |
+
if res.get("warning_sent"):
|
| 151 |
+
results.append(res)
|
| 152 |
+
return results
|
| 153 |
+
|
| 154 |
+
async def handle(self, event) -> None:
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
pass
|
Backend/agents/vision/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .agent import VisionAgent
|
Backend/agents/vision/agent.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import time
|
| 3 |
+
import cv2
|
| 4 |
+
import numpy as np
|
| 5 |
+
import google.generativeai as genai
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import Optional
|
| 8 |
+
from uuid import UUID
|
| 9 |
+
|
| 10 |
+
from sqlalchemy import select
|
| 11 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 12 |
+
|
| 13 |
+
from Backend.core.config import settings
|
| 14 |
+
from Backend.core.events import event_bus, IssueClassified, IssueCreated
|
| 15 |
+
from Backend.core.logging import get_logger
|
| 16 |
+
from Backend.core.schemas import ClassificationResult, DetectionBox, CLASS_ID_TO_CATEGORY, IssueCategory
|
| 17 |
+
from Backend.database.models import Classification, Issue, IssueImage, IssueEvent
|
| 18 |
+
from Backend.orchestration.base import BaseAgent
|
| 19 |
+
from Backend.utils.fuzzy_match import auto_validate_issue
|
| 20 |
+
from Backend.utils.storage import save_bytes, download_from_supabase, get_upload_url
|
| 21 |
+
|
| 22 |
+
logger = get_logger(__name__, agent_name="VisionAgent")
|
| 23 |
+
|
| 24 |
+
if settings.gemini_api_key:
|
| 25 |
+
genai.configure(api_key=settings.gemini_api_key)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class VisionAgent(BaseAgent):
|
| 29 |
+
_model = None
|
| 30 |
+
|
| 31 |
+
def __init__(self, db: Optional[AsyncSession] = None):
|
| 32 |
+
super().__init__("VisionAgent")
|
| 33 |
+
self.db = db
|
| 34 |
+
if settings.gemini_api_key:
|
| 35 |
+
self.gemini_model = genai.GenerativeModel('gemma-3-27b-it')
|
| 36 |
+
else:
|
| 37 |
+
self.gemini_model = None
|
| 38 |
+
|
| 39 |
+
@classmethod
|
| 40 |
+
def load_model(cls):
|
| 41 |
+
if cls._model is None:
|
| 42 |
+
from ultralytics import YOLO
|
| 43 |
+
model_path = settings.model_path
|
| 44 |
+
if not model_path.exists():
|
| 45 |
+
raise FileNotFoundError(f"Model not found: {model_path}")
|
| 46 |
+
cls._model = YOLO(str(model_path))
|
| 47 |
+
logger.info(f"YOLO model loaded from {model_path}")
|
| 48 |
+
return cls._model
|
| 49 |
+
|
| 50 |
+
@classmethod
|
| 51 |
+
def get_model(cls):
|
| 52 |
+
if cls._model is None:
|
| 53 |
+
cls.load_model()
|
| 54 |
+
return cls._model
|
| 55 |
+
|
| 56 |
+
async def download_image(self, remote_path: str) -> bytes:
|
| 57 |
+
return await download_from_supabase(remote_path)
|
| 58 |
+
|
| 59 |
+
async def save_annotated(self, results, original_path: str, subfolder: str) -> str:
|
| 60 |
+
im_array = results[0].plot()
|
| 61 |
+
|
| 62 |
+
original_name = Path(original_path).stem
|
| 63 |
+
annotated_filename = f"annotated_{original_name}.jpg"
|
| 64 |
+
|
| 65 |
+
_, buffer = cv2.imencode('.jpg', im_array, [cv2.IMWRITE_JPEG_QUALITY, 90])
|
| 66 |
+
image_bytes = buffer.tobytes()
|
| 67 |
+
|
| 68 |
+
remote_path = await save_bytes(image_bytes, annotated_filename, subfolder=subfolder)
|
| 69 |
+
return remote_path
|
| 70 |
+
|
| 71 |
+
async def run_inference(self, image_data: bytes) -> tuple[list, float]:
|
| 72 |
+
model = self.get_model()
|
| 73 |
+
|
| 74 |
+
nparr = np.frombuffer(image_data, np.uint8)
|
| 75 |
+
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
| 76 |
+
if img is None:
|
| 77 |
+
raise ValueError("Invalid image data")
|
| 78 |
+
|
| 79 |
+
start_time = time.perf_counter()
|
| 80 |
+
results = model.predict(
|
| 81 |
+
source=img,
|
| 82 |
+
conf=settings.model_confidence_threshold,
|
| 83 |
+
imgsz=settings.model_input_size,
|
| 84 |
+
verbose=False,
|
| 85 |
+
)
|
| 86 |
+
inference_time = (time.perf_counter() - start_time) * 1000
|
| 87 |
+
|
| 88 |
+
return results, inference_time
|
| 89 |
+
|
| 90 |
+
async def gemini_classify_image(
|
| 91 |
+
self,
|
| 92 |
+
image_data: bytes,
|
| 93 |
+
description: Optional[str] = None
|
| 94 |
+
) -> tuple[Optional[IssueCategory], float, Optional[str]]:
|
| 95 |
+
if not self.gemini_model:
|
| 96 |
+
return None, 0.0, None
|
| 97 |
+
|
| 98 |
+
allowed = [
|
| 99 |
+
{"class_id": k, "class_name": v.value}
|
| 100 |
+
for k, v in CLASS_ID_TO_CATEGORY.items()
|
| 101 |
+
]
|
| 102 |
+
prompt = (
|
| 103 |
+
"Classify the photo into exactly one of the allowed categories. "
|
| 104 |
+
"Return ONLY valid JSON with keys: class_id (int), confidence (0.0-1.0), reasoning (max 80 chars).\n\n"
|
| 105 |
+
f"Allowed categories: {json.dumps(allowed)}\n"
|
| 106 |
+
f"User description: {(description or '')[:200]}"
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
try:
|
| 110 |
+
response = self.gemini_model.generate_content(
|
| 111 |
+
[
|
| 112 |
+
{"text": prompt},
|
| 113 |
+
{
|
| 114 |
+
"inline_data": {
|
| 115 |
+
"mime_type": "image/jpeg",
|
| 116 |
+
"data": image_data,
|
| 117 |
+
}
|
| 118 |
+
},
|
| 119 |
+
]
|
| 120 |
+
)
|
| 121 |
+
text = (response.text or "").replace("```json", "").replace("```", "").strip()
|
| 122 |
+
data = json.loads(text)
|
| 123 |
+
class_id = data.get("class_id")
|
| 124 |
+
confidence = float(data.get("confidence", 0.0))
|
| 125 |
+
reasoning = data.get("reasoning")
|
| 126 |
+
if not isinstance(class_id, int):
|
| 127 |
+
return None, 0.0, None
|
| 128 |
+
category = CLASS_ID_TO_CATEGORY.get(class_id)
|
| 129 |
+
if not category:
|
| 130 |
+
return None, 0.0, None
|
| 131 |
+
confidence = max(0.0, min(1.0, confidence))
|
| 132 |
+
return category, confidence, reasoning
|
| 133 |
+
except Exception as e:
|
| 134 |
+
logger.error(f"Gemini vision classification failed: {e}")
|
| 135 |
+
return None, 0.0, None
|
| 136 |
+
|
| 137 |
+
def extract_detections(self, results) -> list[DetectionBox]:
|
| 138 |
+
detections = []
|
| 139 |
+
for result in results:
|
| 140 |
+
boxes = result.boxes
|
| 141 |
+
if boxes is not None:
|
| 142 |
+
for i in range(len(boxes)):
|
| 143 |
+
class_id = int(boxes.cls[i].item())
|
| 144 |
+
confidence = float(boxes.conf[i].item())
|
| 145 |
+
bbox = tuple(boxes.xyxy[i].tolist())
|
| 146 |
+
|
| 147 |
+
category = CLASS_ID_TO_CATEGORY.get(class_id)
|
| 148 |
+
if category:
|
| 149 |
+
detections.append(DetectionBox(
|
| 150 |
+
class_id=class_id,
|
| 151 |
+
class_name=category.value,
|
| 152 |
+
confidence=confidence,
|
| 153 |
+
bbox=bbox,
|
| 154 |
+
))
|
| 155 |
+
return detections
|
| 156 |
+
|
| 157 |
+
async def classify_image(
|
| 158 |
+
self,
|
| 159 |
+
image_path: str,
|
| 160 |
+
subfolder: str = "",
|
| 161 |
+
description: Optional[str] = None
|
| 162 |
+
) -> tuple[list[DetectionBox], str, Optional[IssueCategory], float, Optional[str]]:
|
| 163 |
+
image_data = await self.download_image(image_path)
|
| 164 |
+
results, inference_time = await self.run_inference(image_data)
|
| 165 |
+
annotated_path = await self.save_annotated(results, image_path, subfolder)
|
| 166 |
+
detections = self.extract_detections(results)
|
| 167 |
+
|
| 168 |
+
gemini_category = None
|
| 169 |
+
gemini_confidence = 0.0
|
| 170 |
+
gemini_reasoning = None
|
| 171 |
+
if self.gemini_model and (not detections or max(d.confidence for d in detections) < 0.5):
|
| 172 |
+
gemini_category, gemini_confidence, gemini_reasoning = await self.gemini_classify_image(
|
| 173 |
+
image_data=image_data,
|
| 174 |
+
description=description
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
logger.info(f"Inference completed in {inference_time:.2f}ms, {len(detections)} detections")
|
| 178 |
+
return detections, annotated_path, gemini_category, gemini_confidence, gemini_reasoning
|
| 179 |
+
|
| 180 |
+
async def process_issue(
|
| 181 |
+
self,
|
| 182 |
+
issue_id: UUID,
|
| 183 |
+
image_paths: list[str],
|
| 184 |
+
description: Optional[str] = None
|
| 185 |
+
) -> ClassificationResult:
|
| 186 |
+
all_detections = []
|
| 187 |
+
annotated_paths = []
|
| 188 |
+
total_time = 0.0
|
| 189 |
+
subfolder = str(issue_id)
|
| 190 |
+
|
| 191 |
+
gemini_best_category = None
|
| 192 |
+
gemini_best_confidence = 0.0
|
| 193 |
+
gemini_best_reasoning = None
|
| 194 |
+
|
| 195 |
+
for path in image_paths:
|
| 196 |
+
start = time.perf_counter()
|
| 197 |
+
detections, annotated_path, gemini_category, gemini_confidence, gemini_reasoning = await self.classify_image(
|
| 198 |
+
path,
|
| 199 |
+
subfolder=subfolder,
|
| 200 |
+
description=description
|
| 201 |
+
)
|
| 202 |
+
total_time += (time.perf_counter() - start) * 1000
|
| 203 |
+
all_detections.extend(detections)
|
| 204 |
+
annotated_paths.append(annotated_path)
|
| 205 |
+
|
| 206 |
+
if gemini_category and gemini_confidence > gemini_best_confidence:
|
| 207 |
+
gemini_best_category = gemini_category
|
| 208 |
+
gemini_best_confidence = gemini_confidence
|
| 209 |
+
gemini_best_reasoning = gemini_reasoning
|
| 210 |
+
|
| 211 |
+
if self.db:
|
| 212 |
+
query = select(IssueImage).where(IssueImage.file_path == path)
|
| 213 |
+
result = await self.db.execute(query)
|
| 214 |
+
image_record = result.scalar_one_or_none()
|
| 215 |
+
if image_record:
|
| 216 |
+
image_record.annotated_path = annotated_path
|
| 217 |
+
|
| 218 |
+
result = ClassificationResult(
|
| 219 |
+
issue_id=issue_id,
|
| 220 |
+
detections=all_detections,
|
| 221 |
+
annotated_urls=[get_upload_url(p) for p in annotated_paths],
|
| 222 |
+
inference_time_ms=total_time,
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
if gemini_best_category and (not result.primary_category or result.primary_confidence < 0.5):
|
| 226 |
+
result.primary_category = gemini_best_category
|
| 227 |
+
result.primary_confidence = gemini_best_confidence
|
| 228 |
+
|
| 229 |
+
detected_categories = list(set(d.class_name for d in all_detections))
|
| 230 |
+
auto_validated, validation_reason = auto_validate_issue(description, detected_categories)
|
| 231 |
+
|
| 232 |
+
validation_source = "auto" if auto_validated else "pending_manual"
|
| 233 |
+
new_state = "validated" if auto_validated else "reported"
|
| 234 |
+
|
| 235 |
+
self.log_decision(
|
| 236 |
+
issue_id=issue_id,
|
| 237 |
+
decision=f"Validation: {validation_source}",
|
| 238 |
+
reasoning=validation_reason
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
if self.db:
|
| 242 |
+
classification = Classification(
|
| 243 |
+
issue_id=issue_id,
|
| 244 |
+
primary_category=result.primary_category.value if result.primary_category else None,
|
| 245 |
+
primary_confidence=result.primary_confidence,
|
| 246 |
+
detections_json=json.dumps([d.model_dump() for d in all_detections]),
|
| 247 |
+
inference_time_ms=total_time,
|
| 248 |
+
)
|
| 249 |
+
self.db.add(classification)
|
| 250 |
+
|
| 251 |
+
issue = await self.db.get(Issue, issue_id)
|
| 252 |
+
if issue:
|
| 253 |
+
issue.state = new_state
|
| 254 |
+
issue.validation_source = validation_source
|
| 255 |
+
issue.validation_reason = validation_reason
|
| 256 |
+
|
| 257 |
+
event_record = IssueEvent(
|
| 258 |
+
issue_id=issue_id,
|
| 259 |
+
event_type="classified",
|
| 260 |
+
agent_name=self.name,
|
| 261 |
+
event_data=json.dumps({
|
| 262 |
+
"category": result.primary_category.value if result.primary_category else None,
|
| 263 |
+
"confidence": result.primary_confidence,
|
| 264 |
+
"detections_count": len(all_detections),
|
| 265 |
+
"validation_source": validation_source,
|
| 266 |
+
"annotated_images": annotated_paths,
|
| 267 |
+
"gemini_category": gemini_best_category.value if gemini_best_category else None,
|
| 268 |
+
"gemini_confidence": gemini_best_confidence,
|
| 269 |
+
"gemini_reasoning": gemini_best_reasoning,
|
| 270 |
+
})
|
| 271 |
+
)
|
| 272 |
+
self.db.add(event_record)
|
| 273 |
+
await self.db.flush()
|
| 274 |
+
|
| 275 |
+
if result.primary_category:
|
| 276 |
+
event = IssueClassified(
|
| 277 |
+
issue_id=issue_id,
|
| 278 |
+
category=result.primary_category.value,
|
| 279 |
+
confidence=result.primary_confidence,
|
| 280 |
+
detections_count=len(all_detections),
|
| 281 |
+
metadata={
|
| 282 |
+
"validation_source": validation_source,
|
| 283 |
+
"validation_reason": validation_reason,
|
| 284 |
+
"annotated_images": [get_upload_url(p) for p in annotated_paths],
|
| 285 |
+
}
|
| 286 |
+
)
|
| 287 |
+
await event_bus.publish(event)
|
| 288 |
+
|
| 289 |
+
return result
|
| 290 |
+
|
| 291 |
+
async def handle(self, event: IssueCreated) -> None:
|
| 292 |
+
await self.process_issue(
|
| 293 |
+
event.issue_id,
|
| 294 |
+
event.image_paths,
|
| 295 |
+
event.description
|
| 296 |
+
)
|
Backend/agents/vision/model.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:55bb189306a9882c84fb471b9cc81e2ba48363d1a4c49ccf914e9a08cde01c24
|
| 3 |
+
size 22512426
|
Backend/api/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .app import create_app
|
| 2 |
+
|
| 3 |
+
app = create_app()
|
Backend/api/app.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import asynccontextmanager
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from fastapi import FastAPI, Request
|
| 4 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 5 |
+
from fastapi.responses import JSONResponse, FileResponse
|
| 6 |
+
from fastapi.staticfiles import StaticFiles
|
| 7 |
+
|
| 8 |
+
from Backend.core.config import settings
|
| 9 |
+
from Backend.core.events import event_bus
|
| 10 |
+
from Backend.core.logging import setup_logging, get_logger
|
| 11 |
+
from Backend.core.security import SecurityHeadersMiddleware, RateLimitMiddleware, RequestValidationMiddleware
|
| 12 |
+
from Backend.database.connection import init_db, close_db
|
| 13 |
+
from Backend.api.routes import api_router
|
| 14 |
+
|
| 15 |
+
logger = get_logger(__name__)
|
| 16 |
+
|
| 17 |
+
STATIC_DIR = Path("static")
|
| 18 |
+
|
| 19 |
+
@asynccontextmanager
|
| 20 |
+
async def lifespan(app: FastAPI):
|
| 21 |
+
setup_logging(debug=settings.debug)
|
| 22 |
+
logger.info("Starting City Issue Resolution Agent")
|
| 23 |
+
|
| 24 |
+
await init_db()
|
| 25 |
+
logger.info("Database initialized")
|
| 26 |
+
|
| 27 |
+
await event_bus.start()
|
| 28 |
+
logger.info("Event bus started")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
from Backend.agents.vision import VisionAgent
|
| 32 |
+
try:
|
| 33 |
+
VisionAgent.load_model()
|
| 34 |
+
logger.info("Vision model loaded")
|
| 35 |
+
except Exception as e:
|
| 36 |
+
logger.warning(f"Vision model failed to load: {e}. Running in mock mode.")
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
import asyncio
|
| 40 |
+
from Backend.database.connection import get_db_context
|
| 41 |
+
from Backend.agents.escalation.agent import EscalationAgent
|
| 42 |
+
from Backend.agents.sla.agent import SLAAgent
|
| 43 |
+
|
| 44 |
+
async def run_periodic_checks():
|
| 45 |
+
while True:
|
| 46 |
+
try:
|
| 47 |
+
logger.info("Running periodic SLA and Escalation checks...")
|
| 48 |
+
async with get_db_context() as db:
|
| 49 |
+
|
| 50 |
+
esc_agent = EscalationAgent(db)
|
| 51 |
+
await esc_agent.check_all_pending()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
sla_agent = SLAAgent(db)
|
| 55 |
+
await sla_agent.check_all_active()
|
| 56 |
+
except Exception as e:
|
| 57 |
+
logger.error(f"Error in background task: {e}")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
await asyncio.sleep(900)
|
| 61 |
+
|
| 62 |
+
task = asyncio.create_task(run_periodic_checks())
|
| 63 |
+
|
| 64 |
+
yield
|
| 65 |
+
|
| 66 |
+
task.cancel()
|
| 67 |
+
await event_bus.stop()
|
| 68 |
+
await close_db()
|
| 69 |
+
logger.info("Shutdown complete")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def create_app() -> FastAPI:
|
| 73 |
+
app = FastAPI(
|
| 74 |
+
title="City Issue Resolution Agent",
|
| 75 |
+
description="Autonomous urban issue detection and resolution platform",
|
| 76 |
+
version="1.0.0",
|
| 77 |
+
lifespan=lifespan,
|
| 78 |
+
root_path="",
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
# CORS must be added first
|
| 82 |
+
app.add_middleware(
|
| 83 |
+
CORSMiddleware,
|
| 84 |
+
allow_origins=["*"],
|
| 85 |
+
allow_credentials=False,
|
| 86 |
+
allow_methods=["*"],
|
| 87 |
+
allow_headers=["*"],
|
| 88 |
+
expose_headers=["*"],
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
app.add_middleware(SecurityHeadersMiddleware)
|
| 92 |
+
app.add_middleware(RateLimitMiddleware, requests_per_minute=120, burst_limit=20)
|
| 93 |
+
app.add_middleware(RequestValidationMiddleware)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
settings.local_temp_dir.mkdir(parents=True, exist_ok=True)
|
| 97 |
+
STATIC_DIR.mkdir(parents=True, exist_ok=True)
|
| 98 |
+
|
| 99 |
+
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
|
| 100 |
+
|
| 101 |
+
app.include_router(api_router)
|
| 102 |
+
|
| 103 |
+
@app.get("/")
|
| 104 |
+
async def root():
|
| 105 |
+
return FileResponse(STATIC_DIR / "flow.html")
|
| 106 |
+
|
| 107 |
+
@app.get("/dashboard")
|
| 108 |
+
async def dashboard():
|
| 109 |
+
return FileResponse(STATIC_DIR / "flow.html")
|
| 110 |
+
|
| 111 |
+
@app.exception_handler(ValueError)
|
| 112 |
+
async def value_error_handler(request: Request, exc: ValueError):
|
| 113 |
+
return JSONResponse(
|
| 114 |
+
status_code=400,
|
| 115 |
+
content={"detail": str(exc)}
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
@app.exception_handler(Exception)
|
| 119 |
+
async def general_exception_handler(request: Request, exc: Exception):
|
| 120 |
+
logger.error(f"Unhandled exception: {exc}", exc_info=True)
|
| 121 |
+
return JSONResponse(
|
| 122 |
+
status_code=500,
|
| 123 |
+
content={"detail": "Internal server error"}
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
return app
|
Backend/api/routes/__init__.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter
|
| 2 |
+
|
| 3 |
+
from .health import router as health_router
|
| 4 |
+
from .issues import router as issues_router
|
| 5 |
+
from .admin import router as admin_router
|
| 6 |
+
from .flow import router as flow_router
|
| 7 |
+
from .worker import router as worker_router
|
| 8 |
+
|
| 9 |
+
api_router = APIRouter()
|
| 10 |
+
|
| 11 |
+
api_router.include_router(health_router, prefix="/health", tags=["Health"])
|
| 12 |
+
api_router.include_router(issues_router, prefix="/issues", tags=["Issues"])
|
| 13 |
+
api_router.include_router(admin_router, prefix="/admin", tags=["Admin"])
|
| 14 |
+
api_router.include_router(flow_router, prefix="/flow", tags=["Agent Flow"])
|
| 15 |
+
api_router.include_router(worker_router, prefix="/worker", tags=["Worker"])
|
| 16 |
+
|
Backend/api/routes/admin.py
ADDED
|
@@ -0,0 +1,1160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional, List
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
| 5 |
+
from pydantic import BaseModel, EmailStr
|
| 6 |
+
from sqlalchemy import select, func, or_, desc, asc
|
| 7 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 8 |
+
from sqlalchemy.orm import selectinload, aliased
|
| 9 |
+
import bcrypt
|
| 10 |
+
import jwt
|
| 11 |
+
|
| 12 |
+
from Backend.database.connection import get_db
|
| 13 |
+
from Backend.database.models import Department, Member, Issue, Escalation, Classification, IssueEvent, IssueImage
|
| 14 |
+
from Backend.core.config import settings
|
| 15 |
+
from Backend.core.logging import get_logger
|
| 16 |
+
from Backend.core.schemas import IssueResponse, IssueState
|
| 17 |
+
from Backend.utils.storage import get_upload_url
|
| 18 |
+
|
| 19 |
+
logger = get_logger(__name__)
|
| 20 |
+
router = APIRouter()
|
| 21 |
+
|
| 22 |
+
SECRET_KEY = settings.supabase_jwt_secret
|
| 23 |
+
ALGORITHM = "HS256"
|
| 24 |
+
ACCESS_TOKEN_EXPIRE_HOURS = 24
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def hash_password(password: str) -> str:
|
| 28 |
+
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def verify_password(password: str, password_hash: str) -> bool:
|
| 32 |
+
return bcrypt.checkpw(password.encode(), password_hash.encode())
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def create_access_token(member_id: UUID, role: str) -> str:
|
| 36 |
+
expire = datetime.utcnow() + timedelta(hours=ACCESS_TOKEN_EXPIRE_HOURS)
|
| 37 |
+
payload = {
|
| 38 |
+
"sub": str(member_id),
|
| 39 |
+
"role": role,
|
| 40 |
+
"exp": expire,
|
| 41 |
+
"iat": datetime.utcnow(),
|
| 42 |
+
}
|
| 43 |
+
return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class LoginRequest(BaseModel):
|
| 47 |
+
email: str
|
| 48 |
+
password: str
|
| 49 |
+
expected_role: Optional[str] = None
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class LoginResponse(BaseModel):
|
| 53 |
+
access_token: str
|
| 54 |
+
token_type: str = "bearer"
|
| 55 |
+
user: dict
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
from fastapi.security import OAuth2PasswordBearer
|
| 60 |
+
from jwt import PyJWTError
|
| 61 |
+
|
| 62 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/admin/login")
|
| 63 |
+
|
| 64 |
+
async def get_current_user(token: str = Depends(oauth2_scheme), db: AsyncSession = Depends(get_db)):
|
| 65 |
+
try:
|
| 66 |
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
| 67 |
+
member_id: str = payload.get("sub")
|
| 68 |
+
if member_id is None:
|
| 69 |
+
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
| 70 |
+
except PyJWTError:
|
| 71 |
+
raise HTTPException(status_code=401, detail="Invalid authentication credentials")
|
| 72 |
+
|
| 73 |
+
member = await db.get(Member, UUID(member_id))
|
| 74 |
+
if member is None:
|
| 75 |
+
raise HTTPException(status_code=401, detail="User not found")
|
| 76 |
+
return member
|
| 77 |
+
|
| 78 |
+
async def get_current_active_user(current_user: Member = Depends(get_current_user)):
|
| 79 |
+
if not current_user.is_active:
|
| 80 |
+
raise HTTPException(status_code=400, detail="Inactive user")
|
| 81 |
+
return current_user
|
| 82 |
+
|
| 83 |
+
async def get_current_admin(current_user: Member = Depends(get_current_active_user)):
|
| 84 |
+
if current_user.role != "admin":
|
| 85 |
+
raise HTTPException(status_code=403, detail="Not authorized")
|
| 86 |
+
return current_user
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@router.post("/login", response_model=LoginResponse)
|
| 90 |
+
async def staff_login(
|
| 91 |
+
data: LoginRequest,
|
| 92 |
+
db: AsyncSession = Depends(get_db),
|
| 93 |
+
):
|
| 94 |
+
member = await db.execute(
|
| 95 |
+
select(Member).where(Member.email == data.email, Member.is_active == True)
|
| 96 |
+
)
|
| 97 |
+
member = member.scalar_one_or_none()
|
| 98 |
+
|
| 99 |
+
if not member or not member.password_hash:
|
| 100 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 101 |
+
|
| 102 |
+
if not verify_password(data.password, member.password_hash):
|
| 103 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 104 |
+
|
| 105 |
+
if data.expected_role:
|
| 106 |
+
if data.expected_role == "admin" and member.role != "admin":
|
| 107 |
+
raise HTTPException(status_code=403, detail="Access denied. You are not an admin.")
|
| 108 |
+
if data.expected_role == "worker" and member.role == "admin":
|
| 109 |
+
raise HTTPException(status_code=403, detail="Admins should login as Admin, not Worker.")
|
| 110 |
+
|
| 111 |
+
access_token = create_access_token(member.id, member.role)
|
| 112 |
+
|
| 113 |
+
return LoginResponse(
|
| 114 |
+
access_token=access_token,
|
| 115 |
+
user={
|
| 116 |
+
"id": str(member.id),
|
| 117 |
+
"name": member.name,
|
| 118 |
+
"email": member.email,
|
| 119 |
+
"role": member.role,
|
| 120 |
+
"department_id": str(member.department_id) if member.department_id else None,
|
| 121 |
+
},
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class DepartmentCreate(BaseModel):
|
| 126 |
+
name: str
|
| 127 |
+
code: str
|
| 128 |
+
description: Optional[str] = None
|
| 129 |
+
categories: Optional[str] = None
|
| 130 |
+
default_sla_hours: int = 48
|
| 131 |
+
escalation_email: Optional[str] = None
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
class DepartmentUpdate(BaseModel):
|
| 135 |
+
name: Optional[str] = None
|
| 136 |
+
description: Optional[str] = None
|
| 137 |
+
categories: Optional[str] = None
|
| 138 |
+
default_sla_hours: Optional[int] = None
|
| 139 |
+
escalation_email: Optional[str] = None
|
| 140 |
+
is_active: Optional[bool] = None
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class DepartmentResponse(BaseModel):
|
| 144 |
+
id: UUID
|
| 145 |
+
name: str
|
| 146 |
+
code: str
|
| 147 |
+
description: Optional[str]
|
| 148 |
+
categories: Optional[str]
|
| 149 |
+
default_sla_hours: int
|
| 150 |
+
escalation_email: Optional[str]
|
| 151 |
+
is_active: bool
|
| 152 |
+
member_count: int = 0
|
| 153 |
+
|
| 154 |
+
class Config:
|
| 155 |
+
from_attributes = True
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class MemberInvite(BaseModel):
|
| 159 |
+
department_id: UUID
|
| 160 |
+
name: str
|
| 161 |
+
email: str
|
| 162 |
+
phone: Optional[str] = None
|
| 163 |
+
role: str = "officer"
|
| 164 |
+
city: Optional[str] = None
|
| 165 |
+
locality: Optional[str] = None
|
| 166 |
+
max_workload: int = 10
|
| 167 |
+
send_invite: bool = True
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class MemberCreate(BaseModel):
|
| 171 |
+
department_id: UUID
|
| 172 |
+
name: str
|
| 173 |
+
email: str
|
| 174 |
+
phone: Optional[str] = None
|
| 175 |
+
role: str = "worker"
|
| 176 |
+
city: Optional[str] = None
|
| 177 |
+
locality: Optional[str] = None
|
| 178 |
+
max_workload: int = 10
|
| 179 |
+
password: str
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class MemberUpdate(BaseModel):
|
| 183 |
+
name: Optional[str] = None
|
| 184 |
+
email: Optional[str] = None
|
| 185 |
+
phone: Optional[str] = None
|
| 186 |
+
role: Optional[str] = None
|
| 187 |
+
city: Optional[str] = None
|
| 188 |
+
locality: Optional[str] = None
|
| 189 |
+
max_workload: Optional[int] = None
|
| 190 |
+
is_active: Optional[bool] = None
|
| 191 |
+
password: Optional[str] = None
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
class MemberResponse(BaseModel):
|
| 195 |
+
id: UUID
|
| 196 |
+
department_id: Optional[UUID]
|
| 197 |
+
name: str
|
| 198 |
+
email: str
|
| 199 |
+
phone: Optional[str]
|
| 200 |
+
role: str
|
| 201 |
+
city: Optional[str]
|
| 202 |
+
locality: Optional[str]
|
| 203 |
+
is_active: bool
|
| 204 |
+
current_workload: int
|
| 205 |
+
max_workload: int
|
| 206 |
+
invite_status: Optional[str] = None
|
| 207 |
+
|
| 208 |
+
class Config:
|
| 209 |
+
from_attributes = True
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
@router.post("/departments", response_model=DepartmentResponse, status_code=status.HTTP_201_CREATED)
|
| 216 |
+
async def create_department(
|
| 217 |
+
data: DepartmentCreate,
|
| 218 |
+
db: AsyncSession = Depends(get_db),
|
| 219 |
+
current_user: Member = Depends(get_current_admin),
|
| 220 |
+
):
|
| 221 |
+
|
| 222 |
+
existing = await db.execute(select(Department).where(Department.code == data.code))
|
| 223 |
+
if existing.scalar_one_or_none():
|
| 224 |
+
raise HTTPException(status_code=400, detail="Department code already exists")
|
| 225 |
+
|
| 226 |
+
department = Department(
|
| 227 |
+
name=data.name,
|
| 228 |
+
code=data.code.upper(),
|
| 229 |
+
description=data.description,
|
| 230 |
+
categories=data.categories,
|
| 231 |
+
default_sla_hours=data.default_sla_hours,
|
| 232 |
+
escalation_email=data.escalation_email,
|
| 233 |
+
)
|
| 234 |
+
db.add(department)
|
| 235 |
+
await db.flush()
|
| 236 |
+
await db.refresh(department)
|
| 237 |
+
|
| 238 |
+
return DepartmentResponse(
|
| 239 |
+
id=department.id,
|
| 240 |
+
name=department.name,
|
| 241 |
+
code=department.code,
|
| 242 |
+
description=department.description,
|
| 243 |
+
categories=department.categories,
|
| 244 |
+
default_sla_hours=department.default_sla_hours,
|
| 245 |
+
escalation_email=department.escalation_email,
|
| 246 |
+
is_active=department.is_active,
|
| 247 |
+
member_count=0,
|
| 248 |
+
)
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@router.get("/departments", response_model=list[DepartmentResponse])
|
| 252 |
+
async def list_departments(
|
| 253 |
+
db: AsyncSession = Depends(get_db),
|
| 254 |
+
current_user: Member = Depends(get_current_active_user),
|
| 255 |
+
):
|
| 256 |
+
query = select(Department).order_by(Department.name)
|
| 257 |
+
result = await db.execute(query)
|
| 258 |
+
departments = result.scalars().all()
|
| 259 |
+
|
| 260 |
+
response = []
|
| 261 |
+
for dept in departments:
|
| 262 |
+
member_count = await db.execute(
|
| 263 |
+
select(func.count(Member.id)).where(Member.department_id == dept.id)
|
| 264 |
+
)
|
| 265 |
+
count = member_count.scalar() or 0
|
| 266 |
+
|
| 267 |
+
response.append(DepartmentResponse(
|
| 268 |
+
id=dept.id,
|
| 269 |
+
name=dept.name,
|
| 270 |
+
code=dept.code,
|
| 271 |
+
description=dept.description,
|
| 272 |
+
categories=dept.categories,
|
| 273 |
+
default_sla_hours=dept.default_sla_hours,
|
| 274 |
+
escalation_email=dept.escalation_email,
|
| 275 |
+
is_active=dept.is_active,
|
| 276 |
+
member_count=count,
|
| 277 |
+
))
|
| 278 |
+
|
| 279 |
+
return response
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
@router.get("/departments/{department_id}", response_model=DepartmentResponse)
|
| 283 |
+
async def get_department(
|
| 284 |
+
department_id: UUID,
|
| 285 |
+
db: AsyncSession = Depends(get_db),
|
| 286 |
+
current_user: Member = Depends(get_current_active_user),
|
| 287 |
+
):
|
| 288 |
+
department = await db.get(Department, department_id)
|
| 289 |
+
if not department:
|
| 290 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 291 |
+
|
| 292 |
+
member_count = await db.execute(
|
| 293 |
+
select(func.count(Member.id)).where(Member.department_id == department.id)
|
| 294 |
+
)
|
| 295 |
+
count = member_count.scalar() or 0
|
| 296 |
+
|
| 297 |
+
return DepartmentResponse(
|
| 298 |
+
id=department.id,
|
| 299 |
+
name=department.name,
|
| 300 |
+
code=department.code,
|
| 301 |
+
description=department.description,
|
| 302 |
+
categories=department.categories,
|
| 303 |
+
default_sla_hours=department.default_sla_hours,
|
| 304 |
+
escalation_email=department.escalation_email,
|
| 305 |
+
is_active=department.is_active,
|
| 306 |
+
member_count=count,
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@router.patch("/departments/{department_id}", response_model=DepartmentResponse)
|
| 311 |
+
async def update_department(
|
| 312 |
+
department_id: UUID,
|
| 313 |
+
data: DepartmentUpdate,
|
| 314 |
+
db: AsyncSession = Depends(get_db),
|
| 315 |
+
current_user: Member = Depends(get_current_admin),
|
| 316 |
+
):
|
| 317 |
+
department = await db.get(Department, department_id)
|
| 318 |
+
if not department:
|
| 319 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 320 |
+
|
| 321 |
+
update_data = data.model_dump(exclude_unset=True)
|
| 322 |
+
for key, value in update_data.items():
|
| 323 |
+
setattr(department, key, value)
|
| 324 |
+
|
| 325 |
+
await db.flush()
|
| 326 |
+
|
| 327 |
+
member_count = await db.execute(
|
| 328 |
+
select(func.count(Member.id)).where(Member.department_id == department.id)
|
| 329 |
+
)
|
| 330 |
+
count = member_count.scalar() or 0
|
| 331 |
+
|
| 332 |
+
return DepartmentResponse(
|
| 333 |
+
id=department.id,
|
| 334 |
+
name=department.name,
|
| 335 |
+
code=department.code,
|
| 336 |
+
description=department.description,
|
| 337 |
+
categories=department.categories,
|
| 338 |
+
default_sla_hours=department.default_sla_hours,
|
| 339 |
+
escalation_email=department.escalation_email,
|
| 340 |
+
is_active=department.is_active,
|
| 341 |
+
member_count=count,
|
| 342 |
+
)
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
@router.delete("/departments/{department_id}", status_code=status.HTTP_204_NO_CONTENT)
|
| 346 |
+
async def delete_department(
|
| 347 |
+
department_id: UUID,
|
| 348 |
+
db: AsyncSession = Depends(get_db),
|
| 349 |
+
current_user: Member = Depends(get_current_admin),
|
| 350 |
+
):
|
| 351 |
+
department = await db.get(Department, department_id)
|
| 352 |
+
if not department:
|
| 353 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 354 |
+
|
| 355 |
+
await db.delete(department)
|
| 356 |
+
await db.flush()
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
@router.post("/members/invite", status_code=status.HTTP_201_CREATED)
|
| 360 |
+
async def invite_member(
|
| 361 |
+
data: MemberInvite,
|
| 362 |
+
db: AsyncSession = Depends(get_db),
|
| 363 |
+
current_user: Member = Depends(get_current_admin),
|
| 364 |
+
):
|
| 365 |
+
department = await db.get(Department, data.department_id)
|
| 366 |
+
if not department:
|
| 367 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 368 |
+
|
| 369 |
+
existing = await db.execute(select(Member).where(Member.email == data.email))
|
| 370 |
+
if existing.scalar_one_or_none():
|
| 371 |
+
raise HTTPException(status_code=400, detail="Email already exists")
|
| 372 |
+
|
| 373 |
+
invite_result = None
|
| 374 |
+
if data.send_invite:
|
| 375 |
+
invite_result = await supabase_auth.invite_user(
|
| 376 |
+
email=data.email,
|
| 377 |
+
redirect_to=f"{settings.frontend_url}/auth/callback"
|
| 378 |
+
)
|
| 379 |
+
|
| 380 |
+
member = Member(
|
| 381 |
+
department_id=data.department_id,
|
| 382 |
+
name=data.name,
|
| 383 |
+
email=data.email,
|
| 384 |
+
phone=data.phone,
|
| 385 |
+
role=data.role,
|
| 386 |
+
city=data.city,
|
| 387 |
+
locality=data.locality,
|
| 388 |
+
max_workload=data.max_workload,
|
| 389 |
+
)
|
| 390 |
+
db.add(member)
|
| 391 |
+
await db.flush()
|
| 392 |
+
await db.refresh(member)
|
| 393 |
+
|
| 394 |
+
return {
|
| 395 |
+
"member": MemberResponse(
|
| 396 |
+
id=member.id,
|
| 397 |
+
department_id=member.department_id,
|
| 398 |
+
name=member.name,
|
| 399 |
+
email=member.email,
|
| 400 |
+
phone=member.phone,
|
| 401 |
+
role=member.role,
|
| 402 |
+
city=member.city,
|
| 403 |
+
locality=member.locality,
|
| 404 |
+
is_active=member.is_active,
|
| 405 |
+
current_workload=member.current_workload,
|
| 406 |
+
max_workload=member.max_workload,
|
| 407 |
+
invite_status="sent" if invite_result and invite_result.get("success") else "not_sent",
|
| 408 |
+
),
|
| 409 |
+
"invite": invite_result,
|
| 410 |
+
"message": f"Member created. {'Invite email sent!' if invite_result and invite_result.get('success') else 'No invite sent.'}",
|
| 411 |
+
}
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
@router.post("/members", response_model=MemberResponse, status_code=status.HTTP_201_CREATED)
|
| 418 |
+
async def create_member(
|
| 419 |
+
data: MemberCreate,
|
| 420 |
+
db: AsyncSession = Depends(get_db),
|
| 421 |
+
current_user: Member = Depends(get_current_admin),
|
| 422 |
+
):
|
| 423 |
+
|
| 424 |
+
department = await db.get(Department, data.department_id)
|
| 425 |
+
if not department:
|
| 426 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 427 |
+
|
| 428 |
+
existing = await db.execute(select(Member).where(Member.email == data.email))
|
| 429 |
+
if existing.scalar_one_or_none():
|
| 430 |
+
raise HTTPException(status_code=400, detail="Email already exists")
|
| 431 |
+
|
| 432 |
+
member = Member(
|
| 433 |
+
department_id=data.department_id,
|
| 434 |
+
name=data.name,
|
| 435 |
+
email=data.email,
|
| 436 |
+
phone=data.phone,
|
| 437 |
+
role=data.role,
|
| 438 |
+
city=data.city,
|
| 439 |
+
locality=data.locality,
|
| 440 |
+
max_workload=data.max_workload,
|
| 441 |
+
password_hash=hash_password(data.password),
|
| 442 |
+
)
|
| 443 |
+
db.add(member)
|
| 444 |
+
await db.flush()
|
| 445 |
+
await db.refresh(member)
|
| 446 |
+
|
| 447 |
+
|
| 448 |
+
return MemberResponse(
|
| 449 |
+
id=member.id,
|
| 450 |
+
department_id=member.department_id,
|
| 451 |
+
name=member.name,
|
| 452 |
+
email=member.email,
|
| 453 |
+
phone=member.phone,
|
| 454 |
+
role=member.role,
|
| 455 |
+
city=member.city,
|
| 456 |
+
locality=member.locality,
|
| 457 |
+
is_active=member.is_active,
|
| 458 |
+
current_workload=member.current_workload,
|
| 459 |
+
max_workload=member.max_workload,
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
@router.post("/members/{member_id}/send-invite")
|
| 464 |
+
async def send_member_invite(
|
| 465 |
+
member_id: UUID,
|
| 466 |
+
db: AsyncSession = Depends(get_db),
|
| 467 |
+
current_user: Member = Depends(get_current_admin),
|
| 468 |
+
):
|
| 469 |
+
member = await db.get(Member, member_id)
|
| 470 |
+
if not member:
|
| 471 |
+
raise HTTPException(status_code=404, detail="Member not found")
|
| 472 |
+
|
| 473 |
+
if not settings.frontend_url:
|
| 474 |
+
raise HTTPException(status_code=500, detail="FRONTEND_URL not configured")
|
| 475 |
+
|
| 476 |
+
result = await supabase_auth.invite_user(
|
| 477 |
+
email=member.email,
|
| 478 |
+
redirect_to=f"{settings.frontend_url}/auth/callback"
|
| 479 |
+
)
|
| 480 |
+
|
| 481 |
+
if result.get("success"):
|
| 482 |
+
return {
|
| 483 |
+
"success": True,
|
| 484 |
+
"message": f"Invite sent to {member.email}",
|
| 485 |
+
"member_id": str(member.id),
|
| 486 |
+
}
|
| 487 |
+
else:
|
| 488 |
+
raise HTTPException(
|
| 489 |
+
status_code=400,
|
| 490 |
+
detail=result.get("message", "Failed to send invite")
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
|
| 494 |
+
@router.post("/members/{member_id}/magic-link")
|
| 495 |
+
async def send_magic_link(
|
| 496 |
+
member_id: UUID,
|
| 497 |
+
db: AsyncSession = Depends(get_db),
|
| 498 |
+
current_user: Member = Depends(get_current_admin),
|
| 499 |
+
):
|
| 500 |
+
member = await db.get(Member, member_id)
|
| 501 |
+
if not member:
|
| 502 |
+
raise HTTPException(status_code=404, detail="Member not found")
|
| 503 |
+
|
| 504 |
+
if not settings.frontend_url:
|
| 505 |
+
raise HTTPException(status_code=500, detail="FRONTEND_URL not configured")
|
| 506 |
+
|
| 507 |
+
result = await supabase_auth.send_magic_link(
|
| 508 |
+
email=member.email,
|
| 509 |
+
redirect_to=f"{settings.frontend_url}/auth/callback"
|
| 510 |
+
)
|
| 511 |
+
|
| 512 |
+
if result.get("success"):
|
| 513 |
+
return {
|
| 514 |
+
"success": True,
|
| 515 |
+
"message": f"Magic link sent to {member.email}",
|
| 516 |
+
}
|
| 517 |
+
else:
|
| 518 |
+
raise HTTPException(
|
| 519 |
+
status_code=400,
|
| 520 |
+
detail=result.get("message", "Failed to send magic link")
|
| 521 |
+
)
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
@router.get("/members", response_model=list[MemberResponse])
|
| 525 |
+
async def list_members(
|
| 526 |
+
department_id: Optional[UUID] = None,
|
| 527 |
+
db: AsyncSession = Depends(get_db),
|
| 528 |
+
current_user: Member = Depends(get_current_active_user),
|
| 529 |
+
):
|
| 530 |
+
query = select(Member).order_by(Member.name)
|
| 531 |
+
if department_id:
|
| 532 |
+
query = query.where(Member.department_id == department_id)
|
| 533 |
+
|
| 534 |
+
result = await db.execute(query)
|
| 535 |
+
members = result.scalars().all()
|
| 536 |
+
|
| 537 |
+
return [
|
| 538 |
+
MemberResponse(
|
| 539 |
+
id=m.id,
|
| 540 |
+
department_id=m.department_id,
|
| 541 |
+
name=m.name,
|
| 542 |
+
email=m.email,
|
| 543 |
+
phone=m.phone,
|
| 544 |
+
role=m.role,
|
| 545 |
+
city=m.city,
|
| 546 |
+
locality=m.locality,
|
| 547 |
+
is_active=m.is_active,
|
| 548 |
+
current_workload=m.current_workload,
|
| 549 |
+
max_workload=m.max_workload,
|
| 550 |
+
)
|
| 551 |
+
for m in members
|
| 552 |
+
]
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
@router.get("/members/{member_id}", response_model=MemberResponse)
|
| 556 |
+
async def get_member(
|
| 557 |
+
member_id: UUID,
|
| 558 |
+
db: AsyncSession = Depends(get_db),
|
| 559 |
+
current_user: Member = Depends(get_current_active_user),
|
| 560 |
+
):
|
| 561 |
+
member = await db.get(Member, member_id)
|
| 562 |
+
if not member:
|
| 563 |
+
raise HTTPException(status_code=404, detail="Member not found")
|
| 564 |
+
|
| 565 |
+
return MemberResponse(
|
| 566 |
+
id=member.id,
|
| 567 |
+
department_id=member.department_id,
|
| 568 |
+
name=member.name,
|
| 569 |
+
email=member.email,
|
| 570 |
+
phone=member.phone,
|
| 571 |
+
role=member.role,
|
| 572 |
+
city=member.city,
|
| 573 |
+
locality=member.locality,
|
| 574 |
+
is_active=member.is_active,
|
| 575 |
+
current_workload=member.current_workload,
|
| 576 |
+
max_workload=member.max_workload,
|
| 577 |
+
)
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
@router.patch("/members/{member_id}", response_model=MemberResponse)
|
| 581 |
+
async def update_member(
|
| 582 |
+
member_id: UUID,
|
| 583 |
+
data: MemberUpdate,
|
| 584 |
+
db: AsyncSession = Depends(get_db),
|
| 585 |
+
current_user: Member = Depends(get_current_admin),
|
| 586 |
+
):
|
| 587 |
+
member = await db.get(Member, member_id)
|
| 588 |
+
if not member:
|
| 589 |
+
raise HTTPException(status_code=404, detail="Member not found")
|
| 590 |
+
|
| 591 |
+
update_data = data.model_dump(exclude_unset=True)
|
| 592 |
+
for key, value in update_data.items():
|
| 593 |
+
setattr(member, key, value)
|
| 594 |
+
|
| 595 |
+
await db.flush()
|
| 596 |
+
|
| 597 |
+
return MemberResponse(
|
| 598 |
+
id=member.id,
|
| 599 |
+
department_id=member.department_id,
|
| 600 |
+
name=member.name,
|
| 601 |
+
email=member.email,
|
| 602 |
+
phone=member.phone,
|
| 603 |
+
role=member.role,
|
| 604 |
+
city=member.city,
|
| 605 |
+
locality=member.locality,
|
| 606 |
+
is_active=member.is_active,
|
| 607 |
+
current_workload=member.current_workload,
|
| 608 |
+
max_workload=member.max_workload,
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
@router.delete("/members/{member_id}", status_code=status.HTTP_204_NO_CONTENT)
|
| 613 |
+
async def delete_member(
|
| 614 |
+
member_id: UUID,
|
| 615 |
+
db: AsyncSession = Depends(get_db),
|
| 616 |
+
current_user: Member = Depends(get_current_admin),
|
| 617 |
+
):
|
| 618 |
+
member = await db.get(Member, member_id)
|
| 619 |
+
if not member:
|
| 620 |
+
raise HTTPException(status_code=404, detail="Member not found")
|
| 621 |
+
|
| 622 |
+
await db.delete(member)
|
| 623 |
+
await db.flush()
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
@router.get("/stats")
|
| 627 |
+
async def get_admin_stats(
|
| 628 |
+
db: AsyncSession = Depends(get_db),
|
| 629 |
+
current_user: Member = Depends(get_current_active_user),
|
| 630 |
+
):
|
| 631 |
+
from Backend.database.models import Issue, Classification
|
| 632 |
+
from datetime import datetime, timedelta
|
| 633 |
+
|
| 634 |
+
dept_count = await db.execute(select(func.count(Department.id)))
|
| 635 |
+
member_count = await db.execute(select(func.count(Member.id)))
|
| 636 |
+
issue_count = await db.execute(select(func.count(Issue.id)))
|
| 637 |
+
pending_count = await db.execute(
|
| 638 |
+
select(func.count(Issue.id)).where(Issue.state.in_(["reported", "validated", "assigned"]))
|
| 639 |
+
)
|
| 640 |
+
resolved_count = await db.execute(
|
| 641 |
+
select(func.count(Issue.id)).where(Issue.state.in_(["resolved", "closed", "verified"]))
|
| 642 |
+
)
|
| 643 |
+
verification_count = await db.execute(
|
| 644 |
+
select(func.count(Issue.id)).where(Issue.state == "pending_verification")
|
| 645 |
+
)
|
| 646 |
+
|
| 647 |
+
category_query = (
|
| 648 |
+
select(
|
| 649 |
+
Classification.primary_category,
|
| 650 |
+
func.count(Classification.id).label("count")
|
| 651 |
+
)
|
| 652 |
+
.group_by(Classification.primary_category)
|
| 653 |
+
.order_by(func.count(Classification.id).desc())
|
| 654 |
+
.limit(6)
|
| 655 |
+
)
|
| 656 |
+
category_result = await db.execute(category_query)
|
| 657 |
+
categories = category_result.all()
|
| 658 |
+
issues_by_category = [{"name": cat or "Unknown", "value": cnt} for cat, cnt in categories]
|
| 659 |
+
|
| 660 |
+
today = datetime.utcnow().date()
|
| 661 |
+
day_names = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
| 662 |
+
issues_activity = []
|
| 663 |
+
|
| 664 |
+
for i in range(6, -1, -1):
|
| 665 |
+
day = today - timedelta(days=i)
|
| 666 |
+
day_start = datetime.combine(day, datetime.min.time())
|
| 667 |
+
day_end = datetime.combine(day, datetime.max.time())
|
| 668 |
+
|
| 669 |
+
reported_q = await db.execute(
|
| 670 |
+
select(func.count(Issue.id)).where(
|
| 671 |
+
Issue.created_at >= day_start,
|
| 672 |
+
Issue.created_at <= day_end
|
| 673 |
+
)
|
| 674 |
+
)
|
| 675 |
+
resolved_q = await db.execute(
|
| 676 |
+
select(func.count(Issue.id)).where(
|
| 677 |
+
Issue.resolved_at >= day_start,
|
| 678 |
+
Issue.resolved_at <= day_end
|
| 679 |
+
)
|
| 680 |
+
)
|
| 681 |
+
|
| 682 |
+
issues_activity.append({
|
| 683 |
+
"name": day_names[day.weekday()],
|
| 684 |
+
"reported": reported_q.scalar() or 0,
|
| 685 |
+
"resolved": resolved_q.scalar() or 0
|
| 686 |
+
})
|
| 687 |
+
|
| 688 |
+
return {
|
| 689 |
+
"departments": dept_count.scalar() or 0,
|
| 690 |
+
"members": member_count.scalar() or 0,
|
| 691 |
+
"total_issues": issue_count.scalar() or 0,
|
| 692 |
+
"pending_issues": pending_count.scalar() or 0,
|
| 693 |
+
"resolved_issues": resolved_count.scalar() or 0,
|
| 694 |
+
"verification_needed": verification_count.scalar() or 0,
|
| 695 |
+
"issues_by_category": issues_by_category,
|
| 696 |
+
"issues_activity": issues_activity,
|
| 697 |
+
}
|
| 698 |
+
|
| 699 |
+
|
| 700 |
+
@router.get("/stats/heatmap")
|
| 701 |
+
async def get_issue_heatmap(
|
| 702 |
+
db: AsyncSession = Depends(get_db),
|
| 703 |
+
current_user: Member = Depends(get_current_active_user),
|
| 704 |
+
):
|
| 705 |
+
"""
|
| 706 |
+
Returns city-aggregated issue counts for heatmap visualization.
|
| 707 |
+
"""
|
| 708 |
+
query = (
|
| 709 |
+
select(
|
| 710 |
+
Issue.city,
|
| 711 |
+
func.count(Issue.id).label("count"),
|
| 712 |
+
func.avg(Issue.priority).label("priority_avg")
|
| 713 |
+
)
|
| 714 |
+
.where(Issue.state.notin_(["closed", "resolved", "verified"]))
|
| 715 |
+
.where(Issue.city.isnot(None))
|
| 716 |
+
.group_by(Issue.city)
|
| 717 |
+
.order_by(func.count(Issue.id).desc())
|
| 718 |
+
)
|
| 719 |
+
result = await db.execute(query)
|
| 720 |
+
rows = result.all()
|
| 721 |
+
|
| 722 |
+
heatmap_data = []
|
| 723 |
+
for city, count, priority_avg in rows:
|
| 724 |
+
heatmap_data.append({
|
| 725 |
+
"city": city or "Unknown",
|
| 726 |
+
"count": count,
|
| 727 |
+
"priority_avg": round(float(priority_avg or 3), 1)
|
| 728 |
+
})
|
| 729 |
+
|
| 730 |
+
return heatmap_data
|
| 731 |
+
|
| 732 |
+
|
| 733 |
+
@router.get("/stats/escalations", response_model=list[dict])
|
| 734 |
+
async def get_escalation_alerts(
|
| 735 |
+
db: AsyncSession = Depends(get_db),
|
| 736 |
+
current_user: Member = Depends(get_current_active_user),
|
| 737 |
+
):
|
| 738 |
+
"""
|
| 739 |
+
Returns a list of currently escalated issues with details.
|
| 740 |
+
"""
|
| 741 |
+
query = (
|
| 742 |
+
select(Issue, Escalation)
|
| 743 |
+
.join(Escalation, Issue.id == Escalation.issue_id)
|
| 744 |
+
.where(Issue.state == "escalated")
|
| 745 |
+
.order_by(Escalation.created_at.desc())
|
| 746 |
+
)
|
| 747 |
+
result = await db.execute(query)
|
| 748 |
+
rows = result.all()
|
| 749 |
+
|
| 750 |
+
alerts = []
|
| 751 |
+
for issue, esc in rows:
|
| 752 |
+
alerts.append({
|
| 753 |
+
"issue_id": issue.id,
|
| 754 |
+
"category": issue.classification.primary_category if issue.classification else "Unknown",
|
| 755 |
+
"priority": issue.priority,
|
| 756 |
+
"escalated_at": esc.created_at,
|
| 757 |
+
"level": esc.to_level,
|
| 758 |
+
"reason": esc.reason,
|
| 759 |
+
"city": issue.city,
|
| 760 |
+
"locality": issue.locality
|
| 761 |
+
})
|
| 762 |
+
|
| 763 |
+
|
| 764 |
+
class ManualReviewRequest(BaseModel):
|
| 765 |
+
status: str
|
| 766 |
+
reason: Optional[str] = None
|
| 767 |
+
|
| 768 |
+
|
| 769 |
+
|
| 770 |
+
class AdminIssueListItem(BaseModel):
|
| 771 |
+
id: UUID
|
| 772 |
+
description: Optional[str]
|
| 773 |
+
state: str
|
| 774 |
+
priority: Optional[int]
|
| 775 |
+
city: Optional[str]
|
| 776 |
+
created_at: datetime
|
| 777 |
+
updated_at: datetime
|
| 778 |
+
department: Optional[str]
|
| 779 |
+
assigned_to: Optional[str]
|
| 780 |
+
category: Optional[str]
|
| 781 |
+
sla_deadline: Optional[datetime]
|
| 782 |
+
thumbnail: Optional[str]
|
| 783 |
+
|
| 784 |
+
class Config:
|
| 785 |
+
from_attributes = True
|
| 786 |
+
|
| 787 |
+
def issue_to_response(issue: Issue) -> IssueResponse:
|
| 788 |
+
image_urls = []
|
| 789 |
+
annotated_urls = []
|
| 790 |
+
for img in issue.images:
|
| 791 |
+
image_urls.append(get_upload_url(img.file_path))
|
| 792 |
+
if img.annotated_path:
|
| 793 |
+
annotated_urls.append(get_upload_url(img.annotated_path))
|
| 794 |
+
|
| 795 |
+
proof_image_url = None
|
| 796 |
+
if issue.proof_image_path:
|
| 797 |
+
proof_image_url = get_upload_url(issue.proof_image_path)
|
| 798 |
+
|
| 799 |
+
return IssueResponse(
|
| 800 |
+
id=issue.id,
|
| 801 |
+
description=issue.description,
|
| 802 |
+
latitude=issue.latitude,
|
| 803 |
+
longitude=issue.longitude,
|
| 804 |
+
state=IssueState(issue.state),
|
| 805 |
+
priority=issue.priority,
|
| 806 |
+
category=issue.classification.primary_category if issue.classification else None,
|
| 807 |
+
confidence=issue.classification.primary_confidence if issue.classification else None,
|
| 808 |
+
image_urls=image_urls,
|
| 809 |
+
annotated_urls=annotated_urls,
|
| 810 |
+
proof_image_url=proof_image_url,
|
| 811 |
+
validation_source=issue.validation_source,
|
| 812 |
+
is_duplicate=issue.is_duplicate,
|
| 813 |
+
parent_issue_id=issue.parent_issue_id,
|
| 814 |
+
city=issue.city,
|
| 815 |
+
locality=issue.locality,
|
| 816 |
+
full_address=issue.full_address,
|
| 817 |
+
sla_hours=issue.sla_hours,
|
| 818 |
+
sla_deadline=issue.sla_deadline,
|
| 819 |
+
created_at=issue.created_at,
|
| 820 |
+
updated_at=issue.updated_at,
|
| 821 |
+
)
|
| 822 |
+
|
| 823 |
+
@router.get("/issues", response_model=dict)
|
| 824 |
+
async def list_admin_issues(
|
| 825 |
+
page: int = Query(1, ge=1),
|
| 826 |
+
limit: int = Query(20, ge=1, le=100),
|
| 827 |
+
status: Optional[str] = None,
|
| 828 |
+
priority: Optional[int] = None,
|
| 829 |
+
department_id: Optional[UUID] = None,
|
| 830 |
+
worker_id: Optional[UUID] = None,
|
| 831 |
+
search: Optional[str] = None,
|
| 832 |
+
sort_by: str = "created_at",
|
| 833 |
+
sort_order: str = "desc",
|
| 834 |
+
db: AsyncSession = Depends(get_db),
|
| 835 |
+
current_user: Member = Depends(get_current_active_user),
|
| 836 |
+
):
|
| 837 |
+
query = (
|
| 838 |
+
select(Issue)
|
| 839 |
+
.options(
|
| 840 |
+
selectinload(Issue.department),
|
| 841 |
+
selectinload(Issue.assigned_member),
|
| 842 |
+
selectinload(Issue.classification),
|
| 843 |
+
selectinload(Issue.images)
|
| 844 |
+
)
|
| 845 |
+
)
|
| 846 |
+
|
| 847 |
+
|
| 848 |
+
if status:
|
| 849 |
+
statuses = status.split(",")
|
| 850 |
+
query = query.where(Issue.state.in_(statuses))
|
| 851 |
+
|
| 852 |
+
if priority is not None:
|
| 853 |
+
query = query.where(Issue.priority == priority)
|
| 854 |
+
|
| 855 |
+
if department_id:
|
| 856 |
+
query = query.where(Issue.department_id == department_id)
|
| 857 |
+
|
| 858 |
+
if worker_id:
|
| 859 |
+
query = query.where(Issue.assigned_member_id == worker_id)
|
| 860 |
+
|
| 861 |
+
if search:
|
| 862 |
+
search_filter = or_(
|
| 863 |
+
Issue.description.ilike(f"%{search}%"),
|
| 864 |
+
Issue.city.ilike(f"%{search}%"),
|
| 865 |
+
Issue.locality.ilike(f"%{search}%"),
|
| 866 |
+
Issue.id.cast(String).ilike(f"%{search}%")
|
| 867 |
+
)
|
| 868 |
+
query = query.where(search_filter)
|
| 869 |
+
|
| 870 |
+
|
| 871 |
+
sort_column = getattr(Issue, sort_by, Issue.created_at)
|
| 872 |
+
if sort_order == "asc":
|
| 873 |
+
query = query.order_by(asc(sort_column))
|
| 874 |
+
else:
|
| 875 |
+
query = query.order_by(desc(sort_column))
|
| 876 |
+
|
| 877 |
+
|
| 878 |
+
total_query = select(func.count()).select_from(query.subquery())
|
| 879 |
+
total_result = await db.execute(total_query)
|
| 880 |
+
total = total_result.scalar_one()
|
| 881 |
+
|
| 882 |
+
query = query.offset((page - 1) * limit).limit(limit)
|
| 883 |
+
result = await db.execute(query)
|
| 884 |
+
issues = result.scalars().all()
|
| 885 |
+
|
| 886 |
+
|
| 887 |
+
|
| 888 |
+
|
| 889 |
+
items = []
|
| 890 |
+
for issue in issues:
|
| 891 |
+
thumb = None
|
| 892 |
+
if issue.images and len(issue.images) > 0:
|
| 893 |
+
thumb = get_upload_url(issue.images[0].file_path)
|
| 894 |
+
|
| 895 |
+
items.append(AdminIssueListItem(
|
| 896 |
+
id=issue.id,
|
| 897 |
+
description=issue.description,
|
| 898 |
+
state=issue.state,
|
| 899 |
+
priority=issue.priority,
|
| 900 |
+
city=issue.city,
|
| 901 |
+
created_at=issue.created_at,
|
| 902 |
+
updated_at=issue.updated_at,
|
| 903 |
+
department=issue.department.name if issue.department else None,
|
| 904 |
+
assigned_to=issue.assigned_member.name if issue.assigned_member else None,
|
| 905 |
+
category=issue.classification.primary_category if issue.classification else None,
|
| 906 |
+
sla_deadline=issue.sla_deadline,
|
| 907 |
+
thumbnail=thumb
|
| 908 |
+
))
|
| 909 |
+
|
| 910 |
+
return {
|
| 911 |
+
"items": items,
|
| 912 |
+
"total": total,
|
| 913 |
+
"page": page,
|
| 914 |
+
"limit": limit,
|
| 915 |
+
"pages": (total + limit - 1) // limit
|
| 916 |
+
}
|
| 917 |
+
|
| 918 |
+
@router.get("/issues/{issue_id}/details")
|
| 919 |
+
async def get_admin_issue_details(
|
| 920 |
+
issue_id: UUID,
|
| 921 |
+
db: AsyncSession = Depends(get_db),
|
| 922 |
+
current_user: Member = Depends(get_current_active_user),
|
| 923 |
+
):
|
| 924 |
+
query = (
|
| 925 |
+
select(Issue)
|
| 926 |
+
.options(
|
| 927 |
+
selectinload(Issue.department),
|
| 928 |
+
selectinload(Issue.classification),
|
| 929 |
+
selectinload(Issue.images),
|
| 930 |
+
selectinload(Issue.events),
|
| 931 |
+
selectinload(Issue.duplicates)
|
| 932 |
+
)
|
| 933 |
+
.where(Issue.id == issue_id)
|
| 934 |
+
)
|
| 935 |
+
result = await db.execute(query)
|
| 936 |
+
issue = result.scalar_one_or_none()
|
| 937 |
+
|
| 938 |
+
if not issue:
|
| 939 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 940 |
+
|
| 941 |
+
|
| 942 |
+
worker = None
|
| 943 |
+
if issue.assigned_member_id:
|
| 944 |
+
worker = await db.get(Member, issue.assigned_member_id)
|
| 945 |
+
|
| 946 |
+
return {
|
| 947 |
+
"issue": issue_to_response(issue),
|
| 948 |
+
"department": {
|
| 949 |
+
"id": issue.department.id,
|
| 950 |
+
"name": issue.department.name
|
| 951 |
+
} if issue.department else None,
|
| 952 |
+
"worker": {
|
| 953 |
+
"id": worker.id,
|
| 954 |
+
"name": worker.name,
|
| 955 |
+
"email": worker.email,
|
| 956 |
+
"workload": worker.current_workload
|
| 957 |
+
} if worker else None,
|
| 958 |
+
"events": [
|
| 959 |
+
{
|
| 960 |
+
"id": e.id,
|
| 961 |
+
"type": e.event_type,
|
| 962 |
+
"agent": e.agent_name,
|
| 963 |
+
"data": e.event_data,
|
| 964 |
+
"created_at": e.created_at
|
| 965 |
+
} for e in sorted(issue.events, key=lambda x: x.created_at, reverse=True)
|
| 966 |
+
],
|
| 967 |
+
"duplicates": [
|
| 968 |
+
{
|
| 969 |
+
"id": d.id,
|
| 970 |
+
"created_at": d.created_at,
|
| 971 |
+
"status": d.state
|
| 972 |
+
} for d in issue.duplicates
|
| 973 |
+
]
|
| 974 |
+
}
|
| 975 |
+
|
| 976 |
+
@router.get("/workers/performance")
|
| 977 |
+
async def get_worker_performance(
|
| 978 |
+
department_id: Optional[UUID] = None,
|
| 979 |
+
db: AsyncSession = Depends(get_db),
|
| 980 |
+
current_user: Member = Depends(get_current_active_user),
|
| 981 |
+
):
|
| 982 |
+
|
| 983 |
+
q = select(Member).where(Member.role == "worker")
|
| 984 |
+
if department_id:
|
| 985 |
+
q = q.where(Member.department_id == department_id)
|
| 986 |
+
|
| 987 |
+
res = await db.execute(q)
|
| 988 |
+
workers = res.scalars().all()
|
| 989 |
+
|
| 990 |
+
performance_data = []
|
| 991 |
+
|
| 992 |
+
for w in workers:
|
| 993 |
+
|
| 994 |
+
|
| 995 |
+
resolved_count = await db.execute(
|
| 996 |
+
select(func.count(Issue.id)).where(
|
| 997 |
+
Issue.assigned_member_id == w.id,
|
| 998 |
+
Issue.state.in_(["resolved", "closed"])
|
| 999 |
+
)
|
| 1000 |
+
)
|
| 1001 |
+
resolved = resolved_count.scalar() or 0
|
| 1002 |
+
|
| 1003 |
+
|
| 1004 |
+
|
| 1005 |
+
|
| 1006 |
+
|
| 1007 |
+
performance_data.append({
|
| 1008 |
+
"id": w.id,
|
| 1009 |
+
"name": w.name,
|
| 1010 |
+
"active": w.is_active,
|
| 1011 |
+
"current_load": w.current_workload,
|
| 1012 |
+
"max_load": w.max_workload,
|
| 1013 |
+
"resolved_total": resolved,
|
| 1014 |
+
"efficiency": round(resolved / (max(1, (datetime.utcnow() - w.created_at).days / 7)), 1)
|
| 1015 |
+
})
|
| 1016 |
+
|
| 1017 |
+
return performance_data
|
| 1018 |
+
|
| 1019 |
+
@router.patch("/issues/{issue_id}", response_model=IssueResponse)
|
| 1020 |
+
async def update_issue_details(
|
| 1021 |
+
issue_id: UUID,
|
| 1022 |
+
data: dict,
|
| 1023 |
+
db: AsyncSession = Depends(get_db),
|
| 1024 |
+
current_user: Member = Depends(get_current_admin),
|
| 1025 |
+
):
|
| 1026 |
+
issue = await db.get(Issue, issue_id)
|
| 1027 |
+
if not issue:
|
| 1028 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 1029 |
+
|
| 1030 |
+
if "priority" in data:
|
| 1031 |
+
issue.priority = data["priority"]
|
| 1032 |
+
|
| 1033 |
+
|
| 1034 |
+
if "assigned_member_id" in data:
|
| 1035 |
+
new_worker_id = data["assigned_member_id"]
|
| 1036 |
+
if new_worker_id:
|
| 1037 |
+
worker = await db.get(Member, UUID(new_worker_id))
|
| 1038 |
+
if not worker:
|
| 1039 |
+
raise HTTPException(status_code=400, detail="Worker not found")
|
| 1040 |
+
issue.assigned_member_id = worker.id
|
| 1041 |
+
issue.state = "assigned"
|
| 1042 |
+
worker.current_workload += 1
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
else:
|
| 1046 |
+
issue.assigned_member_id = None
|
| 1047 |
+
|
| 1048 |
+
await db.commit()
|
| 1049 |
+
await db.refresh(issue)
|
| 1050 |
+
|
| 1051 |
+
|
| 1052 |
+
|
| 1053 |
+
|
| 1054 |
+
return issue_to_response(issue)
|
| 1055 |
+
|
| 1056 |
+
class ResolutionReviewRequest(BaseModel):
|
| 1057 |
+
action: str
|
| 1058 |
+
comment: Optional[str] = None
|
| 1059 |
+
|
| 1060 |
+
@router.post("/issues/{issue_id}/approve_resolution")
|
| 1061 |
+
async def approve_resolution(
|
| 1062 |
+
issue_id: UUID,
|
| 1063 |
+
data: ResolutionReviewRequest,
|
| 1064 |
+
db: AsyncSession = Depends(get_db),
|
| 1065 |
+
current_user: Member = Depends(get_current_admin),
|
| 1066 |
+
):
|
| 1067 |
+
issue = await db.get(Issue, issue_id)
|
| 1068 |
+
if not issue:
|
| 1069 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 1070 |
+
|
| 1071 |
+
if issue.state != "pending_verification":
|
| 1072 |
+
raise HTTPException(status_code=400, detail="Issue is not pending verification.")
|
| 1073 |
+
|
| 1074 |
+
if data.action == "approve":
|
| 1075 |
+
issue.state = "resolved"
|
| 1076 |
+
issue.completed_at = datetime.utcnow()
|
| 1077 |
+
if data.comment:
|
| 1078 |
+
issue.resolution_notes = (issue.resolution_notes or "") + f"\nAdmin Note: {data.comment}"
|
| 1079 |
+
|
| 1080 |
+
|
| 1081 |
+
if issue.assigned_member_id:
|
| 1082 |
+
worker = await db.get(Member, issue.assigned_member_id)
|
| 1083 |
+
if worker and worker.current_workload > 0:
|
| 1084 |
+
worker.current_workload -= 1
|
| 1085 |
+
|
| 1086 |
+
await db.commit()
|
| 1087 |
+
return {"message": "Issue resolution approved and marked as resolved."}
|
| 1088 |
+
|
| 1089 |
+
elif data.action == "reject":
|
| 1090 |
+
issue.state = "in_progress"
|
| 1091 |
+
|
| 1092 |
+
if data.comment:
|
| 1093 |
+
issue.resolution_notes = (issue.resolution_notes or "") + f"\n[REJECTED]: {data.comment}"
|
| 1094 |
+
|
| 1095 |
+
|
| 1096 |
+
|
| 1097 |
+
await db.commit()
|
| 1098 |
+
return {"message": "Issue resolution rejected. Sent back to worker."}
|
| 1099 |
+
|
| 1100 |
+
else:
|
| 1101 |
+
raise HTTPException(status_code=400, detail="Invalid action.")
|
| 1102 |
+
|
| 1103 |
+
@router.post("/issues/{issue_id}/review")
|
| 1104 |
+
async def review_issue(
|
| 1105 |
+
issue_id: UUID,
|
| 1106 |
+
data: ManualReviewRequest,
|
| 1107 |
+
db: AsyncSession = Depends(get_db),
|
| 1108 |
+
current_user: Member = Depends(get_current_admin),
|
| 1109 |
+
):
|
| 1110 |
+
"""
|
| 1111 |
+
Manually review an issue.
|
| 1112 |
+
- If REJECTED: Mark as rejected.
|
| 1113 |
+
- If APPROVED: Mark as assigned and auto-assign to a worker.
|
| 1114 |
+
"""
|
| 1115 |
+
issue = await db.get(Issue, issue_id)
|
| 1116 |
+
if not issue:
|
| 1117 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 1118 |
+
|
| 1119 |
+
if data.status == "rejected":
|
| 1120 |
+
issue.state = "rejected"
|
| 1121 |
+
issue.resolution_notes = data.reason or "Rejected during manual review."
|
| 1122 |
+
await db.commit()
|
| 1123 |
+
return {"message": "Issue rejected successfully"}
|
| 1124 |
+
|
| 1125 |
+
elif data.status == "approved":
|
| 1126 |
+
|
| 1127 |
+
|
| 1128 |
+
|
| 1129 |
+
query = select(Member).where(Member.role == "worker", Member.is_active == True).order_by(Member.current_workload.asc())
|
| 1130 |
+
|
| 1131 |
+
|
| 1132 |
+
if issue.department_id:
|
| 1133 |
+
query = query.where(Member.department_id == issue.department_id)
|
| 1134 |
+
|
| 1135 |
+
result = await db.execute(query)
|
| 1136 |
+
Workers = result.scalars().all()
|
| 1137 |
+
|
| 1138 |
+
selected_worker = None
|
| 1139 |
+
|
| 1140 |
+
if not Workers:
|
| 1141 |
+
|
| 1142 |
+
|
| 1143 |
+
issue.state = "verified"
|
| 1144 |
+
issue.resolution_notes = "Verified but no workers available for auto-assignment."
|
| 1145 |
+
else:
|
| 1146 |
+
selected_worker = Workers[0]
|
| 1147 |
+
issue.assigned_member_id = selected_worker.id
|
| 1148 |
+
issue.state = "assigned"
|
| 1149 |
+
selected_worker.current_workload += 1
|
| 1150 |
+
db.add(selected_worker)
|
| 1151 |
+
|
| 1152 |
+
await db.commit()
|
| 1153 |
+
|
| 1154 |
+
return {
|
| 1155 |
+
"message": f"Issue approved. {'Assigned to ' + selected_worker.name if selected_worker else 'No worker available, queued as verified.'}",
|
| 1156 |
+
"assigned_to": str(selected_worker.id) if selected_worker else None
|
| 1157 |
+
}
|
| 1158 |
+
|
| 1159 |
+
else:
|
| 1160 |
+
raise HTTPException(status_code=400, detail="Invalid status. Use 'approved' or 'rejected'.")
|
Backend/api/routes/flow.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import json
|
| 3 |
+
from dataclasses import asdict
|
| 4 |
+
from typing import Optional
|
| 5 |
+
from uuid import UUID
|
| 6 |
+
from fastapi import APIRouter, Depends, Query
|
| 7 |
+
from fastapi.responses import StreamingResponse
|
| 8 |
+
from sqlalchemy import select
|
| 9 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 10 |
+
|
| 11 |
+
from Backend.database.connection import get_db
|
| 12 |
+
from Backend.database.models import Issue, IssueEvent
|
| 13 |
+
from Backend.core.flow_tracker import get_flow_tracker, _active_flows
|
| 14 |
+
|
| 15 |
+
router = APIRouter()
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
async def event_generator(issue_id: UUID, timeout: int = 300):
|
| 19 |
+
tracker = get_flow_tracker(issue_id)
|
| 20 |
+
|
| 21 |
+
if not tracker:
|
| 22 |
+
yield f"data: {json.dumps({'type': 'error', 'message': 'No active flow for this issue'})}\n\n"
|
| 23 |
+
return
|
| 24 |
+
|
| 25 |
+
queue = tracker.subscribe()
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
start_msg = {
|
| 29 |
+
"type": "connected",
|
| 30 |
+
"issue_id": str(issue_id),
|
| 31 |
+
"message": "Connected to agent flow stream",
|
| 32 |
+
"current_steps": [asdict(s) for s in tracker.flow.steps]
|
| 33 |
+
}
|
| 34 |
+
yield f"data: {json.dumps(start_msg)}\n\n"
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
if tracker.flow.status in ["completed", "error"]:
|
| 44 |
+
yield f"data: {json.dumps({'type': 'flow_' + tracker.flow.status, 'data': tracker.flow.to_dict()})}\n\n"
|
| 45 |
+
return
|
| 46 |
+
|
| 47 |
+
while True:
|
| 48 |
+
try:
|
| 49 |
+
message = await asyncio.wait_for(queue.get(), timeout=30)
|
| 50 |
+
yield f"data: {json.dumps(message)}\n\n"
|
| 51 |
+
|
| 52 |
+
if message.get("type") in ["flow_completed", "flow_error"]:
|
| 53 |
+
break
|
| 54 |
+
except asyncio.TimeoutError:
|
| 55 |
+
yield f"data: {json.dumps({'type': 'heartbeat'})}\n\n"
|
| 56 |
+
finally:
|
| 57 |
+
tracker.unsubscribe(queue)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
@router.get("/flow/{issue_id}")
|
| 61 |
+
async def stream_agent_flow(issue_id: UUID):
|
| 62 |
+
return StreamingResponse(
|
| 63 |
+
event_generator(issue_id),
|
| 64 |
+
media_type="text/event-stream",
|
| 65 |
+
headers={
|
| 66 |
+
"Cache-Control": "no-cache",
|
| 67 |
+
"Connection": "keep-alive",
|
| 68 |
+
"X-Accel-Buffering": "no",
|
| 69 |
+
}
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@router.get("/flow/active")
|
| 74 |
+
async def list_active_flows():
|
| 75 |
+
return {
|
| 76 |
+
"active_flows": [
|
| 77 |
+
{
|
| 78 |
+
"issue_id": str(issue_id),
|
| 79 |
+
"status": tracker.flow.status,
|
| 80 |
+
"steps_count": len(tracker.flow.steps),
|
| 81 |
+
"started_at": tracker.flow.started_at,
|
| 82 |
+
}
|
| 83 |
+
for issue_id, tracker in _active_flows.items()
|
| 84 |
+
]
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
@router.get("/events/{issue_id}")
|
| 89 |
+
async def get_issue_events(
|
| 90 |
+
issue_id: UUID,
|
| 91 |
+
limit: int = Query(50, ge=1, le=200),
|
| 92 |
+
db: AsyncSession = Depends(get_db),
|
| 93 |
+
):
|
| 94 |
+
query = (
|
| 95 |
+
select(IssueEvent)
|
| 96 |
+
.where(IssueEvent.issue_id == issue_id)
|
| 97 |
+
.order_by(IssueEvent.created_at.asc())
|
| 98 |
+
.limit(limit)
|
| 99 |
+
)
|
| 100 |
+
result = await db.execute(query)
|
| 101 |
+
events = result.scalars().all()
|
| 102 |
+
|
| 103 |
+
return {
|
| 104 |
+
"issue_id": str(issue_id),
|
| 105 |
+
"events": [
|
| 106 |
+
{
|
| 107 |
+
"id": str(e.id),
|
| 108 |
+
"event_type": e.event_type,
|
| 109 |
+
"agent_name": e.agent_name,
|
| 110 |
+
"event_data": json.loads(e.event_data) if e.event_data else None,
|
| 111 |
+
"created_at": e.created_at.isoformat(),
|
| 112 |
+
}
|
| 113 |
+
for e in events
|
| 114 |
+
]
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@router.get("/timeline/{issue_id}")
|
| 119 |
+
async def get_issue_timeline(
|
| 120 |
+
issue_id: UUID,
|
| 121 |
+
db: AsyncSession = Depends(get_db),
|
| 122 |
+
):
|
| 123 |
+
issue = await db.get(Issue, issue_id)
|
| 124 |
+
if not issue:
|
| 125 |
+
return {"error": "Issue not found"}
|
| 126 |
+
|
| 127 |
+
query = (
|
| 128 |
+
select(IssueEvent)
|
| 129 |
+
.where(IssueEvent.issue_id == issue_id)
|
| 130 |
+
.order_by(IssueEvent.created_at.asc())
|
| 131 |
+
)
|
| 132 |
+
result = await db.execute(query)
|
| 133 |
+
events = result.scalars().all()
|
| 134 |
+
|
| 135 |
+
timeline = []
|
| 136 |
+
|
| 137 |
+
timeline.append({
|
| 138 |
+
"timestamp": issue.created_at.isoformat(),
|
| 139 |
+
"event": "issue_created",
|
| 140 |
+
"agent": "System",
|
| 141 |
+
"details": {
|
| 142 |
+
"latitude": issue.latitude,
|
| 143 |
+
"longitude": issue.longitude,
|
| 144 |
+
"description": issue.description,
|
| 145 |
+
}
|
| 146 |
+
})
|
| 147 |
+
|
| 148 |
+
for event in events:
|
| 149 |
+
event_data = json.loads(event.event_data) if event.event_data else {}
|
| 150 |
+
timeline.append({
|
| 151 |
+
"timestamp": event.created_at.isoformat(),
|
| 152 |
+
"event": event.event_type,
|
| 153 |
+
"agent": event.agent_name or "Unknown",
|
| 154 |
+
"details": event_data,
|
| 155 |
+
})
|
| 156 |
+
|
| 157 |
+
return {
|
| 158 |
+
"issue_id": str(issue_id),
|
| 159 |
+
"current_state": issue.state,
|
| 160 |
+
"priority": issue.priority,
|
| 161 |
+
"is_duplicate": issue.is_duplicate,
|
| 162 |
+
"timeline": timeline,
|
| 163 |
+
}
|
Backend/api/routes/health.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import APIRouter
|
| 2 |
+
from sqlalchemy import text
|
| 3 |
+
|
| 4 |
+
from Backend.database.connection import async_session_factory
|
| 5 |
+
|
| 6 |
+
router = APIRouter()
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@router.get("/health")
|
| 10 |
+
async def health_check():
|
| 11 |
+
return {"status": "healthy", "service": "city-issue-agent"}
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@router.get("/health/db")
|
| 15 |
+
async def db_health_check():
|
| 16 |
+
try:
|
| 17 |
+
async with async_session_factory() as session:
|
| 18 |
+
await session.execute(text("SELECT 1"))
|
| 19 |
+
return {"status": "healthy", "database": "connected"}
|
| 20 |
+
except Exception as e:
|
| 21 |
+
return {"status": "unhealthy", "database": "disconnected", "error": str(e)}
|
Backend/api/routes/issues.py
ADDED
|
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from pydantic import BaseModel
|
| 4 |
+
from fastapi import APIRouter, Depends, File, Form, HTTPException, Query, UploadFile, status, BackgroundTasks
|
| 5 |
+
from sqlalchemy import select, func
|
| 6 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 7 |
+
from sqlalchemy.orm import selectinload
|
| 8 |
+
|
| 9 |
+
from Backend.core.schemas import IssueCreate, IssueResponse, IssueListResponse, IssueState
|
| 10 |
+
from Backend.core.flow_tracker import create_flow_tracker, remove_flow_tracker
|
| 11 |
+
from Backend.database.connection import get_db, get_db_context
|
| 12 |
+
from Backend.database.models import Issue, Classification
|
| 13 |
+
from Backend.services.ingestion import IngestionService
|
| 14 |
+
from Backend.agents import (
|
| 15 |
+
VisionAgent,
|
| 16 |
+
GeoDeduplicateAgent,
|
| 17 |
+
PriorityAgent,
|
| 18 |
+
RoutingAgent,
|
| 19 |
+
NotificationAgent,
|
| 20 |
+
)
|
| 21 |
+
from Backend.utils.storage import get_upload_url
|
| 22 |
+
from Backend.core.auth import get_user_id_from_form_token
|
| 23 |
+
from Backend.core.logging import get_logger
|
| 24 |
+
|
| 25 |
+
logger = get_logger(__name__)
|
| 26 |
+
|
| 27 |
+
router = APIRouter()
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def issue_to_response(issue: Issue) -> IssueResponse:
|
| 31 |
+
image_urls = []
|
| 32 |
+
annotated_urls = []
|
| 33 |
+
for img in issue.images:
|
| 34 |
+
image_urls.append(get_upload_url(img.file_path))
|
| 35 |
+
if img.annotated_path:
|
| 36 |
+
annotated_urls.append(get_upload_url(img.annotated_path))
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
return IssueResponse(
|
| 40 |
+
id=issue.id,
|
| 41 |
+
description=issue.description,
|
| 42 |
+
latitude=issue.latitude,
|
| 43 |
+
longitude=issue.longitude,
|
| 44 |
+
state=IssueState(issue.state),
|
| 45 |
+
priority=issue.priority,
|
| 46 |
+
category=issue.classification.primary_category if issue.classification else None,
|
| 47 |
+
confidence=issue.classification.primary_confidence if issue.classification else None,
|
| 48 |
+
image_urls=image_urls,
|
| 49 |
+
annotated_urls=annotated_urls,
|
| 50 |
+
validation_source=issue.validation_source,
|
| 51 |
+
is_duplicate=issue.is_duplicate,
|
| 52 |
+
parent_issue_id=issue.parent_issue_id,
|
| 53 |
+
city=issue.city,
|
| 54 |
+
locality=issue.locality,
|
| 55 |
+
full_address=issue.full_address,
|
| 56 |
+
geo_status="Duplicate" if issue.is_duplicate else "Clustered" if issue.geo_cluster_id else "Unique Location",
|
| 57 |
+
sla_hours=issue.sla_hours,
|
| 58 |
+
sla_deadline=issue.sla_deadline,
|
| 59 |
+
created_at=issue.created_at,
|
| 60 |
+
updated_at=issue.updated_at,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
async def get_issue_with_relations(db: AsyncSession, issue_id: UUID) -> Issue | None:
|
| 65 |
+
query = (
|
| 66 |
+
select(Issue)
|
| 67 |
+
.options(selectinload(Issue.images), selectinload(Issue.classification))
|
| 68 |
+
.where(Issue.id == issue_id)
|
| 69 |
+
)
|
| 70 |
+
result = await db.execute(query)
|
| 71 |
+
return result.scalar_one_or_none()
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
async def run_agent_pipeline(db: AsyncSession, issue_id: UUID, image_paths: list[str], description: Optional[str]):
|
| 75 |
+
tracker = create_flow_tracker(issue_id)
|
| 76 |
+
|
| 77 |
+
try:
|
| 78 |
+
await tracker.start_step("VisionAgent")
|
| 79 |
+
vision = VisionAgent(db)
|
| 80 |
+
vision_result = await vision.process_issue(issue_id, image_paths, description)
|
| 81 |
+
|
| 82 |
+
detection_count = len(vision_result.detections)
|
| 83 |
+
|
| 84 |
+
if detection_count == 0:
|
| 85 |
+
await tracker.complete_step(
|
| 86 |
+
"VisionAgent",
|
| 87 |
+
decision="No issues detected",
|
| 88 |
+
reasoning="0 detections - requires manual confirmation",
|
| 89 |
+
result={
|
| 90 |
+
"detections": 0,
|
| 91 |
+
"needs_confirmation": True,
|
| 92 |
+
"annotated_urls": vision_result.annotated_urls,
|
| 93 |
+
}
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
issue = await db.get(Issue, issue_id)
|
| 97 |
+
if issue:
|
| 98 |
+
issue.state = "pending_confirmation"
|
| 99 |
+
issue.validation_source = "pending_manual"
|
| 100 |
+
issue.validation_reason = "No issues detected by AI - awaiting user confirmation"
|
| 101 |
+
await db.flush()
|
| 102 |
+
|
| 103 |
+
final_result = {
|
| 104 |
+
"issue_id": str(issue_id),
|
| 105 |
+
"state": "pending_confirmation",
|
| 106 |
+
"needs_confirmation": True,
|
| 107 |
+
"detections": 0,
|
| 108 |
+
"message": "No issues detected. Please confirm if you want to submit for manual review.",
|
| 109 |
+
}
|
| 110 |
+
await tracker.complete_flow(final_result)
|
| 111 |
+
return
|
| 112 |
+
|
| 113 |
+
await tracker.complete_step(
|
| 114 |
+
"VisionAgent",
|
| 115 |
+
decision=f"Detected: {vision_result.primary_category.value if vision_result.primary_category else 'Unknown'}",
|
| 116 |
+
reasoning=f"Confidence: {vision_result.primary_confidence:.2%}, {detection_count} detections",
|
| 117 |
+
result=vision_result.model_dump(mode='json')
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
await tracker.start_step("GeoDeduplicateAgent")
|
| 121 |
+
geo = GeoDeduplicateAgent(db)
|
| 122 |
+
geo_result = await geo.process_issue(issue_id)
|
| 123 |
+
await tracker.complete_step(
|
| 124 |
+
"GeoDeduplicateAgent",
|
| 125 |
+
decision=f"Status: {geo_result.get('geo_status', 'unknown')}",
|
| 126 |
+
reasoning=f"Nearby issues: {geo_result.get('nearby_count', 0)}",
|
| 127 |
+
result=geo_result
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
if not geo_result.get("is_duplicate"):
|
| 131 |
+
await tracker.start_step("PriorityAgent")
|
| 132 |
+
priority = PriorityAgent(db)
|
| 133 |
+
priority_result = await priority.process_issue(issue_id)
|
| 134 |
+
await tracker.complete_step(
|
| 135 |
+
"PriorityAgent",
|
| 136 |
+
decision=f"Priority: {priority_result.get('priority', 'N/A')}",
|
| 137 |
+
reasoning=priority_result.get("reasoning", ""),
|
| 138 |
+
result=priority_result
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
await tracker.start_step("RoutingAgent")
|
| 142 |
+
routing = RoutingAgent(db)
|
| 143 |
+
routing_result = await routing.process_issue(issue_id)
|
| 144 |
+
await tracker.complete_step(
|
| 145 |
+
"RoutingAgent",
|
| 146 |
+
decision=f"Routed to: {routing_result.get('department', 'N/A')}",
|
| 147 |
+
reasoning=f"Assigned: {routing_result.get('member', 'N/A')}, SLA: {routing_result.get('sla_hours', 0)}h",
|
| 148 |
+
result=routing_result
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
await tracker.start_step("NotificationAgent")
|
| 152 |
+
notification = NotificationAgent(db)
|
| 153 |
+
await notification.notify_assignment(issue_id)
|
| 154 |
+
await tracker.complete_step(
|
| 155 |
+
"NotificationAgent",
|
| 156 |
+
decision="Notifications queued",
|
| 157 |
+
reasoning="Assignment notification sent to assigned member",
|
| 158 |
+
result={"queued": True}
|
| 159 |
+
)
|
| 160 |
+
else:
|
| 161 |
+
await tracker.complete_step(
|
| 162 |
+
"GeoDeduplicateAgent",
|
| 163 |
+
decision="Marked as duplicate",
|
| 164 |
+
reasoning=f"Linked to parent: {geo_result.get('parent_issue_id')}",
|
| 165 |
+
result=geo_result
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 169 |
+
final_result = {
|
| 170 |
+
"issue_id": str(issue_id),
|
| 171 |
+
"state": issue.state if issue else "unknown",
|
| 172 |
+
"priority": issue.priority if issue else None,
|
| 173 |
+
"is_duplicate": issue.is_duplicate if issue else False,
|
| 174 |
+
}
|
| 175 |
+
await tracker.complete_flow(final_result)
|
| 176 |
+
|
| 177 |
+
except Exception as e:
|
| 178 |
+
await tracker.error_flow(str(e))
|
| 179 |
+
raise
|
| 180 |
+
finally:
|
| 181 |
+
remove_flow_tracker(issue_id)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
async def run_agent_pipeline_background(issue_id: UUID, image_paths: list[str], description: Optional[str]):
|
| 186 |
+
async with get_db_context() as session:
|
| 187 |
+
await run_agent_pipeline(session, issue_id, image_paths, description)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
@router.post("", response_model=IssueResponse, status_code=status.HTTP_201_CREATED)
|
| 191 |
+
|
| 192 |
+
async def create_issue(
|
| 193 |
+
background_tasks: BackgroundTasks,
|
| 194 |
+
images: list[UploadFile] = File(...),
|
| 195 |
+
description: Optional[str] = Form(None),
|
| 196 |
+
latitude: float = Form(...),
|
| 197 |
+
longitude: float = Form(...),
|
| 198 |
+
accuracy_meters: Optional[float] = Form(None),
|
| 199 |
+
platform: str = Form(...),
|
| 200 |
+
device_model: Optional[str] = Form(None),
|
| 201 |
+
authorization: Optional[str] = Form(None),
|
| 202 |
+
db: AsyncSession = Depends(get_db),
|
| 203 |
+
):
|
| 204 |
+
user_id = get_user_id_from_form_token(authorization)
|
| 205 |
+
|
| 206 |
+
data = IssueCreate(
|
| 207 |
+
|
| 208 |
+
description=description,
|
| 209 |
+
latitude=latitude,
|
| 210 |
+
longitude=longitude,
|
| 211 |
+
accuracy_meters=accuracy_meters,
|
| 212 |
+
platform=platform,
|
| 213 |
+
device_model=device_model,
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
ingestion = IngestionService(db)
|
| 217 |
+
issue, image_paths = await ingestion.create_issue(data, images, user_id)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
tracker = create_flow_tracker(issue.id)
|
| 221 |
+
|
| 222 |
+
await tracker.start_step("LocationStep")
|
| 223 |
+
await tracker.complete_step(
|
| 224 |
+
"LocationStep",
|
| 225 |
+
decision="Resolved",
|
| 226 |
+
reasoning=f"{latitude:.4f}, {longitude:.4f}",
|
| 227 |
+
result={"city": "Mathura"}
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
await tracker.start_step("UploadStep")
|
| 231 |
+
await tracker.complete_step(
|
| 232 |
+
"UploadStep",
|
| 233 |
+
decision="Uploaded",
|
| 234 |
+
reasoning=f"{len(images)} images stored securely",
|
| 235 |
+
result={"count": len(images)}
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
background_tasks.add_task(run_agent_pipeline_background, issue.id, image_paths, data.description)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
issue = await get_issue_with_relations(db, issue.id)
|
| 243 |
+
issue = await get_issue_with_relations(db, issue.id)
|
| 244 |
+
return issue_to_response(issue)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
async def run_remaining_pipeline(db: AsyncSession, issue_id: UUID):
|
| 248 |
+
tracker = create_flow_tracker(issue_id)
|
| 249 |
+
try:
|
| 250 |
+
await tracker.start_step("GeoDeduplicateAgent")
|
| 251 |
+
geo = GeoDeduplicateAgent(db)
|
| 252 |
+
geo_result = await geo.process_issue(issue_id)
|
| 253 |
+
await tracker.complete_step(
|
| 254 |
+
"GeoDeduplicateAgent",
|
| 255 |
+
decision=f"Status: {geo_result.get('geo_status', 'unknown')}",
|
| 256 |
+
reasoning=f"Nearby issues: {geo_result.get('nearby_count', 0)}",
|
| 257 |
+
result=geo_result
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
if not geo_result.get("is_duplicate"):
|
| 261 |
+
await tracker.start_step("PriorityAgent")
|
| 262 |
+
priority = PriorityAgent(db)
|
| 263 |
+
priority_result = await priority.process_issue(issue_id)
|
| 264 |
+
await tracker.complete_step(
|
| 265 |
+
"PriorityAgent",
|
| 266 |
+
decision=f"Priority: {priority_result.get('priority', 'N/A')}",
|
| 267 |
+
reasoning=priority_result.get("reasoning", ""),
|
| 268 |
+
result=priority_result
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
await tracker.start_step("RoutingAgent")
|
| 278 |
+
await tracker.complete_step(
|
| 279 |
+
"RoutingAgent",
|
| 280 |
+
decision="Manual Review Requested",
|
| 281 |
+
reasoning="Skipped automatic routing due to 0 detections/manual confirmation. Sent to triage queue.",
|
| 282 |
+
result={"skipped": True, "queue": "manual_triage"}
|
| 283 |
+
)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
else:
|
| 288 |
+
await tracker.complete_step(
|
| 289 |
+
"GeoDeduplicateAgent",
|
| 290 |
+
decision="Marked as duplicate",
|
| 291 |
+
reasoning=f"Linked to parent: {geo_result.get('parent_issue_id')}",
|
| 292 |
+
result=geo_result
|
| 293 |
+
)
|
| 294 |
+
|
| 295 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 296 |
+
final_result = {
|
| 297 |
+
"issue_id": str(issue_id),
|
| 298 |
+
"state": issue.state if issue else "unknown",
|
| 299 |
+
"priority": issue.priority if issue else None,
|
| 300 |
+
"is_duplicate": issue.is_duplicate if issue else False,
|
| 301 |
+
}
|
| 302 |
+
await tracker.complete_flow(final_result)
|
| 303 |
+
|
| 304 |
+
except Exception as e:
|
| 305 |
+
await tracker.error_flow(str(e))
|
| 306 |
+
raise
|
| 307 |
+
finally:
|
| 308 |
+
remove_flow_tracker(issue_id)
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class ConfirmationBody(BaseModel):
|
| 312 |
+
confirmed: bool
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
@router.post("/{issue_id}/confirm", response_model=IssueResponse)
|
| 316 |
+
async def confirm_issue(
|
| 317 |
+
issue_id: UUID,
|
| 318 |
+
body: ConfirmationBody,
|
| 319 |
+
background_tasks: BackgroundTasks,
|
| 320 |
+
db: AsyncSession = Depends(get_db),
|
| 321 |
+
):
|
| 322 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 323 |
+
if not issue:
|
| 324 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 325 |
+
|
| 326 |
+
if body.confirmed:
|
| 327 |
+
issue.state = IssueState.REPORTED
|
| 328 |
+
issue.validation_reason = "Manual confirmation by user (0 detections)"
|
| 329 |
+
await db.flush()
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
background_tasks.add_task(pipeline_wrapper_resume, issue_id)
|
| 336 |
+
|
| 337 |
+
return issue_to_response(issue)
|
| 338 |
+
else:
|
| 339 |
+
issue.state = IssueState.REJECTED
|
| 340 |
+
issue.validation_reason = "User rejected manual confirmation"
|
| 341 |
+
issue.resolution_notes = "User cancelled submission after 0 detections were found"
|
| 342 |
+
await db.flush()
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 346 |
+
|
| 347 |
+
return issue_to_response(issue)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
from Backend.database.connection import get_db_context
|
| 351 |
+
|
| 352 |
+
async def pipeline_wrapper(issue_id: UUID, image_paths: list[str], description: Optional[str]):
|
| 353 |
+
try:
|
| 354 |
+
async with get_db_context() as db:
|
| 355 |
+
await run_agent_pipeline(db, issue_id, image_paths, description)
|
| 356 |
+
except Exception:
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
async def pipeline_wrapper_resume(issue_id: UUID):
|
| 360 |
+
try:
|
| 361 |
+
async with get_db_context() as db:
|
| 362 |
+
await run_remaining_pipeline(db, issue_id)
|
| 363 |
+
except Exception:
|
| 364 |
+
pass
|
| 365 |
+
|
| 366 |
+
@router.post("/stream", status_code=status.HTTP_201_CREATED)
|
| 367 |
+
async def create_issue_with_stream(
|
| 368 |
+
background_tasks: BackgroundTasks,
|
| 369 |
+
images: list[UploadFile] = File(...),
|
| 370 |
+
description: Optional[str] = Form(None),
|
| 371 |
+
latitude: float = Form(...),
|
| 372 |
+
longitude: float = Form(...),
|
| 373 |
+
accuracy_meters: Optional[float] = Form(None),
|
| 374 |
+
platform: str = Form(...),
|
| 375 |
+
device_model: Optional[str] = Form(None),
|
| 376 |
+
authorization: Optional[str] = Form(None),
|
| 377 |
+
db: AsyncSession = Depends(get_db),
|
| 378 |
+
):
|
| 379 |
+
user_id = get_user_id_from_form_token(authorization)
|
| 380 |
+
logger.info(f"[/stream] Creating issue - user_id: {user_id}, authorization_present: {bool(authorization)}")
|
| 381 |
+
|
| 382 |
+
data = IssueCreate(
|
| 383 |
+
description=description,
|
| 384 |
+
latitude=latitude,
|
| 385 |
+
longitude=longitude,
|
| 386 |
+
accuracy_meters=accuracy_meters,
|
| 387 |
+
platform=platform,
|
| 388 |
+
device_model=device_model,
|
| 389 |
+
)
|
| 390 |
+
|
| 391 |
+
ingestion = IngestionService(db)
|
| 392 |
+
issue, image_paths = await ingestion.create_issue(data, images, user_id)
|
| 393 |
+
logger.info(f"[/stream] Issue created: {issue.id} with user_id: {issue.user_id}")
|
| 394 |
+
|
| 395 |
+
|
| 396 |
+
await db.commit()
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
tracker = create_flow_tracker(issue.id)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
background_tasks.add_task(pipeline_wrapper, issue.id, image_paths, data.description)
|
| 403 |
+
|
| 404 |
+
return {
|
| 405 |
+
"issue_id": str(issue.id),
|
| 406 |
+
"stream_url": f"/flow/flow/{issue.id}",
|
| 407 |
+
"message": "Issue created. Pipeline started in background.",
|
| 408 |
+
}
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
@router.post("/{issue_id}/process")
|
| 412 |
+
async def process_issue_pipeline(
|
| 413 |
+
issue_id: UUID,
|
| 414 |
+
db: AsyncSession = Depends(get_db),
|
| 415 |
+
):
|
| 416 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 417 |
+
if not issue:
|
| 418 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 419 |
+
|
| 420 |
+
image_paths = [img.file_path for img in issue.images]
|
| 421 |
+
|
| 422 |
+
await run_agent_pipeline(db, issue_id, image_paths, issue.description)
|
| 423 |
+
|
| 424 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 425 |
+
return issue_to_response(issue)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
@router.get("/{issue_id}", response_model=IssueResponse)
|
| 429 |
+
async def get_issue(
|
| 430 |
+
issue_id: UUID,
|
| 431 |
+
db: AsyncSession = Depends(get_db),
|
| 432 |
+
):
|
| 433 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 434 |
+
if not issue:
|
| 435 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 436 |
+
return issue_to_response(issue)
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
@router.patch("/{issue_id}/resolve")
|
| 440 |
+
async def resolve_issue(
|
| 441 |
+
issue_id: UUID,
|
| 442 |
+
resolution_notes: Optional[str] = Form(None),
|
| 443 |
+
db: AsyncSession = Depends(get_db),
|
| 444 |
+
):
|
| 445 |
+
issue = await db.get(Issue, issue_id)
|
| 446 |
+
if not issue:
|
| 447 |
+
raise HTTPException(status_code=404, detail="Issue not found")
|
| 448 |
+
|
| 449 |
+
from datetime import datetime
|
| 450 |
+
issue.state = "resolved"
|
| 451 |
+
issue.resolved_at = datetime.utcnow()
|
| 452 |
+
issue.resolution_notes = resolution_notes
|
| 453 |
+
|
| 454 |
+
if issue.assigned_member_id:
|
| 455 |
+
from Backend.database.models import Member
|
| 456 |
+
member = await db.get(Member, issue.assigned_member_id)
|
| 457 |
+
if member and member.current_workload > 0:
|
| 458 |
+
member.current_workload -= 1
|
| 459 |
+
|
| 460 |
+
await db.flush()
|
| 461 |
+
|
| 462 |
+
issue = await get_issue_with_relations(db, issue_id)
|
| 463 |
+
return issue_to_response(issue)
|
| 464 |
+
|
| 465 |
+
|
| 466 |
+
@router.get("", response_model=IssueListResponse)
|
| 467 |
+
async def list_issues(
|
| 468 |
+
page: int = Query(1, ge=1),
|
| 469 |
+
page_size: int = Query(20, ge=1, le=100),
|
| 470 |
+
state: Optional[IssueState] = None,
|
| 471 |
+
priority: Optional[int] = Query(None, ge=1, le=4),
|
| 472 |
+
department_id: Optional[UUID] = None,
|
| 473 |
+
is_duplicate: Optional[bool] = None,
|
| 474 |
+
user_id: Optional[str] = Query(None),
|
| 475 |
+
db: AsyncSession = Depends(get_db),
|
| 476 |
+
):
|
| 477 |
+
query = (
|
| 478 |
+
select(Issue)
|
| 479 |
+
.options(selectinload(Issue.images), selectinload(Issue.classification))
|
| 480 |
+
.order_by(Issue.created_at.desc())
|
| 481 |
+
)
|
| 482 |
+
count_query = select(func.count(Issue.id))
|
| 483 |
+
|
| 484 |
+
if state:
|
| 485 |
+
query = query.where(Issue.state == state.value)
|
| 486 |
+
count_query = count_query.where(Issue.state == state.value)
|
| 487 |
+
|
| 488 |
+
if priority:
|
| 489 |
+
query = query.where(Issue.priority == priority)
|
| 490 |
+
count_query = count_query.where(Issue.priority == priority)
|
| 491 |
+
|
| 492 |
+
if department_id:
|
| 493 |
+
query = query.where(Issue.department_id == department_id)
|
| 494 |
+
count_query = count_query.where(Issue.department_id == department_id)
|
| 495 |
+
|
| 496 |
+
if is_duplicate is not None:
|
| 497 |
+
query = query.where(Issue.is_duplicate == is_duplicate)
|
| 498 |
+
count_query = count_query.where(Issue.is_duplicate == is_duplicate)
|
| 499 |
+
|
| 500 |
+
if user_id:
|
| 501 |
+
query = query.where(Issue.user_id == user_id)
|
| 502 |
+
count_query = count_query.where(Issue.user_id == user_id)
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
offset = (page - 1) * page_size
|
| 506 |
+
query = query.offset(offset).limit(page_size)
|
| 507 |
+
|
| 508 |
+
result = await db.execute(query)
|
| 509 |
+
issues = result.scalars().all()
|
| 510 |
+
|
| 511 |
+
count_result = await db.execute(count_query)
|
| 512 |
+
total = count_result.scalar() or 0
|
| 513 |
+
|
| 514 |
+
return IssueListResponse(
|
| 515 |
+
items=[issue_to_response(issue) for issue in issues],
|
| 516 |
+
total=total,
|
| 517 |
+
page=page,
|
| 518 |
+
page_size=page_size,
|
| 519 |
+
)
|
Backend/api/routes/worker.py
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from uuid import UUID
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form
|
| 5 |
+
from fastapi.security import OAuth2PasswordBearer
|
| 6 |
+
from pydantic import BaseModel
|
| 7 |
+
from sqlalchemy import select
|
| 8 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 9 |
+
from sqlalchemy.orm import selectinload
|
| 10 |
+
import jwt
|
| 11 |
+
from jwt import PyJWTError
|
| 12 |
+
|
| 13 |
+
from Backend.database.connection import get_db
|
| 14 |
+
from Backend.database.models import Issue, Member
|
| 15 |
+
from Backend.core.logging import get_logger
|
| 16 |
+
from Backend.core.config import settings
|
| 17 |
+
from Backend.utils.storage import save_upload, get_upload_url
|
| 18 |
+
|
| 19 |
+
logger = get_logger(__name__)
|
| 20 |
+
router = APIRouter()
|
| 21 |
+
|
| 22 |
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/admin/login")
|
| 23 |
+
|
| 24 |
+
async def get_current_worker(
|
| 25 |
+
token: str = Depends(oauth2_scheme),
|
| 26 |
+
db: AsyncSession = Depends(get_db)
|
| 27 |
+
) -> Member:
|
| 28 |
+
try:
|
| 29 |
+
payload = jwt.decode(token, settings.supabase_jwt_secret, algorithms=["HS256"])
|
| 30 |
+
member_id = payload.get("sub")
|
| 31 |
+
if not member_id:
|
| 32 |
+
raise HTTPException(status_code=401, detail="Invalid token")
|
| 33 |
+
|
| 34 |
+
member = await db.get(Member, UUID(member_id))
|
| 35 |
+
if not member or not member.is_active:
|
| 36 |
+
raise HTTPException(status_code=401, detail="User not found or inactive")
|
| 37 |
+
|
| 38 |
+
if member.role not in ["worker", "admin"]:
|
| 39 |
+
raise HTTPException(status_code=403, detail="Not a worker")
|
| 40 |
+
|
| 41 |
+
return member
|
| 42 |
+
except PyJWTError:
|
| 43 |
+
raise HTTPException(status_code=401, detail="Invalid token")
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class TaskResponse(BaseModel):
|
| 47 |
+
id: UUID
|
| 48 |
+
description: Optional[str]
|
| 49 |
+
priority: Optional[int]
|
| 50 |
+
state: str
|
| 51 |
+
city: Optional[str]
|
| 52 |
+
locality: Optional[str]
|
| 53 |
+
full_address: Optional[str]
|
| 54 |
+
latitude: float
|
| 55 |
+
longitude: float
|
| 56 |
+
image_url: Optional[str]
|
| 57 |
+
annotated_url: Optional[str]
|
| 58 |
+
created_at: datetime
|
| 59 |
+
sla_deadline: Optional[datetime]
|
| 60 |
+
category: Optional[str] = None
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@router.get("/tasks", response_model=list[TaskResponse])
|
| 64 |
+
async def get_worker_tasks(
|
| 65 |
+
db: AsyncSession = Depends(get_db),
|
| 66 |
+
current_worker: Member = Depends(get_current_worker),
|
| 67 |
+
):
|
| 68 |
+
result = await db.execute(
|
| 69 |
+
select(Issue)
|
| 70 |
+
.options(selectinload(Issue.images), selectinload(Issue.classification))
|
| 71 |
+
.where(Issue.assigned_member_id == current_worker.id)
|
| 72 |
+
.where(Issue.state.in_(["assigned", "in_progress", "pending_verification", "resolved"]))
|
| 73 |
+
.order_by(Issue.priority.asc().nullslast(), Issue.created_at.asc())
|
| 74 |
+
)
|
| 75 |
+
issues = result.scalars().all()
|
| 76 |
+
|
| 77 |
+
tasks = []
|
| 78 |
+
for issue in issues:
|
| 79 |
+
image_url = None
|
| 80 |
+
annotated_url = None
|
| 81 |
+
if issue.images:
|
| 82 |
+
image_url = get_upload_url(issue.images[0].file_path)
|
| 83 |
+
if issue.images[0].annotated_path:
|
| 84 |
+
annotated_url = get_upload_url(issue.images[0].annotated_path)
|
| 85 |
+
|
| 86 |
+
tasks.append(TaskResponse(
|
| 87 |
+
id=issue.id,
|
| 88 |
+
description=issue.description,
|
| 89 |
+
priority=issue.priority,
|
| 90 |
+
state=issue.state,
|
| 91 |
+
city=issue.city,
|
| 92 |
+
locality=issue.locality,
|
| 93 |
+
full_address=issue.full_address,
|
| 94 |
+
latitude=issue.latitude,
|
| 95 |
+
longitude=issue.longitude,
|
| 96 |
+
image_url=image_url,
|
| 97 |
+
annotated_url=annotated_url,
|
| 98 |
+
created_at=issue.created_at,
|
| 99 |
+
sla_deadline=issue.sla_deadline,
|
| 100 |
+
category=issue.classification.primary_category if issue.classification else None,
|
| 101 |
+
))
|
| 102 |
+
|
| 103 |
+
return tasks
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
@router.post("/tasks/{task_id}/start")
|
| 107 |
+
async def start_task(
|
| 108 |
+
task_id: UUID,
|
| 109 |
+
db: AsyncSession = Depends(get_db),
|
| 110 |
+
current_worker: Member = Depends(get_current_worker),
|
| 111 |
+
):
|
| 112 |
+
issue = await db.get(Issue, task_id)
|
| 113 |
+
if not issue:
|
| 114 |
+
raise HTTPException(status_code=404, detail="Task not found")
|
| 115 |
+
|
| 116 |
+
if issue.assigned_member_id != current_worker.id:
|
| 117 |
+
raise HTTPException(status_code=403, detail="Not assigned to this task")
|
| 118 |
+
|
| 119 |
+
issue.state = "in_progress"
|
| 120 |
+
await db.commit()
|
| 121 |
+
|
| 122 |
+
logger.info(f"Worker {current_worker.id} started task {task_id}")
|
| 123 |
+
return {"status": "started"}
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@router.post("/tasks/{task_id}/complete")
|
| 127 |
+
async def complete_task(
|
| 128 |
+
task_id: UUID,
|
| 129 |
+
notes: Optional[str] = Form(None),
|
| 130 |
+
proof_image: UploadFile = File(...),
|
| 131 |
+
db: AsyncSession = Depends(get_db),
|
| 132 |
+
current_worker: Member = Depends(get_current_worker),
|
| 133 |
+
):
|
| 134 |
+
issue = await db.get(Issue, task_id)
|
| 135 |
+
if not issue:
|
| 136 |
+
raise HTTPException(status_code=404, detail="Task not found")
|
| 137 |
+
|
| 138 |
+
if issue.assigned_member_id != current_worker.id:
|
| 139 |
+
raise HTTPException(status_code=403, detail="Not assigned to this task")
|
| 140 |
+
|
| 141 |
+
proof_path = await save_upload(proof_image, f"proofs/{task_id}")
|
| 142 |
+
|
| 143 |
+
issue.state = "pending_verification"
|
| 144 |
+
issue.proof_image_path = proof_path
|
| 145 |
+
issue.resolution_notes = notes
|
| 146 |
+
issue.resolved_at = datetime.utcnow()
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
await db.commit()
|
| 151 |
+
|
| 152 |
+
logger.info(f"Worker {current_worker.id} completed task {task_id}")
|
| 153 |
+
|
| 154 |
+
return {
|
| 155 |
+
"status": "completed",
|
| 156 |
+
"proof_url": get_upload_url(proof_path),
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
@router.get("/tasks/{task_id}")
|
| 161 |
+
async def get_task_detail(
|
| 162 |
+
task_id: UUID,
|
| 163 |
+
db: AsyncSession = Depends(get_db),
|
| 164 |
+
current_worker: Member = Depends(get_current_worker),
|
| 165 |
+
):
|
| 166 |
+
result = await db.execute(
|
| 167 |
+
select(Issue)
|
| 168 |
+
.options(selectinload(Issue.images), selectinload(Issue.classification))
|
| 169 |
+
.where(Issue.id == task_id)
|
| 170 |
+
)
|
| 171 |
+
issue = result.scalar_one_or_none()
|
| 172 |
+
|
| 173 |
+
if not issue:
|
| 174 |
+
raise HTTPException(status_code=404, detail="Task not found")
|
| 175 |
+
|
| 176 |
+
if issue.assigned_member_id != current_worker.id:
|
| 177 |
+
raise HTTPException(status_code=403, detail="Not assigned to this task")
|
| 178 |
+
|
| 179 |
+
image_url = None
|
| 180 |
+
annotated_url = None
|
| 181 |
+
if issue.images:
|
| 182 |
+
image_url = get_upload_url(issue.images[0].file_path)
|
| 183 |
+
if issue.images[0].annotated_path:
|
| 184 |
+
annotated_url = get_upload_url(issue.images[0].annotated_path)
|
| 185 |
+
|
| 186 |
+
return {
|
| 187 |
+
"id": str(issue.id),
|
| 188 |
+
"description": issue.description,
|
| 189 |
+
"priority": issue.priority,
|
| 190 |
+
"state": issue.state,
|
| 191 |
+
"city": issue.city,
|
| 192 |
+
"locality": issue.locality,
|
| 193 |
+
"full_address": issue.full_address,
|
| 194 |
+
"latitude": issue.latitude,
|
| 195 |
+
"longitude": issue.longitude,
|
| 196 |
+
"image_url": image_url,
|
| 197 |
+
"annotated_url": annotated_url,
|
| 198 |
+
"created_at": issue.created_at,
|
| 199 |
+
"sla_deadline": issue.sla_deadline,
|
| 200 |
+
"category": issue.classification.primary_category if issue.classification else None,
|
| 201 |
+
"proof_image_url": get_upload_url(issue.proof_image_path) if issue.proof_image_path else None,
|
| 202 |
+
"resolution_notes": issue.resolution_notes,
|
| 203 |
+
"resolved_at": issue.resolved_at,
|
| 204 |
+
}
|
Backend/core/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .config import settings
|
| 2 |
+
from .schemas import IssuePacket, IssueState, ClassificationResult, PriorityLevel, IssueResponse
|
| 3 |
+
from .events import EventBus, Event, IssueCreated, IssueClassified
|
| 4 |
+
from .logging import get_logger, setup_logging
|
Backend/core/auth.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
from dataclasses import dataclass
|
| 3 |
+
from fastapi import Depends, HTTPException, status, Request
|
| 4 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 5 |
+
import jwt
|
| 6 |
+
from jwt.exceptions import InvalidTokenError
|
| 7 |
+
|
| 8 |
+
from Backend.core.config import settings
|
| 9 |
+
from Backend.core.logging import get_logger
|
| 10 |
+
|
| 11 |
+
logger = get_logger(__name__)
|
| 12 |
+
|
| 13 |
+
security = HTTPBearer(auto_error=False)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@dataclass
|
| 17 |
+
class AuthenticatedUser:
|
| 18 |
+
id: str
|
| 19 |
+
email: Optional[str] = None
|
| 20 |
+
role: str = "user"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def verify_jwt_token(token: str) -> dict:
|
| 24 |
+
try:
|
| 25 |
+
decoded = jwt.decode(
|
| 26 |
+
token,
|
| 27 |
+
settings.supabase_jwt_secret,
|
| 28 |
+
algorithms=["HS256"],
|
| 29 |
+
audience="authenticated",
|
| 30 |
+
)
|
| 31 |
+
return decoded
|
| 32 |
+
except InvalidTokenError as e:
|
| 33 |
+
logger.warning(f"JWT verification failed: {e}")
|
| 34 |
+
raise HTTPException(
|
| 35 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 36 |
+
detail="Invalid or expired token",
|
| 37 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
async def get_current_user(
|
| 42 |
+
credentials: HTTPAuthorizationCredentials = Depends(security),
|
| 43 |
+
) -> AuthenticatedUser:
|
| 44 |
+
if not credentials:
|
| 45 |
+
raise HTTPException(
|
| 46 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 47 |
+
detail="Authentication required",
|
| 48 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
token = credentials.credentials
|
| 52 |
+
payload = verify_jwt_token(token)
|
| 53 |
+
|
| 54 |
+
return AuthenticatedUser(
|
| 55 |
+
id=payload.get("sub", ""),
|
| 56 |
+
email=payload.get("email"),
|
| 57 |
+
role=payload.get("role", "user"),
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
async def get_optional_user(
|
| 62 |
+
credentials: HTTPAuthorizationCredentials = Depends(security),
|
| 63 |
+
) -> Optional[AuthenticatedUser]:
|
| 64 |
+
if not credentials:
|
| 65 |
+
return None
|
| 66 |
+
|
| 67 |
+
try:
|
| 68 |
+
token = credentials.credentials
|
| 69 |
+
payload = verify_jwt_token(token)
|
| 70 |
+
return AuthenticatedUser(
|
| 71 |
+
id=payload.get("sub", ""),
|
| 72 |
+
email=payload.get("email"),
|
| 73 |
+
role=payload.get("role", "user"),
|
| 74 |
+
)
|
| 75 |
+
except HTTPException:
|
| 76 |
+
return None
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def get_user_id_from_form_token(authorization: Optional[str]) -> Optional[str]:
|
| 80 |
+
if not authorization:
|
| 81 |
+
logger.debug("No authorization header provided for form token extraction")
|
| 82 |
+
return None
|
| 83 |
+
if not authorization.startswith("Bearer "):
|
| 84 |
+
logger.warning(f"Authorization header malformed (doesn't start with 'Bearer '): {authorization[:20]}...")
|
| 85 |
+
return None
|
| 86 |
+
try:
|
| 87 |
+
token = authorization.replace("Bearer ", "")
|
| 88 |
+
|
| 89 |
+
unverified_header = jwt.get_unverified_header(token)
|
| 90 |
+
logger.info(f"JWT header: alg={unverified_header.get('alg')}, typ={unverified_header.get('typ')}")
|
| 91 |
+
|
| 92 |
+
try:
|
| 93 |
+
payload = jwt.decode(
|
| 94 |
+
token,
|
| 95 |
+
settings.supabase_jwt_secret,
|
| 96 |
+
algorithms=["HS256"],
|
| 97 |
+
audience="authenticated",
|
| 98 |
+
)
|
| 99 |
+
except jwt.exceptions.InvalidAlgorithmError:
|
| 100 |
+
logger.warning("HS256 verification failed, falling back to unverified decode (Supabase already authenticated user)")
|
| 101 |
+
payload = jwt.decode(token, options={"verify_signature": False}, audience="authenticated")
|
| 102 |
+
|
| 103 |
+
user_id = payload.get("sub")
|
| 104 |
+
email = payload.get("email")
|
| 105 |
+
logger.info(f"Successfully extracted user_id from form token: {user_id} (email: {email})")
|
| 106 |
+
return user_id
|
| 107 |
+
except InvalidTokenError as e:
|
| 108 |
+
logger.warning(f"JWT decode failed for form token: {e}")
|
| 109 |
+
return None
|
Backend/core/config.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import lru_cache
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
| 5 |
+
from pydantic import field_validator
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class Settings(BaseSettings):
|
| 9 |
+
model_config = SettingsConfigDict(
|
| 10 |
+
env_file=".env",
|
| 11 |
+
env_file_encoding="utf-8",
|
| 12 |
+
extra="ignore"
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
database_url: str
|
| 16 |
+
|
| 17 |
+
supabase_url: str
|
| 18 |
+
supabase_key: str
|
| 19 |
+
supabase_jwt_secret: str
|
| 20 |
+
supabase_bucket: str = "city-issues"
|
| 21 |
+
|
| 22 |
+
supabase_s3_endpoint: Optional[str] = None
|
| 23 |
+
supabase_s3_region: str = "ap-southeast-1"
|
| 24 |
+
supabase_s3_access_key: Optional[str] = None
|
| 25 |
+
supabase_s3_secret_key: Optional[str] = None
|
| 26 |
+
|
| 27 |
+
model_path: Path = Path("Backend/agents/vision/model.pt")
|
| 28 |
+
model_confidence_threshold: float = 0.25
|
| 29 |
+
model_input_size: int = 512
|
| 30 |
+
|
| 31 |
+
local_temp_dir: Path = Path("static/temp")
|
| 32 |
+
|
| 33 |
+
sla_critical_hours: int = 4
|
| 34 |
+
sla_high_hours: int = 12
|
| 35 |
+
sla_medium_hours: int = 48
|
| 36 |
+
sla_low_hours: int = 168
|
| 37 |
+
|
| 38 |
+
api_host: str = "0.0.0.0"
|
| 39 |
+
api_port: int = 8000
|
| 40 |
+
api_workers: int = 4
|
| 41 |
+
|
| 42 |
+
max_upload_size_mb: int = 10
|
| 43 |
+
allowed_extensions: set[str] = {"jpg", "jpeg", "png", "webp"}
|
| 44 |
+
|
| 45 |
+
duplicate_radius_meters: float = 50.0
|
| 46 |
+
|
| 47 |
+
debug: bool = False
|
| 48 |
+
|
| 49 |
+
resend_api_key: Optional[str] = None
|
| 50 |
+
google_client_id: Optional[str] = None
|
| 51 |
+
gemini_api_key: Optional[str] = None
|
| 52 |
+
google_client_secret: Optional[str] = None
|
| 53 |
+
project_id: Optional[str] = None
|
| 54 |
+
sender_email: str = "noreply@urbanlens.city"
|
| 55 |
+
admin_email: str = "admin@urbanlens.city"
|
| 56 |
+
|
| 57 |
+
frontend_url: Optional[str] = None
|
| 58 |
+
|
| 59 |
+
cors_origins: list[str] = []
|
| 60 |
+
jwt_algorithm: str = "HS256"
|
| 61 |
+
jwt_expire_hours: int = 24
|
| 62 |
+
|
| 63 |
+
@field_validator("database_url")
|
| 64 |
+
@classmethod
|
| 65 |
+
def validate_database_url(cls, v: str) -> str:
|
| 66 |
+
if not v.startswith("postgresql"):
|
| 67 |
+
raise ValueError("DATABASE_URL must be a PostgreSQL connection string")
|
| 68 |
+
return v
|
| 69 |
+
|
| 70 |
+
@field_validator("supabase_jwt_secret")
|
| 71 |
+
@classmethod
|
| 72 |
+
def validate_jwt_secret(cls, v: str) -> str:
|
| 73 |
+
if len(v) < 32:
|
| 74 |
+
raise ValueError("SUPABASE_JWT_SECRET must be at least 32 characters")
|
| 75 |
+
return v
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@lru_cache
|
| 79 |
+
def get_settings() -> Settings:
|
| 80 |
+
return Settings()
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
settings = get_settings()
|
Backend/core/events.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from collections import defaultdict
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from typing import Any, Callable, Coroutine, Optional, TypeVar
|
| 5 |
+
from uuid import UUID, uuid4
|
| 6 |
+
from pydantic import BaseModel, Field
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Event(BaseModel):
|
| 10 |
+
event_id: UUID = Field(default_factory=uuid4)
|
| 11 |
+
issue_id: UUID
|
| 12 |
+
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
| 13 |
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
| 14 |
+
|
| 15 |
+
@property
|
| 16 |
+
def event_type(self) -> str:
|
| 17 |
+
return self.__class__.__name__
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class IssueCreated(Event):
|
| 21 |
+
image_paths: list[str]
|
| 22 |
+
latitude: float
|
| 23 |
+
longitude: float
|
| 24 |
+
description: Optional[str] = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class IssueClassified(Event):
|
| 28 |
+
category: str
|
| 29 |
+
confidence: float
|
| 30 |
+
detections_count: int
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class IssuePrioritized(Event):
|
| 34 |
+
priority: int
|
| 35 |
+
reasoning: str
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class IssueAssigned(Event):
|
| 39 |
+
department: str
|
| 40 |
+
ward: str
|
| 41 |
+
sla_deadline: datetime
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class IssueEscalated(Event):
|
| 45 |
+
from_level: int
|
| 46 |
+
to_level: int
|
| 47 |
+
reason: str
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class IssueResolved(Event):
|
| 51 |
+
resolved_by: str
|
| 52 |
+
resolution_notes: str
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
E = TypeVar("E", bound=Event)
|
| 56 |
+
Handler = Callable[[E], Coroutine[Any, Any, None]]
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class EventBus:
|
| 60 |
+
_instance: Optional["EventBus"] = None
|
| 61 |
+
_lock: asyncio.Lock = asyncio.Lock()
|
| 62 |
+
|
| 63 |
+
def __new__(cls) -> "EventBus":
|
| 64 |
+
if cls._instance is None:
|
| 65 |
+
cls._instance = super().__new__(cls)
|
| 66 |
+
cls._instance._handlers = defaultdict(list)
|
| 67 |
+
cls._instance._queue = asyncio.Queue()
|
| 68 |
+
cls._instance._running = False
|
| 69 |
+
return cls._instance
|
| 70 |
+
|
| 71 |
+
def subscribe(self, event_type: type[E], handler: Handler[E]) -> None:
|
| 72 |
+
self._handlers[event_type.__name__].append(handler)
|
| 73 |
+
|
| 74 |
+
async def publish(self, event: Event) -> None:
|
| 75 |
+
await self._queue.put(event)
|
| 76 |
+
|
| 77 |
+
def publish_sync(self, event: Event) -> None:
|
| 78 |
+
asyncio.create_task(self._queue.put(event))
|
| 79 |
+
|
| 80 |
+
async def start(self) -> None:
|
| 81 |
+
if self._running:
|
| 82 |
+
return
|
| 83 |
+
self._running = True
|
| 84 |
+
asyncio.create_task(self._process_events())
|
| 85 |
+
|
| 86 |
+
async def stop(self) -> None:
|
| 87 |
+
self._running = False
|
| 88 |
+
|
| 89 |
+
async def _process_events(self) -> None:
|
| 90 |
+
while self._running:
|
| 91 |
+
try:
|
| 92 |
+
event = await asyncio.wait_for(self._queue.get(), timeout=1.0)
|
| 93 |
+
handlers = self._handlers.get(event.event_type, [])
|
| 94 |
+
if handlers:
|
| 95 |
+
await asyncio.gather(
|
| 96 |
+
*[handler(event) for handler in handlers],
|
| 97 |
+
return_exceptions=True
|
| 98 |
+
)
|
| 99 |
+
self._queue.task_done()
|
| 100 |
+
except asyncio.TimeoutError:
|
| 101 |
+
continue
|
| 102 |
+
except Exception:
|
| 103 |
+
continue
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
event_bus = EventBus()
|
Backend/core/flow_tracker.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import json
|
| 3 |
+
from datetime import datetime
|
| 4 |
+
from typing import Optional, Callable, Any
|
| 5 |
+
from uuid import UUID
|
| 6 |
+
from dataclasses import dataclass, field, asdict
|
| 7 |
+
|
| 8 |
+
from Backend.core.logging import get_logger
|
| 9 |
+
|
| 10 |
+
logger = get_logger(__name__)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@dataclass
|
| 14 |
+
class AgentStep:
|
| 15 |
+
agent_name: str
|
| 16 |
+
status: str
|
| 17 |
+
started_at: str
|
| 18 |
+
completed_at: Optional[str] = None
|
| 19 |
+
duration_ms: Optional[float] = None
|
| 20 |
+
decision: Optional[str] = None
|
| 21 |
+
reasoning: Optional[str] = None
|
| 22 |
+
result: Optional[dict] = None
|
| 23 |
+
error: Optional[str] = None
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@dataclass
|
| 27 |
+
class PipelineFlow:
|
| 28 |
+
issue_id: UUID
|
| 29 |
+
started_at: str
|
| 30 |
+
status: str = "running"
|
| 31 |
+
completed_at: Optional[str] = None
|
| 32 |
+
total_duration_ms: Optional[float] = None
|
| 33 |
+
steps: list[AgentStep] = field(default_factory=list)
|
| 34 |
+
final_result: Optional[dict] = None
|
| 35 |
+
|
| 36 |
+
def to_dict(self) -> dict:
|
| 37 |
+
return {
|
| 38 |
+
"issue_id": str(self.issue_id),
|
| 39 |
+
"started_at": self.started_at,
|
| 40 |
+
"status": self.status,
|
| 41 |
+
"completed_at": self.completed_at,
|
| 42 |
+
"total_duration_ms": self.total_duration_ms,
|
| 43 |
+
"steps": [asdict(s) for s in self.steps],
|
| 44 |
+
"final_result": self.final_result,
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class FlowTracker:
|
| 49 |
+
def __init__(self, issue_id: UUID):
|
| 50 |
+
self.flow = PipelineFlow(
|
| 51 |
+
issue_id=issue_id,
|
| 52 |
+
started_at=datetime.utcnow().isoformat(),
|
| 53 |
+
)
|
| 54 |
+
self._start_time = datetime.utcnow()
|
| 55 |
+
self._subscribers: list[asyncio.Queue] = []
|
| 56 |
+
|
| 57 |
+
def subscribe(self) -> asyncio.Queue:
|
| 58 |
+
queue = asyncio.Queue()
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
for step in self.flow.steps:
|
| 62 |
+
if step.started_at:
|
| 63 |
+
queue.put_nowait({
|
| 64 |
+
"type": "step_started",
|
| 65 |
+
"timestamp": step.started_at,
|
| 66 |
+
"data": {
|
| 67 |
+
"agent_name": step.agent_name,
|
| 68 |
+
"step_index": self.flow.steps.index(step)
|
| 69 |
+
}
|
| 70 |
+
})
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
if step.status in ("completed", "error"):
|
| 74 |
+
queue.put_nowait({
|
| 75 |
+
"type": "step_completed" if step.status == "completed" else "step_error",
|
| 76 |
+
"timestamp": step.completed_at,
|
| 77 |
+
"data": {
|
| 78 |
+
"agent_name": step.agent_name,
|
| 79 |
+
"status": step.status,
|
| 80 |
+
"decision": step.decision,
|
| 81 |
+
"reasoning": step.reasoning,
|
| 82 |
+
"result": step.result,
|
| 83 |
+
"error": step.error
|
| 84 |
+
}
|
| 85 |
+
})
|
| 86 |
+
|
| 87 |
+
self._subscribers.append(queue)
|
| 88 |
+
return queue
|
| 89 |
+
|
| 90 |
+
def unsubscribe(self, queue: asyncio.Queue):
|
| 91 |
+
if queue in self._subscribers:
|
| 92 |
+
self._subscribers.remove(queue)
|
| 93 |
+
|
| 94 |
+
async def _broadcast(self, event_type: str, data: dict):
|
| 95 |
+
message = {
|
| 96 |
+
"type": event_type,
|
| 97 |
+
"timestamp": datetime.utcnow().isoformat(),
|
| 98 |
+
"data": data,
|
| 99 |
+
}
|
| 100 |
+
for queue in self._subscribers:
|
| 101 |
+
await queue.put(message)
|
| 102 |
+
|
| 103 |
+
async def start_step(self, agent_name: str):
|
| 104 |
+
step = AgentStep(
|
| 105 |
+
agent_name=agent_name,
|
| 106 |
+
status="running",
|
| 107 |
+
started_at=datetime.utcnow().isoformat(),
|
| 108 |
+
)
|
| 109 |
+
self.flow.steps.append(step)
|
| 110 |
+
|
| 111 |
+
await self._broadcast("step_started", {
|
| 112 |
+
"agent_name": agent_name,
|
| 113 |
+
"step_index": len(self.flow.steps) - 1,
|
| 114 |
+
})
|
| 115 |
+
|
| 116 |
+
return step
|
| 117 |
+
|
| 118 |
+
async def complete_step(
|
| 119 |
+
self,
|
| 120 |
+
agent_name: str,
|
| 121 |
+
decision: str,
|
| 122 |
+
reasoning: str,
|
| 123 |
+
result: Optional[dict] = None,
|
| 124 |
+
error: Optional[str] = None
|
| 125 |
+
):
|
| 126 |
+
step = next((s for s in self.flow.steps if s.agent_name == agent_name and s.status == "running"), None)
|
| 127 |
+
if step:
|
| 128 |
+
now = datetime.utcnow()
|
| 129 |
+
step.completed_at = now.isoformat()
|
| 130 |
+
step.status = "error" if error else "completed"
|
| 131 |
+
step.decision = decision
|
| 132 |
+
step.reasoning = reasoning
|
| 133 |
+
step.result = result
|
| 134 |
+
step.error = error
|
| 135 |
+
|
| 136 |
+
started = datetime.fromisoformat(step.started_at)
|
| 137 |
+
step.duration_ms = (now - started).total_seconds() * 1000
|
| 138 |
+
|
| 139 |
+
await self._broadcast("step_completed", {
|
| 140 |
+
"agent_name": agent_name,
|
| 141 |
+
"status": step.status if step else "unknown",
|
| 142 |
+
"decision": decision,
|
| 143 |
+
"reasoning": reasoning,
|
| 144 |
+
"duration_ms": step.duration_ms if step else 0,
|
| 145 |
+
"result": result,
|
| 146 |
+
"error": error,
|
| 147 |
+
})
|
| 148 |
+
|
| 149 |
+
async def complete_flow(self, final_result: dict):
|
| 150 |
+
now = datetime.utcnow()
|
| 151 |
+
self.flow.completed_at = now.isoformat()
|
| 152 |
+
self.flow.status = "completed"
|
| 153 |
+
self.flow.total_duration_ms = (now - self._start_time).total_seconds() * 1000
|
| 154 |
+
self.flow.final_result = final_result
|
| 155 |
+
|
| 156 |
+
await self._broadcast("flow_completed", self.flow.to_dict())
|
| 157 |
+
|
| 158 |
+
async def error_flow(self, error: str):
|
| 159 |
+
now = datetime.utcnow()
|
| 160 |
+
self.flow.completed_at = now.isoformat()
|
| 161 |
+
self.flow.status = "error"
|
| 162 |
+
self.flow.total_duration_ms = (now - self._start_time).total_seconds() * 1000
|
| 163 |
+
|
| 164 |
+
await self._broadcast("flow_error", {
|
| 165 |
+
"error": error,
|
| 166 |
+
"flow": self.flow.to_dict(),
|
| 167 |
+
})
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
_active_flows: dict[UUID, FlowTracker] = {}
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def get_flow_tracker(issue_id: UUID) -> Optional[FlowTracker]:
|
| 174 |
+
return _active_flows.get(issue_id)
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def create_flow_tracker(issue_id: UUID) -> FlowTracker:
|
| 178 |
+
if issue_id in _active_flows:
|
| 179 |
+
return _active_flows[issue_id]
|
| 180 |
+
|
| 181 |
+
tracker = FlowTracker(issue_id)
|
| 182 |
+
_active_flows[issue_id] = tracker
|
| 183 |
+
return tracker
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
def remove_flow_tracker(issue_id: UUID):
|
| 187 |
+
if issue_id in _active_flows:
|
| 188 |
+
del _active_flows[issue_id]
|
Backend/core/logging.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from contextvars import ContextVar
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from typing import Any, Optional
|
| 6 |
+
from uuid import UUID
|
| 7 |
+
import json
|
| 8 |
+
|
| 9 |
+
correlation_id: ContextVar[Optional[str]] = ContextVar("correlation_id", default=None)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class JSONFormatter(logging.Formatter):
|
| 13 |
+
def format(self, record: logging.LogRecord) -> str:
|
| 14 |
+
log_data = {
|
| 15 |
+
"timestamp": datetime.utcnow().isoformat(),
|
| 16 |
+
"level": record.levelname,
|
| 17 |
+
"logger": record.name,
|
| 18 |
+
"message": record.getMessage(),
|
| 19 |
+
"correlation_id": correlation_id.get(),
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
if hasattr(record, "issue_id"):
|
| 23 |
+
log_data["issue_id"] = str(record.issue_id)
|
| 24 |
+
|
| 25 |
+
if hasattr(record, "agent"):
|
| 26 |
+
log_data["agent"] = record.agent
|
| 27 |
+
|
| 28 |
+
if hasattr(record, "decision"):
|
| 29 |
+
log_data["decision"] = record.decision
|
| 30 |
+
|
| 31 |
+
if record.exc_info:
|
| 32 |
+
log_data["exception"] = self.formatException(record.exc_info)
|
| 33 |
+
|
| 34 |
+
return json.dumps(log_data)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class AgentLogger(logging.LoggerAdapter):
|
| 38 |
+
def __init__(self, logger: logging.Logger, agent_name: str):
|
| 39 |
+
super().__init__(logger, {"agent": agent_name})
|
| 40 |
+
|
| 41 |
+
def process(self, msg: str, kwargs: dict[str, Any]) -> tuple[str, dict[str, Any]]:
|
| 42 |
+
extra = kwargs.get("extra", {})
|
| 43 |
+
extra["agent"] = self.extra["agent"]
|
| 44 |
+
kwargs["extra"] = extra
|
| 45 |
+
return msg, kwargs
|
| 46 |
+
|
| 47 |
+
def log_decision(
|
| 48 |
+
self,
|
| 49 |
+
issue_id: UUID,
|
| 50 |
+
decision: str,
|
| 51 |
+
reasoning: str,
|
| 52 |
+
level: int = logging.INFO
|
| 53 |
+
) -> None:
|
| 54 |
+
self.log(
|
| 55 |
+
level,
|
| 56 |
+
f"Decision: {decision} | Reasoning: {reasoning}",
|
| 57 |
+
extra={"issue_id": issue_id, "decision": decision}
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def setup_logging(debug: bool = False) -> None:
|
| 62 |
+
root = logging.getLogger()
|
| 63 |
+
root.setLevel(logging.DEBUG if debug else logging.INFO)
|
| 64 |
+
|
| 65 |
+
handler = logging.StreamHandler(sys.stdout)
|
| 66 |
+
handler.setFormatter(JSONFormatter())
|
| 67 |
+
root.addHandler(handler)
|
| 68 |
+
|
| 69 |
+
logging.getLogger("uvicorn.access").setLevel(logging.WARNING)
|
| 70 |
+
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def get_logger(name: str, agent_name: Optional[str] = None) -> logging.Logger | AgentLogger:
|
| 74 |
+
logger = logging.getLogger(name)
|
| 75 |
+
if agent_name:
|
| 76 |
+
return AgentLogger(logger, agent_name)
|
| 77 |
+
return logger
|
Backend/core/schemas.py
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from enum import IntEnum, StrEnum
|
| 3 |
+
from typing import Optional
|
| 4 |
+
from uuid import UUID, uuid4
|
| 5 |
+
from pydantic import BaseModel, Field, field_validator
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class IssueState(StrEnum):
|
| 9 |
+
REPORTED = "reported"
|
| 10 |
+
PENDING_CONFIRMATION = "pending_confirmation"
|
| 11 |
+
VALIDATED = "validated"
|
| 12 |
+
ASSIGNED = "assigned"
|
| 13 |
+
IN_PROGRESS = "in_progress"
|
| 14 |
+
PENDING_VERIFICATION = "pending_verification"
|
| 15 |
+
RESOLVED = "resolved"
|
| 16 |
+
VERIFIED = "verified"
|
| 17 |
+
CLOSED = "closed"
|
| 18 |
+
ESCALATED = "escalated"
|
| 19 |
+
REJECTED = "rejected"
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class PriorityLevel(IntEnum):
|
| 23 |
+
CRITICAL = 1
|
| 24 |
+
HIGH = 2
|
| 25 |
+
MEDIUM = 3
|
| 26 |
+
LOW = 4
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class IssueCategory(StrEnum):
|
| 30 |
+
DAMAGED_ROAD = "Damaged Road Issues"
|
| 31 |
+
POTHOLE = "Pothole Issues"
|
| 32 |
+
ILLEGAL_PARKING = "Illegal Parking Issues"
|
| 33 |
+
BROKEN_SIGN = "Broken Road Sign Issues"
|
| 34 |
+
FALLEN_TREE = "Fallen Trees"
|
| 35 |
+
GARBAGE = "Littering/Garbage on Public Places"
|
| 36 |
+
VANDALISM = "Vandalism Issues"
|
| 37 |
+
DEAD_ANIMAL = "Dead Animal Pollution"
|
| 38 |
+
DAMAGED_CONCRETE = "Damaged Concrete Structures"
|
| 39 |
+
DAMAGED_ELECTRIC = "Damaged Electric Wires and Poles"
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
CLASS_ID_TO_CATEGORY = {
|
| 43 |
+
0: IssueCategory.DAMAGED_ROAD,
|
| 44 |
+
1: IssueCategory.POTHOLE,
|
| 45 |
+
2: IssueCategory.ILLEGAL_PARKING,
|
| 46 |
+
3: IssueCategory.BROKEN_SIGN,
|
| 47 |
+
4: IssueCategory.FALLEN_TREE,
|
| 48 |
+
5: IssueCategory.GARBAGE,
|
| 49 |
+
6: IssueCategory.VANDALISM,
|
| 50 |
+
7: IssueCategory.DEAD_ANIMAL,
|
| 51 |
+
8: IssueCategory.DAMAGED_CONCRETE,
|
| 52 |
+
9: IssueCategory.DAMAGED_ELECTRIC,
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class Coordinates(BaseModel):
|
| 57 |
+
latitude: float = Field(..., ge=-90, le=90)
|
| 58 |
+
longitude: float = Field(..., ge=-180, le=180)
|
| 59 |
+
accuracy_meters: Optional[float] = Field(None, ge=0)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class DeviceMetadata(BaseModel):
|
| 63 |
+
platform: str = Field(..., max_length=50)
|
| 64 |
+
device_model: Optional[str] = Field(None, max_length=100)
|
| 65 |
+
os_version: Optional[str] = Field(None, max_length=50)
|
| 66 |
+
app_version: Optional[str] = Field(None, max_length=20)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class IssuePacket(BaseModel):
|
| 70 |
+
description: Optional[str] = Field(None, max_length=2000)
|
| 71 |
+
coordinates: Coordinates
|
| 72 |
+
device_metadata: DeviceMetadata
|
| 73 |
+
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
| 74 |
+
|
| 75 |
+
@field_validator("description")
|
| 76 |
+
@classmethod
|
| 77 |
+
def clean_description(cls, v: Optional[str]) -> Optional[str]:
|
| 78 |
+
if v:
|
| 79 |
+
return v.strip()
|
| 80 |
+
return v
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class DetectionBox(BaseModel):
|
| 84 |
+
class_id: int
|
| 85 |
+
class_name: str
|
| 86 |
+
confidence: float = Field(..., ge=0, le=1)
|
| 87 |
+
bbox: tuple[float, float, float, float]
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class ClassificationResult(BaseModel):
|
| 91 |
+
issue_id: UUID
|
| 92 |
+
detections: list[DetectionBox]
|
| 93 |
+
primary_category: Optional[IssueCategory] = None
|
| 94 |
+
primary_confidence: float = 0.0
|
| 95 |
+
annotated_urls: list[str] = []
|
| 96 |
+
inference_time_ms: float
|
| 97 |
+
model_version: str = "1.0"
|
| 98 |
+
|
| 99 |
+
def model_post_init(self, __context) -> None:
|
| 100 |
+
if self.detections and not self.primary_category:
|
| 101 |
+
best = max(self.detections, key=lambda d: d.confidence)
|
| 102 |
+
self.primary_category = CLASS_ID_TO_CATEGORY.get(best.class_id)
|
| 103 |
+
self.primary_confidence = best.confidence
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class IssueCreate(BaseModel):
|
| 107 |
+
description: Optional[str] = Field(None, max_length=2000)
|
| 108 |
+
latitude: float = Field(..., ge=-90, le=90)
|
| 109 |
+
longitude: float = Field(..., ge=-180, le=180)
|
| 110 |
+
accuracy_meters: Optional[float] = Field(None, ge=0)
|
| 111 |
+
platform: str = Field(..., max_length=50)
|
| 112 |
+
device_model: Optional[str] = Field(None, max_length=100)
|
| 113 |
+
|
| 114 |
+
@field_validator("description")
|
| 115 |
+
@classmethod
|
| 116 |
+
def clean_description(cls, v: Optional[str]) -> Optional[str]:
|
| 117 |
+
if v is None:
|
| 118 |
+
return None
|
| 119 |
+
cleaned = v.strip()
|
| 120 |
+
return cleaned or None
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class AgentOutput(BaseModel):
|
| 124 |
+
agent: str
|
| 125 |
+
decision: str
|
| 126 |
+
reasoning: Optional[str] = None
|
| 127 |
+
duration_ms: Optional[float] = None
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class IssueResponse(BaseModel):
|
| 131 |
+
id: UUID
|
| 132 |
+
description: Optional[str]
|
| 133 |
+
latitude: float
|
| 134 |
+
longitude: float
|
| 135 |
+
state: IssueState
|
| 136 |
+
priority: Optional[PriorityLevel]
|
| 137 |
+
priority_reason: Optional[str] = None
|
| 138 |
+
category: Optional[str]
|
| 139 |
+
confidence: Optional[float]
|
| 140 |
+
detections_count: Optional[int] = None
|
| 141 |
+
image_urls: list[str]
|
| 142 |
+
annotated_urls: list[str] = []
|
| 143 |
+
proof_image_url: Optional[str] = None
|
| 144 |
+
validation_source: Optional[str] = None
|
| 145 |
+
is_duplicate: bool = False
|
| 146 |
+
parent_issue_id: Optional[UUID] = None
|
| 147 |
+
nearby_count: Optional[int] = None
|
| 148 |
+
city: Optional[str] = None
|
| 149 |
+
locality: Optional[str] = None
|
| 150 |
+
full_address: Optional[str] = None
|
| 151 |
+
geo_status: Optional[str] = None
|
| 152 |
+
department: Optional[str] = None
|
| 153 |
+
assigned_member: Optional[str] = None
|
| 154 |
+
sla_hours: Optional[int] = None
|
| 155 |
+
sla_deadline: Optional[datetime] = None
|
| 156 |
+
agent_flow: list[AgentOutput] = []
|
| 157 |
+
created_at: datetime
|
| 158 |
+
updated_at: datetime
|
| 159 |
+
|
| 160 |
+
class Config:
|
| 161 |
+
from_attributes = True
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class IssueListResponse(BaseModel):
|
| 165 |
+
items: list[IssueResponse]
|
| 166 |
+
total: int
|
| 167 |
+
page: int
|
| 168 |
+
page_size: int
|
| 169 |
+
|
Backend/core/security.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import Request, Response
|
| 2 |
+
from fastapi.responses import JSONResponse
|
| 3 |
+
from starlette.middleware.base import BaseHTTPMiddleware
|
| 4 |
+
from collections import defaultdict
|
| 5 |
+
import time
|
| 6 |
+
import asyncio
|
| 7 |
+
|
| 8 |
+
from Backend.core.logging import get_logger
|
| 9 |
+
|
| 10 |
+
logger = get_logger(__name__)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SecurityHeadersMiddleware(BaseHTTPMiddleware):
|
| 14 |
+
async def dispatch(self, request: Request, call_next):
|
| 15 |
+
response = await call_next(request)
|
| 16 |
+
|
| 17 |
+
response.headers["X-Content-Type-Options"] = "nosniff"
|
| 18 |
+
response.headers["X-Frame-Options"] = "DENY"
|
| 19 |
+
response.headers["X-XSS-Protection"] = "1; mode=block"
|
| 20 |
+
response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin"
|
| 21 |
+
response.headers["Permissions-Policy"] = "geolocation=(self), camera=(self)"
|
| 22 |
+
|
| 23 |
+
if request.url.scheme == "https":
|
| 24 |
+
response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains"
|
| 25 |
+
|
| 26 |
+
return response
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class RateLimitMiddleware(BaseHTTPMiddleware):
|
| 30 |
+
def __init__(self, app, requests_per_minute: int = 60, burst_limit: int = 10):
|
| 31 |
+
super().__init__(app)
|
| 32 |
+
self.requests_per_minute = requests_per_minute
|
| 33 |
+
self.burst_limit = burst_limit
|
| 34 |
+
self.requests = defaultdict(list)
|
| 35 |
+
self.lock = asyncio.Lock()
|
| 36 |
+
|
| 37 |
+
async def dispatch(self, request: Request, call_next):
|
| 38 |
+
client_ip = request.client.host if request.client else "unknown"
|
| 39 |
+
current_time = time.time()
|
| 40 |
+
|
| 41 |
+
async with self.lock:
|
| 42 |
+
self.requests[client_ip] = [
|
| 43 |
+
t for t in self.requests[client_ip]
|
| 44 |
+
if current_time - t < 60
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
if len(self.requests[client_ip]) >= self.requests_per_minute:
|
| 48 |
+
logger.warning(f"Rate limit exceeded for {client_ip}")
|
| 49 |
+
return JSONResponse(
|
| 50 |
+
status_code=429,
|
| 51 |
+
content={"detail": "Too many requests. Please slow down."},
|
| 52 |
+
headers={"Retry-After": "60"}
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
recent_requests = [t for t in self.requests[client_ip] if current_time - t < 1]
|
| 56 |
+
if len(recent_requests) >= self.burst_limit:
|
| 57 |
+
logger.warning(f"Burst limit exceeded for {client_ip}")
|
| 58 |
+
return JSONResponse(
|
| 59 |
+
status_code=429,
|
| 60 |
+
content={"detail": "Too many requests. Please slow down."},
|
| 61 |
+
headers={"Retry-After": "1"}
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
self.requests[client_ip].append(current_time)
|
| 65 |
+
|
| 66 |
+
return await call_next(request)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class RequestValidationMiddleware(BaseHTTPMiddleware):
|
| 70 |
+
MAX_CONTENT_LENGTH = 50 * 1024 * 1024
|
| 71 |
+
|
| 72 |
+
async def dispatch(self, request: Request, call_next):
|
| 73 |
+
content_length = request.headers.get("content-length")
|
| 74 |
+
if content_length and int(content_length) > self.MAX_CONTENT_LENGTH:
|
| 75 |
+
return JSONResponse(
|
| 76 |
+
status_code=413,
|
| 77 |
+
content={"detail": "Request entity too large"}
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
return await call_next(request)
|
Backend/database/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .connection import engine, async_session_factory, get_db, get_db_context, init_db, close_db
|
| 2 |
+
from .models import Base, Issue, IssueImage, Classification, IssueEvent, Department, Member, Escalation
|
Backend/database/connection.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import asynccontextmanager
|
| 2 |
+
from typing import AsyncGenerator
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
| 4 |
+
from sqlalchemy.pool import NullPool
|
| 5 |
+
|
| 6 |
+
from Backend.core.config import settings
|
| 7 |
+
|
| 8 |
+
database_url = settings.database_url.replace("postgresql://", "postgresql+asyncpg://")
|
| 9 |
+
|
| 10 |
+
engine = create_async_engine(
|
| 11 |
+
database_url,
|
| 12 |
+
poolclass=NullPool,
|
| 13 |
+
echo=False,
|
| 14 |
+
connect_args={
|
| 15 |
+
"statement_cache_size": 0,
|
| 16 |
+
"prepared_statement_cache_size": 0,
|
| 17 |
+
},
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
async_session_factory = async_sessionmaker(
|
| 21 |
+
engine,
|
| 22 |
+
class_=AsyncSession,
|
| 23 |
+
expire_on_commit=False,
|
| 24 |
+
autocommit=False,
|
| 25 |
+
autoflush=False,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
| 30 |
+
async with async_session_factory() as session:
|
| 31 |
+
try:
|
| 32 |
+
yield session
|
| 33 |
+
await session.commit()
|
| 34 |
+
except Exception:
|
| 35 |
+
await session.rollback()
|
| 36 |
+
raise
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@asynccontextmanager
|
| 40 |
+
async def get_db_context() -> AsyncGenerator[AsyncSession, None]:
|
| 41 |
+
async with async_session_factory() as session:
|
| 42 |
+
try:
|
| 43 |
+
yield session
|
| 44 |
+
await session.commit()
|
| 45 |
+
except Exception:
|
| 46 |
+
await session.rollback()
|
| 47 |
+
raise
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
async def init_db() -> None:
|
| 51 |
+
from Backend.database.models import Base
|
| 52 |
+
async with engine.begin() as conn:
|
| 53 |
+
await conn.run_sync(Base.metadata.create_all)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
async def close_db() -> None:
|
| 57 |
+
await engine.dispose()
|
Backend/database/init_db.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
from sqlalchemy.ext.asyncio import create_async_engine
|
| 4 |
+
from Backend.core.config import settings
|
| 5 |
+
from Backend.database.models import Base
|
| 6 |
+
from Backend.database.seed import seed_data
|
| 7 |
+
|
| 8 |
+
logging.basicConfig(level=logging.INFO)
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
async def init_models():
|
| 12 |
+
"""Drops and recreates all tables, then seeds initial data."""
|
| 13 |
+
logger.info("Initializing database...")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
database_url = settings.database_url.replace("port=6543", "port=5432").replace("postgresql://", "postgresql+asyncpg://")
|
| 17 |
+
engine = create_async_engine(
|
| 18 |
+
database_url,
|
| 19 |
+
echo=True,
|
| 20 |
+
connect_args={
|
| 21 |
+
"statement_cache_size": 0,
|
| 22 |
+
"prepared_statement_cache_size": 0,
|
| 23 |
+
}
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
async with engine.begin() as conn:
|
| 27 |
+
logger.info("Dropping existing tables...")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
logger.info("Creating new tables...")
|
| 33 |
+
await conn.run_sync(Base.metadata.create_all)
|
| 34 |
+
|
| 35 |
+
logger.info("Schema initialized. Seeding data...")
|
| 36 |
+
try:
|
| 37 |
+
await seed_data(engine)
|
| 38 |
+
logger.info("Seeding completed successfully!")
|
| 39 |
+
except Exception as e:
|
| 40 |
+
logger.error(f"Seeding failed: {e}")
|
| 41 |
+
|
| 42 |
+
await engine.dispose()
|
| 43 |
+
logger.info("Database initialization finished.")
|
| 44 |
+
|
| 45 |
+
if __name__ == "__main__":
|
| 46 |
+
asyncio.run(init_models())
|
Backend/database/models.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datetime import datetime
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from uuid import UUID, uuid4
|
| 4 |
+
from sqlalchemy import Boolean, DateTime, Float, ForeignKey, Integer, String, Text, func
|
| 5 |
+
from sqlalchemy.dialects.postgresql import UUID as PGUUID
|
| 6 |
+
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Base(DeclarativeBase):
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Department(Base):
|
| 14 |
+
__tablename__ = "departments"
|
| 15 |
+
|
| 16 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 17 |
+
name: Mapped[str] = mapped_column(String(100), nullable=False, unique=True)
|
| 18 |
+
code: Mapped[str] = mapped_column(String(20), nullable=False, unique=True)
|
| 19 |
+
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 20 |
+
|
| 21 |
+
categories: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 22 |
+
|
| 23 |
+
default_sla_hours: Mapped[int] = mapped_column(Integer, default=48)
|
| 24 |
+
escalation_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
| 25 |
+
|
| 26 |
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
| 27 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now())
|
| 28 |
+
updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now())
|
| 29 |
+
|
| 30 |
+
members: Mapped[list["Member"]] = relationship(back_populates="department", lazy="selectin")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class Member(Base):
|
| 34 |
+
__tablename__ = "members"
|
| 35 |
+
|
| 36 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 37 |
+
department_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("departments.id", ondelete="SET NULL"), nullable=True, index=True)
|
| 38 |
+
|
| 39 |
+
name: Mapped[str] = mapped_column(String(100), nullable=False)
|
| 40 |
+
email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
| 41 |
+
phone: Mapped[Optional[str]] = mapped_column(String(20), nullable=True)
|
| 42 |
+
password_hash: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
| 43 |
+
|
| 44 |
+
role: Mapped[str] = mapped_column(String(50), default="worker")
|
| 45 |
+
city: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True)
|
| 46 |
+
locality: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
| 47 |
+
|
| 48 |
+
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
| 49 |
+
current_workload: Mapped[int] = mapped_column(Integer, default=0)
|
| 50 |
+
max_workload: Mapped[int] = mapped_column(Integer, default=10)
|
| 51 |
+
|
| 52 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now())
|
| 53 |
+
updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now())
|
| 54 |
+
|
| 55 |
+
department: Mapped[Optional["Department"]] = relationship(back_populates="members")
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class Issue(Base):
|
| 61 |
+
__tablename__ = "issues"
|
| 62 |
+
|
| 63 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 64 |
+
user_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True)
|
| 65 |
+
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 66 |
+
latitude: Mapped[float] = mapped_column(Float, nullable=False, index=True)
|
| 67 |
+
longitude: Mapped[float] = mapped_column(Float, nullable=False, index=True)
|
| 68 |
+
accuracy_meters: Mapped[Optional[float]] = mapped_column(Float, nullable=True)
|
| 69 |
+
|
| 70 |
+
state: Mapped[str] = mapped_column(String(20), default="reported", index=True)
|
| 71 |
+
priority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True)
|
| 72 |
+
priority_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 73 |
+
|
| 74 |
+
validation_source: Mapped[Optional[str]] = mapped_column(String(20), nullable=True)
|
| 75 |
+
validation_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 76 |
+
|
| 77 |
+
is_duplicate: Mapped[bool] = mapped_column(Boolean, default=False, index=True)
|
| 78 |
+
parent_issue_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id"), nullable=True)
|
| 79 |
+
geo_cluster_id: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True)
|
| 80 |
+
|
| 81 |
+
platform: Mapped[str] = mapped_column(String(50), nullable=False)
|
| 82 |
+
device_model: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
| 83 |
+
|
| 84 |
+
department_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("departments.id"), nullable=True)
|
| 85 |
+
assigned_member_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("members.id"), nullable=True)
|
| 86 |
+
city: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True)
|
| 87 |
+
locality: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
|
| 88 |
+
full_address: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 89 |
+
|
| 90 |
+
sla_deadline: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
| 91 |
+
sla_hours: Mapped[Optional[int]] = mapped_column(Integer, nullable=True)
|
| 92 |
+
escalation_level: Mapped[int] = mapped_column(Integer, default=0)
|
| 93 |
+
escalated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
| 94 |
+
|
| 95 |
+
resolved_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True, index=True)
|
| 96 |
+
resolution_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 97 |
+
proof_image_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
| 98 |
+
completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True)
|
| 99 |
+
|
| 100 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True)
|
| 101 |
+
updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now())
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
images: Mapped[list["IssueImage"]] = relationship(back_populates="issue", lazy="selectin", foreign_keys="IssueImage.issue_id")
|
| 105 |
+
classification: Mapped[Optional["Classification"]] = relationship(back_populates="issue", uselist=False, lazy="selectin")
|
| 106 |
+
|
| 107 |
+
department: Mapped[Optional["Department"]] = relationship("Department", lazy="selectin")
|
| 108 |
+
assigned_member: Mapped[Optional["Member"]] = relationship("Member", foreign_keys=[assigned_member_id], lazy="selectin")
|
| 109 |
+
|
| 110 |
+
events: Mapped[list["IssueEvent"]] = relationship(back_populates="issue", lazy="noload")
|
| 111 |
+
duplicates: Mapped[list["Issue"]] = relationship(back_populates="parent_issue", foreign_keys=[parent_issue_id])
|
| 112 |
+
parent_issue: Mapped[Optional["Issue"]] = relationship(back_populates="duplicates", remote_side=[id], foreign_keys=[parent_issue_id])
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class IssueImage(Base):
|
| 116 |
+
__tablename__ = "issue_images"
|
| 117 |
+
|
| 118 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 119 |
+
issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True)
|
| 120 |
+
file_path: Mapped[str] = mapped_column(String(500), nullable=False)
|
| 121 |
+
annotated_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
|
| 122 |
+
original_filename: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
|
| 123 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now())
|
| 124 |
+
|
| 125 |
+
issue: Mapped["Issue"] = relationship(back_populates="images", foreign_keys=[issue_id])
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class Classification(Base):
|
| 129 |
+
__tablename__ = "classifications"
|
| 130 |
+
|
| 131 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 132 |
+
issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), unique=True)
|
| 133 |
+
|
| 134 |
+
primary_category: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True)
|
| 135 |
+
primary_confidence: Mapped[float] = mapped_column(Float, default=0.0)
|
| 136 |
+
|
| 137 |
+
detections_json: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 138 |
+
inference_time_ms: Mapped[float] = mapped_column(Float, default=0.0)
|
| 139 |
+
model_version: Mapped[str] = mapped_column(String(20), default="1.0")
|
| 140 |
+
|
| 141 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now())
|
| 142 |
+
|
| 143 |
+
issue: Mapped["Issue"] = relationship(back_populates="classification")
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class IssueEvent(Base):
|
| 147 |
+
__tablename__ = "issue_events"
|
| 148 |
+
|
| 149 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 150 |
+
issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True)
|
| 151 |
+
|
| 152 |
+
event_type: Mapped[str] = mapped_column(String(50), nullable=False, index=True)
|
| 153 |
+
agent_name: Mapped[Optional[str]] = mapped_column(String(50), nullable=True)
|
| 154 |
+
event_data: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 155 |
+
|
| 156 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True)
|
| 157 |
+
|
| 158 |
+
issue: Mapped["Issue"] = relationship(back_populates="events")
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
class Escalation(Base):
|
| 162 |
+
__tablename__ = "escalations"
|
| 163 |
+
|
| 164 |
+
id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4)
|
| 165 |
+
issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True)
|
| 166 |
+
|
| 167 |
+
from_level: Mapped[int] = mapped_column(Integer, nullable=False)
|
| 168 |
+
to_level: Mapped[int] = mapped_column(Integer, nullable=False)
|
| 169 |
+
reason: Mapped[str] = mapped_column(Text, nullable=False)
|
| 170 |
+
|
| 171 |
+
escalated_by: Mapped[str] = mapped_column(String(50), default="system")
|
| 172 |
+
notified_emails: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
|
| 173 |
+
|
| 174 |
+
created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now())
|
Backend/database/seed.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import uuid
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession
|
| 4 |
+
from sqlalchemy.orm import sessionmaker
|
| 5 |
+
from Backend.database.models import Department, Member
|
| 6 |
+
|
| 7 |
+
logger = logging.getLogger(__name__)
|
| 8 |
+
|
| 9 |
+
async def seed_data(engine: AsyncEngine):
|
| 10 |
+
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
|
| 11 |
+
|
| 12 |
+
async with async_session() as session:
|
| 13 |
+
|
| 14 |
+
pwd_id = uuid.uuid4()
|
| 15 |
+
sanitation_id = uuid.uuid4()
|
| 16 |
+
traffic_id = uuid.uuid4()
|
| 17 |
+
|
| 18 |
+
departments = [
|
| 19 |
+
Department(
|
| 20 |
+
id=pwd_id,
|
| 21 |
+
name="Public Works Department",
|
| 22 |
+
code="PWD",
|
| 23 |
+
description="Roads, Potholes, Infrastructure",
|
| 24 |
+
default_sla_hours=48,
|
| 25 |
+
escalation_email="pwd_head@city.gov"
|
| 26 |
+
),
|
| 27 |
+
Department(
|
| 28 |
+
id=sanitation_id,
|
| 29 |
+
name="Sanitation Department",
|
| 30 |
+
code="SANITATION",
|
| 31 |
+
description="Garbage, Cleaning, Waste",
|
| 32 |
+
default_sla_hours=24,
|
| 33 |
+
escalation_email="sanitation_head@city.gov"
|
| 34 |
+
),
|
| 35 |
+
Department(
|
| 36 |
+
id=traffic_id,
|
| 37 |
+
name="Traffic Department",
|
| 38 |
+
code="TRAFFIC",
|
| 39 |
+
description="Signals, Signs, Illegal Parking",
|
| 40 |
+
default_sla_hours=12,
|
| 41 |
+
escalation_email="traffic_head@city.gov"
|
| 42 |
+
)
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
for dept in departments:
|
| 46 |
+
session.add(dept)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
members = [
|
| 50 |
+
Member(
|
| 51 |
+
department_id=pwd_id,
|
| 52 |
+
name="Ramesh Kumar",
|
| 53 |
+
email="ramesh.pwd@city.gov",
|
| 54 |
+
role="officer",
|
| 55 |
+
city="New Delhi",
|
| 56 |
+
locality="Connaught Place",
|
| 57 |
+
max_workload=10
|
| 58 |
+
),
|
| 59 |
+
Member(
|
| 60 |
+
department_id=sanitation_id,
|
| 61 |
+
name="Suresh Singh",
|
| 62 |
+
email="suresh.sanitation@city.gov",
|
| 63 |
+
role="officer",
|
| 64 |
+
city="New Delhi",
|
| 65 |
+
locality="Karol Bagh",
|
| 66 |
+
max_workload=15
|
| 67 |
+
),
|
| 68 |
+
Member(
|
| 69 |
+
department_id=traffic_id,
|
| 70 |
+
name="Priya Sharma",
|
| 71 |
+
email="priya.traffic@city.gov",
|
| 72 |
+
role="officer",
|
| 73 |
+
city="New Delhi",
|
| 74 |
+
locality="Lajpat Nagar",
|
| 75 |
+
max_workload=12
|
| 76 |
+
)
|
| 77 |
+
]
|
| 78 |
+
|
| 79 |
+
for member in members:
|
| 80 |
+
session.add(member)
|
| 81 |
+
|
| 82 |
+
await session.commit()
|
| 83 |
+
logger.info("Seeded 3 departments and 3 members.")
|
Backend/main.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uvicorn
|
| 2 |
+
from Backend.core.config import settings
|
| 3 |
+
|
| 4 |
+
def main():
|
| 5 |
+
uvicorn.run(
|
| 6 |
+
"Backend.api:app",
|
| 7 |
+
host=settings.api_host,
|
| 8 |
+
port=settings.api_port,
|
| 9 |
+
reload=True,
|
| 10 |
+
workers=1,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
if __name__ == "__main__":
|
| 14 |
+
main()
|
Backend/orchestration/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .base import BaseAgent
|
Backend/orchestration/base.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from abc import ABC, abstractmethod
|
| 2 |
+
from typing import Any, TypeVar
|
| 3 |
+
from uuid import UUID
|
| 4 |
+
|
| 5 |
+
from Backend.core.events import Event, EventBus, event_bus
|
| 6 |
+
from Backend.core.logging import AgentLogger, get_logger
|
| 7 |
+
|
| 8 |
+
E = TypeVar("E", bound=Event)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class BaseAgent(ABC):
|
| 12 |
+
def __init__(self, name: str):
|
| 13 |
+
self.name = name
|
| 14 |
+
self.logger: AgentLogger = get_logger(f"agent.{name}", agent_name=name)
|
| 15 |
+
self._event_bus = event_bus
|
| 16 |
+
|
| 17 |
+
def subscribe(self, event_type: type[E]) -> None:
|
| 18 |
+
self._event_bus.subscribe(event_type, self.handle)
|
| 19 |
+
|
| 20 |
+
@abstractmethod
|
| 21 |
+
async def handle(self, event: E) -> None:
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
def log_decision(self, issue_id: UUID, decision: str, reasoning: str) -> None:
|
| 25 |
+
self.logger.log_decision(issue_id, decision, reasoning)
|
Backend/requirements.txt
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
fastapi>=0.109.0
|
| 2 |
+
uvicorn[standard]>=0.27.0
|
| 3 |
+
pydantic>=2.5.0
|
| 4 |
+
pydantic-settings>=2.1.0
|
| 5 |
+
email-validator>=2.0.0
|
| 6 |
+
sqlalchemy[asyncio]>=2.0.25
|
| 7 |
+
asyncpg>=0.29.0
|
| 8 |
+
aiofiles>=23.2.1
|
| 9 |
+
aiohttp>=3.9.0
|
| 10 |
+
python-multipart>=0.0.6
|
| 11 |
+
PyJWT>=2.8.0
|
| 12 |
+
bcrypt>=4.1.0
|
| 13 |
+
numpy<2
|
| 14 |
+
opencv-python-headless==4.11.0.86
|
| 15 |
+
torch>=2.1.0,<3
|
| 16 |
+
torchvision>=0.16.0,<1
|
| 17 |
+
ultralytics>=8.1.0
|
| 18 |
+
resend>=2.0.0
|
| 19 |
+
google-generativeai>=0.8.3
|
Backend/services/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .ingestion import IngestionService
|
| 2 |
+
from .vision import VisionService
|
Backend/services/email.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import resend
|
| 2 |
+
from typing import List
|
| 3 |
+
from Backend.core.config import settings
|
| 4 |
+
from Backend.core.logging import get_logger
|
| 5 |
+
|
| 6 |
+
logger = get_logger(__name__)
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class EmailService:
|
| 10 |
+
def __init__(self):
|
| 11 |
+
self.sender_email = settings.sender_email
|
| 12 |
+
if settings.resend_api_key:
|
| 13 |
+
resend.api_key = settings.resend_api_key
|
| 14 |
+
else:
|
| 15 |
+
logger.warning("Resend API key not configured")
|
| 16 |
+
|
| 17 |
+
async def send_email(
|
| 18 |
+
self,
|
| 19 |
+
to: List[str],
|
| 20 |
+
subject: str,
|
| 21 |
+
body: str,
|
| 22 |
+
html: bool = False
|
| 23 |
+
) -> bool:
|
| 24 |
+
if not settings.resend_api_key:
|
| 25 |
+
logger.warning("Resend API key not configured. Email not sent.")
|
| 26 |
+
logger.info(f"Would send email to {to}: {subject}")
|
| 27 |
+
return False
|
| 28 |
+
|
| 29 |
+
try:
|
| 30 |
+
for recipient in to:
|
| 31 |
+
try:
|
| 32 |
+
params = {
|
| 33 |
+
"from": self.sender_email,
|
| 34 |
+
"to": [recipient],
|
| 35 |
+
"subject": subject,
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
if html:
|
| 39 |
+
params["html"] = body
|
| 40 |
+
else:
|
| 41 |
+
params["text"] = body
|
| 42 |
+
|
| 43 |
+
resend.Emails.send(params)
|
| 44 |
+
logger.info(f"Email sent successfully to {recipient}")
|
| 45 |
+
except Exception as e:
|
| 46 |
+
logger.error(f"Failed to send email to {recipient}: {e}")
|
| 47 |
+
return False
|
| 48 |
+
|
| 49 |
+
return True
|
| 50 |
+
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"Email service error: {e}")
|
| 53 |
+
return False
|
| 54 |
+
|
| 55 |
+
async def send_assignment_email(
|
| 56 |
+
self,
|
| 57 |
+
worker_email: str,
|
| 58 |
+
worker_name: str,
|
| 59 |
+
issue_id: str,
|
| 60 |
+
category: str,
|
| 61 |
+
priority: str,
|
| 62 |
+
location: str,
|
| 63 |
+
description: str
|
| 64 |
+
):
|
| 65 |
+
subject = f"🔔 New Task Assigned: {category} [{priority}]"
|
| 66 |
+
|
| 67 |
+
body = f"""
|
| 68 |
+
Hello {worker_name},
|
| 69 |
+
|
| 70 |
+
You have been assigned a new task in UrbanLens.
|
| 71 |
+
|
| 72 |
+
ISSUE DETAILS
|
| 73 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 74 |
+
Issue ID: {issue_id}
|
| 75 |
+
Category: {category}
|
| 76 |
+
Priority: {priority}
|
| 77 |
+
Location: {location}
|
| 78 |
+
Description: {description or 'No description provided'}
|
| 79 |
+
|
| 80 |
+
NEXT STEPS
|
| 81 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 82 |
+
1. Review the issue details in your worker dashboard
|
| 83 |
+
2. Navigate to the location
|
| 84 |
+
3. Resolve the issue
|
| 85 |
+
4. Upload proof of resolution
|
| 86 |
+
|
| 87 |
+
Thank you for your service!
|
| 88 |
+
|
| 89 |
+
UrbanLens Team
|
| 90 |
+
"Governance at the Speed of Software"
|
| 91 |
+
"""
|
| 92 |
+
|
| 93 |
+
return await self.send_email([worker_email], subject, body)
|
| 94 |
+
|
| 95 |
+
async def send_manual_review_email(
|
| 96 |
+
self,
|
| 97 |
+
issue_id: str,
|
| 98 |
+
reason: str,
|
| 99 |
+
category: str,
|
| 100 |
+
location: str,
|
| 101 |
+
image_url: str
|
| 102 |
+
):
|
| 103 |
+
subject = f"⚠️ Manual Review Required: {category}"
|
| 104 |
+
|
| 105 |
+
body = f"""
|
| 106 |
+
Admin Team,
|
| 107 |
+
|
| 108 |
+
An issue requires manual review in UrbanLens.
|
| 109 |
+
|
| 110 |
+
ISSUE DETAILS
|
| 111 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 112 |
+
Issue ID: {issue_id}
|
| 113 |
+
Category: {category}
|
| 114 |
+
Location: {location}
|
| 115 |
+
Reason: {reason}
|
| 116 |
+
|
| 117 |
+
Image: {image_url}
|
| 118 |
+
|
| 119 |
+
ACTION REQUIRED
|
| 120 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 121 |
+
Please review this issue in the admin dashboard and take appropriate action.
|
| 122 |
+
|
| 123 |
+
UrbanLens System
|
| 124 |
+
"""
|
| 125 |
+
|
| 126 |
+
return await self.send_email([settings.admin_email], subject, body)
|
| 127 |
+
|
| 128 |
+
async def send_completion_email(
|
| 129 |
+
self,
|
| 130 |
+
user_email: str,
|
| 131 |
+
issue_id: str,
|
| 132 |
+
category: str,
|
| 133 |
+
location: str,
|
| 134 |
+
resolution_notes: str
|
| 135 |
+
):
|
| 136 |
+
subject = f"✅ Your Report Has Been Resolved: {category}"
|
| 137 |
+
|
| 138 |
+
body = f"""
|
| 139 |
+
Dear Citizen,
|
| 140 |
+
|
| 141 |
+
Great news! Your reported issue has been resolved.
|
| 142 |
+
|
| 143 |
+
ISSUE DETAILS
|
| 144 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 145 |
+
Issue ID: {issue_id}
|
| 146 |
+
Category: {category}
|
| 147 |
+
Location: {location}
|
| 148 |
+
Resolution: {resolution_notes or 'Issue has been successfully addressed'}
|
| 149 |
+
|
| 150 |
+
FEEDBACK
|
| 151 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 152 |
+
We value your input! Please confirm if the issue has been resolved by checking the app.
|
| 153 |
+
|
| 154 |
+
Thank you for making our city better!
|
| 155 |
+
|
| 156 |
+
UrbanLens Team
|
| 157 |
+
"Governance at the Speed of Software"
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
return await self.send_email([user_email], subject, body)
|
| 161 |
+
|
| 162 |
+
async def send_escalation_email(
|
| 163 |
+
self,
|
| 164 |
+
admin_email: str,
|
| 165 |
+
issue_id: str,
|
| 166 |
+
category: str,
|
| 167 |
+
priority: str,
|
| 168 |
+
reason: str,
|
| 169 |
+
escalation_level: int
|
| 170 |
+
):
|
| 171 |
+
subject = f"🚨 ESCALATION LEVEL {escalation_level}: {category}"
|
| 172 |
+
|
| 173 |
+
body = f"""
|
| 174 |
+
URGENT: Issue Escalation
|
| 175 |
+
|
| 176 |
+
An issue has been escalated and requires immediate attention.
|
| 177 |
+
|
| 178 |
+
ISSUE DETAILS
|
| 179 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 180 |
+
Issue ID: {issue_id}
|
| 181 |
+
Category: {category}
|
| 182 |
+
Priority: {priority}
|
| 183 |
+
Escalation Level: {escalation_level}
|
| 184 |
+
Reason: {reason}
|
| 185 |
+
|
| 186 |
+
IMMEDIATE ACTION REQUIRED
|
| 187 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 188 |
+
Please review and address this issue immediately in the admin dashboard.
|
| 189 |
+
|
| 190 |
+
UrbanLens System
|
| 191 |
+
"""
|
| 192 |
+
|
| 193 |
+
return await self.send_email([admin_email], subject, body)
|
| 194 |
+
|
| 195 |
+
async def send_confirmation_request_email(
|
| 196 |
+
self,
|
| 197 |
+
user_email: str,
|
| 198 |
+
issue_id: str,
|
| 199 |
+
category: str,
|
| 200 |
+
confirmation_link: str
|
| 201 |
+
):
|
| 202 |
+
subject = f"🔍 Please Confirm: Is This Issue Resolved?"
|
| 203 |
+
|
| 204 |
+
body = f"""
|
| 205 |
+
Dear Citizen,
|
| 206 |
+
|
| 207 |
+
Your reported issue has been marked as resolved by our team.
|
| 208 |
+
|
| 209 |
+
ISSUE DETAILS
|
| 210 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 211 |
+
Issue ID: {issue_id}
|
| 212 |
+
Category: {category}
|
| 213 |
+
|
| 214 |
+
CONFIRMATION NEEDED
|
| 215 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 216 |
+
Please confirm if the issue has been properly resolved:
|
| 217 |
+
{confirmation_link}
|
| 218 |
+
|
| 219 |
+
Your feedback helps us improve our service quality.
|
| 220 |
+
|
| 221 |
+
Thank you!
|
| 222 |
+
|
| 223 |
+
UrbanLens Team
|
| 224 |
+
"""
|
| 225 |
+
|
| 226 |
+
return await self.send_email([user_email], subject, body)
|
| 227 |
+
|
| 228 |
+
async def send_issue_accepted_email(
|
| 229 |
+
self,
|
| 230 |
+
user_email: str,
|
| 231 |
+
issue_id: str,
|
| 232 |
+
category: str,
|
| 233 |
+
priority: str,
|
| 234 |
+
location: str,
|
| 235 |
+
accepted_by: str = "automatic",
|
| 236 |
+
tracking_url: str = None
|
| 237 |
+
):
|
| 238 |
+
acceptance_type = "automatically" if accepted_by == "automatic" else "manually by our team"
|
| 239 |
+
subject = f"✓ Your Report Has Been Accepted: {category}"
|
| 240 |
+
|
| 241 |
+
body = f"""
|
| 242 |
+
Dear Citizen,
|
| 243 |
+
|
| 244 |
+
Thank you for reporting an issue! Your report has been accepted {acceptance_type}.
|
| 245 |
+
|
| 246 |
+
ISSUE DETAILS
|
| 247 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 248 |
+
Issue ID: {issue_id}
|
| 249 |
+
Category: {category}
|
| 250 |
+
Priority: {priority}
|
| 251 |
+
Location: {location}
|
| 252 |
+
|
| 253 |
+
WHAT HAPPENS NEXT
|
| 254 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 255 |
+
1. Your issue has been assigned to the appropriate department
|
| 256 |
+
2. A field worker will be dispatched to address it
|
| 257 |
+
3. You will receive updates on the progress
|
| 258 |
+
4. Once resolved, you'll get a confirmation notification
|
| 259 |
+
|
| 260 |
+
TRACK YOUR REPORT
|
| 261 |
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
| 262 |
+
{tracking_url or 'Check the UrbanLens app for real-time updates'}
|
| 263 |
+
|
| 264 |
+
Thank you for helping make our city better!
|
| 265 |
+
|
| 266 |
+
UrbanLens Team
|
| 267 |
+
"Governance at the Speed of Software"
|
| 268 |
+
"""
|
| 269 |
+
|
| 270 |
+
return await self.send_email([user_email], subject, body)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
email_service = EmailService()
|
Backend/services/geocoding.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import aiohttp
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
|
| 5 |
+
from Backend.core.logging import get_logger
|
| 6 |
+
|
| 7 |
+
logger = get_logger(__name__)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@dataclass
|
| 11 |
+
class LocationInfo:
|
| 12 |
+
city: Optional[str] = None
|
| 13 |
+
locality: Optional[str] = None
|
| 14 |
+
district: Optional[str] = None
|
| 15 |
+
state: Optional[str] = None
|
| 16 |
+
country: Optional[str] = None
|
| 17 |
+
pincode: Optional[str] = None
|
| 18 |
+
full_address: Optional[str] = None
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class GeocodingService:
|
| 22 |
+
NOMINATIM_URL = "https://nominatim.openstreetmap.org/reverse"
|
| 23 |
+
|
| 24 |
+
async def reverse_geocode(self, latitude: float, longitude: float) -> LocationInfo:
|
| 25 |
+
params = {
|
| 26 |
+
"lat": latitude,
|
| 27 |
+
"lon": longitude,
|
| 28 |
+
"format": "json",
|
| 29 |
+
"addressdetails": 1,
|
| 30 |
+
"zoom": 18,
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
headers = {
|
| 34 |
+
"User-Agent": "CityIssueResolutionAgent/1.0"
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
async with aiohttp.ClientSession() as session:
|
| 39 |
+
async with session.get(
|
| 40 |
+
self.NOMINATIM_URL,
|
| 41 |
+
params=params,
|
| 42 |
+
headers=headers,
|
| 43 |
+
timeout=aiohttp.ClientTimeout(total=10)
|
| 44 |
+
) as response:
|
| 45 |
+
if response.status == 200:
|
| 46 |
+
data = await response.json()
|
| 47 |
+
return self._parse_response(data)
|
| 48 |
+
else:
|
| 49 |
+
logger.warning(f"Geocoding failed: {response.status}")
|
| 50 |
+
return LocationInfo()
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"Geocoding error: {e}")
|
| 53 |
+
return LocationInfo()
|
| 54 |
+
|
| 55 |
+
def _parse_response(self, data: dict) -> LocationInfo:
|
| 56 |
+
address = data.get("address", {})
|
| 57 |
+
|
| 58 |
+
city = (
|
| 59 |
+
address.get("city") or
|
| 60 |
+
address.get("town") or
|
| 61 |
+
address.get("municipality") or
|
| 62 |
+
address.get("village") or
|
| 63 |
+
address.get("suburb")
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
locality = (
|
| 67 |
+
address.get("suburb") or
|
| 68 |
+
address.get("neighbourhood") or
|
| 69 |
+
address.get("quarter") or
|
| 70 |
+
address.get("borough")
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
district = (
|
| 74 |
+
address.get("county") or
|
| 75 |
+
address.get("district") or
|
| 76 |
+
address.get("state_district")
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
state = address.get("state")
|
| 80 |
+
country = address.get("country")
|
| 81 |
+
pincode = address.get("postcode")
|
| 82 |
+
|
| 83 |
+
full_address = data.get("display_name")
|
| 84 |
+
|
| 85 |
+
return LocationInfo(
|
| 86 |
+
city=city,
|
| 87 |
+
locality=locality,
|
| 88 |
+
district=district,
|
| 89 |
+
state=state,
|
| 90 |
+
country=country,
|
| 91 |
+
pincode=pincode,
|
| 92 |
+
full_address=full_address,
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
async def get_city_from_coordinates(self, latitude: float, longitude: float) -> Optional[str]:
|
| 96 |
+
location = await self.reverse_geocode(latitude, longitude)
|
| 97 |
+
return location.city or location.locality or location.district
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
geocoding_service = GeocodingService()
|
Backend/services/ingestion.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from uuid import UUID
|
| 2 |
+
from fastapi import UploadFile
|
| 3 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 4 |
+
|
| 5 |
+
from Backend.core.events import event_bus, IssueCreated
|
| 6 |
+
from Backend.core.logging import get_logger
|
| 7 |
+
from Backend.core.schemas import IssueCreate, IssueState
|
| 8 |
+
from Backend.database.models import Issue, IssueImage
|
| 9 |
+
from Backend.services.geocoding import geocoding_service
|
| 10 |
+
from Backend.utils.storage import save_upload, get_upload_url, validate_file_extension, validate_file_size
|
| 11 |
+
|
| 12 |
+
logger = get_logger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class IngestionService:
|
| 16 |
+
def __init__(self, db: AsyncSession):
|
| 17 |
+
self.db = db
|
| 18 |
+
|
| 19 |
+
async def create_issue(
|
| 20 |
+
self,
|
| 21 |
+
data: IssueCreate,
|
| 22 |
+
images: list[UploadFile],
|
| 23 |
+
user_id: str | None = None
|
| 24 |
+
) -> tuple[Issue, list[str]]:
|
| 25 |
+
if not images:
|
| 26 |
+
raise ValueError("At least one image is required")
|
| 27 |
+
|
| 28 |
+
for image in images:
|
| 29 |
+
if not validate_file_extension(image.filename or ""):
|
| 30 |
+
raise ValueError(f"Invalid file extension: {image.filename}")
|
| 31 |
+
|
| 32 |
+
location_info = await geocoding_service.reverse_geocode(
|
| 33 |
+
data.latitude, data.longitude
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
logger.info(f"Location resolved: {location_info.city}, {location_info.locality}")
|
| 37 |
+
|
| 38 |
+
final_description = data.description or "Issue reported"
|
| 39 |
+
|
| 40 |
+
issue = Issue(
|
| 41 |
+
user_id=user_id,
|
| 42 |
+
description=final_description,
|
| 43 |
+
latitude=data.latitude,
|
| 44 |
+
longitude=data.longitude,
|
| 45 |
+
accuracy_meters=data.accuracy_meters,
|
| 46 |
+
platform=data.platform,
|
| 47 |
+
device_model=data.device_model,
|
| 48 |
+
state=IssueState.REPORTED,
|
| 49 |
+
city=location_info.city,
|
| 50 |
+
locality=location_info.locality,
|
| 51 |
+
full_address=location_info.full_address,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
self.db.add(issue)
|
| 55 |
+
await self.db.flush()
|
| 56 |
+
|
| 57 |
+
image_paths = []
|
| 58 |
+
for image in images:
|
| 59 |
+
file_path = await save_upload(image, subfolder=str(issue.id))
|
| 60 |
+
|
| 61 |
+
issue_image = IssueImage(
|
| 62 |
+
issue_id=issue.id,
|
| 63 |
+
file_path=file_path,
|
| 64 |
+
original_filename=image.filename,
|
| 65 |
+
)
|
| 66 |
+
self.db.add(issue_image)
|
| 67 |
+
image_paths.append(file_path)
|
| 68 |
+
|
| 69 |
+
await self.db.flush()
|
| 70 |
+
|
| 71 |
+
event = IssueCreated(
|
| 72 |
+
issue_id=issue.id,
|
| 73 |
+
image_paths=image_paths,
|
| 74 |
+
latitude=issue.latitude,
|
| 75 |
+
longitude=issue.longitude,
|
| 76 |
+
description=issue.description,
|
| 77 |
+
)
|
| 78 |
+
await event_bus.publish(event)
|
| 79 |
+
|
| 80 |
+
logger.info(f"Issue created: {issue.id} in {issue.city}")
|
| 81 |
+
|
| 82 |
+
return issue, image_paths
|
| 83 |
+
|
| 84 |
+
async def get_issue(self, issue_id: UUID) -> Issue | None:
|
| 85 |
+
return await self.db.get(Issue, issue_id)
|
Backend/services/supabase_auth.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import aiohttp
|
| 2 |
+
from typing import Optional
|
| 3 |
+
from Backend.core.config import settings
|
| 4 |
+
from Backend.core.logging import get_logger
|
| 5 |
+
|
| 6 |
+
logger = get_logger(__name__)
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class SupabaseAuthService:
|
| 10 |
+
def __init__(self):
|
| 11 |
+
self.url = settings.supabase_url
|
| 12 |
+
self.service_key = settings.supabase_key
|
| 13 |
+
self.headers = {
|
| 14 |
+
"apikey": self.service_key,
|
| 15 |
+
"Authorization": f"Bearer {self.service_key}",
|
| 16 |
+
"Content-Type": "application/json",
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
async def invite_user(self, email: str, redirect_to: Optional[str] = None) -> dict:
|
| 20 |
+
invite_url = f"{self.url}/auth/v1/invite"
|
| 21 |
+
|
| 22 |
+
payload = {
|
| 23 |
+
"email": email,
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
if redirect_to:
|
| 27 |
+
payload["options"] = {"redirectTo": redirect_to}
|
| 28 |
+
|
| 29 |
+
async with aiohttp.ClientSession() as session:
|
| 30 |
+
async with session.post(invite_url, json=payload, headers=self.headers) as response:
|
| 31 |
+
result = await response.json()
|
| 32 |
+
|
| 33 |
+
if response.status == 200:
|
| 34 |
+
logger.info(f"Invite sent to {email}")
|
| 35 |
+
return {
|
| 36 |
+
"success": True,
|
| 37 |
+
"message": f"Invitation email sent to {email}",
|
| 38 |
+
"user_id": result.get("id"),
|
| 39 |
+
"email": email,
|
| 40 |
+
}
|
| 41 |
+
else:
|
| 42 |
+
error_msg = result.get("msg") or result.get("message") or str(result)
|
| 43 |
+
logger.error(f"Failed to invite {email}: {error_msg}")
|
| 44 |
+
return {
|
| 45 |
+
"success": False,
|
| 46 |
+
"message": error_msg,
|
| 47 |
+
"email": email,
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
async def create_user(self, email: str, password: str, user_metadata: Optional[dict] = None) -> dict:
|
| 51 |
+
create_url = f"{self.url}/auth/v1/admin/users"
|
| 52 |
+
|
| 53 |
+
payload = {
|
| 54 |
+
"email": email,
|
| 55 |
+
"password": password,
|
| 56 |
+
"email_confirm": True,
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
if user_metadata:
|
| 60 |
+
payload["user_metadata"] = user_metadata
|
| 61 |
+
|
| 62 |
+
async with aiohttp.ClientSession() as session:
|
| 63 |
+
async with session.post(create_url, json=payload, headers=self.headers) as response:
|
| 64 |
+
result = await response.json()
|
| 65 |
+
|
| 66 |
+
if response.status in [200, 201]:
|
| 67 |
+
logger.info(f"User created: {email}")
|
| 68 |
+
return {
|
| 69 |
+
"success": True,
|
| 70 |
+
"user_id": result.get("id"),
|
| 71 |
+
"email": email,
|
| 72 |
+
}
|
| 73 |
+
else:
|
| 74 |
+
error_msg = result.get("msg") or result.get("message") or str(result)
|
| 75 |
+
return {
|
| 76 |
+
"success": False,
|
| 77 |
+
"message": error_msg,
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
async def send_magic_link(self, email: str, redirect_to: Optional[str] = None) -> dict:
|
| 81 |
+
magic_url = f"{self.url}/auth/v1/magiclink"
|
| 82 |
+
|
| 83 |
+
payload = {"email": email}
|
| 84 |
+
|
| 85 |
+
if redirect_to:
|
| 86 |
+
payload["options"] = {"redirectTo": redirect_to}
|
| 87 |
+
|
| 88 |
+
async with aiohttp.ClientSession() as session:
|
| 89 |
+
async with session.post(magic_url, json=payload, headers=self.headers) as response:
|
| 90 |
+
if response.status == 200:
|
| 91 |
+
return {
|
| 92 |
+
"success": True,
|
| 93 |
+
"message": f"Magic link sent to {email}",
|
| 94 |
+
}
|
| 95 |
+
else:
|
| 96 |
+
result = await response.json()
|
| 97 |
+
return {
|
| 98 |
+
"success": False,
|
| 99 |
+
"message": result.get("msg") or str(result),
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
async def get_user(self, user_id: str) -> Optional[dict]:
|
| 103 |
+
user_url = f"{self.url}/auth/v1/admin/users/{user_id}"
|
| 104 |
+
|
| 105 |
+
async with aiohttp.ClientSession() as session:
|
| 106 |
+
async with session.get(user_url, headers=self.headers) as response:
|
| 107 |
+
if response.status == 200:
|
| 108 |
+
return await response.json()
|
| 109 |
+
return None
|
| 110 |
+
|
| 111 |
+
async def delete_user(self, user_id: str) -> bool:
|
| 112 |
+
delete_url = f"{self.url}/auth/v1/admin/users/{user_id}"
|
| 113 |
+
|
| 114 |
+
async with aiohttp.ClientSession() as session:
|
| 115 |
+
async with session.delete(delete_url, headers=self.headers) as response:
|
| 116 |
+
return response.status == 200
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
supabase_auth = SupabaseAuthService()
|
Backend/services/vision.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from Backend.agents.vision import VisionAgent
|
| 2 |
+
|
| 3 |
+
VisionService = VisionAgent
|