diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..7225b622b871a92768685c05de2ab59e57c1c970 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,37 @@ +.git +.github +.venv +.env +__pycache__ +*.pyc +*.pyo +*.pyd +.Python +*.so +*.egg +*.egg-info +dist +build +.vscode +.idea +*.log +.DS_Store +Thumbs.db +node_modules +Frontend +User +Dataset +Dataset_Merged +Model/runs +runs +noupload +archive.zip +Backendbackup +design-system +docs +infra +migrations +createadmin.py +generate_password_hash.py +start.js +start_system.bat diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..865a15474d49f3e29b46abb5a6bc21d496b4bebd --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..d6bab887f3ca5359738b71e94d1982b5609784f2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,100 @@ + +.env +.env.* +!.env.example +!.env.sample + +*.pem +*.key +*.p12 +*.pfx +*.crt +*.cer +*.der + +*.log + +$**/__pycache__/ +*.py[cod] +*$py.class +.pytest_cache/ +.mypy_cache/ +.ruff_cache/ +.coverage +htmlcov/ +.tox/ + +.venv/ +venv/ +env/ +ENV/ + +# Backend cache directories are covered by the global pattern +Backendbackup/ + +node_modules/ +.next/ +out/ +dist/ +build/ + +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + +.DS_Store +Thumbs.db + +.vscode/ +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + +Frontend/.env +Frontend/.env.* + +User/.env +User/.env.* + +User/.expo/ +User/.metro-cache/ +User/.cache/ + +User/android/.gradle/ +User/android/build/ +User/android/app/build/ +User/android/local.properties +User/android/app/release/ +User/android/app/debug/ + +static/temp/ +Backend/static/temp/ + +runs/ +Model/runs/ +Model/test_predictions/ +Dataset/ +Dataset_Merged/ + +*.pt +*.onnx +*.torchscript +*.engine +*.tflite +*.weights + + +noupload/ +infra/env/ +infra/**/dev.env +infra/**/prod.env +infra/cloudflared/*.json + +archive.zip + +*.sql +!model.pt +.agent +design-system \ No newline at end of file diff --git a/Backend/.env.example b/Backend/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..3a8453d066261206e65de7acaae0e79dff4cbccd --- /dev/null +++ b/Backend/.env.example @@ -0,0 +1,7 @@ +DATABASE_URL= +SUPABASE_URL= +SUPABASE_KEY= +SUPABASE_JWT_SECRET= +SUPABASE_BUCKET=city-issues +GEMINI_API_KEY= +FRONTEND_URL= diff --git a/Backend/agents/__init__.py b/Backend/agents/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..32b6bdd70f0e8ad5afc2318f3e456b7ec36f9144 --- /dev/null +++ b/Backend/agents/__init__.py @@ -0,0 +1,6 @@ +from .vision import VisionAgent +from .geoDeduplicate import GeoDeduplicateAgent +from .priority import PriorityAgent +from .routing import RoutingAgent +from .escalation import EscalationAgent +from .notification import NotificationAgent diff --git a/Backend/agents/escalation/__init__.py b/Backend/agents/escalation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..73228911293c827ee5c537da9579e1c22c030574 --- /dev/null +++ b/Backend/agents/escalation/__init__.py @@ -0,0 +1 @@ +from .agent import EscalationAgent, IssueEscalated diff --git a/Backend/agents/escalation/agent.py b/Backend/agents/escalation/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..c5a7178b78345c74bce072914501b0908be5071e --- /dev/null +++ b/Backend/agents/escalation/agent.py @@ -0,0 +1,178 @@ +import json +from datetime import datetime +from typing import Optional +from uuid import UUID +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +import google.generativeai as genai + +from Backend.core.events import event_bus, Event +from Backend.core.logging import get_logger +from Backend.core.config import settings +from Backend.database.models import Issue, IssueEvent, Escalation, Department, Member +from Backend.orchestration.base import BaseAgent + +logger = get_logger(__name__, agent_name="EscalationAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + + +class IssueEscalated(Event): + from_level: int + to_level: int + reason: str + hours_overdue: float + + +class EscalationAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("EscalationAgent") + self.db = db + if settings.gemini_api_key: + self.model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.model = None + + async def should_escalate(self, issue: Issue) -> tuple[bool, int, str]: + if not issue.sla_deadline: + return False, 0, "No SLA deadline set" + + if not self.model: + return False, 0, "Gemini API not configured" + + now = datetime.utcnow() + hours_since_creation = (now - issue.created_at).total_seconds() / 3600 + hours_until_deadline = (issue.sla_deadline - now).total_seconds() / 3600 + + prompt = f"""Analyze civic issue escalation: + +Issue State: {issue.state} +Priority: {issue.priority} (1=Critical, 2=High, 3=Medium, 4=Low) +Current Escalation Level: {issue.escalation_level} +Hours Since Creation: {hours_since_creation:.1f} +Hours Until Deadline: {hours_until_deadline:.1f} +Category: {issue.description[:100] if issue.description else 'N/A'} + +Determine if escalation is needed. Consider: +- SLA breach (negative deadline hours) +- Priority urgency +- Time criticality + +Return ONLY valid JSON: +{{"should_escalate": true/false, "new_level": 0-3, "reason": "max 80 chars"}}""" + + try: + response = self.model.generate_content(prompt) + result = json.loads(response.text.replace("```json", "").replace("```", "").strip()) + return result.get("should_escalate", False), result.get("new_level", issue.escalation_level), result.get("reason", "Analysis completed") + except Exception as e: + logger.error(f"Gemini escalation analysis failed: {e}") + return False, issue.escalation_level, "Analysis error" + + async def get_escalation_targets(self, issue: Issue) -> list[str]: + targets = [] + + if issue.department_id: + query = select(Department).where(Department.id == issue.department_id) + result = await self.db.execute(query) + dept = result.scalar_one_or_none() + if dept and dept.escalation_email: + targets.append(dept.escalation_email) + + if issue.assigned_member_id: + query = select(Member).where(Member.id == issue.assigned_member_id) + result = await self.db.execute(query) + member = result.scalar_one_or_none() + if member: + targets.append(member.email) + + return targets + + async def process_issue(self, issue_id: UUID) -> dict: + issue = await self.db.get(Issue, issue_id) + if not issue: + return {"error": "Issue not found"} + + if issue.state in ["resolved", "verified", "closed"]: + return {"skipped": True, "reason": "Issue already resolved"} + + should_esc, new_level, reason = await self.should_escalate(issue) + + if not should_esc: + return {"escalated": False, "reason": reason} + + old_level = issue.escalation_level + issue.escalation_level = new_level + issue.escalated_at = datetime.utcnow() + issue.state = "escalated" + + targets = await self.get_escalation_targets(issue) + + escalation = Escalation( + issue_id=issue_id, + from_level=old_level, + to_level=new_level, + reason=reason, + escalated_by="EscalationAgent", + notified_emails=",".join(targets) if targets else None, + ) + self.db.add(escalation) + + self.log_decision( + issue_id=issue_id, + decision=f"Escalated from level {old_level} to {new_level}", + reasoning=reason + ) + + event_record = IssueEvent( + issue_id=issue_id, + event_type="escalated", + agent_name=self.name, + event_data=json.dumps({ + "from_level": old_level, + "to_level": new_level, + "reason": reason, + "notified": targets, + }) + ) + self.db.add(event_record) + await self.db.flush() + + esc_event = IssueEscalated( + issue_id=issue_id, + from_level=old_level, + to_level=new_level, + reason=reason, + hours_overdue=0, + ) + await event_bus.publish(esc_event) + + return { + "escalated": True, + "from_level": old_level, + "to_level": new_level, + "reason": reason, + "notified": targets, + } + + async def check_all_pending(self) -> list[dict]: + query = ( + select(Issue) + .where(Issue.state.in_(["assigned", "in_progress", "escalated"])) + .where(Issue.is_duplicate == False) + .where(Issue.sla_deadline.isnot(None)) + ) + result = await self.db.execute(query) + issues = result.scalars().all() + + results = [] + for issue in issues: + result = await self.process_issue(issue.id) + if result.get("escalated"): + results.append(result) + + return results + + async def handle(self, event) -> None: + await self.process_issue(event.issue_id) diff --git a/Backend/agents/geoDeduplicate/__init__.py b/Backend/agents/geoDeduplicate/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..06367c8909480f29be32d78857e648fd263adf9e --- /dev/null +++ b/Backend/agents/geoDeduplicate/__init__.py @@ -0,0 +1 @@ +from .agent import GeoDeduplicateAgent, IssueDeduplicated diff --git a/Backend/agents/geoDeduplicate/agent.py b/Backend/agents/geoDeduplicate/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..77591b78c49cf7c23bc2c49ab71c4ecaf4530849 --- /dev/null +++ b/Backend/agents/geoDeduplicate/agent.py @@ -0,0 +1,225 @@ +import json +from typing import Optional +from uuid import UUID +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +import google.generativeai as genai + +from Backend.core.config import settings +from Backend.core.events import event_bus, IssueClassified, Event +from Backend.core.logging import get_logger +from Backend.database.models import Issue, IssueEvent, Classification +from Backend.utils.geo import haversine_distance, get_bounding_box +from Backend.orchestration.base import BaseAgent + +logger = get_logger(__name__, agent_name="GeoDeduplicateAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + + +class IssueDeduplicated(Event): + is_duplicate: bool + parent_issue_id: Optional[UUID] = None + cluster_id: Optional[str] = None + nearby_count: int = 0 + + +class GeoDeduplicateAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("GeoDeduplicateAgent") + self.db = db + self.radius_meters = settings.duplicate_radius_meters + if settings.gemini_api_key: + self.model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.model = None + + async def semantic_similarity(self, desc1: str, desc2: str, cat1: str, cat2: str) -> float: + if not self.model: + return 0.5 + + prompt = f"""Rate semantic similarity (0.0-1.0) between civic issue reports: + +Issue A: +Category: {cat1} +Description: {desc1[:200] if desc1 else 'N/A'} + +Issue B: +Category: {cat2} +Description: {desc2[:200] if desc2 else 'N/A'} + +Consider: +- Same problem type? +- Same physical location context? +- Same infrastructure element? + +Return ONLY a decimal number between 0.0 and 1.0.""" + + try: + response = self.model.generate_content(prompt) + score = float(response.text.strip()) + return max(0.0, min(1.0, score)) + except Exception as e: + logger.error(f"Gemini similarity failed: {e}") + return 0.5 + + async def find_nearby_issues( + self, + latitude: float, + longitude: float, + exclude_id: UUID, + category: Optional[str] = None + ) -> list[tuple[Issue, float]]: + min_lat, max_lat, min_lon, max_lon = get_bounding_box( + latitude, longitude, self.radius_meters + ) + + query = ( + select(Issue) + .options(selectinload(Issue.classification)) + .where(Issue.latitude >= min_lat) + .where(Issue.latitude <= max_lat) + .where(Issue.longitude >= min_lon) + .where(Issue.longitude <= max_lon) + .where(Issue.id != exclude_id) + .where(Issue.state.in_(["reported", "validated", "assigned", "in_progress"])) + .where(Issue.is_duplicate == False) + ) + + result = await self.db.execute(query) + candidates = result.scalars().all() + + nearby = [] + for issue in candidates: + distance = haversine_distance( + latitude, longitude, + issue.latitude, issue.longitude + ) + if distance <= self.radius_meters: + if category and issue.classification: + if issue.classification.primary_category == category: + nearby.append((issue, distance)) + else: + nearby.append((issue, distance)) + + return sorted(nearby, key=lambda x: x[1]) + + async def check_duplicate( + self, + issue_id: UUID, + latitude: float, + longitude: float, + category: Optional[str] = None, + description: Optional[str] = None + ) -> tuple[bool, Optional[UUID], list[tuple[Issue, float]]]: + nearby = await self.find_nearby_issues( + latitude, longitude, issue_id, category + ) + + if not nearby: + return False, None, [] + + best_match = None + highest_score = 0.0 + + for issue, distance in nearby: + if issue.classification and category: + cat1 = category + cat2 = issue.classification.primary_category + desc1 = description or "" + desc2 = issue.description or "" + + similarity = await self.semantic_similarity(desc1, desc2, cat1, cat2) + + if similarity > highest_score: + highest_score = similarity + best_match = issue + + if highest_score > 0.75 and best_match: + return True, best_match.id, nearby + + return False, None, nearby + + async def process_issue(self, issue_id: UUID) -> dict: + query = ( + select(Issue) + .options(selectinload(Issue.classification)) + .where(Issue.id == issue_id) + ) + result = await self.db.execute(query) + issue = result.scalar_one_or_none() + if not issue: + return {"error": "Issue not found"} + + category = None + if issue.classification: + category = issue.classification.primary_category + + is_duplicate, parent_id, nearby = await self.check_duplicate( + issue.id, + issue.latitude, + issue.longitude, + category, + issue.description + ) + + if is_duplicate and parent_id: + issue.is_duplicate = True + issue.parent_issue_id = parent_id + issue.geo_status = "duplicate" + issue.geo_cluster_id = str(parent_id) + + parent = await self.db.get(Issue, parent_id) + if parent and issue.priority and parent.priority: + if issue.priority < parent.priority: + parent.priority = issue.priority + + self.log_decision( + issue_id=issue_id, + decision="Marked as duplicate", + reasoning=f"Found {len(nearby)} nearby issues within {self.radius_meters}m, linked to parent {parent_id}" + ) + else: + issue.is_duplicate = False + issue.geo_status = "unique" + + self.log_decision( + issue_id=issue_id, + decision="Marked as unique", + reasoning=f"No similar issues found within {self.radius_meters}m radius" + ) + + event_record = IssueEvent( + issue_id=issue_id, + event_type="geo_deduplicated", + agent_name=self.name, + event_data=json.dumps({ + "is_duplicate": is_duplicate, + "parent_issue_id": str(parent_id) if parent_id else None, + "nearby_count": len(nearby), + "radius_meters": self.radius_meters, + }) + ) + self.db.add(event_record) + await self.db.flush() + + dedup_event = IssueDeduplicated( + issue_id=issue_id, + is_duplicate=is_duplicate, + parent_issue_id=parent_id, + cluster_id=str(parent_id) if parent_id else None, + nearby_count=len(nearby), + ) + await event_bus.publish(dedup_event) + + return { + "is_duplicate": is_duplicate, + "parent_issue_id": str(parent_id) if parent_id else None, + "nearby_count": len(nearby), + "geo_status": issue.geo_status, + } + + async def handle(self, event: IssueClassified) -> None: + await self.process_issue(event.issue_id) diff --git a/Backend/agents/notification/__init__.py b/Backend/agents/notification/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..52c9d613c257826be6c8329bc579f34aa0404660 --- /dev/null +++ b/Backend/agents/notification/__init__.py @@ -0,0 +1 @@ +from .agent import NotificationAgent, NotificationSent diff --git a/Backend/agents/notification/agent.py b/Backend/agents/notification/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..e53077269a188092cb7a406f046037e0a1f02d60 --- /dev/null +++ b/Backend/agents/notification/agent.py @@ -0,0 +1,333 @@ +import json +from datetime import datetime +from typing import Optional +from uuid import UUID + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from Backend.core.events import Event, event_bus +from Backend.core.logging import get_logger +from Backend.core.config import settings +from Backend.database.models import Classification, Issue, IssueEvent, Member +from Backend.orchestration.base import BaseAgent +from Backend.services.email import email_service + +logger = get_logger(__name__, agent_name="NotificationAgent") + + +class NotificationSent(Event): + notification_type: str + recipients: list[str] + message: str + + +class NotificationAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("NotificationAgent") + self.db = db + self.pending_notifications: list[dict] = [] + + async def get_issue_with_classification(self, issue_id: UUID) -> Optional[Issue]: + query = ( + select(Issue) + .options(selectinload(Issue.classification)) + .where(Issue.id == issue_id) + ) + result = await self.db.execute(query) + return result.scalar_one_or_none() + + def format_issue_summary(self, issue: Issue) -> str: + category = ( + issue.classification.primary_category if issue.classification else "Unknown" + ) + priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"} + priority_str = priority_map.get(issue.priority, "UNKNOWN") + + return ( + f"Issue #{str(issue.id)[:8]}\n" + f"Category: {category}\n" + f"Priority: {priority_str}\n" + f"Location: ({issue.latitude:.4f}, {issue.longitude:.4f})\n" + f"Description: {issue.description or 'No description'}\n" + f"State: {issue.state}" + ) + + async def queue_notification( + self, + notification_type: str, + recipients: list[str], + subject: str, + message: str, + issue_id: Optional[UUID] = None, + ): + notification = { + "type": notification_type, + "recipients": recipients, + "subject": subject, + "message": message, + "issue_id": str(issue_id) if issue_id else None, + "queued_at": datetime.utcnow().isoformat(), + } + self.pending_notifications.append(notification) + + logger.info(f"Notification queued: {notification_type} to {recipients}") + + if issue_id: + event_record = IssueEvent( + issue_id=issue_id, + event_type="notification_queued", + agent_name=self.name, + event_data=json.dumps(notification), + ) + self.db.add(event_record) + await self.db.flush() + + return notification + + async def notify_assignment(self, issue_id: UUID): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + recipients = [] + worker_name = "Worker" + + if issue.assigned_member_id: + query = select(Member).where(Member.id == issue.assigned_member_id) + result = await self.db.execute(query) + member = result.scalar_one_or_none() + if member: + recipients.append(member.email) + worker_name = member.name + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"} + priority_str = priority_map.get(issue.priority, "UNKNOWN") + location = f"({issue.latitude:.4f}, {issue.longitude:.4f})" + + try: + await email_service.send_assignment_email( + worker_email=member.email, + worker_name=worker_name, + issue_id=str(issue.id)[:8], + category=category, + priority=priority_str, + location=location, + description=issue.description or "No description" + ) + logger.info(f"Assignment email sent to {member.email}") + except Exception as e: + logger.error(f"Failed to send assignment email: {e}") + + if recipients: + summary = self.format_issue_summary(issue) + await self.queue_notification( + notification_type="assignment", + recipients=recipients, + subject=f"New Issue Assigned: #{str(issue.id)[:8]}", + message=f"You have been assigned a new issue:\n\n{summary}", + issue_id=issue_id, + ) + + async def notify_escalation(self, issue_id: UUID, reason: str, targets: list[str]): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"} + priority_str = priority_map.get(issue.priority, "UNKNOWN") + + for target in targets: + try: + await email_service.send_escalation_email( + admin_email=target, + issue_id=str(issue.id)[:8], + category=category, + priority=priority_str, + reason=reason, + escalation_level=issue.escalation_level + ) + logger.info(f"Escalation email sent to {target}") + except Exception as e: + logger.error(f"Failed to send escalation email: {e}") + + summary = self.format_issue_summary(issue) + await self.queue_notification( + notification_type="escalation", + recipients=targets, + subject=f"ESCALATION: Issue #{str(issue.id)[:8]} - Level {issue.escalation_level}", + message=f"Issue has been escalated:\n\nReason: {reason}\n\n{summary}", + issue_id=issue_id, + ) + + async def notify_resolution(self, issue_id: UUID): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + location = f"({issue.latitude:.4f}, {issue.longitude:.4f})" + + if issue.user_id: + try: + await email_service.send_completion_email( + user_email=issue.user_id, + issue_id=str(issue.id)[:8], + category=category, + location=location, + resolution_notes=issue.resolution_notes or "Issue resolved successfully" + ) + logger.info(f"Resolution email sent to user {issue.user_id}") + except Exception as e: + logger.error(f"Failed to send resolution email: {e}") + + await self.queue_notification( + notification_type="resolution", + recipients=[settings.admin_email], + subject=f"Issue Resolved: #{str(issue.id)[:8]}", + message=f"Issue has been marked as resolved.\n\n{self.format_issue_summary(issue)}", + issue_id=issue_id, + ) + + async def notify_manual_review(self, issue_id: UUID, reason: str): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + location = f"({issue.latitude:.4f}, {issue.longitude:.4f})" + image_url = f"{settings.supabase_url}/storage/v1/object/public/{settings.supabase_bucket}/{issue.id}/original.jpg" + + try: + await email_service.send_manual_review_email( + issue_id=str(issue.id)[:8], + reason=reason, + category=category, + location=location, + image_url=image_url + ) + logger.info(f"Manual review email sent to admin") + except Exception as e: + logger.error(f"Failed to send manual review email: {e}") + + await self.queue_notification( + notification_type="manual_review", + recipients=[settings.admin_email], + subject=f"Manual Review Required: #{str(issue.id)[:8]}", + message=f"Issue requires manual review.\n\nReason: {reason}\n\n{self.format_issue_summary(issue)}", + issue_id=issue_id, + ) + + async def notify_user_confirmation(self, issue_id: UUID): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + confirmation_link = f"https://app.urbanlens.city/confirm/{issue.id}" + + if issue.user_id: + try: + await email_service.send_confirmation_request_email( + user_email=issue.user_id, + issue_id=str(issue.id)[:8], + category=category, + confirmation_link=confirmation_link + ) + logger.info(f"Confirmation request email sent to user {issue.user_id}") + except Exception as e: + logger.error(f"Failed to send confirmation email: {e}") + + await self.queue_notification( + notification_type="user_confirmation", + recipients=[issue.user_id] if issue.user_id else [], + subject=f"Please Confirm Resolution: #{str(issue.id)[:8]}", + message=f"Please confirm if this issue has been resolved.\n\n{self.format_issue_summary(issue)}", + issue_id=issue_id, + ) + + async def notify_issue_accepted(self, issue_id: UUID, accepted_by: str = "automatic"): + issue = await self.get_issue_with_classification(issue_id) + if not issue: + return + + category = ( + issue.classification.primary_category + if issue.classification + else "Unknown" + ) + priority_map = {1: "CRITICAL", 2: "HIGH", 3: "MEDIUM", 4: "LOW"} + priority_str = priority_map.get(issue.priority, "UNKNOWN") + location = f"({issue.latitude:.4f}, {issue.longitude:.4f})" + tracking_url = f"https://app.urbanlens.city/track/{issue.id}" + + if issue.user_id: + try: + await email_service.send_issue_accepted_email( + user_email=issue.user_id, + issue_id=str(issue.id)[:8], + category=category, + priority=priority_str, + location=location, + accepted_by=accepted_by, + tracking_url=tracking_url + ) + logger.info(f"Issue accepted email sent to user {issue.user_id} ({accepted_by})") + except Exception as e: + logger.error(f"Failed to send issue accepted email: {e}") + + await self.queue_notification( + notification_type="issue_accepted", + recipients=[issue.user_id] if issue.user_id else [], + subject=f"Issue Accepted: #{str(issue.id)[:8]}", + message=f"Your issue has been accepted {accepted_by}.\n\n{self.format_issue_summary(issue)}", + issue_id=issue_id, + ) + + async def process_issue( + self, issue_id: UUID, event_type: str = "assignment" + ) -> dict: + if event_type == "assignment": + await self.notify_assignment(issue_id) + elif event_type == "resolution": + await self.notify_resolution(issue_id) + elif event_type == "escalation": + await self.notify_escalation(issue_id, "SLA breach or priority escalation", [settings.admin_email]) + elif event_type == "manual_review": + await self.notify_manual_review(issue_id, "Requires admin attention") + elif event_type == "user_confirmation": + await self.notify_user_confirmation(issue_id) + elif event_type == "issue_accepted": + accepted_by = "automatic" + await self.notify_issue_accepted(issue_id, accepted_by) + elif event_type == "issue_accepted_manual": + await self.notify_issue_accepted(issue_id, "manual") + + return {"queued": len(self.pending_notifications)} + + async def handle(self, event) -> None: + event_type = getattr(event, "notification_type", "assignment") + await self.process_issue(event.issue_id, event_type) diff --git a/Backend/agents/priority/__init__.py b/Backend/agents/priority/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f5f7c8a6c8aa1841babfa958c7e0eede48b339f6 --- /dev/null +++ b/Backend/agents/priority/__init__.py @@ -0,0 +1 @@ +from .agent import PriorityAgent, IssuePrioritized diff --git a/Backend/agents/priority/agent.py b/Backend/agents/priority/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..9dff5ff00f64ca02a0040ccbe61bafc43602f335 --- /dev/null +++ b/Backend/agents/priority/agent.py @@ -0,0 +1,144 @@ +import json +from typing import Optional +from uuid import UUID +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +import google.generativeai as genai + +from Backend.core.config import settings +from Backend.core.events import event_bus, Event +from Backend.core.logging import get_logger +from Backend.database.models import Issue, IssueEvent, Classification +from Backend.orchestration.base import BaseAgent + +logger = get_logger(__name__, agent_name="PriorityAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + + +class IssuePrioritized(Event): + priority: int + reasoning: str + + +class PriorityAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("PriorityAgent") + self.db = db + if settings.gemini_api_key: + self.model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.model = None + + async def calculate_priority( + self, + category: Optional[str], + confidence: float, + is_duplicate: bool, + duplicate_count: int = 0, + description: Optional[str] = None, + city: Optional[str] = None + ) -> tuple[int, str]: + if not self.model: + return 3, "Gemini API not configured" + + prompt = f"""Assign priority for civic infrastructure issue: + +Category: {category or 'Unknown'} +AI Confidence: {confidence:.1%} +Duplicate Reports: {duplicate_count} +Location: {city or 'Unknown'} +Description: {description[:200] if description else 'N/A'} + +Priority Scale: +1 = CRITICAL (Public safety, electrical hazards, major hazards) +2 = HIGH (Potholes, road damage, fallen trees) +3 = MEDIUM (Garbage, broken signs, minor structures) +4 = LOW (Parking violations, minor vandalism) + +Consider safety impact, infrastructure criticality, and community accessibility. + +Return ONLY valid JSON: +{{"priority": 1-4, "reasoning": "max 80 chars"}}""" + + try: + response = self.model.generate_content(prompt) + result = json.loads(response.text.replace("```json", "").replace("```", "").strip()) + return result.get("priority", 3), result.get("reasoning", "Priority assigned") + except Exception as e: + logger.error(f"Gemini priority calculation failed: {e}") + return 3, "Analysis error" + + async def process_issue(self, issue_id: UUID) -> dict: + query = ( + select(Issue) + .options(selectinload(Issue.classification)) + .where(Issue.id == issue_id) + ) + result = await self.db.execute(query) + issue = result.scalar_one_or_none() + if not issue: + return {"error": "Issue not found"} + + if issue.is_duplicate: + self.log_decision( + issue_id=issue_id, + decision="Skipped prioritization", + reasoning="Issue is a duplicate, priority inherited from parent" + ) + return {"skipped": True, "reason": "duplicate"} + + category = None + confidence = 0.0 + if issue.classification: + category = issue.classification.primary_category + confidence = issue.classification.primary_confidence + + dup_count_result = await self.db.execute( + select(func.count(Issue.id)).where(Issue.parent_issue_id == issue_id) + ) + duplicate_count = dup_count_result.scalar() or 0 + + priority, reasoning = await self.calculate_priority( + category, confidence, issue.is_duplicate, duplicate_count, issue.description, issue.city + ) + + issue.priority = priority + issue.priority_reason = reasoning + + self.log_decision( + issue_id=issue_id, + decision=f"Priority set to {priority}", + reasoning=reasoning + ) + + event_record = IssueEvent( + issue_id=issue_id, + event_type="prioritized", + agent_name=self.name, + event_data=json.dumps({ + "priority": priority, + "reasoning": reasoning, + "category": category, + "confidence": confidence, + }) + ) + self.db.add(event_record) + await self.db.flush() + + priority_event = IssuePrioritized( + issue_id=issue_id, + priority=priority, + reasoning=reasoning, + ) + await event_bus.publish(priority_event) + + return { + "priority": priority, + "reasoning": reasoning, + } + + async def handle(self, event) -> None: + await self.process_issue(event.issue_id) diff --git a/Backend/agents/routing/__init__.py b/Backend/agents/routing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..60e44db9e82f92ea590cb855ea2faf76a4b66448 --- /dev/null +++ b/Backend/agents/routing/__init__.py @@ -0,0 +1 @@ +from .agent import RoutingAgent, IssueAssigned diff --git a/Backend/agents/routing/agent.py b/Backend/agents/routing/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..cbce1b569f74685bdb0da0693f2a6ace0eff6387 --- /dev/null +++ b/Backend/agents/routing/agent.py @@ -0,0 +1,222 @@ +import json +from datetime import datetime, timedelta +from typing import Optional +from uuid import UUID +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +import google.generativeai as genai + +from Backend.core.config import settings +from Backend.core.events import event_bus, Event +from Backend.core.logging import get_logger +from Backend.database.models import Issue, IssueEvent, Department, Member, Classification +from Backend.orchestration.base import BaseAgent + +logger = get_logger(__name__, agent_name="RoutingAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + +PRIORITY_SLA_HOURS = { + 1: 4, + 2: 12, + 3: 48, + 4: 168, +} + + +class IssueAssigned(Event): + department_code: str + member_id: Optional[UUID] = None + member_name: Optional[str] = None + sla_deadline: datetime + sla_hours: int + + +class RoutingAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("RoutingAgent") + self.db = db + if settings.gemini_api_key: + self.model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.model = None + + async def find_department(self, category: Optional[str], description: Optional[str] = None) -> Optional[Department]: + query = select(Department).where(Department.is_active == True) + result = await self.db.execute(query) + departments = result.scalars().all() + + if not departments: + return None + + if not self.model or not category: + return departments[0] + + dept_info = "\n".join([f"- {d.code}: {d.name} ({d.categories})" for d in departments]) + + prompt = f"""Route civic issue to correct department: + +Issue Category: {category} +Description: {description[:150] if description else 'N/A'} + +Available Departments: +{dept_info} + +Return ONLY the department CODE (e.g., PWD, TRAFFIC, SANITATION)""" + + try: + response = self.model.generate_content(prompt) + dept_code = response.text.strip().upper() + + for dept in departments: + if dept.code == dept_code: + return dept + except Exception as e: + logger.error(f"Gemini routing failed: {e}") + + return departments[0] + + async def find_available_member( + self, + department_id: UUID, + city: Optional[str] = None, + locality: Optional[str] = None + ) -> Optional[Member]: + base_query = ( + select(Member) + .where(Member.department_id == department_id) + .where(Member.is_active == True) + .where(Member.current_workload < Member.max_workload) + ) + + if city: + city_query = base_query.where(Member.city.ilike(f"%{city}%")) + result = await self.db.execute(city_query.order_by(Member.current_workload.asc())) + member = result.scalars().first() + if member: + logger.info(f"Found member in city: {city}") + return member + + if locality: + locality_query = base_query.where(Member.locality.ilike(f"%{locality}%")) + result = await self.db.execute(locality_query.order_by(Member.current_workload.asc())) + member = result.scalars().first() + if member: + logger.info(f"Found member in locality: {locality}") + return member + + result = await self.db.execute(base_query.order_by(Member.current_workload.asc())) + member = result.scalars().first() + if member: + logger.info(f"Assigned to available member (no location match)") + return member + + def calculate_sla(self, priority: int, department: Optional[Department]) -> tuple[int, datetime]: + base_hours = PRIORITY_SLA_HOURS.get(priority, 48) + + if department and department.default_sla_hours: + base_hours = min(base_hours, department.default_sla_hours) + + deadline = datetime.utcnow() + timedelta(hours=base_hours) + return base_hours, deadline + + async def process_issue(self, issue_id: UUID) -> dict: + query = ( + select(Issue) + .options(selectinload(Issue.classification)) + .where(Issue.id == issue_id) + ) + result = await self.db.execute(query) + issue = result.scalar_one_or_none() + if not issue: + return {"error": "Issue not found"} + + if issue.is_duplicate: + self.log_decision( + issue_id=issue_id, + decision="Skipped routing", + reasoning="Issue is a duplicate" + ) + return {"skipped": True, "reason": "duplicate"} + + category = issue.classification.primary_category if issue.classification else None + priority = issue.priority or 3 + + department = await self.find_department(category, issue.description) + + member = None + if department: + member = await self.find_available_member( + department.id, + city=issue.city, + locality=issue.locality + ) + if member: + member.current_workload += 1 + + sla_hours, sla_deadline = self.calculate_sla(priority, department) + + issue.department_id = department.id if department else None + issue.assigned_member_id = member.id if member else None + issue.sla_hours = sla_hours + issue.sla_deadline = sla_deadline + issue.state = "assigned" + + dept_code = department.code if department else "UNASSIGNED" + member_name = member.name if member else "Unassigned" + member_city = member.city if member else "N/A" + + reasoning = f"Category '{category}' → {dept_code}" + if issue.city: + reasoning += f", Issue location: {issue.city}" + if member: + reasoning += f", Member location: {member_city}" + reasoning += f", SLA: {sla_hours}h" + + self.log_decision( + issue_id=issue_id, + decision=f"Routed to {dept_code} → {member_name}", + reasoning=reasoning + ) + + event_record = IssueEvent( + issue_id=issue_id, + event_type="assigned", + agent_name=self.name, + event_data=json.dumps({ + "department_code": dept_code, + "member_id": str(member.id) if member else None, + "member_name": member_name, + "issue_city": issue.city, + "issue_locality": issue.locality, + "member_city": member.city if member else None, + "sla_hours": sla_hours, + "sla_deadline": sla_deadline.isoformat(), + }) + ) + self.db.add(event_record) + await self.db.flush() + + assign_event = IssueAssigned( + issue_id=issue_id, + department_code=dept_code, + member_id=member.id if member else None, + member_name=member_name, + sla_deadline=sla_deadline, + sla_hours=sla_hours, + ) + await event_bus.publish(assign_event) + + return { + "department": dept_code, + "member": member_name, + "issue_city": issue.city, + "issue_locality": issue.locality, + "sla_hours": sla_hours, + "sla_deadline": sla_deadline.isoformat(), + } + + async def handle(self, event) -> None: + await self.process_issue(event.issue_id) diff --git a/Backend/agents/sla/agent.py b/Backend/agents/sla/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..42a80f2470392d674638d31c42fc8a8b58a58b83 --- /dev/null +++ b/Backend/agents/sla/agent.py @@ -0,0 +1,157 @@ +import json +from datetime import datetime +from typing import Optional +from uuid import UUID +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +import google.generativeai as genai + +from Backend.core.events import event_bus, Event +from Backend.core.logging import get_logger +from Backend.core.config import settings +from Backend.database.models import Issue, IssueEvent, Member, Department +from Backend.orchestration.base import BaseAgent + +logger = get_logger(__name__, agent_name="SLAAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + + +class SLAWarning(Event): + hours_remaining: float + threshold_hours: float + warning_level: str + assigned_email: Optional[str] = None + + +class SLAAgent(BaseAgent): + def __init__(self, db: AsyncSession): + super().__init__("SLAAgent") + self.db = db + if settings.gemini_api_key: + self.model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.model = None + + async def check_sla_status(self, issue: Issue) -> tuple[bool, str, Optional[str]]: + """ + Checks if an issue needs an SLA warning. + Returns: (needs_warning, warning_type, reason) + """ + if not issue.sla_deadline or issue.state in ["resolved", "verified", "closed", "escalated"]: + return False, "", None + + if not self.model: + now = datetime.utcnow() + hours_remaining = (issue.sla_deadline - now).total_seconds() / 3600 + total_sla_hours = issue.sla_hours or 48 + + if 0 < hours_remaining <= (total_sla_hours * 0.5) and hours_remaining > (total_sla_hours * 0.2): + return True, "warning", f"50% SLA time remaining ({hours_remaining:.1f}h)" + elif 0 < hours_remaining <= (total_sla_hours * 0.2): + return True, "critical", f"Critical: Less than 20% SLA time remaining ({hours_remaining:.1f}h)" + return False, "", None + + now = datetime.utcnow() + hours_remaining = (issue.sla_deadline - now).total_seconds() / 3600 + total_sla_hours = issue.sla_hours or 48 + hours_elapsed = total_sla_hours - hours_remaining + + prompt = f"""Assess SLA status for civic issue: + +Priority: {issue.priority} (1=Critical, 2=High, 3=Medium, 4=Low) +State: {issue.state} +Total SLA Hours: {total_sla_hours} +Hours Elapsed: {hours_elapsed:.1f} +Hours Remaining: {hours_remaining:.1f} +Time Used: {(hours_elapsed/total_sla_hours*100):.1f}% + +Determine if warning is needed: +- "none": No warning needed (>50% time remaining) +- "warning": Warning level (20-50% time remaining) +- "critical": Critical warning (<20% time remaining) + +Return ONLY valid JSON: +{{"warning_level": "none/warning/critical", "reason": "max 60 chars"}}""" + + try: + response = self.model.generate_content(prompt) + result = json.loads(response.text.replace("```json", "").replace("```", "").strip()) + level = result.get("warning_level", "none") + reason = result.get("reason", "SLA assessment completed") + + if level == "none": + return False, "", None + return True, level, reason + except Exception as e: + logger.error(f"Gemini SLA check failed: {e}") + if 0 < hours_remaining <= (total_sla_hours * 0.2): + return True, "critical", f"Less than 20% SLA time remaining" + elif 0 < hours_remaining <= (total_sla_hours * 0.5): + return True, "warning", f"50% SLA time remaining" + return False, "", None + + async def process_issue(self, issue_id: UUID) -> dict: + issue = await self.db.get(Issue, issue_id) + if not issue: + return {"error": "Issue not found"} + + needs_warning, level, reason = await self.check_sla_status(issue) + + if not needs_warning: + return {"status": "ok"} + + + assigned_email = None + if issue.assigned_member_id: + member = await self.db.get(Member, issue.assigned_member_id) + if member: + assigned_email = member.email + + + warning_event = SLAWarning( + issue_id=issue_id, + hours_remaining=(issue.sla_deadline - datetime.utcnow()).total_seconds() / 3600, + threshold_hours=0, + warning_level=level, + assigned_email=assigned_email + ) + await event_bus.publish(warning_event) + + + event_record = IssueEvent( + issue_id=issue_id, + event_type=f"sla_{level}", + agent_name=self.name, + event_data=json.dumps({ + "hours_remaining": warning_event.hours_remaining, + "level": level, + "reason": reason + }) + ) + self.db.add(event_record) + await self.db.flush() + + return {"warning_sent": True, "level": level, "recipient": assigned_email} + + async def check_all_active(self) -> list[dict]: + """Scans all active issues for SLA breaches.""" + query = select(Issue).where( + Issue.state.in_(["assigned", "in_progress"]), + Issue.sla_deadline.isnot(None) + ) + result = await self.db.execute(query) + issues = result.scalars().all() + + results = [] + for issue in issues: + res = await self.process_issue(issue.id) + if res.get("warning_sent"): + results.append(res) + return results + + async def handle(self, event) -> None: + + + pass diff --git a/Backend/agents/vision/__init__.py b/Backend/agents/vision/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..20c9f645f580b806a355f32affbfa51b59dbe9bc --- /dev/null +++ b/Backend/agents/vision/__init__.py @@ -0,0 +1 @@ +from .agent import VisionAgent diff --git a/Backend/agents/vision/agent.py b/Backend/agents/vision/agent.py new file mode 100644 index 0000000000000000000000000000000000000000..30eb6b9c11873a966c02aa2c84a27a50674d9180 --- /dev/null +++ b/Backend/agents/vision/agent.py @@ -0,0 +1,296 @@ +import json +import time +import cv2 +import numpy as np +import google.generativeai as genai +from pathlib import Path +from typing import Optional +from uuid import UUID + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from Backend.core.config import settings +from Backend.core.events import event_bus, IssueClassified, IssueCreated +from Backend.core.logging import get_logger +from Backend.core.schemas import ClassificationResult, DetectionBox, CLASS_ID_TO_CATEGORY, IssueCategory +from Backend.database.models import Classification, Issue, IssueImage, IssueEvent +from Backend.orchestration.base import BaseAgent +from Backend.utils.fuzzy_match import auto_validate_issue +from Backend.utils.storage import save_bytes, download_from_supabase, get_upload_url + +logger = get_logger(__name__, agent_name="VisionAgent") + +if settings.gemini_api_key: + genai.configure(api_key=settings.gemini_api_key) + + +class VisionAgent(BaseAgent): + _model = None + + def __init__(self, db: Optional[AsyncSession] = None): + super().__init__("VisionAgent") + self.db = db + if settings.gemini_api_key: + self.gemini_model = genai.GenerativeModel('gemma-3-27b-it') + else: + self.gemini_model = None + + @classmethod + def load_model(cls): + if cls._model is None: + from ultralytics import YOLO + model_path = settings.model_path + if not model_path.exists(): + raise FileNotFoundError(f"Model not found: {model_path}") + cls._model = YOLO(str(model_path)) + logger.info(f"YOLO model loaded from {model_path}") + return cls._model + + @classmethod + def get_model(cls): + if cls._model is None: + cls.load_model() + return cls._model + + async def download_image(self, remote_path: str) -> bytes: + return await download_from_supabase(remote_path) + + async def save_annotated(self, results, original_path: str, subfolder: str) -> str: + im_array = results[0].plot() + + original_name = Path(original_path).stem + annotated_filename = f"annotated_{original_name}.jpg" + + _, buffer = cv2.imencode('.jpg', im_array, [cv2.IMWRITE_JPEG_QUALITY, 90]) + image_bytes = buffer.tobytes() + + remote_path = await save_bytes(image_bytes, annotated_filename, subfolder=subfolder) + return remote_path + + async def run_inference(self, image_data: bytes) -> tuple[list, float]: + model = self.get_model() + + nparr = np.frombuffer(image_data, np.uint8) + img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + if img is None: + raise ValueError("Invalid image data") + + start_time = time.perf_counter() + results = model.predict( + source=img, + conf=settings.model_confidence_threshold, + imgsz=settings.model_input_size, + verbose=False, + ) + inference_time = (time.perf_counter() - start_time) * 1000 + + return results, inference_time + + async def gemini_classify_image( + self, + image_data: bytes, + description: Optional[str] = None + ) -> tuple[Optional[IssueCategory], float, Optional[str]]: + if not self.gemini_model: + return None, 0.0, None + + allowed = [ + {"class_id": k, "class_name": v.value} + for k, v in CLASS_ID_TO_CATEGORY.items() + ] + prompt = ( + "Classify the photo into exactly one of the allowed categories. " + "Return ONLY valid JSON with keys: class_id (int), confidence (0.0-1.0), reasoning (max 80 chars).\n\n" + f"Allowed categories: {json.dumps(allowed)}\n" + f"User description: {(description or '')[:200]}" + ) + + try: + response = self.gemini_model.generate_content( + [ + {"text": prompt}, + { + "inline_data": { + "mime_type": "image/jpeg", + "data": image_data, + } + }, + ] + ) + text = (response.text or "").replace("```json", "").replace("```", "").strip() + data = json.loads(text) + class_id = data.get("class_id") + confidence = float(data.get("confidence", 0.0)) + reasoning = data.get("reasoning") + if not isinstance(class_id, int): + return None, 0.0, None + category = CLASS_ID_TO_CATEGORY.get(class_id) + if not category: + return None, 0.0, None + confidence = max(0.0, min(1.0, confidence)) + return category, confidence, reasoning + except Exception as e: + logger.error(f"Gemini vision classification failed: {e}") + return None, 0.0, None + + def extract_detections(self, results) -> list[DetectionBox]: + detections = [] + for result in results: + boxes = result.boxes + if boxes is not None: + for i in range(len(boxes)): + class_id = int(boxes.cls[i].item()) + confidence = float(boxes.conf[i].item()) + bbox = tuple(boxes.xyxy[i].tolist()) + + category = CLASS_ID_TO_CATEGORY.get(class_id) + if category: + detections.append(DetectionBox( + class_id=class_id, + class_name=category.value, + confidence=confidence, + bbox=bbox, + )) + return detections + + async def classify_image( + self, + image_path: str, + subfolder: str = "", + description: Optional[str] = None + ) -> tuple[list[DetectionBox], str, Optional[IssueCategory], float, Optional[str]]: + image_data = await self.download_image(image_path) + results, inference_time = await self.run_inference(image_data) + annotated_path = await self.save_annotated(results, image_path, subfolder) + detections = self.extract_detections(results) + + gemini_category = None + gemini_confidence = 0.0 + gemini_reasoning = None + if self.gemini_model and (not detections or max(d.confidence for d in detections) < 0.5): + gemini_category, gemini_confidence, gemini_reasoning = await self.gemini_classify_image( + image_data=image_data, + description=description + ) + + logger.info(f"Inference completed in {inference_time:.2f}ms, {len(detections)} detections") + return detections, annotated_path, gemini_category, gemini_confidence, gemini_reasoning + + async def process_issue( + self, + issue_id: UUID, + image_paths: list[str], + description: Optional[str] = None + ) -> ClassificationResult: + all_detections = [] + annotated_paths = [] + total_time = 0.0 + subfolder = str(issue_id) + + gemini_best_category = None + gemini_best_confidence = 0.0 + gemini_best_reasoning = None + + for path in image_paths: + start = time.perf_counter() + detections, annotated_path, gemini_category, gemini_confidence, gemini_reasoning = await self.classify_image( + path, + subfolder=subfolder, + description=description + ) + total_time += (time.perf_counter() - start) * 1000 + all_detections.extend(detections) + annotated_paths.append(annotated_path) + + if gemini_category and gemini_confidence > gemini_best_confidence: + gemini_best_category = gemini_category + gemini_best_confidence = gemini_confidence + gemini_best_reasoning = gemini_reasoning + + if self.db: + query = select(IssueImage).where(IssueImage.file_path == path) + result = await self.db.execute(query) + image_record = result.scalar_one_or_none() + if image_record: + image_record.annotated_path = annotated_path + + result = ClassificationResult( + issue_id=issue_id, + detections=all_detections, + annotated_urls=[get_upload_url(p) for p in annotated_paths], + inference_time_ms=total_time, + ) + + if gemini_best_category and (not result.primary_category or result.primary_confidence < 0.5): + result.primary_category = gemini_best_category + result.primary_confidence = gemini_best_confidence + + detected_categories = list(set(d.class_name for d in all_detections)) + auto_validated, validation_reason = auto_validate_issue(description, detected_categories) + + validation_source = "auto" if auto_validated else "pending_manual" + new_state = "validated" if auto_validated else "reported" + + self.log_decision( + issue_id=issue_id, + decision=f"Validation: {validation_source}", + reasoning=validation_reason + ) + + if self.db: + classification = Classification( + issue_id=issue_id, + primary_category=result.primary_category.value if result.primary_category else None, + primary_confidence=result.primary_confidence, + detections_json=json.dumps([d.model_dump() for d in all_detections]), + inference_time_ms=total_time, + ) + self.db.add(classification) + + issue = await self.db.get(Issue, issue_id) + if issue: + issue.state = new_state + issue.validation_source = validation_source + issue.validation_reason = validation_reason + + event_record = IssueEvent( + issue_id=issue_id, + event_type="classified", + agent_name=self.name, + event_data=json.dumps({ + "category": result.primary_category.value if result.primary_category else None, + "confidence": result.primary_confidence, + "detections_count": len(all_detections), + "validation_source": validation_source, + "annotated_images": annotated_paths, + "gemini_category": gemini_best_category.value if gemini_best_category else None, + "gemini_confidence": gemini_best_confidence, + "gemini_reasoning": gemini_best_reasoning, + }) + ) + self.db.add(event_record) + await self.db.flush() + + if result.primary_category: + event = IssueClassified( + issue_id=issue_id, + category=result.primary_category.value, + confidence=result.primary_confidence, + detections_count=len(all_detections), + metadata={ + "validation_source": validation_source, + "validation_reason": validation_reason, + "annotated_images": [get_upload_url(p) for p in annotated_paths], + } + ) + await event_bus.publish(event) + + return result + + async def handle(self, event: IssueCreated) -> None: + await self.process_issue( + event.issue_id, + event.image_paths, + event.description + ) diff --git a/Backend/agents/vision/model.pt b/Backend/agents/vision/model.pt new file mode 100644 index 0000000000000000000000000000000000000000..1b5e63886c5b0f71f7b68e30d6cacdc2dc28fa39 --- /dev/null +++ b/Backend/agents/vision/model.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55bb189306a9882c84fb471b9cc81e2ba48363d1a4c49ccf914e9a08cde01c24 +size 22512426 diff --git a/Backend/api/__init__.py b/Backend/api/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..793f52dc26724b68376dd0adda737ab5129343a6 --- /dev/null +++ b/Backend/api/__init__.py @@ -0,0 +1,3 @@ +from .app import create_app + +app = create_app() diff --git a/Backend/api/app.py b/Backend/api/app.py new file mode 100644 index 0000000000000000000000000000000000000000..ec0722e34cee1e5d61fd240a4bbf43e314277176 --- /dev/null +++ b/Backend/api/app.py @@ -0,0 +1,126 @@ +from contextlib import asynccontextmanager +from pathlib import Path +from fastapi import FastAPI, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse, FileResponse +from fastapi.staticfiles import StaticFiles + +from Backend.core.config import settings +from Backend.core.events import event_bus +from Backend.core.logging import setup_logging, get_logger +from Backend.core.security import SecurityHeadersMiddleware, RateLimitMiddleware, RequestValidationMiddleware +from Backend.database.connection import init_db, close_db +from Backend.api.routes import api_router + +logger = get_logger(__name__) + +STATIC_DIR = Path("static") + +@asynccontextmanager +async def lifespan(app: FastAPI): + setup_logging(debug=settings.debug) + logger.info("Starting City Issue Resolution Agent") + + await init_db() + logger.info("Database initialized") + + await event_bus.start() + logger.info("Event bus started") + + + from Backend.agents.vision import VisionAgent + try: + VisionAgent.load_model() + logger.info("Vision model loaded") + except Exception as e: + logger.warning(f"Vision model failed to load: {e}. Running in mock mode.") + + + import asyncio + from Backend.database.connection import get_db_context + from Backend.agents.escalation.agent import EscalationAgent + from Backend.agents.sla.agent import SLAAgent + + async def run_periodic_checks(): + while True: + try: + logger.info("Running periodic SLA and Escalation checks...") + async with get_db_context() as db: + + esc_agent = EscalationAgent(db) + await esc_agent.check_all_pending() + + + sla_agent = SLAAgent(db) + await sla_agent.check_all_active() + except Exception as e: + logger.error(f"Error in background task: {e}") + + + await asyncio.sleep(900) + + task = asyncio.create_task(run_periodic_checks()) + + yield + + task.cancel() + await event_bus.stop() + await close_db() + logger.info("Shutdown complete") + + +def create_app() -> FastAPI: + app = FastAPI( + title="City Issue Resolution Agent", + description="Autonomous urban issue detection and resolution platform", + version="1.0.0", + lifespan=lifespan, + root_path="", + ) + + # CORS must be added first + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=False, + allow_methods=["*"], + allow_headers=["*"], + expose_headers=["*"], + ) + + app.add_middleware(SecurityHeadersMiddleware) + app.add_middleware(RateLimitMiddleware, requests_per_minute=120, burst_limit=20) + app.add_middleware(RequestValidationMiddleware) + + + settings.local_temp_dir.mkdir(parents=True, exist_ok=True) + STATIC_DIR.mkdir(parents=True, exist_ok=True) + + app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static") + + app.include_router(api_router) + + @app.get("/") + async def root(): + return FileResponse(STATIC_DIR / "flow.html") + + @app.get("/dashboard") + async def dashboard(): + return FileResponse(STATIC_DIR / "flow.html") + + @app.exception_handler(ValueError) + async def value_error_handler(request: Request, exc: ValueError): + return JSONResponse( + status_code=400, + content={"detail": str(exc)} + ) + + @app.exception_handler(Exception) + async def general_exception_handler(request: Request, exc: Exception): + logger.error(f"Unhandled exception: {exc}", exc_info=True) + return JSONResponse( + status_code=500, + content={"detail": "Internal server error"} + ) + + return app diff --git a/Backend/api/routes/__init__.py b/Backend/api/routes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..88c950a91cf2ebff37d660acf5da3a5bf306d1e1 --- /dev/null +++ b/Backend/api/routes/__init__.py @@ -0,0 +1,16 @@ +from fastapi import APIRouter + +from .health import router as health_router +from .issues import router as issues_router +from .admin import router as admin_router +from .flow import router as flow_router +from .worker import router as worker_router + +api_router = APIRouter() + +api_router.include_router(health_router, prefix="/health", tags=["Health"]) +api_router.include_router(issues_router, prefix="/issues", tags=["Issues"]) +api_router.include_router(admin_router, prefix="/admin", tags=["Admin"]) +api_router.include_router(flow_router, prefix="/flow", tags=["Agent Flow"]) +api_router.include_router(worker_router, prefix="/worker", tags=["Worker"]) + diff --git a/Backend/api/routes/admin.py b/Backend/api/routes/admin.py new file mode 100644 index 0000000000000000000000000000000000000000..0f43802afbe2128debb1ce033e3dda77b2ddd1db --- /dev/null +++ b/Backend/api/routes/admin.py @@ -0,0 +1,1160 @@ +from typing import Optional, List +from uuid import UUID +from datetime import datetime, timedelta +from fastapi import APIRouter, Depends, HTTPException, status, Query +from pydantic import BaseModel, EmailStr +from sqlalchemy import select, func, or_, desc, asc +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload, aliased +import bcrypt +import jwt + +from Backend.database.connection import get_db +from Backend.database.models import Department, Member, Issue, Escalation, Classification, IssueEvent, IssueImage +from Backend.core.config import settings +from Backend.core.logging import get_logger +from Backend.core.schemas import IssueResponse, IssueState +from Backend.utils.storage import get_upload_url + +logger = get_logger(__name__) +router = APIRouter() + +SECRET_KEY = settings.supabase_jwt_secret +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_HOURS = 24 + + +def hash_password(password: str) -> str: + return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode() + + +def verify_password(password: str, password_hash: str) -> bool: + return bcrypt.checkpw(password.encode(), password_hash.encode()) + + +def create_access_token(member_id: UUID, role: str) -> str: + expire = datetime.utcnow() + timedelta(hours=ACCESS_TOKEN_EXPIRE_HOURS) + payload = { + "sub": str(member_id), + "role": role, + "exp": expire, + "iat": datetime.utcnow(), + } + return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM) + + +class LoginRequest(BaseModel): + email: str + password: str + expected_role: Optional[str] = None + + +class LoginResponse(BaseModel): + access_token: str + token_type: str = "bearer" + user: dict + + + +from fastapi.security import OAuth2PasswordBearer +from jwt import PyJWTError + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/admin/login") + +async def get_current_user(token: str = Depends(oauth2_scheme), db: AsyncSession = Depends(get_db)): + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + member_id: str = payload.get("sub") + if member_id is None: + raise HTTPException(status_code=401, detail="Invalid authentication credentials") + except PyJWTError: + raise HTTPException(status_code=401, detail="Invalid authentication credentials") + + member = await db.get(Member, UUID(member_id)) + if member is None: + raise HTTPException(status_code=401, detail="User not found") + return member + +async def get_current_active_user(current_user: Member = Depends(get_current_user)): + if not current_user.is_active: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user + +async def get_current_admin(current_user: Member = Depends(get_current_active_user)): + if current_user.role != "admin": + raise HTTPException(status_code=403, detail="Not authorized") + return current_user + + +@router.post("/login", response_model=LoginResponse) +async def staff_login( + data: LoginRequest, + db: AsyncSession = Depends(get_db), +): + member = await db.execute( + select(Member).where(Member.email == data.email, Member.is_active == True) + ) + member = member.scalar_one_or_none() + + if not member or not member.password_hash: + raise HTTPException(status_code=401, detail="Invalid email or password") + + if not verify_password(data.password, member.password_hash): + raise HTTPException(status_code=401, detail="Invalid email or password") + + if data.expected_role: + if data.expected_role == "admin" and member.role != "admin": + raise HTTPException(status_code=403, detail="Access denied. You are not an admin.") + if data.expected_role == "worker" and member.role == "admin": + raise HTTPException(status_code=403, detail="Admins should login as Admin, not Worker.") + + access_token = create_access_token(member.id, member.role) + + return LoginResponse( + access_token=access_token, + user={ + "id": str(member.id), + "name": member.name, + "email": member.email, + "role": member.role, + "department_id": str(member.department_id) if member.department_id else None, + }, + ) + + +class DepartmentCreate(BaseModel): + name: str + code: str + description: Optional[str] = None + categories: Optional[str] = None + default_sla_hours: int = 48 + escalation_email: Optional[str] = None + + +class DepartmentUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + categories: Optional[str] = None + default_sla_hours: Optional[int] = None + escalation_email: Optional[str] = None + is_active: Optional[bool] = None + + +class DepartmentResponse(BaseModel): + id: UUID + name: str + code: str + description: Optional[str] + categories: Optional[str] + default_sla_hours: int + escalation_email: Optional[str] + is_active: bool + member_count: int = 0 + + class Config: + from_attributes = True + + +class MemberInvite(BaseModel): + department_id: UUID + name: str + email: str + phone: Optional[str] = None + role: str = "officer" + city: Optional[str] = None + locality: Optional[str] = None + max_workload: int = 10 + send_invite: bool = True + + +class MemberCreate(BaseModel): + department_id: UUID + name: str + email: str + phone: Optional[str] = None + role: str = "worker" + city: Optional[str] = None + locality: Optional[str] = None + max_workload: int = 10 + password: str + + +class MemberUpdate(BaseModel): + name: Optional[str] = None + email: Optional[str] = None + phone: Optional[str] = None + role: Optional[str] = None + city: Optional[str] = None + locality: Optional[str] = None + max_workload: Optional[int] = None + is_active: Optional[bool] = None + password: Optional[str] = None + + +class MemberResponse(BaseModel): + id: UUID + department_id: Optional[UUID] + name: str + email: str + phone: Optional[str] + role: str + city: Optional[str] + locality: Optional[str] + is_active: bool + current_workload: int + max_workload: int + invite_status: Optional[str] = None + + class Config: + from_attributes = True + + + + + +@router.post("/departments", response_model=DepartmentResponse, status_code=status.HTTP_201_CREATED) +async def create_department( + data: DepartmentCreate, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + + existing = await db.execute(select(Department).where(Department.code == data.code)) + if existing.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Department code already exists") + + department = Department( + name=data.name, + code=data.code.upper(), + description=data.description, + categories=data.categories, + default_sla_hours=data.default_sla_hours, + escalation_email=data.escalation_email, + ) + db.add(department) + await db.flush() + await db.refresh(department) + + return DepartmentResponse( + id=department.id, + name=department.name, + code=department.code, + description=department.description, + categories=department.categories, + default_sla_hours=department.default_sla_hours, + escalation_email=department.escalation_email, + is_active=department.is_active, + member_count=0, + ) + + +@router.get("/departments", response_model=list[DepartmentResponse]) +async def list_departments( + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + query = select(Department).order_by(Department.name) + result = await db.execute(query) + departments = result.scalars().all() + + response = [] + for dept in departments: + member_count = await db.execute( + select(func.count(Member.id)).where(Member.department_id == dept.id) + ) + count = member_count.scalar() or 0 + + response.append(DepartmentResponse( + id=dept.id, + name=dept.name, + code=dept.code, + description=dept.description, + categories=dept.categories, + default_sla_hours=dept.default_sla_hours, + escalation_email=dept.escalation_email, + is_active=dept.is_active, + member_count=count, + )) + + return response + + +@router.get("/departments/{department_id}", response_model=DepartmentResponse) +async def get_department( + department_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + department = await db.get(Department, department_id) + if not department: + raise HTTPException(status_code=404, detail="Department not found") + + member_count = await db.execute( + select(func.count(Member.id)).where(Member.department_id == department.id) + ) + count = member_count.scalar() or 0 + + return DepartmentResponse( + id=department.id, + name=department.name, + code=department.code, + description=department.description, + categories=department.categories, + default_sla_hours=department.default_sla_hours, + escalation_email=department.escalation_email, + is_active=department.is_active, + member_count=count, + ) + + +@router.patch("/departments/{department_id}", response_model=DepartmentResponse) +async def update_department( + department_id: UUID, + data: DepartmentUpdate, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + department = await db.get(Department, department_id) + if not department: + raise HTTPException(status_code=404, detail="Department not found") + + update_data = data.model_dump(exclude_unset=True) + for key, value in update_data.items(): + setattr(department, key, value) + + await db.flush() + + member_count = await db.execute( + select(func.count(Member.id)).where(Member.department_id == department.id) + ) + count = member_count.scalar() or 0 + + return DepartmentResponse( + id=department.id, + name=department.name, + code=department.code, + description=department.description, + categories=department.categories, + default_sla_hours=department.default_sla_hours, + escalation_email=department.escalation_email, + is_active=department.is_active, + member_count=count, + ) + + +@router.delete("/departments/{department_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_department( + department_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + department = await db.get(Department, department_id) + if not department: + raise HTTPException(status_code=404, detail="Department not found") + + await db.delete(department) + await db.flush() + + +@router.post("/members/invite", status_code=status.HTTP_201_CREATED) +async def invite_member( + data: MemberInvite, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + department = await db.get(Department, data.department_id) + if not department: + raise HTTPException(status_code=404, detail="Department not found") + + existing = await db.execute(select(Member).where(Member.email == data.email)) + if existing.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Email already exists") + + invite_result = None + if data.send_invite: + invite_result = await supabase_auth.invite_user( + email=data.email, + redirect_to=f"{settings.frontend_url}/auth/callback" + ) + + member = Member( + department_id=data.department_id, + name=data.name, + email=data.email, + phone=data.phone, + role=data.role, + city=data.city, + locality=data.locality, + max_workload=data.max_workload, + ) + db.add(member) + await db.flush() + await db.refresh(member) + + return { + "member": MemberResponse( + id=member.id, + department_id=member.department_id, + name=member.name, + email=member.email, + phone=member.phone, + role=member.role, + city=member.city, + locality=member.locality, + is_active=member.is_active, + current_workload=member.current_workload, + max_workload=member.max_workload, + invite_status="sent" if invite_result and invite_result.get("success") else "not_sent", + ), + "invite": invite_result, + "message": f"Member created. {'Invite email sent!' if invite_result and invite_result.get('success') else 'No invite sent.'}", + } + + + + + +@router.post("/members", response_model=MemberResponse, status_code=status.HTTP_201_CREATED) +async def create_member( + data: MemberCreate, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + + department = await db.get(Department, data.department_id) + if not department: + raise HTTPException(status_code=404, detail="Department not found") + + existing = await db.execute(select(Member).where(Member.email == data.email)) + if existing.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Email already exists") + + member = Member( + department_id=data.department_id, + name=data.name, + email=data.email, + phone=data.phone, + role=data.role, + city=data.city, + locality=data.locality, + max_workload=data.max_workload, + password_hash=hash_password(data.password), + ) + db.add(member) + await db.flush() + await db.refresh(member) + + + return MemberResponse( + id=member.id, + department_id=member.department_id, + name=member.name, + email=member.email, + phone=member.phone, + role=member.role, + city=member.city, + locality=member.locality, + is_active=member.is_active, + current_workload=member.current_workload, + max_workload=member.max_workload, + ) + + +@router.post("/members/{member_id}/send-invite") +async def send_member_invite( + member_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + member = await db.get(Member, member_id) + if not member: + raise HTTPException(status_code=404, detail="Member not found") + + if not settings.frontend_url: + raise HTTPException(status_code=500, detail="FRONTEND_URL not configured") + + result = await supabase_auth.invite_user( + email=member.email, + redirect_to=f"{settings.frontend_url}/auth/callback" + ) + + if result.get("success"): + return { + "success": True, + "message": f"Invite sent to {member.email}", + "member_id": str(member.id), + } + else: + raise HTTPException( + status_code=400, + detail=result.get("message", "Failed to send invite") + ) + + +@router.post("/members/{member_id}/magic-link") +async def send_magic_link( + member_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + member = await db.get(Member, member_id) + if not member: + raise HTTPException(status_code=404, detail="Member not found") + + if not settings.frontend_url: + raise HTTPException(status_code=500, detail="FRONTEND_URL not configured") + + result = await supabase_auth.send_magic_link( + email=member.email, + redirect_to=f"{settings.frontend_url}/auth/callback" + ) + + if result.get("success"): + return { + "success": True, + "message": f"Magic link sent to {member.email}", + } + else: + raise HTTPException( + status_code=400, + detail=result.get("message", "Failed to send magic link") + ) + + +@router.get("/members", response_model=list[MemberResponse]) +async def list_members( + department_id: Optional[UUID] = None, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + query = select(Member).order_by(Member.name) + if department_id: + query = query.where(Member.department_id == department_id) + + result = await db.execute(query) + members = result.scalars().all() + + return [ + MemberResponse( + id=m.id, + department_id=m.department_id, + name=m.name, + email=m.email, + phone=m.phone, + role=m.role, + city=m.city, + locality=m.locality, + is_active=m.is_active, + current_workload=m.current_workload, + max_workload=m.max_workload, + ) + for m in members + ] + + +@router.get("/members/{member_id}", response_model=MemberResponse) +async def get_member( + member_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + member = await db.get(Member, member_id) + if not member: + raise HTTPException(status_code=404, detail="Member not found") + + return MemberResponse( + id=member.id, + department_id=member.department_id, + name=member.name, + email=member.email, + phone=member.phone, + role=member.role, + city=member.city, + locality=member.locality, + is_active=member.is_active, + current_workload=member.current_workload, + max_workload=member.max_workload, + ) + + +@router.patch("/members/{member_id}", response_model=MemberResponse) +async def update_member( + member_id: UUID, + data: MemberUpdate, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + member = await db.get(Member, member_id) + if not member: + raise HTTPException(status_code=404, detail="Member not found") + + update_data = data.model_dump(exclude_unset=True) + for key, value in update_data.items(): + setattr(member, key, value) + + await db.flush() + + return MemberResponse( + id=member.id, + department_id=member.department_id, + name=member.name, + email=member.email, + phone=member.phone, + role=member.role, + city=member.city, + locality=member.locality, + is_active=member.is_active, + current_workload=member.current_workload, + max_workload=member.max_workload, + ) + + +@router.delete("/members/{member_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_member( + member_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + member = await db.get(Member, member_id) + if not member: + raise HTTPException(status_code=404, detail="Member not found") + + await db.delete(member) + await db.flush() + + +@router.get("/stats") +async def get_admin_stats( + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + from Backend.database.models import Issue, Classification + from datetime import datetime, timedelta + + dept_count = await db.execute(select(func.count(Department.id))) + member_count = await db.execute(select(func.count(Member.id))) + issue_count = await db.execute(select(func.count(Issue.id))) + pending_count = await db.execute( + select(func.count(Issue.id)).where(Issue.state.in_(["reported", "validated", "assigned"])) + ) + resolved_count = await db.execute( + select(func.count(Issue.id)).where(Issue.state.in_(["resolved", "closed", "verified"])) + ) + verification_count = await db.execute( + select(func.count(Issue.id)).where(Issue.state == "pending_verification") + ) + + category_query = ( + select( + Classification.primary_category, + func.count(Classification.id).label("count") + ) + .group_by(Classification.primary_category) + .order_by(func.count(Classification.id).desc()) + .limit(6) + ) + category_result = await db.execute(category_query) + categories = category_result.all() + issues_by_category = [{"name": cat or "Unknown", "value": cnt} for cat, cnt in categories] + + today = datetime.utcnow().date() + day_names = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] + issues_activity = [] + + for i in range(6, -1, -1): + day = today - timedelta(days=i) + day_start = datetime.combine(day, datetime.min.time()) + day_end = datetime.combine(day, datetime.max.time()) + + reported_q = await db.execute( + select(func.count(Issue.id)).where( + Issue.created_at >= day_start, + Issue.created_at <= day_end + ) + ) + resolved_q = await db.execute( + select(func.count(Issue.id)).where( + Issue.resolved_at >= day_start, + Issue.resolved_at <= day_end + ) + ) + + issues_activity.append({ + "name": day_names[day.weekday()], + "reported": reported_q.scalar() or 0, + "resolved": resolved_q.scalar() or 0 + }) + + return { + "departments": dept_count.scalar() or 0, + "members": member_count.scalar() or 0, + "total_issues": issue_count.scalar() or 0, + "pending_issues": pending_count.scalar() or 0, + "resolved_issues": resolved_count.scalar() or 0, + "verification_needed": verification_count.scalar() or 0, + "issues_by_category": issues_by_category, + "issues_activity": issues_activity, + } + + +@router.get("/stats/heatmap") +async def get_issue_heatmap( + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + """ + Returns city-aggregated issue counts for heatmap visualization. + """ + query = ( + select( + Issue.city, + func.count(Issue.id).label("count"), + func.avg(Issue.priority).label("priority_avg") + ) + .where(Issue.state.notin_(["closed", "resolved", "verified"])) + .where(Issue.city.isnot(None)) + .group_by(Issue.city) + .order_by(func.count(Issue.id).desc()) + ) + result = await db.execute(query) + rows = result.all() + + heatmap_data = [] + for city, count, priority_avg in rows: + heatmap_data.append({ + "city": city or "Unknown", + "count": count, + "priority_avg": round(float(priority_avg or 3), 1) + }) + + return heatmap_data + + +@router.get("/stats/escalations", response_model=list[dict]) +async def get_escalation_alerts( + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + """ + Returns a list of currently escalated issues with details. + """ + query = ( + select(Issue, Escalation) + .join(Escalation, Issue.id == Escalation.issue_id) + .where(Issue.state == "escalated") + .order_by(Escalation.created_at.desc()) + ) + result = await db.execute(query) + rows = result.all() + + alerts = [] + for issue, esc in rows: + alerts.append({ + "issue_id": issue.id, + "category": issue.classification.primary_category if issue.classification else "Unknown", + "priority": issue.priority, + "escalated_at": esc.created_at, + "level": esc.to_level, + "reason": esc.reason, + "city": issue.city, + "locality": issue.locality + }) + + +class ManualReviewRequest(BaseModel): + status: str + reason: Optional[str] = None + + + +class AdminIssueListItem(BaseModel): + id: UUID + description: Optional[str] + state: str + priority: Optional[int] + city: Optional[str] + created_at: datetime + updated_at: datetime + department: Optional[str] + assigned_to: Optional[str] + category: Optional[str] + sla_deadline: Optional[datetime] + thumbnail: Optional[str] + + class Config: + from_attributes = True + +def issue_to_response(issue: Issue) -> IssueResponse: + image_urls = [] + annotated_urls = [] + for img in issue.images: + image_urls.append(get_upload_url(img.file_path)) + if img.annotated_path: + annotated_urls.append(get_upload_url(img.annotated_path)) + + proof_image_url = None + if issue.proof_image_path: + proof_image_url = get_upload_url(issue.proof_image_path) + + return IssueResponse( + id=issue.id, + description=issue.description, + latitude=issue.latitude, + longitude=issue.longitude, + state=IssueState(issue.state), + priority=issue.priority, + category=issue.classification.primary_category if issue.classification else None, + confidence=issue.classification.primary_confidence if issue.classification else None, + image_urls=image_urls, + annotated_urls=annotated_urls, + proof_image_url=proof_image_url, + validation_source=issue.validation_source, + is_duplicate=issue.is_duplicate, + parent_issue_id=issue.parent_issue_id, + city=issue.city, + locality=issue.locality, + full_address=issue.full_address, + sla_hours=issue.sla_hours, + sla_deadline=issue.sla_deadline, + created_at=issue.created_at, + updated_at=issue.updated_at, + ) + +@router.get("/issues", response_model=dict) +async def list_admin_issues( + page: int = Query(1, ge=1), + limit: int = Query(20, ge=1, le=100), + status: Optional[str] = None, + priority: Optional[int] = None, + department_id: Optional[UUID] = None, + worker_id: Optional[UUID] = None, + search: Optional[str] = None, + sort_by: str = "created_at", + sort_order: str = "desc", + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + query = ( + select(Issue) + .options( + selectinload(Issue.department), + selectinload(Issue.assigned_member), + selectinload(Issue.classification), + selectinload(Issue.images) + ) + ) + + + if status: + statuses = status.split(",") + query = query.where(Issue.state.in_(statuses)) + + if priority is not None: + query = query.where(Issue.priority == priority) + + if department_id: + query = query.where(Issue.department_id == department_id) + + if worker_id: + query = query.where(Issue.assigned_member_id == worker_id) + + if search: + search_filter = or_( + Issue.description.ilike(f"%{search}%"), + Issue.city.ilike(f"%{search}%"), + Issue.locality.ilike(f"%{search}%"), + Issue.id.cast(String).ilike(f"%{search}%") + ) + query = query.where(search_filter) + + + sort_column = getattr(Issue, sort_by, Issue.created_at) + if sort_order == "asc": + query = query.order_by(asc(sort_column)) + else: + query = query.order_by(desc(sort_column)) + + + total_query = select(func.count()).select_from(query.subquery()) + total_result = await db.execute(total_query) + total = total_result.scalar_one() + + query = query.offset((page - 1) * limit).limit(limit) + result = await db.execute(query) + issues = result.scalars().all() + + + + + items = [] + for issue in issues: + thumb = None + if issue.images and len(issue.images) > 0: + thumb = get_upload_url(issue.images[0].file_path) + + items.append(AdminIssueListItem( + id=issue.id, + description=issue.description, + state=issue.state, + priority=issue.priority, + city=issue.city, + created_at=issue.created_at, + updated_at=issue.updated_at, + department=issue.department.name if issue.department else None, + assigned_to=issue.assigned_member.name if issue.assigned_member else None, + category=issue.classification.primary_category if issue.classification else None, + sla_deadline=issue.sla_deadline, + thumbnail=thumb + )) + + return { + "items": items, + "total": total, + "page": page, + "limit": limit, + "pages": (total + limit - 1) // limit + } + +@router.get("/issues/{issue_id}/details") +async def get_admin_issue_details( + issue_id: UUID, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + query = ( + select(Issue) + .options( + selectinload(Issue.department), + selectinload(Issue.classification), + selectinload(Issue.images), + selectinload(Issue.events), + selectinload(Issue.duplicates) + ) + .where(Issue.id == issue_id) + ) + result = await db.execute(query) + issue = result.scalar_one_or_none() + + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + + worker = None + if issue.assigned_member_id: + worker = await db.get(Member, issue.assigned_member_id) + + return { + "issue": issue_to_response(issue), + "department": { + "id": issue.department.id, + "name": issue.department.name + } if issue.department else None, + "worker": { + "id": worker.id, + "name": worker.name, + "email": worker.email, + "workload": worker.current_workload + } if worker else None, + "events": [ + { + "id": e.id, + "type": e.event_type, + "agent": e.agent_name, + "data": e.event_data, + "created_at": e.created_at + } for e in sorted(issue.events, key=lambda x: x.created_at, reverse=True) + ], + "duplicates": [ + { + "id": d.id, + "created_at": d.created_at, + "status": d.state + } for d in issue.duplicates + ] + } + +@router.get("/workers/performance") +async def get_worker_performance( + department_id: Optional[UUID] = None, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_active_user), +): + + q = select(Member).where(Member.role == "worker") + if department_id: + q = q.where(Member.department_id == department_id) + + res = await db.execute(q) + workers = res.scalars().all() + + performance_data = [] + + for w in workers: + + + resolved_count = await db.execute( + select(func.count(Issue.id)).where( + Issue.assigned_member_id == w.id, + Issue.state.in_(["resolved", "closed"]) + ) + ) + resolved = resolved_count.scalar() or 0 + + + + + + performance_data.append({ + "id": w.id, + "name": w.name, + "active": w.is_active, + "current_load": w.current_workload, + "max_load": w.max_workload, + "resolved_total": resolved, + "efficiency": round(resolved / (max(1, (datetime.utcnow() - w.created_at).days / 7)), 1) + }) + + return performance_data + +@router.patch("/issues/{issue_id}", response_model=IssueResponse) +async def update_issue_details( + issue_id: UUID, + data: dict, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + issue = await db.get(Issue, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + if "priority" in data: + issue.priority = data["priority"] + + + if "assigned_member_id" in data: + new_worker_id = data["assigned_member_id"] + if new_worker_id: + worker = await db.get(Member, UUID(new_worker_id)) + if not worker: + raise HTTPException(status_code=400, detail="Worker not found") + issue.assigned_member_id = worker.id + issue.state = "assigned" + worker.current_workload += 1 + + + else: + issue.assigned_member_id = None + + await db.commit() + await db.refresh(issue) + + + + + return issue_to_response(issue) + +class ResolutionReviewRequest(BaseModel): + action: str + comment: Optional[str] = None + +@router.post("/issues/{issue_id}/approve_resolution") +async def approve_resolution( + issue_id: UUID, + data: ResolutionReviewRequest, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + issue = await db.get(Issue, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + if issue.state != "pending_verification": + raise HTTPException(status_code=400, detail="Issue is not pending verification.") + + if data.action == "approve": + issue.state = "resolved" + issue.completed_at = datetime.utcnow() + if data.comment: + issue.resolution_notes = (issue.resolution_notes or "") + f"\nAdmin Note: {data.comment}" + + + if issue.assigned_member_id: + worker = await db.get(Member, issue.assigned_member_id) + if worker and worker.current_workload > 0: + worker.current_workload -= 1 + + await db.commit() + return {"message": "Issue resolution approved and marked as resolved."} + + elif data.action == "reject": + issue.state = "in_progress" + + if data.comment: + issue.resolution_notes = (issue.resolution_notes or "") + f"\n[REJECTED]: {data.comment}" + + + + await db.commit() + return {"message": "Issue resolution rejected. Sent back to worker."} + + else: + raise HTTPException(status_code=400, detail="Invalid action.") + +@router.post("/issues/{issue_id}/review") +async def review_issue( + issue_id: UUID, + data: ManualReviewRequest, + db: AsyncSession = Depends(get_db), + current_user: Member = Depends(get_current_admin), +): + """ + Manually review an issue. + - If REJECTED: Mark as rejected. + - If APPROVED: Mark as assigned and auto-assign to a worker. + """ + issue = await db.get(Issue, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + if data.status == "rejected": + issue.state = "rejected" + issue.resolution_notes = data.reason or "Rejected during manual review." + await db.commit() + return {"message": "Issue rejected successfully"} + + elif data.status == "approved": + + + + query = select(Member).where(Member.role == "worker", Member.is_active == True).order_by(Member.current_workload.asc()) + + + if issue.department_id: + query = query.where(Member.department_id == issue.department_id) + + result = await db.execute(query) + Workers = result.scalars().all() + + selected_worker = None + + if not Workers: + + + issue.state = "verified" + issue.resolution_notes = "Verified but no workers available for auto-assignment." + else: + selected_worker = Workers[0] + issue.assigned_member_id = selected_worker.id + issue.state = "assigned" + selected_worker.current_workload += 1 + db.add(selected_worker) + + await db.commit() + + return { + "message": f"Issue approved. {'Assigned to ' + selected_worker.name if selected_worker else 'No worker available, queued as verified.'}", + "assigned_to": str(selected_worker.id) if selected_worker else None + } + + else: + raise HTTPException(status_code=400, detail="Invalid status. Use 'approved' or 'rejected'.") diff --git a/Backend/api/routes/flow.py b/Backend/api/routes/flow.py new file mode 100644 index 0000000000000000000000000000000000000000..b533403f4ddde64d0e7fef71af70012e1d997e29 --- /dev/null +++ b/Backend/api/routes/flow.py @@ -0,0 +1,163 @@ +import asyncio +import json +from dataclasses import asdict +from typing import Optional +from uuid import UUID +from fastapi import APIRouter, Depends, Query +from fastapi.responses import StreamingResponse +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from Backend.database.connection import get_db +from Backend.database.models import Issue, IssueEvent +from Backend.core.flow_tracker import get_flow_tracker, _active_flows + +router = APIRouter() + + +async def event_generator(issue_id: UUID, timeout: int = 300): + tracker = get_flow_tracker(issue_id) + + if not tracker: + yield f"data: {json.dumps({'type': 'error', 'message': 'No active flow for this issue'})}\n\n" + return + + queue = tracker.subscribe() + + try: + start_msg = { + "type": "connected", + "issue_id": str(issue_id), + "message": "Connected to agent flow stream", + "current_steps": [asdict(s) for s in tracker.flow.steps] + } + yield f"data: {json.dumps(start_msg)}\n\n" + + + + + + + + + if tracker.flow.status in ["completed", "error"]: + yield f"data: {json.dumps({'type': 'flow_' + tracker.flow.status, 'data': tracker.flow.to_dict()})}\n\n" + return + + while True: + try: + message = await asyncio.wait_for(queue.get(), timeout=30) + yield f"data: {json.dumps(message)}\n\n" + + if message.get("type") in ["flow_completed", "flow_error"]: + break + except asyncio.TimeoutError: + yield f"data: {json.dumps({'type': 'heartbeat'})}\n\n" + finally: + tracker.unsubscribe(queue) + + +@router.get("/flow/{issue_id}") +async def stream_agent_flow(issue_id: UUID): + return StreamingResponse( + event_generator(issue_id), + media_type="text/event-stream", + headers={ + "Cache-Control": "no-cache", + "Connection": "keep-alive", + "X-Accel-Buffering": "no", + } + ) + + +@router.get("/flow/active") +async def list_active_flows(): + return { + "active_flows": [ + { + "issue_id": str(issue_id), + "status": tracker.flow.status, + "steps_count": len(tracker.flow.steps), + "started_at": tracker.flow.started_at, + } + for issue_id, tracker in _active_flows.items() + ] + } + + +@router.get("/events/{issue_id}") +async def get_issue_events( + issue_id: UUID, + limit: int = Query(50, ge=1, le=200), + db: AsyncSession = Depends(get_db), +): + query = ( + select(IssueEvent) + .where(IssueEvent.issue_id == issue_id) + .order_by(IssueEvent.created_at.asc()) + .limit(limit) + ) + result = await db.execute(query) + events = result.scalars().all() + + return { + "issue_id": str(issue_id), + "events": [ + { + "id": str(e.id), + "event_type": e.event_type, + "agent_name": e.agent_name, + "event_data": json.loads(e.event_data) if e.event_data else None, + "created_at": e.created_at.isoformat(), + } + for e in events + ] + } + + +@router.get("/timeline/{issue_id}") +async def get_issue_timeline( + issue_id: UUID, + db: AsyncSession = Depends(get_db), +): + issue = await db.get(Issue, issue_id) + if not issue: + return {"error": "Issue not found"} + + query = ( + select(IssueEvent) + .where(IssueEvent.issue_id == issue_id) + .order_by(IssueEvent.created_at.asc()) + ) + result = await db.execute(query) + events = result.scalars().all() + + timeline = [] + + timeline.append({ + "timestamp": issue.created_at.isoformat(), + "event": "issue_created", + "agent": "System", + "details": { + "latitude": issue.latitude, + "longitude": issue.longitude, + "description": issue.description, + } + }) + + for event in events: + event_data = json.loads(event.event_data) if event.event_data else {} + timeline.append({ + "timestamp": event.created_at.isoformat(), + "event": event.event_type, + "agent": event.agent_name or "Unknown", + "details": event_data, + }) + + return { + "issue_id": str(issue_id), + "current_state": issue.state, + "priority": issue.priority, + "is_duplicate": issue.is_duplicate, + "timeline": timeline, + } diff --git a/Backend/api/routes/health.py b/Backend/api/routes/health.py new file mode 100644 index 0000000000000000000000000000000000000000..7935023a8c0178ac0cbecc50c4a02b9587f09d43 --- /dev/null +++ b/Backend/api/routes/health.py @@ -0,0 +1,21 @@ +from fastapi import APIRouter +from sqlalchemy import text + +from Backend.database.connection import async_session_factory + +router = APIRouter() + + +@router.get("/health") +async def health_check(): + return {"status": "healthy", "service": "city-issue-agent"} + + +@router.get("/health/db") +async def db_health_check(): + try: + async with async_session_factory() as session: + await session.execute(text("SELECT 1")) + return {"status": "healthy", "database": "connected"} + except Exception as e: + return {"status": "unhealthy", "database": "disconnected", "error": str(e)} diff --git a/Backend/api/routes/issues.py b/Backend/api/routes/issues.py new file mode 100644 index 0000000000000000000000000000000000000000..ad19c12cb1c0923c8f174abe0d5bb16dc516180f --- /dev/null +++ b/Backend/api/routes/issues.py @@ -0,0 +1,519 @@ +from typing import Optional +from uuid import UUID +from pydantic import BaseModel +from fastapi import APIRouter, Depends, File, Form, HTTPException, Query, UploadFile, status, BackgroundTasks +from sqlalchemy import select, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from Backend.core.schemas import IssueCreate, IssueResponse, IssueListResponse, IssueState +from Backend.core.flow_tracker import create_flow_tracker, remove_flow_tracker +from Backend.database.connection import get_db, get_db_context +from Backend.database.models import Issue, Classification +from Backend.services.ingestion import IngestionService +from Backend.agents import ( + VisionAgent, + GeoDeduplicateAgent, + PriorityAgent, + RoutingAgent, + NotificationAgent, +) +from Backend.utils.storage import get_upload_url +from Backend.core.auth import get_user_id_from_form_token +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + +router = APIRouter() + + +def issue_to_response(issue: Issue) -> IssueResponse: + image_urls = [] + annotated_urls = [] + for img in issue.images: + image_urls.append(get_upload_url(img.file_path)) + if img.annotated_path: + annotated_urls.append(get_upload_url(img.annotated_path)) + + + return IssueResponse( + id=issue.id, + description=issue.description, + latitude=issue.latitude, + longitude=issue.longitude, + state=IssueState(issue.state), + priority=issue.priority, + category=issue.classification.primary_category if issue.classification else None, + confidence=issue.classification.primary_confidence if issue.classification else None, + image_urls=image_urls, + annotated_urls=annotated_urls, + validation_source=issue.validation_source, + is_duplicate=issue.is_duplicate, + parent_issue_id=issue.parent_issue_id, + city=issue.city, + locality=issue.locality, + full_address=issue.full_address, + geo_status="Duplicate" if issue.is_duplicate else "Clustered" if issue.geo_cluster_id else "Unique Location", + sla_hours=issue.sla_hours, + sla_deadline=issue.sla_deadline, + created_at=issue.created_at, + updated_at=issue.updated_at, + ) + + +async def get_issue_with_relations(db: AsyncSession, issue_id: UUID) -> Issue | None: + query = ( + select(Issue) + .options(selectinload(Issue.images), selectinload(Issue.classification)) + .where(Issue.id == issue_id) + ) + result = await db.execute(query) + return result.scalar_one_or_none() + + +async def run_agent_pipeline(db: AsyncSession, issue_id: UUID, image_paths: list[str], description: Optional[str]): + tracker = create_flow_tracker(issue_id) + + try: + await tracker.start_step("VisionAgent") + vision = VisionAgent(db) + vision_result = await vision.process_issue(issue_id, image_paths, description) + + detection_count = len(vision_result.detections) + + if detection_count == 0: + await tracker.complete_step( + "VisionAgent", + decision="No issues detected", + reasoning="0 detections - requires manual confirmation", + result={ + "detections": 0, + "needs_confirmation": True, + "annotated_urls": vision_result.annotated_urls, + } + ) + + issue = await db.get(Issue, issue_id) + if issue: + issue.state = "pending_confirmation" + issue.validation_source = "pending_manual" + issue.validation_reason = "No issues detected by AI - awaiting user confirmation" + await db.flush() + + final_result = { + "issue_id": str(issue_id), + "state": "pending_confirmation", + "needs_confirmation": True, + "detections": 0, + "message": "No issues detected. Please confirm if you want to submit for manual review.", + } + await tracker.complete_flow(final_result) + return + + await tracker.complete_step( + "VisionAgent", + decision=f"Detected: {vision_result.primary_category.value if vision_result.primary_category else 'Unknown'}", + reasoning=f"Confidence: {vision_result.primary_confidence:.2%}, {detection_count} detections", + result=vision_result.model_dump(mode='json') + ) + + await tracker.start_step("GeoDeduplicateAgent") + geo = GeoDeduplicateAgent(db) + geo_result = await geo.process_issue(issue_id) + await tracker.complete_step( + "GeoDeduplicateAgent", + decision=f"Status: {geo_result.get('geo_status', 'unknown')}", + reasoning=f"Nearby issues: {geo_result.get('nearby_count', 0)}", + result=geo_result + ) + + if not geo_result.get("is_duplicate"): + await tracker.start_step("PriorityAgent") + priority = PriorityAgent(db) + priority_result = await priority.process_issue(issue_id) + await tracker.complete_step( + "PriorityAgent", + decision=f"Priority: {priority_result.get('priority', 'N/A')}", + reasoning=priority_result.get("reasoning", ""), + result=priority_result + ) + + await tracker.start_step("RoutingAgent") + routing = RoutingAgent(db) + routing_result = await routing.process_issue(issue_id) + await tracker.complete_step( + "RoutingAgent", + decision=f"Routed to: {routing_result.get('department', 'N/A')}", + reasoning=f"Assigned: {routing_result.get('member', 'N/A')}, SLA: {routing_result.get('sla_hours', 0)}h", + result=routing_result + ) + + await tracker.start_step("NotificationAgent") + notification = NotificationAgent(db) + await notification.notify_assignment(issue_id) + await tracker.complete_step( + "NotificationAgent", + decision="Notifications queued", + reasoning="Assignment notification sent to assigned member", + result={"queued": True} + ) + else: + await tracker.complete_step( + "GeoDeduplicateAgent", + decision="Marked as duplicate", + reasoning=f"Linked to parent: {geo_result.get('parent_issue_id')}", + result=geo_result + ) + + issue = await get_issue_with_relations(db, issue_id) + final_result = { + "issue_id": str(issue_id), + "state": issue.state if issue else "unknown", + "priority": issue.priority if issue else None, + "is_duplicate": issue.is_duplicate if issue else False, + } + await tracker.complete_flow(final_result) + + except Exception as e: + await tracker.error_flow(str(e)) + raise + finally: + remove_flow_tracker(issue_id) + + + +async def run_agent_pipeline_background(issue_id: UUID, image_paths: list[str], description: Optional[str]): + async with get_db_context() as session: + await run_agent_pipeline(session, issue_id, image_paths, description) + + +@router.post("", response_model=IssueResponse, status_code=status.HTTP_201_CREATED) + +async def create_issue( + background_tasks: BackgroundTasks, + images: list[UploadFile] = File(...), + description: Optional[str] = Form(None), + latitude: float = Form(...), + longitude: float = Form(...), + accuracy_meters: Optional[float] = Form(None), + platform: str = Form(...), + device_model: Optional[str] = Form(None), + authorization: Optional[str] = Form(None), + db: AsyncSession = Depends(get_db), +): + user_id = get_user_id_from_form_token(authorization) + + data = IssueCreate( + + description=description, + latitude=latitude, + longitude=longitude, + accuracy_meters=accuracy_meters, + platform=platform, + device_model=device_model, + ) + + ingestion = IngestionService(db) + issue, image_paths = await ingestion.create_issue(data, images, user_id) + + + tracker = create_flow_tracker(issue.id) + + await tracker.start_step("LocationStep") + await tracker.complete_step( + "LocationStep", + decision="Resolved", + reasoning=f"{latitude:.4f}, {longitude:.4f}", + result={"city": "Mathura"} + ) + + await tracker.start_step("UploadStep") + await tracker.complete_step( + "UploadStep", + decision="Uploaded", + reasoning=f"{len(images)} images stored securely", + result={"count": len(images)} + ) + + + background_tasks.add_task(run_agent_pipeline_background, issue.id, image_paths, data.description) + + + issue = await get_issue_with_relations(db, issue.id) + issue = await get_issue_with_relations(db, issue.id) + return issue_to_response(issue) + + +async def run_remaining_pipeline(db: AsyncSession, issue_id: UUID): + tracker = create_flow_tracker(issue_id) + try: + await tracker.start_step("GeoDeduplicateAgent") + geo = GeoDeduplicateAgent(db) + geo_result = await geo.process_issue(issue_id) + await tracker.complete_step( + "GeoDeduplicateAgent", + decision=f"Status: {geo_result.get('geo_status', 'unknown')}", + reasoning=f"Nearby issues: {geo_result.get('nearby_count', 0)}", + result=geo_result + ) + + if not geo_result.get("is_duplicate"): + await tracker.start_step("PriorityAgent") + priority = PriorityAgent(db) + priority_result = await priority.process_issue(issue_id) + await tracker.complete_step( + "PriorityAgent", + decision=f"Priority: {priority_result.get('priority', 'N/A')}", + reasoning=priority_result.get("reasoning", ""), + result=priority_result + ) + + + + + + + + await tracker.start_step("RoutingAgent") + await tracker.complete_step( + "RoutingAgent", + decision="Manual Review Requested", + reasoning="Skipped automatic routing due to 0 detections/manual confirmation. Sent to triage queue.", + result={"skipped": True, "queue": "manual_triage"} + ) + + + + else: + await tracker.complete_step( + "GeoDeduplicateAgent", + decision="Marked as duplicate", + reasoning=f"Linked to parent: {geo_result.get('parent_issue_id')}", + result=geo_result + ) + + issue = await get_issue_with_relations(db, issue_id) + final_result = { + "issue_id": str(issue_id), + "state": issue.state if issue else "unknown", + "priority": issue.priority if issue else None, + "is_duplicate": issue.is_duplicate if issue else False, + } + await tracker.complete_flow(final_result) + + except Exception as e: + await tracker.error_flow(str(e)) + raise + finally: + remove_flow_tracker(issue_id) + + +class ConfirmationBody(BaseModel): + confirmed: bool + + +@router.post("/{issue_id}/confirm", response_model=IssueResponse) +async def confirm_issue( + issue_id: UUID, + body: ConfirmationBody, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db), +): + issue = await get_issue_with_relations(db, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + if body.confirmed: + issue.state = IssueState.REPORTED + issue.validation_reason = "Manual confirmation by user (0 detections)" + await db.flush() + + + issue = await get_issue_with_relations(db, issue_id) + + + background_tasks.add_task(pipeline_wrapper_resume, issue_id) + + return issue_to_response(issue) + else: + issue.state = IssueState.REJECTED + issue.validation_reason = "User rejected manual confirmation" + issue.resolution_notes = "User cancelled submission after 0 detections were found" + await db.flush() + + + issue = await get_issue_with_relations(db, issue_id) + + return issue_to_response(issue) + + +from Backend.database.connection import get_db_context + +async def pipeline_wrapper(issue_id: UUID, image_paths: list[str], description: Optional[str]): + try: + async with get_db_context() as db: + await run_agent_pipeline(db, issue_id, image_paths, description) + except Exception: + pass + +async def pipeline_wrapper_resume(issue_id: UUID): + try: + async with get_db_context() as db: + await run_remaining_pipeline(db, issue_id) + except Exception: + pass + +@router.post("/stream", status_code=status.HTTP_201_CREATED) +async def create_issue_with_stream( + background_tasks: BackgroundTasks, + images: list[UploadFile] = File(...), + description: Optional[str] = Form(None), + latitude: float = Form(...), + longitude: float = Form(...), + accuracy_meters: Optional[float] = Form(None), + platform: str = Form(...), + device_model: Optional[str] = Form(None), + authorization: Optional[str] = Form(None), + db: AsyncSession = Depends(get_db), +): + user_id = get_user_id_from_form_token(authorization) + logger.info(f"[/stream] Creating issue - user_id: {user_id}, authorization_present: {bool(authorization)}") + + data = IssueCreate( + description=description, + latitude=latitude, + longitude=longitude, + accuracy_meters=accuracy_meters, + platform=platform, + device_model=device_model, + ) + + ingestion = IngestionService(db) + issue, image_paths = await ingestion.create_issue(data, images, user_id) + logger.info(f"[/stream] Issue created: {issue.id} with user_id: {issue.user_id}") + + + await db.commit() + + + tracker = create_flow_tracker(issue.id) + + + background_tasks.add_task(pipeline_wrapper, issue.id, image_paths, data.description) + + return { + "issue_id": str(issue.id), + "stream_url": f"/flow/flow/{issue.id}", + "message": "Issue created. Pipeline started in background.", + } + + +@router.post("/{issue_id}/process") +async def process_issue_pipeline( + issue_id: UUID, + db: AsyncSession = Depends(get_db), +): + issue = await get_issue_with_relations(db, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + image_paths = [img.file_path for img in issue.images] + + await run_agent_pipeline(db, issue_id, image_paths, issue.description) + + issue = await get_issue_with_relations(db, issue_id) + return issue_to_response(issue) + + +@router.get("/{issue_id}", response_model=IssueResponse) +async def get_issue( + issue_id: UUID, + db: AsyncSession = Depends(get_db), +): + issue = await get_issue_with_relations(db, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + return issue_to_response(issue) + + +@router.patch("/{issue_id}/resolve") +async def resolve_issue( + issue_id: UUID, + resolution_notes: Optional[str] = Form(None), + db: AsyncSession = Depends(get_db), +): + issue = await db.get(Issue, issue_id) + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + from datetime import datetime + issue.state = "resolved" + issue.resolved_at = datetime.utcnow() + issue.resolution_notes = resolution_notes + + if issue.assigned_member_id: + from Backend.database.models import Member + member = await db.get(Member, issue.assigned_member_id) + if member and member.current_workload > 0: + member.current_workload -= 1 + + await db.flush() + + issue = await get_issue_with_relations(db, issue_id) + return issue_to_response(issue) + + +@router.get("", response_model=IssueListResponse) +async def list_issues( + page: int = Query(1, ge=1), + page_size: int = Query(20, ge=1, le=100), + state: Optional[IssueState] = None, + priority: Optional[int] = Query(None, ge=1, le=4), + department_id: Optional[UUID] = None, + is_duplicate: Optional[bool] = None, + user_id: Optional[str] = Query(None), + db: AsyncSession = Depends(get_db), +): + query = ( + select(Issue) + .options(selectinload(Issue.images), selectinload(Issue.classification)) + .order_by(Issue.created_at.desc()) + ) + count_query = select(func.count(Issue.id)) + + if state: + query = query.where(Issue.state == state.value) + count_query = count_query.where(Issue.state == state.value) + + if priority: + query = query.where(Issue.priority == priority) + count_query = count_query.where(Issue.priority == priority) + + if department_id: + query = query.where(Issue.department_id == department_id) + count_query = count_query.where(Issue.department_id == department_id) + + if is_duplicate is not None: + query = query.where(Issue.is_duplicate == is_duplicate) + count_query = count_query.where(Issue.is_duplicate == is_duplicate) + + if user_id: + query = query.where(Issue.user_id == user_id) + count_query = count_query.where(Issue.user_id == user_id) + + + offset = (page - 1) * page_size + query = query.offset(offset).limit(page_size) + + result = await db.execute(query) + issues = result.scalars().all() + + count_result = await db.execute(count_query) + total = count_result.scalar() or 0 + + return IssueListResponse( + items=[issue_to_response(issue) for issue in issues], + total=total, + page=page, + page_size=page_size, + ) diff --git a/Backend/api/routes/worker.py b/Backend/api/routes/worker.py new file mode 100644 index 0000000000000000000000000000000000000000..e4d1b78d4a84d6efafb471a5e11b950ff0d8f0c6 --- /dev/null +++ b/Backend/api/routes/worker.py @@ -0,0 +1,204 @@ +from typing import Optional +from uuid import UUID +from datetime import datetime +from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form +from fastapi.security import OAuth2PasswordBearer +from pydantic import BaseModel +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload +import jwt +from jwt import PyJWTError + +from Backend.database.connection import get_db +from Backend.database.models import Issue, Member +from Backend.core.logging import get_logger +from Backend.core.config import settings +from Backend.utils.storage import save_upload, get_upload_url + +logger = get_logger(__name__) +router = APIRouter() + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/admin/login") + +async def get_current_worker( + token: str = Depends(oauth2_scheme), + db: AsyncSession = Depends(get_db) +) -> Member: + try: + payload = jwt.decode(token, settings.supabase_jwt_secret, algorithms=["HS256"]) + member_id = payload.get("sub") + if not member_id: + raise HTTPException(status_code=401, detail="Invalid token") + + member = await db.get(Member, UUID(member_id)) + if not member or not member.is_active: + raise HTTPException(status_code=401, detail="User not found or inactive") + + if member.role not in ["worker", "admin"]: + raise HTTPException(status_code=403, detail="Not a worker") + + return member + except PyJWTError: + raise HTTPException(status_code=401, detail="Invalid token") + + +class TaskResponse(BaseModel): + id: UUID + description: Optional[str] + priority: Optional[int] + state: str + city: Optional[str] + locality: Optional[str] + full_address: Optional[str] + latitude: float + longitude: float + image_url: Optional[str] + annotated_url: Optional[str] + created_at: datetime + sla_deadline: Optional[datetime] + category: Optional[str] = None + + +@router.get("/tasks", response_model=list[TaskResponse]) +async def get_worker_tasks( + db: AsyncSession = Depends(get_db), + current_worker: Member = Depends(get_current_worker), +): + result = await db.execute( + select(Issue) + .options(selectinload(Issue.images), selectinload(Issue.classification)) + .where(Issue.assigned_member_id == current_worker.id) + .where(Issue.state.in_(["assigned", "in_progress", "pending_verification", "resolved"])) + .order_by(Issue.priority.asc().nullslast(), Issue.created_at.asc()) + ) + issues = result.scalars().all() + + tasks = [] + for issue in issues: + image_url = None + annotated_url = None + if issue.images: + image_url = get_upload_url(issue.images[0].file_path) + if issue.images[0].annotated_path: + annotated_url = get_upload_url(issue.images[0].annotated_path) + + tasks.append(TaskResponse( + id=issue.id, + description=issue.description, + priority=issue.priority, + state=issue.state, + city=issue.city, + locality=issue.locality, + full_address=issue.full_address, + latitude=issue.latitude, + longitude=issue.longitude, + image_url=image_url, + annotated_url=annotated_url, + created_at=issue.created_at, + sla_deadline=issue.sla_deadline, + category=issue.classification.primary_category if issue.classification else None, + )) + + return tasks + + +@router.post("/tasks/{task_id}/start") +async def start_task( + task_id: UUID, + db: AsyncSession = Depends(get_db), + current_worker: Member = Depends(get_current_worker), +): + issue = await db.get(Issue, task_id) + if not issue: + raise HTTPException(status_code=404, detail="Task not found") + + if issue.assigned_member_id != current_worker.id: + raise HTTPException(status_code=403, detail="Not assigned to this task") + + issue.state = "in_progress" + await db.commit() + + logger.info(f"Worker {current_worker.id} started task {task_id}") + return {"status": "started"} + + +@router.post("/tasks/{task_id}/complete") +async def complete_task( + task_id: UUID, + notes: Optional[str] = Form(None), + proof_image: UploadFile = File(...), + db: AsyncSession = Depends(get_db), + current_worker: Member = Depends(get_current_worker), +): + issue = await db.get(Issue, task_id) + if not issue: + raise HTTPException(status_code=404, detail="Task not found") + + if issue.assigned_member_id != current_worker.id: + raise HTTPException(status_code=403, detail="Not assigned to this task") + + proof_path = await save_upload(proof_image, f"proofs/{task_id}") + + issue.state = "pending_verification" + issue.proof_image_path = proof_path + issue.resolution_notes = notes + issue.resolved_at = datetime.utcnow() + + + + await db.commit() + + logger.info(f"Worker {current_worker.id} completed task {task_id}") + + return { + "status": "completed", + "proof_url": get_upload_url(proof_path), + } + + +@router.get("/tasks/{task_id}") +async def get_task_detail( + task_id: UUID, + db: AsyncSession = Depends(get_db), + current_worker: Member = Depends(get_current_worker), +): + result = await db.execute( + select(Issue) + .options(selectinload(Issue.images), selectinload(Issue.classification)) + .where(Issue.id == task_id) + ) + issue = result.scalar_one_or_none() + + if not issue: + raise HTTPException(status_code=404, detail="Task not found") + + if issue.assigned_member_id != current_worker.id: + raise HTTPException(status_code=403, detail="Not assigned to this task") + + image_url = None + annotated_url = None + if issue.images: + image_url = get_upload_url(issue.images[0].file_path) + if issue.images[0].annotated_path: + annotated_url = get_upload_url(issue.images[0].annotated_path) + + return { + "id": str(issue.id), + "description": issue.description, + "priority": issue.priority, + "state": issue.state, + "city": issue.city, + "locality": issue.locality, + "full_address": issue.full_address, + "latitude": issue.latitude, + "longitude": issue.longitude, + "image_url": image_url, + "annotated_url": annotated_url, + "created_at": issue.created_at, + "sla_deadline": issue.sla_deadline, + "category": issue.classification.primary_category if issue.classification else None, + "proof_image_url": get_upload_url(issue.proof_image_path) if issue.proof_image_path else None, + "resolution_notes": issue.resolution_notes, + "resolved_at": issue.resolved_at, + } diff --git a/Backend/core/__init__.py b/Backend/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f9c8054e2a19269a96af3084f50afb2b06dca872 --- /dev/null +++ b/Backend/core/__init__.py @@ -0,0 +1,4 @@ +from .config import settings +from .schemas import IssuePacket, IssueState, ClassificationResult, PriorityLevel, IssueResponse +from .events import EventBus, Event, IssueCreated, IssueClassified +from .logging import get_logger, setup_logging diff --git a/Backend/core/auth.py b/Backend/core/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..6216bb58eefdf48d3990895876e41f53f78a0225 --- /dev/null +++ b/Backend/core/auth.py @@ -0,0 +1,109 @@ +from typing import Optional +from dataclasses import dataclass +from fastapi import Depends, HTTPException, status, Request +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +import jwt +from jwt.exceptions import InvalidTokenError + +from Backend.core.config import settings +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + +security = HTTPBearer(auto_error=False) + + +@dataclass +class AuthenticatedUser: + id: str + email: Optional[str] = None + role: str = "user" + + +def verify_jwt_token(token: str) -> dict: + try: + decoded = jwt.decode( + token, + settings.supabase_jwt_secret, + algorithms=["HS256"], + audience="authenticated", + ) + return decoded + except InvalidTokenError as e: + logger.warning(f"JWT verification failed: {e}") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token", + headers={"WWW-Authenticate": "Bearer"}, + ) + + +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(security), +) -> AuthenticatedUser: + if not credentials: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Authentication required", + headers={"WWW-Authenticate": "Bearer"}, + ) + + token = credentials.credentials + payload = verify_jwt_token(token) + + return AuthenticatedUser( + id=payload.get("sub", ""), + email=payload.get("email"), + role=payload.get("role", "user"), + ) + + +async def get_optional_user( + credentials: HTTPAuthorizationCredentials = Depends(security), +) -> Optional[AuthenticatedUser]: + if not credentials: + return None + + try: + token = credentials.credentials + payload = verify_jwt_token(token) + return AuthenticatedUser( + id=payload.get("sub", ""), + email=payload.get("email"), + role=payload.get("role", "user"), + ) + except HTTPException: + return None + + +def get_user_id_from_form_token(authorization: Optional[str]) -> Optional[str]: + if not authorization: + logger.debug("No authorization header provided for form token extraction") + return None + if not authorization.startswith("Bearer "): + logger.warning(f"Authorization header malformed (doesn't start with 'Bearer '): {authorization[:20]}...") + return None + try: + token = authorization.replace("Bearer ", "") + + unverified_header = jwt.get_unverified_header(token) + logger.info(f"JWT header: alg={unverified_header.get('alg')}, typ={unverified_header.get('typ')}") + + try: + payload = jwt.decode( + token, + settings.supabase_jwt_secret, + algorithms=["HS256"], + audience="authenticated", + ) + except jwt.exceptions.InvalidAlgorithmError: + logger.warning("HS256 verification failed, falling back to unverified decode (Supabase already authenticated user)") + payload = jwt.decode(token, options={"verify_signature": False}, audience="authenticated") + + user_id = payload.get("sub") + email = payload.get("email") + logger.info(f"Successfully extracted user_id from form token: {user_id} (email: {email})") + return user_id + except InvalidTokenError as e: + logger.warning(f"JWT decode failed for form token: {e}") + return None diff --git a/Backend/core/config.py b/Backend/core/config.py new file mode 100644 index 0000000000000000000000000000000000000000..bcf2425b9724d03a182433259853aac60789eb16 --- /dev/null +++ b/Backend/core/config.py @@ -0,0 +1,83 @@ +from functools import lru_cache +from pathlib import Path +from typing import Optional +from pydantic_settings import BaseSettings, SettingsConfigDict +from pydantic import field_validator + + +class Settings(BaseSettings): + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + extra="ignore" + ) + + database_url: str + + supabase_url: str + supabase_key: str + supabase_jwt_secret: str + supabase_bucket: str = "city-issues" + + supabase_s3_endpoint: Optional[str] = None + supabase_s3_region: str = "ap-southeast-1" + supabase_s3_access_key: Optional[str] = None + supabase_s3_secret_key: Optional[str] = None + + model_path: Path = Path("Backend/agents/vision/model.pt") + model_confidence_threshold: float = 0.25 + model_input_size: int = 512 + + local_temp_dir: Path = Path("static/temp") + + sla_critical_hours: int = 4 + sla_high_hours: int = 12 + sla_medium_hours: int = 48 + sla_low_hours: int = 168 + + api_host: str = "0.0.0.0" + api_port: int = 8000 + api_workers: int = 4 + + max_upload_size_mb: int = 10 + allowed_extensions: set[str] = {"jpg", "jpeg", "png", "webp"} + + duplicate_radius_meters: float = 50.0 + + debug: bool = False + + resend_api_key: Optional[str] = None + google_client_id: Optional[str] = None + gemini_api_key: Optional[str] = None + google_client_secret: Optional[str] = None + project_id: Optional[str] = None + sender_email: str = "noreply@urbanlens.city" + admin_email: str = "admin@urbanlens.city" + + frontend_url: Optional[str] = None + + cors_origins: list[str] = [] + jwt_algorithm: str = "HS256" + jwt_expire_hours: int = 24 + + @field_validator("database_url") + @classmethod + def validate_database_url(cls, v: str) -> str: + if not v.startswith("postgresql"): + raise ValueError("DATABASE_URL must be a PostgreSQL connection string") + return v + + @field_validator("supabase_jwt_secret") + @classmethod + def validate_jwt_secret(cls, v: str) -> str: + if len(v) < 32: + raise ValueError("SUPABASE_JWT_SECRET must be at least 32 characters") + return v + + +@lru_cache +def get_settings() -> Settings: + return Settings() + + +settings = get_settings() diff --git a/Backend/core/events.py b/Backend/core/events.py new file mode 100644 index 0000000000000000000000000000000000000000..eb27d2a4a26637f45bede8143990ee62ab4c8b6d --- /dev/null +++ b/Backend/core/events.py @@ -0,0 +1,106 @@ +import asyncio +from collections import defaultdict +from datetime import datetime +from typing import Any, Callable, Coroutine, Optional, TypeVar +from uuid import UUID, uuid4 +from pydantic import BaseModel, Field + + +class Event(BaseModel): + event_id: UUID = Field(default_factory=uuid4) + issue_id: UUID + timestamp: datetime = Field(default_factory=datetime.utcnow) + metadata: dict[str, Any] = Field(default_factory=dict) + + @property + def event_type(self) -> str: + return self.__class__.__name__ + + +class IssueCreated(Event): + image_paths: list[str] + latitude: float + longitude: float + description: Optional[str] = None + + +class IssueClassified(Event): + category: str + confidence: float + detections_count: int + + +class IssuePrioritized(Event): + priority: int + reasoning: str + + +class IssueAssigned(Event): + department: str + ward: str + sla_deadline: datetime + + +class IssueEscalated(Event): + from_level: int + to_level: int + reason: str + + +class IssueResolved(Event): + resolved_by: str + resolution_notes: str + + +E = TypeVar("E", bound=Event) +Handler = Callable[[E], Coroutine[Any, Any, None]] + + +class EventBus: + _instance: Optional["EventBus"] = None + _lock: asyncio.Lock = asyncio.Lock() + + def __new__(cls) -> "EventBus": + if cls._instance is None: + cls._instance = super().__new__(cls) + cls._instance._handlers = defaultdict(list) + cls._instance._queue = asyncio.Queue() + cls._instance._running = False + return cls._instance + + def subscribe(self, event_type: type[E], handler: Handler[E]) -> None: + self._handlers[event_type.__name__].append(handler) + + async def publish(self, event: Event) -> None: + await self._queue.put(event) + + def publish_sync(self, event: Event) -> None: + asyncio.create_task(self._queue.put(event)) + + async def start(self) -> None: + if self._running: + return + self._running = True + asyncio.create_task(self._process_events()) + + async def stop(self) -> None: + self._running = False + + async def _process_events(self) -> None: + while self._running: + try: + event = await asyncio.wait_for(self._queue.get(), timeout=1.0) + handlers = self._handlers.get(event.event_type, []) + if handlers: + await asyncio.gather( + *[handler(event) for handler in handlers], + return_exceptions=True + ) + self._queue.task_done() + except asyncio.TimeoutError: + continue + except Exception: + continue + + +event_bus = EventBus() diff --git a/Backend/core/flow_tracker.py b/Backend/core/flow_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..65d0bf6db57c1b34d1a13e0cf1db56e6136c773c --- /dev/null +++ b/Backend/core/flow_tracker.py @@ -0,0 +1,188 @@ +import asyncio +import json +from datetime import datetime +from typing import Optional, Callable, Any +from uuid import UUID +from dataclasses import dataclass, field, asdict + +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +@dataclass +class AgentStep: + agent_name: str + status: str + started_at: str + completed_at: Optional[str] = None + duration_ms: Optional[float] = None + decision: Optional[str] = None + reasoning: Optional[str] = None + result: Optional[dict] = None + error: Optional[str] = None + + +@dataclass +class PipelineFlow: + issue_id: UUID + started_at: str + status: str = "running" + completed_at: Optional[str] = None + total_duration_ms: Optional[float] = None + steps: list[AgentStep] = field(default_factory=list) + final_result: Optional[dict] = None + + def to_dict(self) -> dict: + return { + "issue_id": str(self.issue_id), + "started_at": self.started_at, + "status": self.status, + "completed_at": self.completed_at, + "total_duration_ms": self.total_duration_ms, + "steps": [asdict(s) for s in self.steps], + "final_result": self.final_result, + } + + +class FlowTracker: + def __init__(self, issue_id: UUID): + self.flow = PipelineFlow( + issue_id=issue_id, + started_at=datetime.utcnow().isoformat(), + ) + self._start_time = datetime.utcnow() + self._subscribers: list[asyncio.Queue] = [] + + def subscribe(self) -> asyncio.Queue: + queue = asyncio.Queue() + + + for step in self.flow.steps: + if step.started_at: + queue.put_nowait({ + "type": "step_started", + "timestamp": step.started_at, + "data": { + "agent_name": step.agent_name, + "step_index": self.flow.steps.index(step) + } + }) + + + if step.status in ("completed", "error"): + queue.put_nowait({ + "type": "step_completed" if step.status == "completed" else "step_error", + "timestamp": step.completed_at, + "data": { + "agent_name": step.agent_name, + "status": step.status, + "decision": step.decision, + "reasoning": step.reasoning, + "result": step.result, + "error": step.error + } + }) + + self._subscribers.append(queue) + return queue + + def unsubscribe(self, queue: asyncio.Queue): + if queue in self._subscribers: + self._subscribers.remove(queue) + + async def _broadcast(self, event_type: str, data: dict): + message = { + "type": event_type, + "timestamp": datetime.utcnow().isoformat(), + "data": data, + } + for queue in self._subscribers: + await queue.put(message) + + async def start_step(self, agent_name: str): + step = AgentStep( + agent_name=agent_name, + status="running", + started_at=datetime.utcnow().isoformat(), + ) + self.flow.steps.append(step) + + await self._broadcast("step_started", { + "agent_name": agent_name, + "step_index": len(self.flow.steps) - 1, + }) + + return step + + async def complete_step( + self, + agent_name: str, + decision: str, + reasoning: str, + result: Optional[dict] = None, + error: Optional[str] = None + ): + step = next((s for s in self.flow.steps if s.agent_name == agent_name and s.status == "running"), None) + if step: + now = datetime.utcnow() + step.completed_at = now.isoformat() + step.status = "error" if error else "completed" + step.decision = decision + step.reasoning = reasoning + step.result = result + step.error = error + + started = datetime.fromisoformat(step.started_at) + step.duration_ms = (now - started).total_seconds() * 1000 + + await self._broadcast("step_completed", { + "agent_name": agent_name, + "status": step.status if step else "unknown", + "decision": decision, + "reasoning": reasoning, + "duration_ms": step.duration_ms if step else 0, + "result": result, + "error": error, + }) + + async def complete_flow(self, final_result: dict): + now = datetime.utcnow() + self.flow.completed_at = now.isoformat() + self.flow.status = "completed" + self.flow.total_duration_ms = (now - self._start_time).total_seconds() * 1000 + self.flow.final_result = final_result + + await self._broadcast("flow_completed", self.flow.to_dict()) + + async def error_flow(self, error: str): + now = datetime.utcnow() + self.flow.completed_at = now.isoformat() + self.flow.status = "error" + self.flow.total_duration_ms = (now - self._start_time).total_seconds() * 1000 + + await self._broadcast("flow_error", { + "error": error, + "flow": self.flow.to_dict(), + }) + + +_active_flows: dict[UUID, FlowTracker] = {} + + +def get_flow_tracker(issue_id: UUID) -> Optional[FlowTracker]: + return _active_flows.get(issue_id) + + +def create_flow_tracker(issue_id: UUID) -> FlowTracker: + if issue_id in _active_flows: + return _active_flows[issue_id] + + tracker = FlowTracker(issue_id) + _active_flows[issue_id] = tracker + return tracker + + +def remove_flow_tracker(issue_id: UUID): + if issue_id in _active_flows: + del _active_flows[issue_id] diff --git a/Backend/core/logging.py b/Backend/core/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..0adb97e77054411e1729d7eba1da3f2d81d3be3d --- /dev/null +++ b/Backend/core/logging.py @@ -0,0 +1,77 @@ +import logging +import sys +from contextvars import ContextVar +from datetime import datetime +from typing import Any, Optional +from uuid import UUID +import json + +correlation_id: ContextVar[Optional[str]] = ContextVar("correlation_id", default=None) + + +class JSONFormatter(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + log_data = { + "timestamp": datetime.utcnow().isoformat(), + "level": record.levelname, + "logger": record.name, + "message": record.getMessage(), + "correlation_id": correlation_id.get(), + } + + if hasattr(record, "issue_id"): + log_data["issue_id"] = str(record.issue_id) + + if hasattr(record, "agent"): + log_data["agent"] = record.agent + + if hasattr(record, "decision"): + log_data["decision"] = record.decision + + if record.exc_info: + log_data["exception"] = self.formatException(record.exc_info) + + return json.dumps(log_data) + + +class AgentLogger(logging.LoggerAdapter): + def __init__(self, logger: logging.Logger, agent_name: str): + super().__init__(logger, {"agent": agent_name}) + + def process(self, msg: str, kwargs: dict[str, Any]) -> tuple[str, dict[str, Any]]: + extra = kwargs.get("extra", {}) + extra["agent"] = self.extra["agent"] + kwargs["extra"] = extra + return msg, kwargs + + def log_decision( + self, + issue_id: UUID, + decision: str, + reasoning: str, + level: int = logging.INFO + ) -> None: + self.log( + level, + f"Decision: {decision} | Reasoning: {reasoning}", + extra={"issue_id": issue_id, "decision": decision} + ) + + +def setup_logging(debug: bool = False) -> None: + root = logging.getLogger() + root.setLevel(logging.DEBUG if debug else logging.INFO) + + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter(JSONFormatter()) + root.addHandler(handler) + + logging.getLogger("uvicorn.access").setLevel(logging.WARNING) + logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING) + + +def get_logger(name: str, agent_name: Optional[str] = None) -> logging.Logger | AgentLogger: + logger = logging.getLogger(name) + if agent_name: + return AgentLogger(logger, agent_name) + return logger diff --git a/Backend/core/schemas.py b/Backend/core/schemas.py new file mode 100644 index 0000000000000000000000000000000000000000..0cc504e98cd25b90fde8a2e42676e45a2d4de157 --- /dev/null +++ b/Backend/core/schemas.py @@ -0,0 +1,169 @@ +from datetime import datetime +from enum import IntEnum, StrEnum +from typing import Optional +from uuid import UUID, uuid4 +from pydantic import BaseModel, Field, field_validator + + +class IssueState(StrEnum): + REPORTED = "reported" + PENDING_CONFIRMATION = "pending_confirmation" + VALIDATED = "validated" + ASSIGNED = "assigned" + IN_PROGRESS = "in_progress" + PENDING_VERIFICATION = "pending_verification" + RESOLVED = "resolved" + VERIFIED = "verified" + CLOSED = "closed" + ESCALATED = "escalated" + REJECTED = "rejected" + + +class PriorityLevel(IntEnum): + CRITICAL = 1 + HIGH = 2 + MEDIUM = 3 + LOW = 4 + + +class IssueCategory(StrEnum): + DAMAGED_ROAD = "Damaged Road Issues" + POTHOLE = "Pothole Issues" + ILLEGAL_PARKING = "Illegal Parking Issues" + BROKEN_SIGN = "Broken Road Sign Issues" + FALLEN_TREE = "Fallen Trees" + GARBAGE = "Littering/Garbage on Public Places" + VANDALISM = "Vandalism Issues" + DEAD_ANIMAL = "Dead Animal Pollution" + DAMAGED_CONCRETE = "Damaged Concrete Structures" + DAMAGED_ELECTRIC = "Damaged Electric Wires and Poles" + + +CLASS_ID_TO_CATEGORY = { + 0: IssueCategory.DAMAGED_ROAD, + 1: IssueCategory.POTHOLE, + 2: IssueCategory.ILLEGAL_PARKING, + 3: IssueCategory.BROKEN_SIGN, + 4: IssueCategory.FALLEN_TREE, + 5: IssueCategory.GARBAGE, + 6: IssueCategory.VANDALISM, + 7: IssueCategory.DEAD_ANIMAL, + 8: IssueCategory.DAMAGED_CONCRETE, + 9: IssueCategory.DAMAGED_ELECTRIC, +} + + +class Coordinates(BaseModel): + latitude: float = Field(..., ge=-90, le=90) + longitude: float = Field(..., ge=-180, le=180) + accuracy_meters: Optional[float] = Field(None, ge=0) + + +class DeviceMetadata(BaseModel): + platform: str = Field(..., max_length=50) + device_model: Optional[str] = Field(None, max_length=100) + os_version: Optional[str] = Field(None, max_length=50) + app_version: Optional[str] = Field(None, max_length=20) + + +class IssuePacket(BaseModel): + description: Optional[str] = Field(None, max_length=2000) + coordinates: Coordinates + device_metadata: DeviceMetadata + timestamp: datetime = Field(default_factory=datetime.utcnow) + + @field_validator("description") + @classmethod + def clean_description(cls, v: Optional[str]) -> Optional[str]: + if v: + return v.strip() + return v + + +class DetectionBox(BaseModel): + class_id: int + class_name: str + confidence: float = Field(..., ge=0, le=1) + bbox: tuple[float, float, float, float] + + +class ClassificationResult(BaseModel): + issue_id: UUID + detections: list[DetectionBox] + primary_category: Optional[IssueCategory] = None + primary_confidence: float = 0.0 + annotated_urls: list[str] = [] + inference_time_ms: float + model_version: str = "1.0" + + def model_post_init(self, __context) -> None: + if self.detections and not self.primary_category: + best = max(self.detections, key=lambda d: d.confidence) + self.primary_category = CLASS_ID_TO_CATEGORY.get(best.class_id) + self.primary_confidence = best.confidence + + +class IssueCreate(BaseModel): + description: Optional[str] = Field(None, max_length=2000) + latitude: float = Field(..., ge=-90, le=90) + longitude: float = Field(..., ge=-180, le=180) + accuracy_meters: Optional[float] = Field(None, ge=0) + platform: str = Field(..., max_length=50) + device_model: Optional[str] = Field(None, max_length=100) + + @field_validator("description") + @classmethod + def clean_description(cls, v: Optional[str]) -> Optional[str]: + if v is None: + return None + cleaned = v.strip() + return cleaned or None + + +class AgentOutput(BaseModel): + agent: str + decision: str + reasoning: Optional[str] = None + duration_ms: Optional[float] = None + + +class IssueResponse(BaseModel): + id: UUID + description: Optional[str] + latitude: float + longitude: float + state: IssueState + priority: Optional[PriorityLevel] + priority_reason: Optional[str] = None + category: Optional[str] + confidence: Optional[float] + detections_count: Optional[int] = None + image_urls: list[str] + annotated_urls: list[str] = [] + proof_image_url: Optional[str] = None + validation_source: Optional[str] = None + is_duplicate: bool = False + parent_issue_id: Optional[UUID] = None + nearby_count: Optional[int] = None + city: Optional[str] = None + locality: Optional[str] = None + full_address: Optional[str] = None + geo_status: Optional[str] = None + department: Optional[str] = None + assigned_member: Optional[str] = None + sla_hours: Optional[int] = None + sla_deadline: Optional[datetime] = None + agent_flow: list[AgentOutput] = [] + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class IssueListResponse(BaseModel): + items: list[IssueResponse] + total: int + page: int + page_size: int + diff --git a/Backend/core/security.py b/Backend/core/security.py new file mode 100644 index 0000000000000000000000000000000000000000..85cafcf2f63a4c3b73f4d17a5c2a1c3c2ca83339 --- /dev/null +++ b/Backend/core/security.py @@ -0,0 +1,80 @@ +from fastapi import Request, Response +from fastapi.responses import JSONResponse +from starlette.middleware.base import BaseHTTPMiddleware +from collections import defaultdict +import time +import asyncio + +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + response = await call_next(request) + + response.headers["X-Content-Type-Options"] = "nosniff" + response.headers["X-Frame-Options"] = "DENY" + response.headers["X-XSS-Protection"] = "1; mode=block" + response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" + response.headers["Permissions-Policy"] = "geolocation=(self), camera=(self)" + + if request.url.scheme == "https": + response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains" + + return response + + +class RateLimitMiddleware(BaseHTTPMiddleware): + def __init__(self, app, requests_per_minute: int = 60, burst_limit: int = 10): + super().__init__(app) + self.requests_per_minute = requests_per_minute + self.burst_limit = burst_limit + self.requests = defaultdict(list) + self.lock = asyncio.Lock() + + async def dispatch(self, request: Request, call_next): + client_ip = request.client.host if request.client else "unknown" + current_time = time.time() + + async with self.lock: + self.requests[client_ip] = [ + t for t in self.requests[client_ip] + if current_time - t < 60 + ] + + if len(self.requests[client_ip]) >= self.requests_per_minute: + logger.warning(f"Rate limit exceeded for {client_ip}") + return JSONResponse( + status_code=429, + content={"detail": "Too many requests. Please slow down."}, + headers={"Retry-After": "60"} + ) + + recent_requests = [t for t in self.requests[client_ip] if current_time - t < 1] + if len(recent_requests) >= self.burst_limit: + logger.warning(f"Burst limit exceeded for {client_ip}") + return JSONResponse( + status_code=429, + content={"detail": "Too many requests. Please slow down."}, + headers={"Retry-After": "1"} + ) + + self.requests[client_ip].append(current_time) + + return await call_next(request) + + +class RequestValidationMiddleware(BaseHTTPMiddleware): + MAX_CONTENT_LENGTH = 50 * 1024 * 1024 + + async def dispatch(self, request: Request, call_next): + content_length = request.headers.get("content-length") + if content_length and int(content_length) > self.MAX_CONTENT_LENGTH: + return JSONResponse( + status_code=413, + content={"detail": "Request entity too large"} + ) + + return await call_next(request) diff --git a/Backend/database/__init__.py b/Backend/database/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..52c22126838e816cba3a68cd6830755e7a15c640 --- /dev/null +++ b/Backend/database/__init__.py @@ -0,0 +1,2 @@ +from .connection import engine, async_session_factory, get_db, get_db_context, init_db, close_db +from .models import Base, Issue, IssueImage, Classification, IssueEvent, Department, Member, Escalation diff --git a/Backend/database/connection.py b/Backend/database/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..b2bc48f6560e41f6d870beb76e4a14edf6cdbea8 --- /dev/null +++ b/Backend/database/connection.py @@ -0,0 +1,57 @@ +from contextlib import asynccontextmanager +from typing import AsyncGenerator +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.pool import NullPool + +from Backend.core.config import settings + +database_url = settings.database_url.replace("postgresql://", "postgresql+asyncpg://") + +engine = create_async_engine( + database_url, + poolclass=NullPool, + echo=False, + connect_args={ + "statement_cache_size": 0, + "prepared_statement_cache_size": 0, + }, +) + +async_session_factory = async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, + autocommit=False, + autoflush=False, +) + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + async with async_session_factory() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + + +@asynccontextmanager +async def get_db_context() -> AsyncGenerator[AsyncSession, None]: + async with async_session_factory() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + + +async def init_db() -> None: + from Backend.database.models import Base + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + +async def close_db() -> None: + await engine.dispose() diff --git a/Backend/database/init_db.py b/Backend/database/init_db.py new file mode 100644 index 0000000000000000000000000000000000000000..a82b187e9f55b5ec52d94d76a1028eb861b50b2e --- /dev/null +++ b/Backend/database/init_db.py @@ -0,0 +1,46 @@ +import asyncio +import logging +from sqlalchemy.ext.asyncio import create_async_engine +from Backend.core.config import settings +from Backend.database.models import Base +from Backend.database.seed import seed_data + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +async def init_models(): + """Drops and recreates all tables, then seeds initial data.""" + logger.info("Initializing database...") + + + database_url = settings.database_url.replace("port=6543", "port=5432").replace("postgresql://", "postgresql+asyncpg://") + engine = create_async_engine( + database_url, + echo=True, + connect_args={ + "statement_cache_size": 0, + "prepared_statement_cache_size": 0, + } + ) + + async with engine.begin() as conn: + logger.info("Dropping existing tables...") + + + + + logger.info("Creating new tables...") + await conn.run_sync(Base.metadata.create_all) + + logger.info("Schema initialized. Seeding data...") + try: + await seed_data(engine) + logger.info("Seeding completed successfully!") + except Exception as e: + logger.error(f"Seeding failed: {e}") + + await engine.dispose() + logger.info("Database initialization finished.") + +if __name__ == "__main__": + asyncio.run(init_models()) diff --git a/Backend/database/models.py b/Backend/database/models.py new file mode 100644 index 0000000000000000000000000000000000000000..64f1f8ab401f9042c3424be735c76a29b4e37571 --- /dev/null +++ b/Backend/database/models.py @@ -0,0 +1,174 @@ +from datetime import datetime +from typing import Optional +from uuid import UUID, uuid4 +from sqlalchemy import Boolean, DateTime, Float, ForeignKey, Integer, String, Text, func +from sqlalchemy.dialects.postgresql import UUID as PGUUID +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship + + +class Base(DeclarativeBase): + pass + + +class Department(Base): + __tablename__ = "departments" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + name: Mapped[str] = mapped_column(String(100), nullable=False, unique=True) + code: Mapped[str] = mapped_column(String(20), nullable=False, unique=True) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + categories: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + default_sla_hours: Mapped[int] = mapped_column(Integer, default=48) + escalation_email: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + is_active: Mapped[bool] = mapped_column(Boolean, default=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now()) + + members: Mapped[list["Member"]] = relationship(back_populates="department", lazy="selectin") + + +class Member(Base): + __tablename__ = "members" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + department_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("departments.id", ondelete="SET NULL"), nullable=True, index=True) + + name: Mapped[str] = mapped_column(String(100), nullable=False) + email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + phone: Mapped[Optional[str]] = mapped_column(String(20), nullable=True) + password_hash: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + + role: Mapped[str] = mapped_column(String(50), default="worker") + city: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True) + locality: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) + + is_active: Mapped[bool] = mapped_column(Boolean, default=True) + current_workload: Mapped[int] = mapped_column(Integer, default=0) + max_workload: Mapped[int] = mapped_column(Integer, default=10) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now()) + + department: Mapped[Optional["Department"]] = relationship(back_populates="members") + + + + +class Issue(Base): + __tablename__ = "issues" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + user_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True, index=True) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + latitude: Mapped[float] = mapped_column(Float, nullable=False, index=True) + longitude: Mapped[float] = mapped_column(Float, nullable=False, index=True) + accuracy_meters: Mapped[Optional[float]] = mapped_column(Float, nullable=True) + + state: Mapped[str] = mapped_column(String(20), default="reported", index=True) + priority: Mapped[Optional[int]] = mapped_column(Integer, nullable=True, index=True) + priority_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + validation_source: Mapped[Optional[str]] = mapped_column(String(20), nullable=True) + validation_reason: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + is_duplicate: Mapped[bool] = mapped_column(Boolean, default=False, index=True) + parent_issue_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id"), nullable=True) + geo_cluster_id: Mapped[Optional[str]] = mapped_column(String(50), nullable=True, index=True) + + platform: Mapped[str] = mapped_column(String(50), nullable=False) + device_model: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) + + department_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("departments.id"), nullable=True) + assigned_member_id: Mapped[Optional[UUID]] = mapped_column(PGUUID(as_uuid=True), ForeignKey("members.id"), nullable=True) + city: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True) + locality: Mapped[Optional[str]] = mapped_column(String(100), nullable=True) + full_address: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + sla_deadline: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + sla_hours: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + escalation_level: Mapped[int] = mapped_column(Integer, default=0) + escalated_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + + resolved_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True, index=True) + resolution_notes: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + proof_image_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) + completed_at: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), onupdate=func.now()) + + + images: Mapped[list["IssueImage"]] = relationship(back_populates="issue", lazy="selectin", foreign_keys="IssueImage.issue_id") + classification: Mapped[Optional["Classification"]] = relationship(back_populates="issue", uselist=False, lazy="selectin") + + department: Mapped[Optional["Department"]] = relationship("Department", lazy="selectin") + assigned_member: Mapped[Optional["Member"]] = relationship("Member", foreign_keys=[assigned_member_id], lazy="selectin") + + events: Mapped[list["IssueEvent"]] = relationship(back_populates="issue", lazy="noload") + duplicates: Mapped[list["Issue"]] = relationship(back_populates="parent_issue", foreign_keys=[parent_issue_id]) + parent_issue: Mapped[Optional["Issue"]] = relationship(back_populates="duplicates", remote_side=[id], foreign_keys=[parent_issue_id]) + + +class IssueImage(Base): + __tablename__ = "issue_images" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True) + file_path: Mapped[str] = mapped_column(String(500), nullable=False) + annotated_path: Mapped[Optional[str]] = mapped_column(String(500), nullable=True) + original_filename: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + + issue: Mapped["Issue"] = relationship(back_populates="images", foreign_keys=[issue_id]) + + +class Classification(Base): + __tablename__ = "classifications" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), unique=True) + + primary_category: Mapped[Optional[str]] = mapped_column(String(100), nullable=True, index=True) + primary_confidence: Mapped[float] = mapped_column(Float, default=0.0) + + detections_json: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + inference_time_ms: Mapped[float] = mapped_column(Float, default=0.0) + model_version: Mapped[str] = mapped_column(String(20), default="1.0") + + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) + + issue: Mapped["Issue"] = relationship(back_populates="classification") + + +class IssueEvent(Base): + __tablename__ = "issue_events" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True) + + event_type: Mapped[str] = mapped_column(String(50), nullable=False, index=True) + agent_name: Mapped[Optional[str]] = mapped_column(String(50), nullable=True) + event_data: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now(), index=True) + + issue: Mapped["Issue"] = relationship(back_populates="events") + + +class Escalation(Base): + __tablename__ = "escalations" + + id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), primary_key=True, default=uuid4) + issue_id: Mapped[UUID] = mapped_column(PGUUID(as_uuid=True), ForeignKey("issues.id", ondelete="CASCADE"), index=True) + + from_level: Mapped[int] = mapped_column(Integer, nullable=False) + to_level: Mapped[int] = mapped_column(Integer, nullable=False) + reason: Mapped[str] = mapped_column(Text, nullable=False) + + escalated_by: Mapped[str] = mapped_column(String(50), default="system") + notified_emails: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + + created_at: Mapped[datetime] = mapped_column(DateTime, default=func.now()) diff --git a/Backend/database/seed.py b/Backend/database/seed.py new file mode 100644 index 0000000000000000000000000000000000000000..e5dc580426bf41d3a3d73d502b650908b4675c18 --- /dev/null +++ b/Backend/database/seed.py @@ -0,0 +1,83 @@ +import logging +import uuid +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession +from sqlalchemy.orm import sessionmaker +from Backend.database.models import Department, Member + +logger = logging.getLogger(__name__) + +async def seed_data(engine: AsyncEngine): + async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + async with async_session() as session: + + pwd_id = uuid.uuid4() + sanitation_id = uuid.uuid4() + traffic_id = uuid.uuid4() + + departments = [ + Department( + id=pwd_id, + name="Public Works Department", + code="PWD", + description="Roads, Potholes, Infrastructure", + default_sla_hours=48, + escalation_email="pwd_head@city.gov" + ), + Department( + id=sanitation_id, + name="Sanitation Department", + code="SANITATION", + description="Garbage, Cleaning, Waste", + default_sla_hours=24, + escalation_email="sanitation_head@city.gov" + ), + Department( + id=traffic_id, + name="Traffic Department", + code="TRAFFIC", + description="Signals, Signs, Illegal Parking", + default_sla_hours=12, + escalation_email="traffic_head@city.gov" + ) + ] + + for dept in departments: + session.add(dept) + + + members = [ + Member( + department_id=pwd_id, + name="Ramesh Kumar", + email="ramesh.pwd@city.gov", + role="officer", + city="New Delhi", + locality="Connaught Place", + max_workload=10 + ), + Member( + department_id=sanitation_id, + name="Suresh Singh", + email="suresh.sanitation@city.gov", + role="officer", + city="New Delhi", + locality="Karol Bagh", + max_workload=15 + ), + Member( + department_id=traffic_id, + name="Priya Sharma", + email="priya.traffic@city.gov", + role="officer", + city="New Delhi", + locality="Lajpat Nagar", + max_workload=12 + ) + ] + + for member in members: + session.add(member) + + await session.commit() + logger.info("Seeded 3 departments and 3 members.") diff --git a/Backend/main.py b/Backend/main.py new file mode 100644 index 0000000000000000000000000000000000000000..e9b0d89c323d006198e74c399c4106866e10526a --- /dev/null +++ b/Backend/main.py @@ -0,0 +1,14 @@ +import uvicorn +from Backend.core.config import settings + +def main(): + uvicorn.run( + "Backend.api:app", + host=settings.api_host, + port=settings.api_port, + reload=True, + workers=1, + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/Backend/orchestration/__init__.py b/Backend/orchestration/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f3b8236fe247150bb2137b3d29cb463bb37b39bb --- /dev/null +++ b/Backend/orchestration/__init__.py @@ -0,0 +1 @@ +from .base import BaseAgent diff --git a/Backend/orchestration/base.py b/Backend/orchestration/base.py new file mode 100644 index 0000000000000000000000000000000000000000..77106490a992e3494707b0c78378b673421e4fba --- /dev/null +++ b/Backend/orchestration/base.py @@ -0,0 +1,25 @@ +from abc import ABC, abstractmethod +from typing import Any, TypeVar +from uuid import UUID + +from Backend.core.events import Event, EventBus, event_bus +from Backend.core.logging import AgentLogger, get_logger + +E = TypeVar("E", bound=Event) + + +class BaseAgent(ABC): + def __init__(self, name: str): + self.name = name + self.logger: AgentLogger = get_logger(f"agent.{name}", agent_name=name) + self._event_bus = event_bus + + def subscribe(self, event_type: type[E]) -> None: + self._event_bus.subscribe(event_type, self.handle) + + @abstractmethod + async def handle(self, event: E) -> None: + pass + + def log_decision(self, issue_id: UUID, decision: str, reasoning: str) -> None: + self.logger.log_decision(issue_id, decision, reasoning) diff --git a/Backend/requirements.txt b/Backend/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..1db9352dec3750f5b8c88ec54f1ddc28029a5e72 --- /dev/null +++ b/Backend/requirements.txt @@ -0,0 +1,19 @@ +fastapi>=0.109.0 +uvicorn[standard]>=0.27.0 +pydantic>=2.5.0 +pydantic-settings>=2.1.0 +email-validator>=2.0.0 +sqlalchemy[asyncio]>=2.0.25 +asyncpg>=0.29.0 +aiofiles>=23.2.1 +aiohttp>=3.9.0 +python-multipart>=0.0.6 +PyJWT>=2.8.0 +bcrypt>=4.1.0 +numpy<2 +opencv-python-headless==4.11.0.86 +torch>=2.1.0,<3 +torchvision>=0.16.0,<1 +ultralytics>=8.1.0 +resend>=2.0.0 +google-generativeai>=0.8.3 diff --git a/Backend/services/__init__.py b/Backend/services/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..61af75ae71c9c22389b7eea32a7df0a96b303e2c --- /dev/null +++ b/Backend/services/__init__.py @@ -0,0 +1,2 @@ +from .ingestion import IngestionService +from .vision import VisionService diff --git a/Backend/services/email.py b/Backend/services/email.py new file mode 100644 index 0000000000000000000000000000000000000000..1224956d953862791fb79cf972a43339846d68d9 --- /dev/null +++ b/Backend/services/email.py @@ -0,0 +1,273 @@ +import resend +from typing import List +from Backend.core.config import settings +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +class EmailService: + def __init__(self): + self.sender_email = settings.sender_email + if settings.resend_api_key: + resend.api_key = settings.resend_api_key + else: + logger.warning("Resend API key not configured") + + async def send_email( + self, + to: List[str], + subject: str, + body: str, + html: bool = False + ) -> bool: + if not settings.resend_api_key: + logger.warning("Resend API key not configured. Email not sent.") + logger.info(f"Would send email to {to}: {subject}") + return False + + try: + for recipient in to: + try: + params = { + "from": self.sender_email, + "to": [recipient], + "subject": subject, + } + + if html: + params["html"] = body + else: + params["text"] = body + + resend.Emails.send(params) + logger.info(f"Email sent successfully to {recipient}") + except Exception as e: + logger.error(f"Failed to send email to {recipient}: {e}") + return False + + return True + + except Exception as e: + logger.error(f"Email service error: {e}") + return False + + async def send_assignment_email( + self, + worker_email: str, + worker_name: str, + issue_id: str, + category: str, + priority: str, + location: str, + description: str + ): + subject = f"🔔 New Task Assigned: {category} [{priority}]" + + body = f""" +Hello {worker_name}, + +You have been assigned a new task in UrbanLens. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} +Priority: {priority} +Location: {location} +Description: {description or 'No description provided'} + +NEXT STEPS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +1. Review the issue details in your worker dashboard +2. Navigate to the location +3. Resolve the issue +4. Upload proof of resolution + +Thank you for your service! + +UrbanLens Team +"Governance at the Speed of Software" +""" + + return await self.send_email([worker_email], subject, body) + + async def send_manual_review_email( + self, + issue_id: str, + reason: str, + category: str, + location: str, + image_url: str + ): + subject = f"⚠️ Manual Review Required: {category}" + + body = f""" +Admin Team, + +An issue requires manual review in UrbanLens. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} +Location: {location} +Reason: {reason} + +Image: {image_url} + +ACTION REQUIRED +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Please review this issue in the admin dashboard and take appropriate action. + +UrbanLens System +""" + + return await self.send_email([settings.admin_email], subject, body) + + async def send_completion_email( + self, + user_email: str, + issue_id: str, + category: str, + location: str, + resolution_notes: str + ): + subject = f"✅ Your Report Has Been Resolved: {category}" + + body = f""" +Dear Citizen, + +Great news! Your reported issue has been resolved. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} +Location: {location} +Resolution: {resolution_notes or 'Issue has been successfully addressed'} + +FEEDBACK +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +We value your input! Please confirm if the issue has been resolved by checking the app. + +Thank you for making our city better! + +UrbanLens Team +"Governance at the Speed of Software" +""" + + return await self.send_email([user_email], subject, body) + + async def send_escalation_email( + self, + admin_email: str, + issue_id: str, + category: str, + priority: str, + reason: str, + escalation_level: int + ): + subject = f"🚨 ESCALATION LEVEL {escalation_level}: {category}" + + body = f""" +URGENT: Issue Escalation + +An issue has been escalated and requires immediate attention. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} +Priority: {priority} +Escalation Level: {escalation_level} +Reason: {reason} + +IMMEDIATE ACTION REQUIRED +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Please review and address this issue immediately in the admin dashboard. + +UrbanLens System +""" + + return await self.send_email([admin_email], subject, body) + + async def send_confirmation_request_email( + self, + user_email: str, + issue_id: str, + category: str, + confirmation_link: str + ): + subject = f"🔍 Please Confirm: Is This Issue Resolved?" + + body = f""" +Dear Citizen, + +Your reported issue has been marked as resolved by our team. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} + +CONFIRMATION NEEDED +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Please confirm if the issue has been properly resolved: +{confirmation_link} + +Your feedback helps us improve our service quality. + +Thank you! + +UrbanLens Team +""" + + return await self.send_email([user_email], subject, body) + + async def send_issue_accepted_email( + self, + user_email: str, + issue_id: str, + category: str, + priority: str, + location: str, + accepted_by: str = "automatic", + tracking_url: str = None + ): + acceptance_type = "automatically" if accepted_by == "automatic" else "manually by our team" + subject = f"✓ Your Report Has Been Accepted: {category}" + + body = f""" +Dear Citizen, + +Thank you for reporting an issue! Your report has been accepted {acceptance_type}. + +ISSUE DETAILS +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Issue ID: {issue_id} +Category: {category} +Priority: {priority} +Location: {location} + +WHAT HAPPENS NEXT +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +1. Your issue has been assigned to the appropriate department +2. A field worker will be dispatched to address it +3. You will receive updates on the progress +4. Once resolved, you'll get a confirmation notification + +TRACK YOUR REPORT +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +{tracking_url or 'Check the UrbanLens app for real-time updates'} + +Thank you for helping make our city better! + +UrbanLens Team +"Governance at the Speed of Software" +""" + + return await self.send_email([user_email], subject, body) + + +email_service = EmailService() diff --git a/Backend/services/geocoding.py b/Backend/services/geocoding.py new file mode 100644 index 0000000000000000000000000000000000000000..a8e36ef3f5c589ef641b2f550eba2db8e36f6c22 --- /dev/null +++ b/Backend/services/geocoding.py @@ -0,0 +1,100 @@ +import aiohttp +from typing import Optional +from dataclasses import dataclass + +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +@dataclass +class LocationInfo: + city: Optional[str] = None + locality: Optional[str] = None + district: Optional[str] = None + state: Optional[str] = None + country: Optional[str] = None + pincode: Optional[str] = None + full_address: Optional[str] = None + + +class GeocodingService: + NOMINATIM_URL = "https://nominatim.openstreetmap.org/reverse" + + async def reverse_geocode(self, latitude: float, longitude: float) -> LocationInfo: + params = { + "lat": latitude, + "lon": longitude, + "format": "json", + "addressdetails": 1, + "zoom": 18, + } + + headers = { + "User-Agent": "CityIssueResolutionAgent/1.0" + } + + try: + async with aiohttp.ClientSession() as session: + async with session.get( + self.NOMINATIM_URL, + params=params, + headers=headers, + timeout=aiohttp.ClientTimeout(total=10) + ) as response: + if response.status == 200: + data = await response.json() + return self._parse_response(data) + else: + logger.warning(f"Geocoding failed: {response.status}") + return LocationInfo() + except Exception as e: + logger.error(f"Geocoding error: {e}") + return LocationInfo() + + def _parse_response(self, data: dict) -> LocationInfo: + address = data.get("address", {}) + + city = ( + address.get("city") or + address.get("town") or + address.get("municipality") or + address.get("village") or + address.get("suburb") + ) + + locality = ( + address.get("suburb") or + address.get("neighbourhood") or + address.get("quarter") or + address.get("borough") + ) + + district = ( + address.get("county") or + address.get("district") or + address.get("state_district") + ) + + state = address.get("state") + country = address.get("country") + pincode = address.get("postcode") + + full_address = data.get("display_name") + + return LocationInfo( + city=city, + locality=locality, + district=district, + state=state, + country=country, + pincode=pincode, + full_address=full_address, + ) + + async def get_city_from_coordinates(self, latitude: float, longitude: float) -> Optional[str]: + location = await self.reverse_geocode(latitude, longitude) + return location.city or location.locality or location.district + + +geocoding_service = GeocodingService() diff --git a/Backend/services/ingestion.py b/Backend/services/ingestion.py new file mode 100644 index 0000000000000000000000000000000000000000..fac4905b717a56a4c2b6983c015a8329d681bc46 --- /dev/null +++ b/Backend/services/ingestion.py @@ -0,0 +1,85 @@ +from uuid import UUID +from fastapi import UploadFile +from sqlalchemy.ext.asyncio import AsyncSession + +from Backend.core.events import event_bus, IssueCreated +from Backend.core.logging import get_logger +from Backend.core.schemas import IssueCreate, IssueState +from Backend.database.models import Issue, IssueImage +from Backend.services.geocoding import geocoding_service +from Backend.utils.storage import save_upload, get_upload_url, validate_file_extension, validate_file_size + +logger = get_logger(__name__) + + +class IngestionService: + def __init__(self, db: AsyncSession): + self.db = db + + async def create_issue( + self, + data: IssueCreate, + images: list[UploadFile], + user_id: str | None = None + ) -> tuple[Issue, list[str]]: + if not images: + raise ValueError("At least one image is required") + + for image in images: + if not validate_file_extension(image.filename or ""): + raise ValueError(f"Invalid file extension: {image.filename}") + + location_info = await geocoding_service.reverse_geocode( + data.latitude, data.longitude + ) + + logger.info(f"Location resolved: {location_info.city}, {location_info.locality}") + + final_description = data.description or "Issue reported" + + issue = Issue( + user_id=user_id, + description=final_description, + latitude=data.latitude, + longitude=data.longitude, + accuracy_meters=data.accuracy_meters, + platform=data.platform, + device_model=data.device_model, + state=IssueState.REPORTED, + city=location_info.city, + locality=location_info.locality, + full_address=location_info.full_address, + ) + + self.db.add(issue) + await self.db.flush() + + image_paths = [] + for image in images: + file_path = await save_upload(image, subfolder=str(issue.id)) + + issue_image = IssueImage( + issue_id=issue.id, + file_path=file_path, + original_filename=image.filename, + ) + self.db.add(issue_image) + image_paths.append(file_path) + + await self.db.flush() + + event = IssueCreated( + issue_id=issue.id, + image_paths=image_paths, + latitude=issue.latitude, + longitude=issue.longitude, + description=issue.description, + ) + await event_bus.publish(event) + + logger.info(f"Issue created: {issue.id} in {issue.city}") + + return issue, image_paths + + async def get_issue(self, issue_id: UUID) -> Issue | None: + return await self.db.get(Issue, issue_id) diff --git a/Backend/services/supabase_auth.py b/Backend/services/supabase_auth.py new file mode 100644 index 0000000000000000000000000000000000000000..5404c20938ff70a35e393e1caa37385e8290a026 --- /dev/null +++ b/Backend/services/supabase_auth.py @@ -0,0 +1,119 @@ +import aiohttp +from typing import Optional +from Backend.core.config import settings +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +class SupabaseAuthService: + def __init__(self): + self.url = settings.supabase_url + self.service_key = settings.supabase_key + self.headers = { + "apikey": self.service_key, + "Authorization": f"Bearer {self.service_key}", + "Content-Type": "application/json", + } + + async def invite_user(self, email: str, redirect_to: Optional[str] = None) -> dict: + invite_url = f"{self.url}/auth/v1/invite" + + payload = { + "email": email, + } + + if redirect_to: + payload["options"] = {"redirectTo": redirect_to} + + async with aiohttp.ClientSession() as session: + async with session.post(invite_url, json=payload, headers=self.headers) as response: + result = await response.json() + + if response.status == 200: + logger.info(f"Invite sent to {email}") + return { + "success": True, + "message": f"Invitation email sent to {email}", + "user_id": result.get("id"), + "email": email, + } + else: + error_msg = result.get("msg") or result.get("message") or str(result) + logger.error(f"Failed to invite {email}: {error_msg}") + return { + "success": False, + "message": error_msg, + "email": email, + } + + async def create_user(self, email: str, password: str, user_metadata: Optional[dict] = None) -> dict: + create_url = f"{self.url}/auth/v1/admin/users" + + payload = { + "email": email, + "password": password, + "email_confirm": True, + } + + if user_metadata: + payload["user_metadata"] = user_metadata + + async with aiohttp.ClientSession() as session: + async with session.post(create_url, json=payload, headers=self.headers) as response: + result = await response.json() + + if response.status in [200, 201]: + logger.info(f"User created: {email}") + return { + "success": True, + "user_id": result.get("id"), + "email": email, + } + else: + error_msg = result.get("msg") or result.get("message") or str(result) + return { + "success": False, + "message": error_msg, + } + + async def send_magic_link(self, email: str, redirect_to: Optional[str] = None) -> dict: + magic_url = f"{self.url}/auth/v1/magiclink" + + payload = {"email": email} + + if redirect_to: + payload["options"] = {"redirectTo": redirect_to} + + async with aiohttp.ClientSession() as session: + async with session.post(magic_url, json=payload, headers=self.headers) as response: + if response.status == 200: + return { + "success": True, + "message": f"Magic link sent to {email}", + } + else: + result = await response.json() + return { + "success": False, + "message": result.get("msg") or str(result), + } + + async def get_user(self, user_id: str) -> Optional[dict]: + user_url = f"{self.url}/auth/v1/admin/users/{user_id}" + + async with aiohttp.ClientSession() as session: + async with session.get(user_url, headers=self.headers) as response: + if response.status == 200: + return await response.json() + return None + + async def delete_user(self, user_id: str) -> bool: + delete_url = f"{self.url}/auth/v1/admin/users/{user_id}" + + async with aiohttp.ClientSession() as session: + async with session.delete(delete_url, headers=self.headers) as response: + return response.status == 200 + + +supabase_auth = SupabaseAuthService() diff --git a/Backend/services/vision.py b/Backend/services/vision.py new file mode 100644 index 0000000000000000000000000000000000000000..c5dbb82b71bd4fc2ca692dc796a8282b8b373556 --- /dev/null +++ b/Backend/services/vision.py @@ -0,0 +1,3 @@ +from Backend.agents.vision import VisionAgent + +VisionService = VisionAgent diff --git a/Backend/utils/__init__.py b/Backend/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..75d0938600f3ab26afd72a4cb4b4b63d81faab7a --- /dev/null +++ b/Backend/utils/__init__.py @@ -0,0 +1,3 @@ +from .geo import haversine_distance, is_within_radius, find_nearby_issues +from .storage import save_upload, generate_filename, get_upload_url, save_bytes, download_from_supabase +from .fuzzy_match import auto_validate_issue, match_description_to_category diff --git a/Backend/utils/fuzzy_match.py b/Backend/utils/fuzzy_match.py new file mode 100644 index 0000000000000000000000000000000000000000..02347d0714253494c9cc1f69a320abd30734fef7 --- /dev/null +++ b/Backend/utils/fuzzy_match.py @@ -0,0 +1,113 @@ +from difflib import SequenceMatcher +from typing import Optional + +CATEGORY_KEYWORDS: dict[str, list[str]] = { + "Damaged Road Issues": [ + "road", "damage", "damaged", "broken", "crack", "cracked", "pavement", + "asphalt", "street", "highway", "lane", "surface", "rough", "uneven" + ], + "Pothole Issues": [ + "pothole", "hole", "pit", "crater", "dip", "depression", "bump", + "cavity", "hollow", "gap" + ], + "Illegal Parking Issues": [ + "parking", "parked", "car", "vehicle", "illegal", "wrong", "blocking", + "obstruction", "no parking", "double park", "sidewalk" + ], + "Broken Road Sign Issues": [ + "sign", "signboard", "traffic sign", "road sign", "broken sign", + "fallen sign", "damaged sign", "missing sign", "bent" + ], + "Fallen Trees": [ + "tree", "fallen", "branch", "trunk", "uprooted", "collapsed", + "blocking", "storm", "wind", "timber" + ], + "Littering/Garbage on Public Places": [ + "garbage", "trash", "litter", "waste", "rubbish", "dump", "dirty", + "filth", "debris", "plastic", "pile", "mess", "junk", "disposal" + ], + "Vandalism Issues": [ + "vandal", "graffiti", "spray", "paint", "defaced", "broken", + "smashed", "destroyed", "damaged property", "torn" + ], + "Dead Animal Pollution": [ + "dead", "animal", "carcass", "body", "corpse", "rotting", "smell", + "stink", "dog", "cat", "bird", "cow", "roadkill" + ], + "Damaged Concrete Structures": [ + "concrete", "structure", "wall", "pillar", "bridge", "flyover", + "footpath", "sidewalk", "curb", "crack", "broken" + ], + "Damaged Electric Wires and Poles": [ + "electric", "wire", "pole", "cable", "power", "electricity", + "hanging", "exposed", "sparking", "transformer", "light pole" + ], +} + + +def normalize_text(text: str) -> str: + return text.lower().strip() + + +def calculate_similarity(s1: str, s2: str) -> float: + return SequenceMatcher(None, s1.lower(), s2.lower()).ratio() + + +def fuzzy_match_word(word: str, keywords: list[str], threshold: float = 0.7) -> bool: + word = normalize_text(word) + for keyword in keywords: + if word == keyword: + return True + if len(word) >= 4 and calculate_similarity(word, keyword) >= threshold: + return True + if keyword in word or word in keyword: + return True + return False + + +def match_description_to_category( + description: Optional[str], + detected_category: str, + threshold: float = 0.6 +) -> tuple[bool, float, list[str]]: + if not description: + return False, 0.0, [] + + keywords = CATEGORY_KEYWORDS.get(detected_category, []) + if not keywords: + return False, 0.0, [] + + words = normalize_text(description).replace(",", " ").replace(".", " ").split() + + matched_words = [] + for word in words: + if len(word) < 3: + continue + if fuzzy_match_word(word, keywords): + matched_words.append(word) + + if not words: + return False, 0.0, [] + + match_score = len(matched_words) / max(len(words), 1) + is_match = len(matched_words) >= 1 or match_score >= threshold + + return is_match, match_score, matched_words + + +def auto_validate_issue( + description: Optional[str], + detected_categories: list[str], + confidence_threshold: float = 0.5 +) -> tuple[bool, str]: + if not description or not detected_categories: + return False, "No description or no detections for auto-validation" + + for category in detected_categories: + is_match, score, matched_words = match_description_to_category( + description, category + ) + if is_match: + return True, f"Auto-validated: '{category}' matched with keywords: {matched_words}" + + return False, f"Manual verification required: no match between description and detected categories {detected_categories}" diff --git a/Backend/utils/geo.py b/Backend/utils/geo.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf406cd5a4f17a5a95f95eaeadf2b09988906d5 --- /dev/null +++ b/Backend/utils/geo.py @@ -0,0 +1,52 @@ +from math import radians, cos, sin, asin, sqrt +from typing import Sequence +from uuid import UUID + + +def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + R = 6371000 + + lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2]) + + dlat = lat2 - lat1 + dlon = lon2 - lon1 + + a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 + c = 2 * asin(sqrt(a)) + + return R * c + + +def is_within_radius( + lat1: float, lon1: float, + lat2: float, lon2: float, + radius_meters: float +) -> bool: + return haversine_distance(lat1, lon1, lat2, lon2) <= radius_meters + + +def find_nearby_issues( + target_lat: float, + target_lon: float, + issues: Sequence[tuple[UUID, float, float]], + radius_meters: float +) -> list[tuple[UUID, float]]: + nearby = [] + for issue_id, lat, lon in issues: + distance = haversine_distance(target_lat, target_lon, lat, lon) + if distance <= radius_meters: + nearby.append((issue_id, distance)) + return sorted(nearby, key=lambda x: x[1]) + + +def get_bounding_box(lat: float, lon: float, radius_meters: float) -> tuple[float, float, float, float]: + R = 6371000 + lat_delta = (radius_meters / R) * (180 / 3.14159265359) + lon_delta = lat_delta / cos(radians(lat)) + + return ( + lat - lat_delta, + lat + lat_delta, + lon - lon_delta, + lon + lon_delta + ) diff --git a/Backend/utils/storage.py b/Backend/utils/storage.py new file mode 100644 index 0000000000000000000000000000000000000000..4a678ff4a61ff2f6e94bad7326c0039de583c3b8 --- /dev/null +++ b/Backend/utils/storage.py @@ -0,0 +1,108 @@ +import aiofiles +import aiohttp +from pathlib import Path +from uuid import uuid4 +from typing import Optional +from fastapi import UploadFile + +from Backend.core.config import settings +from Backend.core.logging import get_logger + +logger = get_logger(__name__) + + +def generate_filename(original_filename: str) -> str: + ext = Path(original_filename).suffix.lower() + if not ext: + ext = ".jpg" + return f"{uuid4().hex}{ext}" + + +def get_supabase_public_url(file_path: str) -> str: + return f"{settings.supabase_url}/storage/v1/object/public/{settings.supabase_bucket}/{file_path}" + + +async def upload_to_supabase(file_data: bytes, remote_path: str, content_type: str = "image/jpeg") -> str: + url = f"{settings.supabase_url}/storage/v1/object/{settings.supabase_bucket}/{remote_path}" + + headers = { + "Authorization": f"Bearer {settings.supabase_key}", + "Content-Type": content_type, + "x-upsert": "true", + } + + async with aiohttp.ClientSession() as session: + async with session.post(url, data=file_data, headers=headers) as response: + if response.status not in (200, 201): + error_text = await response.text() + logger.error(f"Supabase upload failed: {response.status} - {error_text}") + raise Exception(f"Failed to upload to Supabase: {error_text}") + + logger.info(f"Uploaded to Supabase: {remote_path}") + return get_supabase_public_url(remote_path) + + +async def save_upload(file: UploadFile, subfolder: str = "") -> str: + filename = generate_filename(file.filename or "image.jpg") + + if subfolder: + remote_path = f"{subfolder}/{filename}" + else: + remote_path = filename + + content = await file.read() + await file.seek(0) + + content_type = file.content_type or "image/jpeg" + + public_url = await upload_to_supabase(content, remote_path, content_type) + + return remote_path + + +async def save_bytes(data: bytes, filename: str, subfolder: str = "", content_type: str = "image/jpeg") -> str: + if subfolder: + remote_path = f"{subfolder}/{filename}" + else: + remote_path = filename + + public_url = await upload_to_supabase(data, remote_path, content_type) + + return remote_path + + +async def save_local_temp(data: bytes, filename: str) -> str: + temp_dir = settings.local_temp_dir + temp_dir.mkdir(parents=True, exist_ok=True) + + file_path = temp_dir / filename + async with aiofiles.open(file_path, "wb") as f: + await f.write(data) + + return str(file_path) + + +async def download_from_supabase(remote_path: str) -> bytes: + url = get_supabase_public_url(remote_path) + + async with aiohttp.ClientSession() as session: + async with session.get(url) as response: + if response.status != 200: + raise Exception(f"Failed to download from Supabase: {response.status}") + return await response.read() + + +def get_upload_url(file_path: str) -> str: + if file_path.startswith("http"): + return file_path + return get_supabase_public_url(file_path) + + +def validate_file_extension(filename: str) -> bool: + ext = Path(filename).suffix.lower().lstrip(".") + return ext in settings.allowed_extensions + + +def validate_file_size(size: int) -> bool: + max_bytes = settings.max_upload_size_mb * 1024 * 1024 + return size <= max_bytes diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..d647b656a65ffb591cb4fd2a19d317aef5ea57da --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +FROM python:3.11-slim + +WORKDIR /app + +RUN apt-get update && apt-get install -y \ + libgl1 \ + libglib2.0-0 \ + libsm6 \ + libxext6 \ + libxrender-dev \ + libgomp1 \ + git \ + && rm -rf /var/lib/apt/lists/* + +COPY Backend/requirements.txt /app/Backend/requirements.txt +RUN pip install --no-cache-dir -r /app/Backend/requirements.txt + +COPY Backend/ /app/Backend/ +COPY static/ /app/static/ + +RUN mkdir -p /app/static/temp + +ENV PYTHONPATH=/app +ENV PYTHONUNBUFFERED=1 + +EXPOSE 7860 + +CMD ["python", "-m", "uvicorn", "Backend.api:app", "--host", "0.0.0.0", "--port", "7860", "--forwarded-allow-ips", "*"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..40b64207d165c90289f250f4171724aea71f9c92 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 UrbanLens Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f9458395a79a7c40ed70cb05cdf066d1ad27f55a --- /dev/null +++ b/README.md @@ -0,0 +1,264 @@ +--- +title: city_issue +emoji: 🏙️ +colorFrom: blue +colorTo: green +sdk: docker +app_port: 7860 +pinned: false +--- + +# UrbanLens + +```text + _ _ _ _ + | | | | | | | | + | | | |_ __| |__ __ _ _ __ | | ___ _ __ ___ + | | | | '__| '_ \ / _` | '_ \| | / _ \ '_ \/ __| + | |__| | | | |_) | (_| | | | | |___| __/ | | \__ \ + \____/|_| |_.__/ \__,_|_| |_|______\___|_| |_|___/ +``` + +
+ +![Repo Size](https://img.shields.io/github/repo-size/0xarchit/UrbanLens?style=for-the-badge&color=2ecc71) +![Last Commit](https://img.shields.io/github/last-commit/0xarchit/UrbanLens?style=for-the-badge&color=3498db) +![Issues](https://img.shields.io/github/issues/0xarchit/UrbanLens?style=for-the-badge&color=e74c3c) +![Pull Requests](https://img.shields.io/github/issues-pr/0xarchit/UrbanLens?style=for-the-badge&color=9b59b6) + +
+ +**"Governance at the Speed of Software."** + +## ❏ Idea Brief +UrbanLens is an autonomous, event-driven operating system for smart cities that transforms civil infrastructure maintenance from reactive to proactive. Unlike traditional 311 systems that rely on manual triage, UrbanLens leverages AI agents to instantly detect, validate, and route urban issues-such as potholes, illegal dumping, and damaged signage-without human fatigue or bias. By using citizens as real-time sensors and autonomous agents as the nervous system, UrbanLens ensures city-scale, self-healing infrastructure. + +## ❏ System Internals: The "Issue Packet" +Every interaction in UrbanLens starts with an **Issue Packet**—an immutable, atomic unit of civic data. +- **Evidence:** Primary visual proof (Image/Video) captured via mandatory live camera. +- **Context:** High-precision GPS (<10m accuracy), Compass Heading, and Device Metadata. +- **Intent:** User-provided description, enhanced by NLP. + +### Anti-Fraud Enforcement +UrbanLens implements a "Spot-Check" protocol to ensure data integrity at the source: +1. **Live Camera Only:** The mobile app restricts gallery access. Users MUST capture photos live, preventing the repurposing of old or internet images. +2. **GPS Precision Lock:** Submission is practically blocked unless GPS accuracy is better than **10 meters**. +3. **Identity Binding:** All reports are cryptographically linked to a verified Google Identity (Supabase Auth). + +## ❏ The Problem +Traditional urban governance is plagued by: +- **Manual Bottlenecks:** Every report sits in a queue waiting for human categorization. +- **Redundancy:** Multiple citizens report the same issue, creating duplicate tickets and wasting resources. +- **Data Black Holes:** Citizens rarely receive feedback on their reports, leading to civic frustration. +- **Subjective Prioritization:** Urgent issues on main roads are often treated the same as minor issues in quiet alleys. + +## ❏ The Solution: UrbanLens +UrbanLens introduces the **"Issue Packet"**-an immutable unit of data containing visual evidence, GPS metadata, and intent. This packet triggers an autonomous chain reaction across a specialized agent pipeline. + +### System Architecture +```mermaid +graph TD + A[Citizen Mobile App] -->|Issue Packet| B[FastAPI Backend] + B --> C[Event Bus] + + subgraph "Autonomous Agent Pipeline" + C --> D[Vision Agent] + D -->|Annotated Data| E[Geo-Deduplicate Agent] + E -->|Clustered Info| F[Priority Agent] + F -->|Urgency Level| G[Routing Agent] + G -->|Assignment| H[Notification Agent] + end + + H --> I[Worker Dashboard] + H --> J[Admin Portal] + H --> K[Citizen Updates] + + subgraph "Data Layer" + L[(PostgreSQL + PostGIS)] + M[(Supabase Storage)] + end + + D -.-> M + E -.-> L +``` + +## ❏ The Autonomous Pipeline (Deep Dive) +The systems acts as a nervous system where agents react to the "Issue Packet" in real-time. + +### Stage 1: The Senses (Input & Validation) +- **Vision Agent:** The "Eyes". Uses a fine-tuned **YOLOv8** model to scan incoming images. + - **Rejection:** Automatically discards irrelevant images (e.g., selfies, blurry photos). + - **Classification:** Identifies defects (Pothole, Debris, Graffiti) with confidence scores. +- **Geo-Temporal Deduplication Agent:** The "Memory". + - **Clustering:** Queries the geospatial index for similar reports within `X` meters and `Y` hours. + - **Merging:** Instead of creating duplicates, it merges reports into a single "Cluster", increasing its urgency score. + +### Stage 2: The Brain (Decision Making) +- **Priority Agent:** The "Judge". + - **Context Awareness:** Combines Vision Confidence + Location Context (e.g., "Near School") + Repeat Count. + - **SLA Setting:** Assigns dynamic deadlines (e.g., 4 hours for Critical). +- **Routing Agent:** The "Dispatcher". + - **Logic:** Matches issue category to Department (Roads vs Sanitation) and assigns to specific workers based on geolocation and load. + +### Stage 3: The Enforcers (Execution) +- **SLA Watchdog Agent:** The "Timekeeper". + - **AI Monitoring:** analyze the *context* of delayed issues, not just the timer. + - **Escalation:** Triggers warnings at 50% and 20% remaining time. +- **Notification Agent:** The "Messenger". + - **Omnichannel:** Pushes updates to the Citizen (App) and Worker (Task List) simultaneously and send email notifications to respectives. + +## ❏ Project Showcase + +
+User Mobile App Interface +
+
+ Mobile Interface   + Reporting Interface +
+
+ +
+Admin Command Center +
+
+ + + + + + + + + + + + + + + + +
Admin Dashboard
Admin Dashboard
Heatmap
Geospatial Heatmap
Issues Management
Issue Management
Manual Review
AI Manual Review
Workforce
Workforce Management
Departments
Department Control
Worker Approval
Worker Requests
+
+
+ +
+Worker & Resolution Portal +
+
+ + + + + + + + + +
Worker Home
Worker Dashboard
Resolution Review
Resolution Protocol
Resolution Submit
Evidence Submission
Task Complete
Task Completion
+
+
+ +
+Web Portal for Citizen +
+
+ Landing Page +

+ User Dashboard +
+
+ +
+Autonomous Pipeline +
+
+ Pipeline Overview +
+
+ +## Client Ecosystem + +### 1. Citizen Mobile App (The Sensors) +*Built with React Native + Expo (TypeScript)* +- **Offline-First:** (Experimental Beta) Caches reports locally and syncs when connection returns. +- **Real-Time Tracking:** Server-driven events update the "Processing" screen live as agents complete their tasks. +- **Gamification:** (Planned) Civic points for verified reports. + +### 2. Admin Command Center (The Control) +*Built with Next.js 16 (App Router) + Tailwind CSS* +- **Role-Based Access Control (RBAC):** + - **Super Admin:** System config. + - **Worker Dashboard:** Submission of issues and resolution. +- **Visual Intelligence:** Heatmaps and density plots to identify crumbling infrastructure zones. + +### 3. Worker Interface (The Hands) +*Mobile-First Web View* +- **Task List:** Simple, priority-sorted list of jobs. +- **Navigation:** One-tap deep link to Google Maps. +- **Proof of Resolution:** Workers **cannot close a ticket** without uploading a photo. The Vision Agent verifies this photo against the original to confirm the fix. + +## ❏ Modular Monolith Architecture + +The codebase is structured to scale from a Monolith to Microservices easily. + +```text +/ +├── Backend/ # The Core Logic (FastAPI + Async SQLAlchemy) +│ ├── agents/ # 🧠 The Brain: 7 Autonomous Agents (Vision, Geo, SLA, Priority, Routing, Notification, Escalation) +│ ├── api/ # Stateless REST Endpoints (Routes) +│ ├── core/ # Shared Infra (Event Bus, Config) +│ └── orchestration/ # Agent Base Classes & Workflow Managers +│ └── Database/ # Database Models & Schemas +│ └── services/ # Services (Email, Authentication, etc) +├── User/ # 📱 Citizen Mobile App (Expo/React Native) +├── Frontend/ # 💻 Admin & Worker Portals (Next.js 16) +├── infra/ # ☁️ Docker & Deployment Config +└── assets/ # Project Screenshots & Media +``` + +## ❏ Tech Stack + +- **Backend:** FastAPI, Python, SQLAlchemy, PostgreSQL (PostGIS) +- **AI/ML:** YOLOv8s (Fine-tuned for urban defects) +- **Frontend:** Next.js (User/Admin/Worker Web Dashboards), Tailwind CSS +- **Mobile:** React Native, Expo, TypeScript +- **Infrastructure:** Supabase (Auth, Storage), Docker + +## ❏ Key Features +- **Anti-Fraud Reporting:** Mandatory live camera and high-precision GPS lock to prevent fake reports. +- **Real-Time Tracking:** Server-driven progress visualization for citizens. +- **Proof of Resolution:** Workers must upload "After" photos to close tickets. +- **Heatmaps:** Data-driven insights for city administrators to identify systemic issues. + +## ❏ Agent Pipeline Details + +**Stage 1: Input & Validation** +- **Vision Agent:** Scans images, detects objects (potholes, garbage, debris), rejects spam, classifies issues. +- **Geo-Temporal Deduplication Agent:** Checks for duplicate issues in space/time, merges reports, increases urgency for repeated reports. + +**Stage 2: Decision Making** +- **Priority Agent:** Assigns severity (CRITICAL, HIGH, MEDIUM, LOW) and SLA deadlines based on context. +- **Routing Agent:** Assigns issues to correct department and worker, optimizes resource allocation. + +**Stage 3: Execution & Follow-up** +- **SLA Watchdog Agent:** Monitors deadlines, triggers escalation if breached. +- **Escalation Agent:** Handles overdue issues, reassigns to supervisors, flags for transparency. +- **Notification Agent:** Sends updates to citizens and workers via multiple channels. + +## ❏ Roadmap & Future Vision + +### Phase 1: The Foundation (Completed) +- Autonomous Agent Pipeline (Vision, Geo, Priority, Routing, etc). +- Cross-Platform Ecosystem (Citizen App, Admin Portal, Worker View). + +### Phase 2: Predictive Governance +- **Predictive Maintenance:** Using historical data to predict potholes before they form (e.g., "Road X cracks every March"). +- **IoT Fusion:** Integrating direct feeds from smart bins and streetlights. + +### Phase 3: Gamification +- **Civic Reputation:** Leaderboards for top contributing citizens. +- **Incentives:** Tax credits or transit passes for verified infrastructure reporting. + +--- +*Built by BitBots for a smarter, more responsive city.* diff --git a/User/android/.gitignore b/User/android/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..8a6be077181fc1c016eb71a0e426bd0525a90fea --- /dev/null +++ b/User/android/.gitignore @@ -0,0 +1,16 @@ +# OSX +# +.DS_Store + +# Android/IntelliJ +# +build/ +.idea +.gradle +local.properties +*.iml +*.hprof +.cxx/ + +# Bundle artifacts +*.jsbundle diff --git a/User/android/app/build.gradle b/User/android/app/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..a47e144e2c2008cca91a096eb05e53dec2e43bc4 --- /dev/null +++ b/User/android/app/build.gradle @@ -0,0 +1,197 @@ +apply plugin: "com.android.application" +apply plugin: "org.jetbrains.kotlin.android" +apply plugin: "com.facebook.react" + +def projectRoot = rootDir.getAbsoluteFile().getParentFile().getAbsolutePath() + +/** + * This is the configuration block to customize your React Native Android app. + * By default you don't need to apply any configuration, just uncomment the lines you need. + */ +react { + entryFile = file(["node", "-e", "require('expo/scripts/resolveAppEntry')", projectRoot, "android", "absolute"].execute(null, rootDir).text.trim()) + reactNativeDir = new File(["node", "--print", "require.resolve('react-native/package.json')"].execute(null, rootDir).text.trim()).getParentFile().getAbsoluteFile() + hermesCommand = new File(["node", "--print", "require.resolve('react-native/package.json')"].execute(null, rootDir).text.trim()).getParentFile().getAbsolutePath() + "/sdks/hermesc/%OS-BIN%/hermesc" + codegenDir = new File(["node", "--print", "require.resolve('@react-native/codegen/package.json', { paths: [require.resolve('react-native/package.json')] })"].execute(null, rootDir).text.trim()).getParentFile().getAbsoluteFile() + + enableBundleCompression = (findProperty('android.enableBundleCompression') ?: false).toBoolean() + // Use Expo CLI to bundle the app, this ensures the Metro config + // works correctly with Expo projects. + cliFile = new File(["node", "--print", "require.resolve('@expo/cli', { paths: [require.resolve('expo/package.json')] })"].execute(null, rootDir).text.trim()) + bundleCommand = "export:embed" + + /* Folders */ + // The root of your project, i.e. where "package.json" lives. Default is '../..' + // root = file("../../") + // The folder where the react-native NPM package is. Default is ../../node_modules/react-native + // reactNativeDir = file("../../node_modules/react-native") + // The folder where the react-native Codegen package is. Default is ../../node_modules/@react-native/codegen + // codegenDir = file("../../node_modules/@react-native/codegen") + + /* Variants */ + // The list of variants to that are debuggable. For those we're going to + // skip the bundling of the JS bundle and the assets. By default is just 'debug'. + // If you add flavors like lite, prod, etc. you'll have to list your debuggableVariants. + // debuggableVariants = ["liteDebug", "prodDebug"] + + /* Bundling */ + // A list containing the node command and its flags. Default is just 'node'. + // nodeExecutableAndArgs = ["node"] + + // + // The path to the CLI configuration file. Default is empty. + // bundleConfig = file(../rn-cli.config.js) + // + // The name of the generated asset file containing your JS bundle + // bundleAssetName = "MyApplication.android.bundle" + // + // The entry file for bundle generation. Default is 'index.android.js' or 'index.js' + // entryFile = file("../js/MyApplication.android.js") + // + // A list of extra flags to pass to the 'bundle' commands. + // See https://github.com/react-native-community/cli/blob/main/docs/commands.md#bundle + // extraPackagerArgs = [] + + /* Hermes Commands */ + // The hermes compiler command to run. By default it is 'hermesc' + // hermesCommand = "$rootDir/my-custom-hermesc/bin/hermesc" + // + // The list of flags to pass to the Hermes compiler. By default is "-O", "-output-source-map" + // hermesFlags = ["-O", "-output-source-map"] + + /* Autolinking */ + autolinkLibrariesWithApp() +} + +/** + * Set this to true in release builds to optimize the app using [R8](https://developer.android.com/topic/performance/app-optimization/enable-app-optimization). + */ +/** + * Set this to true in release builds to optimize the app using [R8](https://developer.android.com/topic/performance/app-optimization/enable-app-optimization). + */ +def enableMinifyInReleaseBuilds = false // DISABLED temporarily for debugging + +/** + * The preferred build flavor of JavaScriptCore (JSC) + * + * For example, to use the international variant, you can use: + * `def jscFlavor = 'org.webkit:android-jsc-intl:+'` + * + * The international variant includes ICU i18n library and necessary data + * allowing to use e.g. `Date.toLocaleString` and `String.localeCompare` that + * give correct results when using with locales other than en-US. Note that + * this variant is about 6MiB larger per architecture than default. + */ +def jscFlavor = 'io.github.react-native-community:jsc-android:2026004.+' + +android { + ndkVersion rootProject.ext.ndkVersion + + buildToolsVersion rootProject.ext.buildToolsVersion + compileSdk rootProject.ext.compileSdkVersion + + namespace 'com.cityissue.reporter' + defaultConfig { + applicationId 'com.cityissue.reporter' + minSdkVersion rootProject.ext.minSdkVersion + targetSdkVersion rootProject.ext.targetSdkVersion + versionCode 1 + versionName "1.0.0" + + buildConfigField "String", "REACT_NATIVE_RELEASE_LEVEL", "\"${findProperty('reactNativeReleaseLevel') ?: 'stable'}\"" + } + + // ABI Splitting: Generates separate APKs for different CPU architectures. + // This removes unused native libraries (like x86 for emulators) from your phone's APK. + splits { + abi { + reset() + enable true + universalApk false // If true, also generates a heavy universal APK + include "armeabi-v7a", "arm64-v8a" // Only include real device architectures + } + } + + signingConfigs { + debug { + storeFile file('debug.keystore') + storePassword 'android' + keyAlias 'androiddebugkey' + keyPassword 'android' + } + } + buildTypes { + debug { + signingConfig signingConfigs.debug + } + release { + // Caution! In production, you need to generate your own keystore file. + // see https://reactnative.dev/docs/signed-apk-android. + signingConfig signingConfigs.debug + + // Disable R8/ProGuard for now to fix crash + minifyEnabled false + shrinkResources false + + proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro" + } + } + packagingOptions { + jniLibs { + def enableLegacyPackaging = findProperty('expo.useLegacyPackaging') ?: 'false' + useLegacyPackaging enableLegacyPackaging.toBoolean() + } + } + androidResources { + ignoreAssetsPattern '!.svn:!.git:!.ds_store:!*.scc:!CVS:!thumbs.db:!picasa.ini:!*~' + } +} + +// Apply static values from `gradle.properties` to the `android.packagingOptions` +// Accepts values in comma delimited lists, example: +// android.packagingOptions.pickFirsts=/LICENSE,**/picasa.ini +["pickFirsts", "excludes", "merges", "doNotStrip"].each { prop -> + // Split option: 'foo,bar' -> ['foo', 'bar'] + def options = (findProperty("android.packagingOptions.$prop") ?: "").split(","); + // Trim all elements in place. + for (i in 0.. 0) { + println "android.packagingOptions.$prop += $options ($options.length)" + // Ex: android.packagingOptions.pickFirsts += '**/SCCS/**' + options.each { + android.packagingOptions[prop] += it + } + } +} + +dependencies { + // The version of react-native is set by the React Native Gradle Plugin + implementation("com.facebook.react:react-android") + + def isGifEnabled = (findProperty('expo.gif.enabled') ?: "") == "true"; + def isWebpEnabled = (findProperty('expo.webp.enabled') ?: "") == "true"; + def isWebpAnimatedEnabled = (findProperty('expo.webp.animated') ?: "") == "true"; + + if (isGifEnabled) { + // For animated gif support + implementation("com.facebook.fresco:animated-gif:${expoLibs.versions.fresco.get()}") + } + + if (isWebpEnabled) { + // For webp support + implementation("com.facebook.fresco:webpsupport:${expoLibs.versions.fresco.get()}") + if (isWebpAnimatedEnabled) { + // Animated webp support + implementation("com.facebook.fresco:animated-webp:${expoLibs.versions.fresco.get()}") + } + } + + if (hermesEnabled.toBoolean()) { + implementation("com.facebook.react:hermes-android") + } else { + implementation jscFlavor + } +} diff --git a/User/android/app/debug.keystore b/User/android/app/debug.keystore new file mode 100644 index 0000000000000000000000000000000000000000..364e105ed39fbfd62001429a68140672b06ec0de Binary files /dev/null and b/User/android/app/debug.keystore differ diff --git a/User/android/app/proguard-rules.pro b/User/android/app/proguard-rules.pro new file mode 100644 index 0000000000000000000000000000000000000000..551eb41da24268637f6d9ceb38ea1b5359bf734d --- /dev/null +++ b/User/android/app/proguard-rules.pro @@ -0,0 +1,14 @@ +# Add project specific ProGuard rules here. +# By default, the flags in this file are appended to flags specified +# in /usr/local/Cellar/android-sdk/24.3.3/tools/proguard/proguard-android.txt +# You can edit the include path and order by changing the proguardFiles +# directive in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# react-native-reanimated +-keep class com.swmansion.reanimated.** { *; } +-keep class com.facebook.react.turbomodule.** { *; } + +# Add any project specific keep options here: diff --git a/User/android/app/src/debug/AndroidManifest.xml b/User/android/app/src/debug/AndroidManifest.xml new file mode 100644 index 0000000000000000000000000000000000000000..3ec2507bab7c58dda1f8467f9137b02bdbd87b50 --- /dev/null +++ b/User/android/app/src/debug/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + + + diff --git a/User/android/app/src/debugOptimized/AndroidManifest.xml b/User/android/app/src/debugOptimized/AndroidManifest.xml new file mode 100644 index 0000000000000000000000000000000000000000..3ec2507bab7c58dda1f8467f9137b02bdbd87b50 --- /dev/null +++ b/User/android/app/src/debugOptimized/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + + + diff --git a/User/android/app/src/main/AndroidManifest.xml b/User/android/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000000000000000000000000000000000000..e260e7f09343b48433d178ab24f4db1fa903cd61 --- /dev/null +++ b/User/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/User/android/app/src/main/java/com/cityissue/reporter/MainActivity.kt b/User/android/app/src/main/java/com/cityissue/reporter/MainActivity.kt new file mode 100644 index 0000000000000000000000000000000000000000..ac603c9b0cb80ee29c647376f7e47521f8c6c23f --- /dev/null +++ b/User/android/app/src/main/java/com/cityissue/reporter/MainActivity.kt @@ -0,0 +1,61 @@ +package com.cityissue.reporter + +import android.os.Build +import android.os.Bundle + +import com.facebook.react.ReactActivity +import com.facebook.react.ReactActivityDelegate +import com.facebook.react.defaults.DefaultNewArchitectureEntryPoint.fabricEnabled +import com.facebook.react.defaults.DefaultReactActivityDelegate + +import expo.modules.ReactActivityDelegateWrapper + +class MainActivity : ReactActivity() { + override fun onCreate(savedInstanceState: Bundle?) { + // Set the theme to AppTheme BEFORE onCreate to support + // coloring the background, status bar, and navigation bar. + // This is required for expo-splash-screen. + setTheme(R.style.AppTheme); + super.onCreate(null) + } + + /** + * Returns the name of the main component registered from JavaScript. This is used to schedule + * rendering of the component. + */ + override fun getMainComponentName(): String = "main" + + /** + * Returns the instance of the [ReactActivityDelegate]. We use [DefaultReactActivityDelegate] + * which allows you to enable New Architecture with a single boolean flags [fabricEnabled] + */ + override fun createReactActivityDelegate(): ReactActivityDelegate { + return ReactActivityDelegateWrapper( + this, + BuildConfig.IS_NEW_ARCHITECTURE_ENABLED, + object : DefaultReactActivityDelegate( + this, + mainComponentName, + fabricEnabled + ){}) + } + + /** + * Align the back button behavior with Android S + * where moving root activities to background instead of finishing activities. + * @see onBackPressed + */ + override fun invokeDefaultOnBackPressed() { + if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.R) { + if (!moveTaskToBack(false)) { + // For non-root activities, use the default implementation to finish them. + super.invokeDefaultOnBackPressed() + } + return + } + + // Use the default back button implementation on Android S + // because it's doing more than [Activity.moveTaskToBack] in fact. + super.invokeDefaultOnBackPressed() + } +} diff --git a/User/android/app/src/main/java/com/cityissue/reporter/MainApplication.kt b/User/android/app/src/main/java/com/cityissue/reporter/MainApplication.kt new file mode 100644 index 0000000000000000000000000000000000000000..242085d0fb686884420c8f093d6e9c2a659638e1 --- /dev/null +++ b/User/android/app/src/main/java/com/cityissue/reporter/MainApplication.kt @@ -0,0 +1,56 @@ +package com.cityissue.reporter + +import android.app.Application +import android.content.res.Configuration + +import com.facebook.react.PackageList +import com.facebook.react.ReactApplication +import com.facebook.react.ReactNativeApplicationEntryPoint.loadReactNative +import com.facebook.react.ReactNativeHost +import com.facebook.react.ReactPackage +import com.facebook.react.ReactHost +import com.facebook.react.common.ReleaseLevel +import com.facebook.react.defaults.DefaultNewArchitectureEntryPoint +import com.facebook.react.defaults.DefaultReactNativeHost + +import expo.modules.ApplicationLifecycleDispatcher +import expo.modules.ReactNativeHostWrapper + +class MainApplication : Application(), ReactApplication { + + override val reactNativeHost: ReactNativeHost = ReactNativeHostWrapper( + this, + object : DefaultReactNativeHost(this) { + override fun getPackages(): List = + PackageList(this).packages.apply { + // Packages that cannot be autolinked yet can be added manually here, for example: + // add(MyReactNativePackage()) + } + + override fun getJSMainModuleName(): String = ".expo/.virtual-metro-entry" + + override fun getUseDeveloperSupport(): Boolean = BuildConfig.DEBUG + + override val isNewArchEnabled: Boolean = BuildConfig.IS_NEW_ARCHITECTURE_ENABLED + } + ) + + override val reactHost: ReactHost + get() = ReactNativeHostWrapper.createReactHost(applicationContext, reactNativeHost) + + override fun onCreate() { + super.onCreate() + DefaultNewArchitectureEntryPoint.releaseLevel = try { + ReleaseLevel.valueOf(BuildConfig.REACT_NATIVE_RELEASE_LEVEL.uppercase()) + } catch (e: IllegalArgumentException) { + ReleaseLevel.STABLE + } + loadReactNative(this) + ApplicationLifecycleDispatcher.onApplicationCreate(this) + } + + override fun onConfigurationChanged(newConfig: Configuration) { + super.onConfigurationChanged(newConfig) + ApplicationLifecycleDispatcher.onConfigurationChanged(this, newConfig) + } +} diff --git a/User/android/app/src/main/res/drawable-hdpi/splashscreen_logo.png b/User/android/app/src/main/res/drawable-hdpi/splashscreen_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..95c473f2e9abfe7b09c616cf849c360cf921cc09 Binary files /dev/null and b/User/android/app/src/main/res/drawable-hdpi/splashscreen_logo.png differ diff --git a/User/android/app/src/main/res/drawable-mdpi/splashscreen_logo.png b/User/android/app/src/main/res/drawable-mdpi/splashscreen_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..7f3d204efc521270cf31cfa73323cc53fa391b23 Binary files /dev/null and b/User/android/app/src/main/res/drawable-mdpi/splashscreen_logo.png differ diff --git a/User/android/app/src/main/res/drawable-xhdpi/splashscreen_logo.png b/User/android/app/src/main/res/drawable-xhdpi/splashscreen_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..76326f0e89641db10ae370c421b793a896e308e4 Binary files /dev/null and b/User/android/app/src/main/res/drawable-xhdpi/splashscreen_logo.png differ diff --git a/User/android/app/src/main/res/drawable-xxhdpi/splashscreen_logo.png b/User/android/app/src/main/res/drawable-xxhdpi/splashscreen_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..118ccc2f23569df1b2fabdc4894c4683bb4bd11b Binary files /dev/null and b/User/android/app/src/main/res/drawable-xxhdpi/splashscreen_logo.png differ diff --git a/User/android/app/src/main/res/drawable-xxxhdpi/splashscreen_logo.png b/User/android/app/src/main/res/drawable-xxxhdpi/splashscreen_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..937f184bc6e03174c5e968b0b3744fba3e050bdb Binary files /dev/null and b/User/android/app/src/main/res/drawable-xxxhdpi/splashscreen_logo.png differ diff --git a/User/android/app/src/main/res/drawable/ic_launcher_background.xml b/User/android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 0000000000000000000000000000000000000000..883b2a080f3a9981ce0fdc780f34f24ede704236 --- /dev/null +++ b/User/android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/User/android/app/src/main/res/drawable/rn_edit_text_material.xml b/User/android/app/src/main/res/drawable/rn_edit_text_material.xml new file mode 100644 index 0000000000000000000000000000000000000000..5c25e728ea2ce7724bd9ef08c87ce7ee89c77103 --- /dev/null +++ b/User/android/app/src/main/res/drawable/rn_edit_text_material.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + diff --git a/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml new file mode 100644 index 0000000000000000000000000000000000000000..3941bea9b98bb1a65d31fb061f111dfd77fa0993 --- /dev/null +++ b/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml new file mode 100644 index 0000000000000000000000000000000000000000..3941bea9b98bb1a65d31fb061f111dfd77fa0993 --- /dev/null +++ b/User/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/User/android/app/src/main/res/mipmap-hdpi/ic_launcher.webp b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..d129621b659bcbfc379b1aa240475c104582a194 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher.webp differ diff --git a/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.webp b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.webp new file mode 100644 index 0000000000000000000000000000000000000000..ac03dbf69f0d7bfe4cd103b20ae5228aec2ce383 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.webp differ diff --git a/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..2074822b444bd1baf48e1a00faea49277d22b448 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp differ diff --git a/User/android/app/src/main/res/mipmap-mdpi/ic_launcher.webp b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..3984726b0c20dd4660809460da2520209cd2f592 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher.webp differ diff --git a/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.webp b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.webp new file mode 100644 index 0000000000000000000000000000000000000000..e1173a94d67fef8ecc8cae08697f56bb19f864bb Binary files /dev/null and b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.webp differ diff --git a/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..7b7b044620147ac20d670a05894ad1cb5c97fdd2 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp differ diff --git a/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..31ae65fb28b454c9abb4f1f92f18783de2516ea4 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp differ diff --git a/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.webp b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.webp new file mode 100644 index 0000000000000000000000000000000000000000..ff086fdc34ead5977beba4d9cf87c63b8b3a5c60 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.webp differ diff --git a/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..06cfffb4b78ffd58b2e4bd07db088c10b887d444 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..1cdf96344a346e768979c9da941ea0bd85b1ee8b Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.webp b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.webp new file mode 100644 index 0000000000000000000000000000000000000000..f7f1d06908af8ba1320d9aa6474f792883db430a Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..fc94d98510e8a9d5d32e6842b423eb831d69fac8 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..69a387051429cdcdb52899d82ae05d77463ade97 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.webp b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.webp new file mode 100644 index 0000000000000000000000000000000000000000..49a464ee361fe64ce9d076716724a756c521d807 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.webp differ diff --git a/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..7d91f323ec944e76d01993bbe7b78f362e7d3dc5 Binary files /dev/null and b/User/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp differ diff --git a/User/android/app/src/main/res/values-night/colors.xml b/User/android/app/src/main/res/values-night/colors.xml new file mode 100644 index 0000000000000000000000000000000000000000..3c05de5be81a972ce887d0e26d2c9cc4155c303d --- /dev/null +++ b/User/android/app/src/main/res/values-night/colors.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/User/android/app/src/main/res/values/colors.xml b/User/android/app/src/main/res/values/colors.xml new file mode 100644 index 0000000000000000000000000000000000000000..f268c9e7a027cf119b80e904979b2336886348cd --- /dev/null +++ b/User/android/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + #0F172A + #0F172A + #023c69 + #0F172A + \ No newline at end of file diff --git a/User/android/app/src/main/res/values/strings.xml b/User/android/app/src/main/res/values/strings.xml new file mode 100644 index 0000000000000000000000000000000000000000..99e86b607011bca4396116c876bebbe71bba6a6f --- /dev/null +++ b/User/android/app/src/main/res/values/strings.xml @@ -0,0 +1,5 @@ + + City Issue Reporter + contain + false + \ No newline at end of file diff --git a/User/android/app/src/main/res/values/styles.xml b/User/android/app/src/main/res/values/styles.xml new file mode 100644 index 0000000000000000000000000000000000000000..b675006853bd6bf469f93ee40d4d4f21f5256756 --- /dev/null +++ b/User/android/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + \ No newline at end of file diff --git a/User/android/app/src/main/res/xml/network_security_config.xml b/User/android/app/src/main/res/xml/network_security_config.xml new file mode 100644 index 0000000000000000000000000000000000000000..2f952c071ce628b4eeb4892100690d5ac4dc2f6d --- /dev/null +++ b/User/android/app/src/main/res/xml/network_security_config.xml @@ -0,0 +1,14 @@ + + + + + + + + + 0xarchit-city-issue.hf.space + huggingface.co + localhost + 10.0.2.2 + + diff --git a/User/android/build.gradle b/User/android/build.gradle new file mode 100644 index 0000000000000000000000000000000000000000..0554dd156c12f9be47224be2fe763937962a4fb2 --- /dev/null +++ b/User/android/build.gradle @@ -0,0 +1,24 @@ +// Top-level build file where you can add configuration options common to all sub-projects/modules. + +buildscript { + repositories { + google() + mavenCentral() + } + dependencies { + classpath('com.android.tools.build:gradle') + classpath('com.facebook.react:react-native-gradle-plugin') + classpath('org.jetbrains.kotlin:kotlin-gradle-plugin') + } +} + +allprojects { + repositories { + google() + mavenCentral() + maven { url 'https://www.jitpack.io' } + } +} + +apply plugin: "expo-root-project" +apply plugin: "com.facebook.react.rootproject" diff --git a/User/android/gradle.properties b/User/android/gradle.properties new file mode 100644 index 0000000000000000000000000000000000000000..21749d36557e7b8dd98c6de590c798d8c161e91e --- /dev/null +++ b/User/android/gradle.properties @@ -0,0 +1,67 @@ +# Project-wide Gradle settings. + +# IDE (e.g. Android Studio) users: +# Gradle settings configured through the IDE *will override* +# any settings specified in this file. + +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html + +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +# Default value: -Xmx512m -XX:MaxMetaspaceSize=256m +org.gradle.jvmargs=-Xmx2048m -XX:MaxMetaspaceSize=512m + +# When configured, Gradle will run in incubating parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +org.gradle.parallel=true + +# AndroidX package structure to make it clearer which packages are bundled with the +# Android operating system, and which are packaged with your app's APK +# https://developer.android.com/topic/libraries/support-library/androidx-rn +android.useAndroidX=true + +# Enable AAPT2 PNG crunching +android.enablePngCrunchInReleaseBuilds=true + +# Use this property to specify which architecture you want to build. +# You can also override it from the CLI using +# ./gradlew -PreactNativeArchitectures=x86_64 +reactNativeArchitectures=armeabi-v7a,arm64-v8a,x86,x86_64 + +# Use this property to enable support to the new architecture. +# This will allow you to use TurboModules and the Fabric render in +# your application. You should enable this flag either if you want +# to write custom TurboModules/Fabric components OR use libraries that +# are providing them. +newArchEnabled=true + +# Use this property to enable or disable the Hermes JS engine. +# If set to false, you will be using JSC instead. +hermesEnabled=true + +# Use this property to enable edge-to-edge display support. +# This allows your app to draw behind system bars for an immersive UI. +# Note: Only works with ReactActivity and should not be used with custom Activity. +edgeToEdgeEnabled=true + +# Enable GIF support in React Native images (~200 B increase) +expo.gif.enabled=true +# Enable webp support in React Native images (~85 KB increase) +expo.webp.enabled=true +# Enable animated webp support (~3.4 MB increase) +# Disabled by default because iOS doesn't support animated webp +expo.webp.animated=false + +# Enable network inspector +EX_DEV_CLIENT_NETWORK_INSPECTOR=true + +# Use legacy packaging to compress native libraries in the resulting APK. +expo.useLegacyPackaging=false + +# Specifies whether the app is configured to use edge-to-edge via the app config or plugin +# WARNING: This property has been deprecated and will be removed in Expo SDK 55. Use `edgeToEdgeEnabled` or `react.edgeToEdgeEnabled` to determine whether the project is using edge-to-edge. +expo.edgeToEdgeEnabled=true + +reactNativeArchitectures=armeabi-v7a,arm64-v8a,x86,x86_64 diff --git a/User/android/gradle/wrapper/gradle-wrapper.jar b/User/android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..1b33c55baabb587c669f562ae36f953de2481846 Binary files /dev/null and b/User/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/User/android/gradle/wrapper/gradle-wrapper.properties b/User/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000000000000000000000000000000000..d4081da476bb3ee087bb538a0bb6718958af29a4 --- /dev/null +++ b/User/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/User/android/gradlew b/User/android/gradlew new file mode 100644 index 0000000000000000000000000000000000000000..7f94d3d4777766bfb6d45df1d8f45c3f3dce4855 --- /dev/null +++ b/User/android/gradlew @@ -0,0 +1,251 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH="\\\"\\\"" + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/User/android/gradlew.bat b/User/android/gradlew.bat new file mode 100644 index 0000000000000000000000000000000000000000..db3a6ac207e507b0bc1635a9f2c18d3b174e682e --- /dev/null +++ b/User/android/gradlew.bat @@ -0,0 +1,94 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH= + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/User/android/settings.gradle b/User/android/settings.gradle new file mode 100644 index 0000000000000000000000000000000000000000..556d0d71061e5e33dc2b0e0e403036b767cc26bf --- /dev/null +++ b/User/android/settings.gradle @@ -0,0 +1,39 @@ +pluginManagement { + def reactNativeGradlePlugin = new File( + providers.exec { + workingDir(rootDir) + commandLine("node", "--print", "require.resolve('@react-native/gradle-plugin/package.json', { paths: [require.resolve('react-native/package.json')] })") + }.standardOutput.asText.get().trim() + ).getParentFile().absolutePath + includeBuild(reactNativeGradlePlugin) + + def expoPluginsPath = new File( + providers.exec { + workingDir(rootDir) + commandLine("node", "--print", "require.resolve('expo-modules-autolinking/package.json', { paths: [require.resolve('expo/package.json')] })") + }.standardOutput.asText.get().trim(), + "../android/expo-gradle-plugin" + ).absolutePath + includeBuild(expoPluginsPath) +} + +plugins { + id("com.facebook.react.settings") + id("expo-autolinking-settings") +} + +extensions.configure(com.facebook.react.ReactSettingsExtension) { ex -> + if (System.getenv('EXPO_USE_COMMUNITY_AUTOLINKING') == '1') { + ex.autolinkLibrariesFromCommand() + } else { + ex.autolinkLibrariesFromCommand(expoAutolinking.rnConfigCommand) + } +} +expoAutolinking.useExpoModules() + +rootProject.name = 'City Issue Reporter' + +expoAutolinking.useExpoVersionCatalog() + +include ':app' +includeBuild(expoAutolinking.reactNativeGradlePlugin) diff --git a/static/flow.html b/static/flow.html new file mode 100644 index 0000000000000000000000000000000000000000..863b4ae1087d974306f711cc9f8879236a44fbbc --- /dev/null +++ b/static/flow.html @@ -0,0 +1,888 @@ + + + + + + + Autonomous Agent Pipeline | Mission Control + + + + + + + + + + +
+
+
+ Autonomous Agent Control +
+
System Ready
+
+ +
+ +
+
+
+
+ +
+ +
Click to upload image
+
+
+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ + + + + + +
+
+ +
+

Execution Pipeline +

+
+ +
+ Submit an issue to start. +
+
+
+
+ + +
+
+
+

Live Intelligence

+
ID: Pending
+
+ +
+ +
+
ORIGINAL EVIDENCE
+ Original +
+ Awaiting Image... +
+
+ + +
+
AI DETECTION
+ Detected +
+ Analysis Pending... +
+
+
+
+ +
+
+
System Logs
+ +
+
Waiting for stream...
+
+
+
+ + + + + \ No newline at end of file