Spaces:
Running
Running
Dashm commited on
Commit ·
26d82f3
0
Parent(s):
Initial commit — RetailTalk backend for HuggingFace Spaces
Browse files- .gitignore +57 -0
- Dockerfile +34 -0
- README.md +19 -0
- backend/.env.example +17 -0
- backend/config.py +47 -0
- backend/database.py +191 -0
- backend/download_models.py +37 -0
- backend/main.py +110 -0
- backend/models/__init__.py +0 -0
- backend/models/bert_service.py +106 -0
- backend/models/classifier.py +153 -0
- backend/models/intent_service.py +148 -0
- backend/models/query_rewriter.py +456 -0
- backend/models/ranker.py +86 -0
- backend/models/slot_service.py +236 -0
- backend/requirements.txt +26 -0
- backend/routes/__init__.py +0 -0
- backend/routes/admin.py +2273 -0
- backend/routes/auth.py +339 -0
- backend/routes/cart.py +391 -0
- backend/routes/contacts.py +66 -0
- backend/routes/delivery.py +601 -0
- backend/routes/insights.py +175 -0
- backend/routes/manager.py +745 -0
- backend/routes/products.py +341 -0
- backend/routes/restock.py +402 -0
- backend/routes/search.py +417 -0
- backend/routes/transactions.py +955 -0
- backend/routes/wishlist.py +362 -0
- backend/trained_model/intent_classifier/config.json +30 -0
- backend/trained_model/intent_classifier/label_map.json +6 -0
- backend/trained_model/ranker/config.json +32 -0
- backend/trained_model/ranker/special_tokens_map.json +37 -0
- backend/trained_model/ranker/tokenizer.json +0 -0
- backend/trained_model/ranker/tokenizer_config.json +58 -0
- backend/trained_model/ranker/vocab.txt +0 -0
- backend/trained_model/slot_extractor/config.json +50 -0
- backend/trained_model/slot_extractor/tag_map.json +23 -0
.gitignore
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ========================
|
| 2 |
+
# Secrets
|
| 3 |
+
# ========================
|
| 4 |
+
.env
|
| 5 |
+
.env.local
|
| 6 |
+
.env.*.local
|
| 7 |
+
|
| 8 |
+
# ========================
|
| 9 |
+
# Large model weights — stored in HuggingFace Model Hub instead
|
| 10 |
+
# Downloaded at Docker build time via download_models.py
|
| 11 |
+
# ========================
|
| 12 |
+
backend/trained_model/intent_classifier/model.pt
|
| 13 |
+
backend/trained_model/slot_extractor/model.pt
|
| 14 |
+
backend/trained_model/ranker/model.safetensors
|
| 15 |
+
backend/trained_model/pytorch_model.bin
|
| 16 |
+
checkpoint.pt
|
| 17 |
+
*.pth
|
| 18 |
+
*.onnx
|
| 19 |
+
*.h5
|
| 20 |
+
|
| 21 |
+
# ========================
|
| 22 |
+
# Python
|
| 23 |
+
# ========================
|
| 24 |
+
__pycache__/
|
| 25 |
+
*.py[cod]
|
| 26 |
+
*$py.class
|
| 27 |
+
*.so
|
| 28 |
+
*.egg-info/
|
| 29 |
+
dist/
|
| 30 |
+
build/
|
| 31 |
+
venv/
|
| 32 |
+
.venv/
|
| 33 |
+
env/
|
| 34 |
+
|
| 35 |
+
# ========================
|
| 36 |
+
# IDE & OS
|
| 37 |
+
# ========================
|
| 38 |
+
.vscode/
|
| 39 |
+
.idea/
|
| 40 |
+
*.swp
|
| 41 |
+
*.swo
|
| 42 |
+
*~
|
| 43 |
+
Thumbs.db
|
| 44 |
+
.DS_Store
|
| 45 |
+
|
| 46 |
+
# ========================
|
| 47 |
+
# Logs
|
| 48 |
+
# ========================
|
| 49 |
+
*.log
|
| 50 |
+
|
| 51 |
+
# ========================
|
| 52 |
+
# Not needed for HF Space deployment
|
| 53 |
+
# ========================
|
| 54 |
+
frontend/
|
| 55 |
+
database/
|
| 56 |
+
backend/test_*.py
|
| 57 |
+
backend/Dockerfile
|
Dockerfile
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
WORKDIR /app
|
| 4 |
+
|
| 5 |
+
# Install system dependencies
|
| 6 |
+
RUN apt-get update && apt-get install -y \
|
| 7 |
+
build-essential \
|
| 8 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 9 |
+
|
| 10 |
+
# Install CPU-only PyTorch first (prevents pip from pulling GPU version)
|
| 11 |
+
RUN pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu
|
| 12 |
+
|
| 13 |
+
# Install Python dependencies
|
| 14 |
+
COPY backend/requirements.txt .
|
| 15 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 16 |
+
|
| 17 |
+
# Copy backend application code
|
| 18 |
+
COPY backend/ ./backend/
|
| 19 |
+
|
| 20 |
+
# Download large model weights from HuggingFace Model Hub
|
| 21 |
+
# HF_TOKEN is passed as a build secret from Space settings
|
| 22 |
+
ARG HF_TOKEN
|
| 23 |
+
ENV HF_TOKEN=${HF_TOKEN}
|
| 24 |
+
RUN python backend/download_models.py
|
| 25 |
+
|
| 26 |
+
# HuggingFace Spaces runs as non-root user — fix permissions
|
| 27 |
+
RUN chmod -R 777 /app
|
| 28 |
+
|
| 29 |
+
WORKDIR /app/backend
|
| 30 |
+
|
| 31 |
+
# HuggingFace Spaces requires port 7860
|
| 32 |
+
EXPOSE 7860
|
| 33 |
+
|
| 34 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
README.md
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: RetailTalk
|
| 3 |
+
emoji: 🛍️
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: indigo
|
| 6 |
+
sdk: docker
|
| 7 |
+
pinned: false
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
# RetailTalk Backend
|
| 11 |
+
|
| 12 |
+
FastAPI backend for RetailTalk — an NLP-powered e-commerce product search engine.
|
| 13 |
+
|
| 14 |
+
## Models included
|
| 15 |
+
- BERT multilingual embeddings
|
| 16 |
+
- Intent classifier
|
| 17 |
+
- Slot extractor (NER)
|
| 18 |
+
- CrossEncoder ranker
|
| 19 |
+
- ESCI classifier (Exact/Substitute/Complement/Irrelevant)
|
backend/.env.example
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copy these to HuggingFace Space Secrets (Settings > Variables and Secrets)
|
| 2 |
+
# Never commit the actual .env file
|
| 3 |
+
|
| 4 |
+
SUPABASE_URL=https://your-project.supabase.co
|
| 5 |
+
SUPABASE_KEY=your-anon-public-key
|
| 6 |
+
SUPABASE_SERVICE_KEY=your-service-role-key
|
| 7 |
+
DATABASE_URL=postgresql://postgres:your-password@db.your-project.supabase.co:5432/postgres
|
| 8 |
+
|
| 9 |
+
JWT_SECRET=change-this-to-a-random-secure-string
|
| 10 |
+
|
| 11 |
+
# Model paths inside container (defaults already set in config.py — no need to change)
|
| 12 |
+
# CLASSIFIER_MODEL_PATH=trained_model/pytorch_model.bin
|
| 13 |
+
# RANKER_MODEL_PATH=trained_model/ranker
|
| 14 |
+
# INTENT_MODEL_PATH=trained_model/intent_classifier
|
| 15 |
+
# SLOT_MODEL_PATH=trained_model/slot_extractor
|
| 16 |
+
|
| 17 |
+
DEBUG=false
|
backend/config.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Application configuration — loads settings from environment variables.
|
| 3 |
+
Set these in your HuggingFace Space secrets (Settings > Variables and Secrets).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from dotenv import load_dotenv
|
| 8 |
+
|
| 9 |
+
load_dotenv()
|
| 10 |
+
|
| 11 |
+
# Supabase
|
| 12 |
+
SUPABASE_URL = os.getenv("SUPABASE_URL", "")
|
| 13 |
+
SUPABASE_KEY = os.getenv("SUPABASE_KEY", "")
|
| 14 |
+
SUPABASE_SERVICE_KEY = os.getenv("SUPABASE_SERVICE_KEY", "")
|
| 15 |
+
|
| 16 |
+
# Database (direct connection for pgvector queries)
|
| 17 |
+
DATABASE_URL = os.getenv("DATABASE_URL", "")
|
| 18 |
+
|
| 19 |
+
# JWT Auth
|
| 20 |
+
JWT_SECRET = os.getenv("JWT_SECRET", "change-this-in-production")
|
| 21 |
+
JWT_ALGORITHM = "HS256"
|
| 22 |
+
JWT_EXPIRATION_HOURS = 24
|
| 23 |
+
|
| 24 |
+
# ML Models — all paths relative to /app/backend inside the container
|
| 25 |
+
BERT_MODEL_NAME = os.getenv("BERT_MODEL_NAME", "bert-base-multilingual-uncased")
|
| 26 |
+
CLASSIFIER_MODEL_PATH = os.getenv("CLASSIFIER_MODEL_PATH", "trained_model/pytorch_model.bin")
|
| 27 |
+
RANKER_MODEL_PATH = os.getenv("RANKER_MODEL_PATH", "trained_model/ranker")
|
| 28 |
+
INTENT_MODEL_PATH = os.getenv("INTENT_MODEL_PATH", "trained_model/intent_classifier")
|
| 29 |
+
SLOT_MODEL_PATH = os.getenv("SLOT_MODEL_PATH", "trained_model/slot_extractor")
|
| 30 |
+
|
| 31 |
+
BERT_MAX_LENGTH = 256
|
| 32 |
+
BERT_EMBEDDING_DIM = 768
|
| 33 |
+
INTENT_MAX_LENGTH = int(os.getenv("INTENT_MAX_LENGTH", "128"))
|
| 34 |
+
SLOT_MAX_LENGTH = int(os.getenv("SLOT_MAX_LENGTH", "128"))
|
| 35 |
+
|
| 36 |
+
# Score blending weights (must sum to 1.0)
|
| 37 |
+
RANKER_WEIGHT = float(os.getenv("RANKER_WEIGHT", "0.4"))
|
| 38 |
+
CLASSIFIER_WEIGHT = float(os.getenv("CLASSIFIER_WEIGHT", "0.25"))
|
| 39 |
+
SIMILARITY_WEIGHT = float(os.getenv("SIMILARITY_WEIGHT", "0.35"))
|
| 40 |
+
|
| 41 |
+
# Search
|
| 42 |
+
SEARCH_TOP_K_CANDIDATES = 50
|
| 43 |
+
SEARCH_MAX_RESULTS = 20
|
| 44 |
+
|
| 45 |
+
# App
|
| 46 |
+
APP_NAME = "RetailTalk"
|
| 47 |
+
DEBUG = os.getenv("DEBUG", "false").lower() == "true"
|
backend/database.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database connection helpers for Supabase.
|
| 3 |
+
Uses psycopg (v3) for direct PostgreSQL/pgvector queries (search),
|
| 4 |
+
and supabase-py for standard CRUD operations.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import psycopg
|
| 8 |
+
from psycopg.rows import dict_row
|
| 9 |
+
import numpy as np
|
| 10 |
+
from supabase import create_client, Client
|
| 11 |
+
from config import SUPABASE_URL, SUPABASE_KEY, DATABASE_URL
|
| 12 |
+
|
| 13 |
+
# --- Supabase Client (for CRUD operations) ---
|
| 14 |
+
|
| 15 |
+
_supabase_client: Client = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def get_supabase() -> Client:
|
| 19 |
+
"""Get or create the Supabase client for standard CRUD operations."""
|
| 20 |
+
global _supabase_client
|
| 21 |
+
if _supabase_client is None:
|
| 22 |
+
_supabase_client = create_client(SUPABASE_URL, SUPABASE_KEY)
|
| 23 |
+
return _supabase_client
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# --- Direct PostgreSQL connection (for pgvector queries) ---
|
| 27 |
+
|
| 28 |
+
def get_db_connection():
|
| 29 |
+
"""Create a new psycopg3 connection for pgvector queries."""
|
| 30 |
+
conn = psycopg.connect(DATABASE_URL, row_factory=dict_row)
|
| 31 |
+
return conn
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def close_db_pool():
|
| 35 |
+
"""Placeholder for shutdown compatibility."""
|
| 36 |
+
pass
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# --- Helper: embedding <-> database conversion ---
|
| 40 |
+
|
| 41 |
+
def embedding_to_pgvector(embedding: np.ndarray) -> str:
|
| 42 |
+
"""Convert a numpy embedding to pgvector string format: '[0.1,0.2,...]'"""
|
| 43 |
+
return "[" + ",".join(f"{x:.8f}" for x in embedding.tolist()) + "]"
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def pgvector_to_embedding(pgvector_str: str) -> np.ndarray:
|
| 47 |
+
"""Convert a pgvector string back to numpy array."""
|
| 48 |
+
values = pgvector_str.strip("[]").split(",")
|
| 49 |
+
return np.array([float(v) for v in values], dtype=np.float32)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
# --- pgvector similarity search ---
|
| 53 |
+
|
| 54 |
+
def search_similar_products(query_embedding: np.ndarray, top_k: int = 50):
|
| 55 |
+
"""
|
| 56 |
+
Find the top_k most similar products to the query embedding
|
| 57 |
+
using pgvector cosine similarity search.
|
| 58 |
+
Returns list of dicts with product info + embedding.
|
| 59 |
+
"""
|
| 60 |
+
conn = get_db_connection()
|
| 61 |
+
embedding_str = embedding_to_pgvector(query_embedding)
|
| 62 |
+
|
| 63 |
+
query = """
|
| 64 |
+
SELECT
|
| 65 |
+
id, seller_id, title, description, price, stock, images,
|
| 66 |
+
embedding::text as embedding_text,
|
| 67 |
+
1 - (embedding <=> %s::vector) as similarity
|
| 68 |
+
FROM products
|
| 69 |
+
WHERE is_active = true AND status = 'approved' AND stock > 0 AND embedding IS NOT NULL
|
| 70 |
+
ORDER BY embedding <=> %s::vector
|
| 71 |
+
LIMIT %s
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
try:
|
| 75 |
+
with conn.cursor() as cur:
|
| 76 |
+
cur.execute(query, (embedding_str, embedding_str, top_k))
|
| 77 |
+
rows = cur.fetchall()
|
| 78 |
+
finally:
|
| 79 |
+
conn.close()
|
| 80 |
+
|
| 81 |
+
results = []
|
| 82 |
+
for row in rows:
|
| 83 |
+
results.append({
|
| 84 |
+
"id": str(row["id"]),
|
| 85 |
+
"seller_id": str(row["seller_id"]),
|
| 86 |
+
"title": row["title"],
|
| 87 |
+
"description": row["description"],
|
| 88 |
+
"price": float(row["price"]),
|
| 89 |
+
"stock": int(row["stock"]),
|
| 90 |
+
"images": row["images"] or [],
|
| 91 |
+
"embedding": pgvector_to_embedding(row["embedding_text"]),
|
| 92 |
+
"similarity": float(row["similarity"]),
|
| 93 |
+
})
|
| 94 |
+
|
| 95 |
+
return results
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def search_similar_products_filtered(
|
| 99 |
+
query_embedding: np.ndarray,
|
| 100 |
+
top_k: int = 50,
|
| 101 |
+
price_min: float = None,
|
| 102 |
+
price_max: float = None,
|
| 103 |
+
brand: str = None,
|
| 104 |
+
color: str = None,
|
| 105 |
+
):
|
| 106 |
+
"""
|
| 107 |
+
Find the top_k most similar products with optional structured filters.
|
| 108 |
+
Extends search_similar_products with WHERE clauses from query rewriting.
|
| 109 |
+
"""
|
| 110 |
+
conn = get_db_connection()
|
| 111 |
+
embedding_str = embedding_to_pgvector(query_embedding)
|
| 112 |
+
|
| 113 |
+
# Build dynamic WHERE clause
|
| 114 |
+
conditions = ["is_active = true", "status = 'approved'", "stock > 0", "embedding IS NOT NULL"]
|
| 115 |
+
filter_params = []
|
| 116 |
+
|
| 117 |
+
if price_min is not None:
|
| 118 |
+
conditions.append(f"price > %s")
|
| 119 |
+
filter_params.append(price_min)
|
| 120 |
+
|
| 121 |
+
if price_max is not None:
|
| 122 |
+
conditions.append(f"price < %s")
|
| 123 |
+
filter_params.append(price_max)
|
| 124 |
+
|
| 125 |
+
if brand:
|
| 126 |
+
conditions.append(f"(title ILIKE %s OR description ILIKE %s)")
|
| 127 |
+
filter_params.append(f"%{brand}%")
|
| 128 |
+
filter_params.append(f"%{brand}%")
|
| 129 |
+
|
| 130 |
+
if color:
|
| 131 |
+
conditions.append(f"(title ILIKE %s OR description ILIKE %s)")
|
| 132 |
+
filter_params.append(f"%{color}%")
|
| 133 |
+
filter_params.append(f"%{color}%")
|
| 134 |
+
|
| 135 |
+
where_clause = " AND ".join(conditions)
|
| 136 |
+
|
| 137 |
+
# Params must match SQL placeholder order:
|
| 138 |
+
# 1) embedding for SELECT similarity, 2) filter params for WHERE,
|
| 139 |
+
# 3) embedding for ORDER BY, 4) top_k for LIMIT
|
| 140 |
+
params = [embedding_str] + filter_params + [embedding_str, top_k]
|
| 141 |
+
|
| 142 |
+
query = f"""
|
| 143 |
+
SELECT
|
| 144 |
+
id, seller_id, title, description, price, stock, images,
|
| 145 |
+
embedding::text as embedding_text,
|
| 146 |
+
1 - (embedding <=> %s::vector) as similarity
|
| 147 |
+
FROM products
|
| 148 |
+
WHERE {where_clause}
|
| 149 |
+
ORDER BY embedding <=> %s::vector
|
| 150 |
+
LIMIT %s
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
try:
|
| 154 |
+
with conn.cursor() as cur:
|
| 155 |
+
cur.execute(query, tuple(params))
|
| 156 |
+
rows = cur.fetchall()
|
| 157 |
+
finally:
|
| 158 |
+
conn.close()
|
| 159 |
+
|
| 160 |
+
results = []
|
| 161 |
+
for row in rows:
|
| 162 |
+
results.append({
|
| 163 |
+
"id": str(row["id"]),
|
| 164 |
+
"seller_id": str(row["seller_id"]),
|
| 165 |
+
"title": row["title"],
|
| 166 |
+
"description": row["description"],
|
| 167 |
+
"price": float(row["price"]),
|
| 168 |
+
"stock": int(row["stock"]),
|
| 169 |
+
"images": row["images"] or [],
|
| 170 |
+
"embedding": pgvector_to_embedding(row["embedding_text"]),
|
| 171 |
+
"similarity": float(row["similarity"]),
|
| 172 |
+
})
|
| 173 |
+
|
| 174 |
+
return results
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def store_product_embedding(product_id: str, embedding: np.ndarray):
|
| 178 |
+
"""Store/update the BERT embedding for a product."""
|
| 179 |
+
conn = get_db_connection()
|
| 180 |
+
embedding_str = embedding_to_pgvector(embedding)
|
| 181 |
+
|
| 182 |
+
query = """
|
| 183 |
+
UPDATE products SET embedding = %s::vector WHERE id = %s
|
| 184 |
+
"""
|
| 185 |
+
|
| 186 |
+
try:
|
| 187 |
+
with conn.cursor() as cur:
|
| 188 |
+
cur.execute(query, (embedding_str, product_id))
|
| 189 |
+
conn.commit()
|
| 190 |
+
finally:
|
| 191 |
+
conn.close()
|
backend/download_models.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Downloads large model weights from HuggingFace Model Hub at build time.
|
| 3 |
+
Run once during Docker build — weights are baked into the image.
|
| 4 |
+
Requires HF_TOKEN env var if the model repo is private.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from huggingface_hub import hf_hub_download
|
| 9 |
+
|
| 10 |
+
REPO_ID = "dashm/retailtalk-models"
|
| 11 |
+
TOKEN = os.getenv("HF_TOKEN", None)
|
| 12 |
+
|
| 13 |
+
files = [
|
| 14 |
+
("trained_model/intent_classifier/model.pt", "intent_classifier/model.pt"),
|
| 15 |
+
("trained_model/slot_extractor/model.pt", "slot_extractor/model.pt"),
|
| 16 |
+
("trained_model/ranker/model.safetensors", "ranker/model.safetensors"),
|
| 17 |
+
("trained_model/pytorch_model.bin", "pytorch_model.bin"),
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
base_dir = os.path.dirname(os.path.abspath(__file__))
|
| 21 |
+
|
| 22 |
+
for local_rel, hub_filename in files:
|
| 23 |
+
local_path = os.path.join(base_dir, local_rel)
|
| 24 |
+
if os.path.exists(local_path):
|
| 25 |
+
print(f"[skip] {local_rel} already exists")
|
| 26 |
+
continue
|
| 27 |
+
print(f"[download] {hub_filename} -> {local_rel}")
|
| 28 |
+
os.makedirs(os.path.dirname(local_path), exist_ok=True)
|
| 29 |
+
hf_hub_download(
|
| 30 |
+
repo_id=REPO_ID,
|
| 31 |
+
filename=hub_filename,
|
| 32 |
+
local_dir=os.path.join(base_dir, "trained_model"),
|
| 33 |
+
token=TOKEN,
|
| 34 |
+
)
|
| 35 |
+
print(f"[done] {local_rel}")
|
| 36 |
+
|
| 37 |
+
print("[OK] All models downloaded.")
|
backend/main.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
RetailTalk — FastAPI Backend Entry Point
|
| 3 |
+
|
| 4 |
+
This is the main file that starts the API server.
|
| 5 |
+
Run with: uvicorn main:app --reload
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from fastapi import FastAPI
|
| 9 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 10 |
+
from contextlib import asynccontextmanager
|
| 11 |
+
|
| 12 |
+
from config import APP_NAME, DEBUG
|
| 13 |
+
from database import close_db_pool
|
| 14 |
+
from routes import auth, products, search, transactions, admin, insights, contacts, cart, delivery, manager, restock, wishlist
|
| 15 |
+
|
| 16 |
+
@asynccontextmanager
|
| 17 |
+
async def lifespan(app: FastAPI):
|
| 18 |
+
"""Startup and shutdown events."""
|
| 19 |
+
# --- Startup ---
|
| 20 |
+
print(f"[START] Starting {APP_NAME} backend...")
|
| 21 |
+
|
| 22 |
+
# Load ML models
|
| 23 |
+
from models.bert_service import bert_service
|
| 24 |
+
from models.classifier import classifier_service
|
| 25 |
+
from models.ranker import ranker_service
|
| 26 |
+
from models.intent_service import intent_service
|
| 27 |
+
from models.slot_service import slot_service
|
| 28 |
+
from models.query_rewriter import query_rewriter
|
| 29 |
+
|
| 30 |
+
print("[ML] Loading BERT model...")
|
| 31 |
+
bert_service.load()
|
| 32 |
+
print("[ML] Loading classifier model...")
|
| 33 |
+
classifier_service.load()
|
| 34 |
+
print("[ML] Loading ranker model (optional)...")
|
| 35 |
+
ranker_service.load()
|
| 36 |
+
print("[ML] Loading intent classifier (optional)...")
|
| 37 |
+
intent_service.load()
|
| 38 |
+
print("[ML] Loading slot extractor (optional)...")
|
| 39 |
+
slot_service.load()
|
| 40 |
+
|
| 41 |
+
# Initialize query rewriter with loaded services
|
| 42 |
+
query_rewriter.init(intent_service, slot_service)
|
| 43 |
+
print("[ML] Query rewriter initialized")
|
| 44 |
+
|
| 45 |
+
print(f"[OK] {APP_NAME} backend ready!")
|
| 46 |
+
|
| 47 |
+
yield
|
| 48 |
+
|
| 49 |
+
# --- Shutdown ---
|
| 50 |
+
print("[STOP] Shutting down...")
|
| 51 |
+
close_db_pool()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# Create FastAPI app
|
| 55 |
+
app = FastAPI(
|
| 56 |
+
title=f"{APP_NAME} API",
|
| 57 |
+
description=(
|
| 58 |
+
"An NLP for querying e-commerce product. "
|
| 59 |
+
"Uses BERT embeddings + CrossEncoder ranking + QueryProductClassifier to "
|
| 60 |
+
"find, rank, and classify product search results "
|
| 61 |
+
"as Exact / Substitute / Complement / Irrelevant."
|
| 62 |
+
),
|
| 63 |
+
version="1.0.0",
|
| 64 |
+
lifespan=lifespan,
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
# CORS — allow frontend to call the API
|
| 68 |
+
app.add_middleware(
|
| 69 |
+
CORSMiddleware,
|
| 70 |
+
allow_origins=["*"],
|
| 71 |
+
allow_credentials=True,
|
| 72 |
+
allow_methods=["*"],
|
| 73 |
+
allow_headers=["*"],
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
# Register route groups
|
| 77 |
+
app.include_router(auth.router)
|
| 78 |
+
app.include_router(products.router)
|
| 79 |
+
app.include_router(search.router)
|
| 80 |
+
app.include_router(transactions.router)
|
| 81 |
+
app.include_router(admin.router)
|
| 82 |
+
app.include_router(insights.router)
|
| 83 |
+
app.include_router(contacts.router)
|
| 84 |
+
app.include_router(cart.router)
|
| 85 |
+
app.include_router(delivery.router)
|
| 86 |
+
app.include_router(manager.router)
|
| 87 |
+
app.include_router(restock.router)
|
| 88 |
+
app.include_router(wishlist.router)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@app.get("/", tags=["Health"])
|
| 92 |
+
async def root():
|
| 93 |
+
"""Health check endpoint."""
|
| 94 |
+
from models.bert_service import bert_service
|
| 95 |
+
from models.classifier import classifier_service
|
| 96 |
+
from models.ranker import ranker_service
|
| 97 |
+
from models.intent_service import intent_service
|
| 98 |
+
from models.slot_service import slot_service
|
| 99 |
+
return {
|
| 100 |
+
"app": APP_NAME,
|
| 101 |
+
"status": "running",
|
| 102 |
+
"ml_status": {
|
| 103 |
+
"bert": "loaded" if bert_service._loaded else "not loaded",
|
| 104 |
+
"classifier": "loaded" if classifier_service._loaded else "not loaded",
|
| 105 |
+
"ranker": "loaded" if ranker_service._loaded else "not loaded",
|
| 106 |
+
"intent": "loaded" if intent_service._loaded else "not loaded",
|
| 107 |
+
"slot": "loaded" if slot_service._loaded else "not loaded",
|
| 108 |
+
},
|
| 109 |
+
}
|
| 110 |
+
|
backend/models/__init__.py
ADDED
|
File without changes
|
backend/models/bert_service.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
BERT Embedding Service — computes text embeddings using a pretrained BERT model.
|
| 3 |
+
Loaded once at startup, reused for all requests.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import torch
|
| 7 |
+
import torch.nn.functional as F
|
| 8 |
+
import numpy as np
|
| 9 |
+
from transformers import BertModel, BertTokenizer
|
| 10 |
+
from config import BERT_MODEL_NAME, BERT_MAX_LENGTH
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class BertEmbeddingService:
|
| 14 |
+
"""Singleton service that computes BERT embeddings for text."""
|
| 15 |
+
|
| 16 |
+
def __init__(self):
|
| 17 |
+
self.model = None
|
| 18 |
+
self.tokenizer = None
|
| 19 |
+
self.device = None
|
| 20 |
+
self._loaded = False
|
| 21 |
+
|
| 22 |
+
def load(self):
|
| 23 |
+
"""Load the BERT model and tokenizer. Call once at app startup."""
|
| 24 |
+
if self._loaded:
|
| 25 |
+
return
|
| 26 |
+
|
| 27 |
+
print(f"[BertService] Loading BERT model: {BERT_MODEL_NAME}...")
|
| 28 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 29 |
+
self.model = BertModel.from_pretrained(BERT_MODEL_NAME)
|
| 30 |
+
self.tokenizer = BertTokenizer.from_pretrained(BERT_MODEL_NAME)
|
| 31 |
+
self.model.to(self.device)
|
| 32 |
+
self.model.eval()
|
| 33 |
+
self._loaded = True
|
| 34 |
+
print(f"[BertService] Model loaded on {self.device}")
|
| 35 |
+
|
| 36 |
+
def _pool_summary(self, last_hidden_states, pool_op="max"):
|
| 37 |
+
"""Pool the BERT output into a single vector per input."""
|
| 38 |
+
num_features = last_hidden_states.size()[1]
|
| 39 |
+
hidden_p = last_hidden_states.permute(0, 2, 1)
|
| 40 |
+
pool_fn = F.max_pool1d if pool_op == "max" else F.avg_pool1d
|
| 41 |
+
return pool_fn(hidden_p, kernel_size=num_features).squeeze(-1)
|
| 42 |
+
|
| 43 |
+
def compute_embedding(self, text: str) -> np.ndarray:
|
| 44 |
+
"""
|
| 45 |
+
Compute a single BERT embedding for the given text.
|
| 46 |
+
Returns a numpy array of shape (768,).
|
| 47 |
+
"""
|
| 48 |
+
if not self._loaded:
|
| 49 |
+
raise RuntimeError("BertService not loaded. Call load() first.")
|
| 50 |
+
|
| 51 |
+
# Tokenize
|
| 52 |
+
tokens = self.tokenizer(
|
| 53 |
+
text,
|
| 54 |
+
padding="max_length",
|
| 55 |
+
truncation=True,
|
| 56 |
+
max_length=BERT_MAX_LENGTH,
|
| 57 |
+
return_attention_mask=True,
|
| 58 |
+
return_tensors="pt",
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
# Move to device
|
| 62 |
+
inputs = {
|
| 63 |
+
"input_ids": tokens["input_ids"].to(self.device),
|
| 64 |
+
"attention_mask": tokens["attention_mask"].to(self.device),
|
| 65 |
+
"token_type_ids": tokens["token_type_ids"].to(self.device),
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
# Forward pass
|
| 69 |
+
with torch.no_grad():
|
| 70 |
+
output = self.model(**inputs)
|
| 71 |
+
embedding = self._pool_summary(output[0])
|
| 72 |
+
|
| 73 |
+
return embedding.detach().cpu().numpy().squeeze(0) # shape: (768,)
|
| 74 |
+
|
| 75 |
+
def compute_embeddings_batch(self, texts: list[str]) -> np.ndarray:
|
| 76 |
+
"""
|
| 77 |
+
Compute BERT embeddings for a batch of texts.
|
| 78 |
+
Returns numpy array of shape (N, 768).
|
| 79 |
+
"""
|
| 80 |
+
if not self._loaded:
|
| 81 |
+
raise RuntimeError("BertService not loaded. Call load() first.")
|
| 82 |
+
|
| 83 |
+
tokens = self.tokenizer(
|
| 84 |
+
texts,
|
| 85 |
+
padding="max_length",
|
| 86 |
+
truncation=True,
|
| 87 |
+
max_length=BERT_MAX_LENGTH,
|
| 88 |
+
return_attention_mask=True,
|
| 89 |
+
return_tensors="pt",
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
inputs = {
|
| 93 |
+
"input_ids": tokens["input_ids"].to(self.device),
|
| 94 |
+
"attention_mask": tokens["attention_mask"].to(self.device),
|
| 95 |
+
"token_type_ids": tokens["token_type_ids"].to(self.device),
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
with torch.no_grad():
|
| 99 |
+
output = self.model(**inputs)
|
| 100 |
+
embeddings = self._pool_summary(output[0])
|
| 101 |
+
|
| 102 |
+
return embeddings.detach().cpu().numpy() # shape: (N, 768)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# Global singleton instance
|
| 106 |
+
bert_service = BertEmbeddingService()
|
backend/models/classifier.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Classifier Service — loads the trained QueryProductClassifier model
|
| 3 |
+
and classifies (query, product) pairs into E/S/C/I labels.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import torch
|
| 8 |
+
import torch.nn as nn
|
| 9 |
+
import numpy as np
|
| 10 |
+
from config import CLASSIFIER_MODEL_PATH, BERT_EMBEDDING_DIM
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class QueryProductClassifier(nn.Module):
|
| 14 |
+
"""
|
| 15 |
+
Feed-forward classifier that takes concatenated query + product embeddings
|
| 16 |
+
and classifies into E/S/C/I categories.
|
| 17 |
+
This is a copy of the trained model architecture from the ESCI project.
|
| 18 |
+
Must stay in sync with classification_identification/query_product/classifier_model.py
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, size_pretrained=768, dense_hidden_dim=256, num_dense_layers=2, num_labels=4, dropout_rate=0.1):
|
| 22 |
+
super(QueryProductClassifier, self).__init__()
|
| 23 |
+
self.num_labels = 1 if num_labels <= 2 else num_labels
|
| 24 |
+
self.size_pretrained = size_pretrained * 2 # query + product concatenated
|
| 25 |
+
fc_layers = []
|
| 26 |
+
prev_dim = self.size_pretrained
|
| 27 |
+
self.dropout_embedding = nn.Dropout(dropout_rate)
|
| 28 |
+
for _ in range(num_dense_layers):
|
| 29 |
+
fc_layers.append(nn.Linear(prev_dim, dense_hidden_dim, bias=True))
|
| 30 |
+
fc_layers.append(nn.BatchNorm1d(dense_hidden_dim))
|
| 31 |
+
fc_layers.append(nn.ReLU())
|
| 32 |
+
fc_layers.append(nn.Dropout(dropout_rate))
|
| 33 |
+
prev_dim = dense_hidden_dim
|
| 34 |
+
fc_layers.append(nn.Linear(prev_dim, self.num_labels))
|
| 35 |
+
self.fc = nn.Sequential(*fc_layers)
|
| 36 |
+
|
| 37 |
+
def forward(self, query_embedding, product_embedding):
|
| 38 |
+
embedding = torch.cat((query_embedding, product_embedding), 1)
|
| 39 |
+
embedding = self.dropout_embedding(embedding)
|
| 40 |
+
logits = self.fc(embedding).squeeze(-1)
|
| 41 |
+
return logits
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Label mapping
|
| 45 |
+
CLASS_ID_TO_LABEL = {
|
| 46 |
+
0: "Exact",
|
| 47 |
+
1: "Substitute",
|
| 48 |
+
2: "Complement",
|
| 49 |
+
3: "Irrelevant",
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
# Priority for sorting (lower = more relevant = shown first)
|
| 53 |
+
LABEL_PRIORITY = {
|
| 54 |
+
"Exact": 0,
|
| 55 |
+
"Substitute": 1,
|
| 56 |
+
"Complement": 2,
|
| 57 |
+
"Irrelevant": 3,
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class ClassifierService:
|
| 62 |
+
"""Singleton service that classifies query-product pairs."""
|
| 63 |
+
|
| 64 |
+
def __init__(self):
|
| 65 |
+
self.model = None
|
| 66 |
+
self.device = None
|
| 67 |
+
self._loaded = False
|
| 68 |
+
|
| 69 |
+
def load(self):
|
| 70 |
+
"""Load the trained classifier model. Call once at app startup."""
|
| 71 |
+
if self._loaded:
|
| 72 |
+
return
|
| 73 |
+
|
| 74 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 75 |
+
|
| 76 |
+
model_path = CLASSIFIER_MODEL_PATH
|
| 77 |
+
if not os.path.exists(model_path):
|
| 78 |
+
print(f"[ClassifierService] WARNING: Model file not found at {model_path}")
|
| 79 |
+
print("[ClassifierService] Search will use similarity-only ranking (no E/S/C/I classification)")
|
| 80 |
+
return
|
| 81 |
+
|
| 82 |
+
print(f"[ClassifierService] Loading classifier from {model_path}...")
|
| 83 |
+
self.model = QueryProductClassifier(
|
| 84 |
+
size_pretrained=BERT_EMBEDDING_DIM,
|
| 85 |
+
num_labels=4,
|
| 86 |
+
)
|
| 87 |
+
self.model.load_state_dict(torch.load(model_path, map_location=self.device))
|
| 88 |
+
self.model.to(self.device)
|
| 89 |
+
self.model.eval()
|
| 90 |
+
self._loaded = True
|
| 91 |
+
print(f"[ClassifierService] Classifier loaded on {self.device}")
|
| 92 |
+
|
| 93 |
+
def classify(self, query_embedding: np.ndarray, product_embedding: np.ndarray) -> dict:
|
| 94 |
+
"""
|
| 95 |
+
Classify a single (query, product) pair.
|
| 96 |
+
Returns: {"label": "Exact", "confidence": 0.92, "class_id": 0}
|
| 97 |
+
"""
|
| 98 |
+
if not self._loaded:
|
| 99 |
+
return {"label": "Unknown", "confidence": 0.0, "class_id": -1}
|
| 100 |
+
|
| 101 |
+
q = torch.tensor(query_embedding).float().unsqueeze(0).to(self.device)
|
| 102 |
+
p = torch.tensor(product_embedding).float().unsqueeze(0).to(self.device)
|
| 103 |
+
|
| 104 |
+
with torch.no_grad():
|
| 105 |
+
logits = self.model(q, p)
|
| 106 |
+
probabilities = torch.softmax(logits, dim=1)
|
| 107 |
+
class_id = torch.argmax(probabilities, dim=1).item()
|
| 108 |
+
confidence = probabilities[0][class_id].item()
|
| 109 |
+
|
| 110 |
+
return {
|
| 111 |
+
"label": CLASS_ID_TO_LABEL[class_id],
|
| 112 |
+
"confidence": round(confidence, 4),
|
| 113 |
+
"class_id": class_id,
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
def classify_batch(self, query_embedding: np.ndarray, product_embeddings: np.ndarray) -> list[dict]:
|
| 117 |
+
"""
|
| 118 |
+
Classify a query against multiple products at once.
|
| 119 |
+
query_embedding: shape (768,)
|
| 120 |
+
product_embeddings: shape (N, 768)
|
| 121 |
+
Returns list of classification dicts.
|
| 122 |
+
"""
|
| 123 |
+
if not self._loaded:
|
| 124 |
+
return [{"label": "Unknown", "confidence": 0.0, "class_id": -1}] * len(product_embeddings)
|
| 125 |
+
|
| 126 |
+
n = product_embeddings.shape[0]
|
| 127 |
+
# Repeat query embedding N times to match batch
|
| 128 |
+
q = torch.tensor(np.tile(query_embedding, (n, 1))).float().to(self.device)
|
| 129 |
+
p = torch.tensor(product_embeddings).float().to(self.device)
|
| 130 |
+
|
| 131 |
+
with torch.no_grad():
|
| 132 |
+
logits = self.model(q, p)
|
| 133 |
+
probabilities = torch.softmax(logits, dim=1)
|
| 134 |
+
class_ids = torch.argmax(probabilities, dim=1).cpu().numpy()
|
| 135 |
+
confidences = probabilities.max(dim=1).values.cpu().numpy()
|
| 136 |
+
|
| 137 |
+
all_probs = probabilities.cpu().numpy()
|
| 138 |
+
|
| 139 |
+
results = []
|
| 140 |
+
for i in range(n):
|
| 141 |
+
results.append({
|
| 142 |
+
"label": CLASS_ID_TO_LABEL[int(class_ids[i])],
|
| 143 |
+
"confidence": round(float(confidences[i]), 4),
|
| 144 |
+
"class_id": int(class_ids[i]),
|
| 145 |
+
"exact_prob": round(float(all_probs[i][0]), 4),
|
| 146 |
+
"substitute_prob": round(float(all_probs[i][1]), 4),
|
| 147 |
+
"complement_prob": round(float(all_probs[i][2]), 4),
|
| 148 |
+
"irrelevant_prob": round(float(all_probs[i][3]), 4),
|
| 149 |
+
})
|
| 150 |
+
return results
|
| 151 |
+
|
| 152 |
+
# Global singleton instance
|
| 153 |
+
classifier_service = ClassifierService()
|
backend/models/intent_service.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Intent Classification Service — runs the trained IntentClassifier model
|
| 3 |
+
for real-time inference on user queries.
|
| 4 |
+
|
| 5 |
+
Loaded once at startup, reused for all search requests.
|
| 6 |
+
Intents: single_search, multi_search, filtered_search, free_form (multi-label)
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import json
|
| 11 |
+
import torch
|
| 12 |
+
import torch.nn as nn
|
| 13 |
+
import numpy as np
|
| 14 |
+
from transformers import BertModel, BertTokenizer
|
| 15 |
+
from config import INTENT_MODEL_PATH, BERT_MODEL_NAME, INTENT_MAX_LENGTH
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class IntentClassifier(nn.Module):
|
| 19 |
+
"""
|
| 20 |
+
BERT + classification head for multi-label intent classification.
|
| 21 |
+
Must match the architecture used during training.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
def __init__(self, bert_model_name="bert-base-multilingual-uncased", num_intents=4):
|
| 25 |
+
super().__init__()
|
| 26 |
+
self.bert = BertModel.from_pretrained(bert_model_name)
|
| 27 |
+
self.dropout = nn.Dropout(0.3)
|
| 28 |
+
self.fc1 = nn.Linear(768, 256)
|
| 29 |
+
self.relu = nn.ReLU()
|
| 30 |
+
self.fc2 = nn.Linear(256, num_intents)
|
| 31 |
+
|
| 32 |
+
def forward(self, input_ids, attention_mask):
|
| 33 |
+
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
|
| 34 |
+
cls_output = outputs.last_hidden_state[:, 0, :] # [CLS] token
|
| 35 |
+
x = self.dropout(cls_output)
|
| 36 |
+
x = self.relu(self.fc1(x))
|
| 37 |
+
x = self.dropout(x)
|
| 38 |
+
logits = self.fc2(x)
|
| 39 |
+
return logits
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class IntentService:
|
| 43 |
+
"""Singleton service for intent classification at inference time."""
|
| 44 |
+
|
| 45 |
+
def __init__(self):
|
| 46 |
+
self.model = None
|
| 47 |
+
self.tokenizer = None
|
| 48 |
+
self.device = None
|
| 49 |
+
self.label_names = []
|
| 50 |
+
self._loaded = False
|
| 51 |
+
|
| 52 |
+
def load(self):
|
| 53 |
+
"""Load the trained intent classifier. Call once at app startup."""
|
| 54 |
+
if self._loaded:
|
| 55 |
+
return
|
| 56 |
+
|
| 57 |
+
model_dir = INTENT_MODEL_PATH
|
| 58 |
+
|
| 59 |
+
model_path = os.path.join(model_dir, "model.pt")
|
| 60 |
+
label_map_path = os.path.join(model_dir, "label_map.json")
|
| 61 |
+
config_path = os.path.join(model_dir, "config.json")
|
| 62 |
+
|
| 63 |
+
if not os.path.exists(model_path):
|
| 64 |
+
print(f"[IntentService] WARNING: Model not found at {model_path}")
|
| 65 |
+
print("[IntentService] Intent classification will be unavailable.")
|
| 66 |
+
return
|
| 67 |
+
|
| 68 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 69 |
+
|
| 70 |
+
# Load config for label names
|
| 71 |
+
if os.path.exists(config_path):
|
| 72 |
+
with open(config_path, "r") as f:
|
| 73 |
+
config = json.load(f)
|
| 74 |
+
self.label_names = config.get("label_names", [])
|
| 75 |
+
num_intents = config.get("num_intents", 4)
|
| 76 |
+
elif os.path.exists(label_map_path):
|
| 77 |
+
with open(label_map_path, "r") as f:
|
| 78 |
+
label_map = json.load(f)
|
| 79 |
+
self.label_names = sorted(label_map.keys(), key=lambda k: label_map[k])
|
| 80 |
+
num_intents = len(self.label_names)
|
| 81 |
+
else:
|
| 82 |
+
self.label_names = ["single_search", "multi_search", "filtered_search", "free_form"]
|
| 83 |
+
num_intents = 4
|
| 84 |
+
|
| 85 |
+
print(f"[IntentService] Loading intent classifier ({num_intents} intents)...")
|
| 86 |
+
self.model = IntentClassifier(
|
| 87 |
+
bert_model_name=BERT_MODEL_NAME,
|
| 88 |
+
num_intents=num_intents,
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
checkpoint = torch.load(model_path, map_location=self.device, weights_only=False)
|
| 92 |
+
self.model.load_state_dict(checkpoint["model_state_dict"])
|
| 93 |
+
self.model.to(self.device)
|
| 94 |
+
self.model.eval()
|
| 95 |
+
|
| 96 |
+
self.tokenizer = BertTokenizer.from_pretrained(BERT_MODEL_NAME)
|
| 97 |
+
self._loaded = True
|
| 98 |
+
print(f"[IntentService] Intent classifier loaded on {self.device}")
|
| 99 |
+
print(f"[IntentService] Labels: {self.label_names}")
|
| 100 |
+
|
| 101 |
+
def predict(self, query: str, threshold: float = 0.5) -> dict:
|
| 102 |
+
"""
|
| 103 |
+
Classify a query's intents (multi-label).
|
| 104 |
+
|
| 105 |
+
Returns:
|
| 106 |
+
{
|
| 107 |
+
"intents": ["single_search", "filtered_search"],
|
| 108 |
+
"probabilities": {
|
| 109 |
+
"single_search": 0.92,
|
| 110 |
+
"multi_search": 0.03,
|
| 111 |
+
"filtered_search": 0.87,
|
| 112 |
+
"free_form": 0.01
|
| 113 |
+
}
|
| 114 |
+
}
|
| 115 |
+
"""
|
| 116 |
+
if not self._loaded:
|
| 117 |
+
return {"intents": [], "probabilities": {}}
|
| 118 |
+
|
| 119 |
+
tokens = self.tokenizer(
|
| 120 |
+
query,
|
| 121 |
+
padding="max_length",
|
| 122 |
+
truncation=True,
|
| 123 |
+
max_length=INTENT_MAX_LENGTH,
|
| 124 |
+
return_tensors="pt",
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
input_ids = tokens["input_ids"].to(self.device)
|
| 128 |
+
attention_mask = tokens["attention_mask"].to(self.device)
|
| 129 |
+
|
| 130 |
+
with torch.no_grad():
|
| 131 |
+
logits = self.model(input_ids, attention_mask)
|
| 132 |
+
probs = torch.sigmoid(logits).cpu().numpy()[0]
|
| 133 |
+
|
| 134 |
+
probabilities = {
|
| 135 |
+
name: round(float(probs[i]), 4) for i, name in enumerate(self.label_names)
|
| 136 |
+
}
|
| 137 |
+
active_intents = [
|
| 138 |
+
name for i, name in enumerate(self.label_names) if probs[i] > threshold
|
| 139 |
+
]
|
| 140 |
+
|
| 141 |
+
return {
|
| 142 |
+
"intents": active_intents,
|
| 143 |
+
"probabilities": probabilities,
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
# Global singleton
|
| 148 |
+
intent_service = IntentService()
|
backend/models/query_rewriter.py
ADDED
|
@@ -0,0 +1,456 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Query Rewriter — transforms raw user queries into structured search inputs
|
| 3 |
+
using intent classification and slot extraction.
|
| 4 |
+
|
| 5 |
+
Takes: raw query + intents + extracted slots
|
| 6 |
+
Returns: rewritten search text + structured filters + metadata
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import re
|
| 10 |
+
from dataclasses import dataclass, field
|
| 11 |
+
from typing import Optional
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@dataclass
|
| 15 |
+
class SearchGroup:
|
| 16 |
+
"""A single search sub-query with its own text and filters."""
|
| 17 |
+
search_text: str
|
| 18 |
+
filters: dict = field(default_factory=dict)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@dataclass
|
| 22 |
+
class RewrittenQuery:
|
| 23 |
+
"""Output of the query rewriter."""
|
| 24 |
+
search_text: str # Cleaned query for BERT embedding + keyword matching
|
| 25 |
+
filters: dict # Structured filters for Supabase WHERE clauses
|
| 26 |
+
original_query: str # The raw user input
|
| 27 |
+
intents: list = field(default_factory=list) # Detected intents
|
| 28 |
+
slots: dict = field(default_factory=dict) # All extracted slots
|
| 29 |
+
is_rewritten: bool = False # Whether any rewriting was applied
|
| 30 |
+
search_groups: list = field(default_factory=list) # List[SearchGroup] for compound queries
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Slot types that represent product names (included in search text)
|
| 34 |
+
PRODUCT_SLOTS = {"PRODUCT1", "PRODUCT2"}
|
| 35 |
+
|
| 36 |
+
# Slot types that become Supabase filters (excluded from search text)
|
| 37 |
+
FILTER_SLOTS = {"PRICE_MIN", "PRICE_MAX", "PRICE_MOD", "BRAND", "COLOR",
|
| 38 |
+
"SIZE", "RATING_MIN", "RATING_MOD"}
|
| 39 |
+
|
| 40 |
+
# Modifier words to strip from search text (common non-product words)
|
| 41 |
+
MODIFIER_WORDS = {
|
| 42 |
+
"under", "below", "less", "than", "above", "over", "more",
|
| 43 |
+
"at", "least", "most", "around", "between", "and",
|
| 44 |
+
"cheaper", "cheapest", "expensive",
|
| 45 |
+
"budget", "affordable", "cheap", "pricey",
|
| 46 |
+
"minimum", "maximum", "max", "min",
|
| 47 |
+
"rating", "rated", "stars", "star",
|
| 48 |
+
"i", "want", "need", "looking", "for", "find", "show", "me",
|
| 49 |
+
"the", "a", "an", "of", "with", "in", "na", "ng", "ang", "yung",
|
| 50 |
+
"paano", "saan", "ano", "may", "gusto", "ko", "hanap",
|
| 51 |
+
"magkano", "pesos", "peso", "php",
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# Conjunction patterns for compound query splitting
|
| 56 |
+
COMPOUND_CONJUNCTIONS = [
|
| 57 |
+
r'\s+and\s+',
|
| 58 |
+
r'\s+at\s+saka\s+',
|
| 59 |
+
r'\s+tapos\s+',
|
| 60 |
+
r'\s+tsaka\s+',
|
| 61 |
+
r'\s+pati\s+(na\s+)?',
|
| 62 |
+
]
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def split_compound_query(query: str) -> list[str]:
|
| 66 |
+
"""
|
| 67 |
+
Split a compound query on conjunctions ('and', 'at saka', etc.)
|
| 68 |
+
into separate product searches.
|
| 69 |
+
|
| 70 |
+
"party items less than 300 and shoes for kids less 200"
|
| 71 |
+
-> ["party items less than 300", "shoes for kids less 200"]
|
| 72 |
+
|
| 73 |
+
"peanut butter and jelly under 200"
|
| 74 |
+
-> ["peanut butter", "jelly under 200"]
|
| 75 |
+
"""
|
| 76 |
+
for conj in COMPOUND_CONJUNCTIONS:
|
| 77 |
+
parts = re.split(conj, query, flags=re.IGNORECASE)
|
| 78 |
+
if len(parts) >= 2:
|
| 79 |
+
cleaned = [p.strip() for p in parts if p and p.strip()]
|
| 80 |
+
if len(cleaned) >= 2:
|
| 81 |
+
return cleaned
|
| 82 |
+
return [query]
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def split_sentences(query: str) -> list[str]:
|
| 86 |
+
"""
|
| 87 |
+
Split a multi-sentence or compound query into individual sub-queries.
|
| 88 |
+
1. Splits on . ? ! followed by whitespace (standard sentence splitting).
|
| 89 |
+
2. Splits on commas that act as product separators.
|
| 90 |
+
3. Then splits each sentence on conjunctions ('and', 'at saka').
|
| 91 |
+
"""
|
| 92 |
+
# Split on sentence-ending punctuation: . followed by space+letter (avoids
|
| 93 |
+
# decimals like "3.5"), or ? / ! at end/followed by whitespace.
|
| 94 |
+
parts = re.split(r'\.(?=\s+[A-Za-z])|[?!](?:\s+|$)', query)
|
| 95 |
+
sentences = [s.strip().rstrip('.!?') for s in parts if s and s.strip()]
|
| 96 |
+
sentences = [s for s in sentences if s]
|
| 97 |
+
if not sentences:
|
| 98 |
+
sentences = [query.strip()]
|
| 99 |
+
|
| 100 |
+
# Split on commas — treat commas as product separators
|
| 101 |
+
comma_parts = []
|
| 102 |
+
for sent in sentences:
|
| 103 |
+
csv = [p.strip() for p in sent.split(',') if p and p.strip()]
|
| 104 |
+
comma_parts.extend(csv if len(csv) >= 2 else [sent])
|
| 105 |
+
|
| 106 |
+
# Compound query splitting on each part (handles 'and')
|
| 107 |
+
all_parts = []
|
| 108 |
+
for part in comma_parts:
|
| 109 |
+
compound_parts = split_compound_query(part)
|
| 110 |
+
all_parts.extend(compound_parts)
|
| 111 |
+
|
| 112 |
+
return all_parts if all_parts else [query.strip()]
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _merge_rewritten_queries(
|
| 116 |
+
sub_queries: list[RewrittenQuery],
|
| 117 |
+
original_query: str,
|
| 118 |
+
) -> RewrittenQuery:
|
| 119 |
+
"""Merge multiple per-sentence RewrittenQuery results into one."""
|
| 120 |
+
if len(sub_queries) == 1:
|
| 121 |
+
rq = sub_queries[0]
|
| 122 |
+
rq.search_groups = [SearchGroup(search_text=rq.search_text, filters=rq.filters)]
|
| 123 |
+
return rq
|
| 124 |
+
|
| 125 |
+
# Union of all intents (deduplicated, preserving order)
|
| 126 |
+
merged_intents = list(dict.fromkeys(
|
| 127 |
+
intent for rq in sub_queries for intent in rq.intents
|
| 128 |
+
))
|
| 129 |
+
|
| 130 |
+
# Each sub-query that originated from a separator (dot, comma, 'and') represents
|
| 131 |
+
# a distinct product search — always keep them as independent search groups.
|
| 132 |
+
# We only collapse to a single group when all sub-queries share the same search
|
| 133 |
+
# text (i.e., splitting produced no meaningful separation).
|
| 134 |
+
distinct_texts = len({rq.search_text for rq in sub_queries}) > 1
|
| 135 |
+
|
| 136 |
+
if distinct_texts:
|
| 137 |
+
# ── COMPOUND QUERY ──
|
| 138 |
+
# Each sub-query becomes its own SearchGroup with independent filters.
|
| 139 |
+
search_groups = [
|
| 140 |
+
SearchGroup(search_text=rq.search_text, filters=rq.filters)
|
| 141 |
+
for rq in sub_queries
|
| 142 |
+
]
|
| 143 |
+
merged_slots = {}
|
| 144 |
+
product_idx = 1
|
| 145 |
+
for rq in sub_queries:
|
| 146 |
+
for key, value in rq.slots.items():
|
| 147 |
+
if key in ("PRODUCT1", "PRODUCT2"):
|
| 148 |
+
slot_key = f"PRODUCT{product_idx}"
|
| 149 |
+
if slot_key not in merged_slots:
|
| 150 |
+
merged_slots[slot_key] = value
|
| 151 |
+
product_idx += 1
|
| 152 |
+
elif key not in merged_slots:
|
| 153 |
+
merged_slots[key] = value
|
| 154 |
+
search_text = " | ".join(g.search_text for g in search_groups)
|
| 155 |
+
return RewrittenQuery(
|
| 156 |
+
search_text=search_text,
|
| 157 |
+
filters={},
|
| 158 |
+
original_query=original_query,
|
| 159 |
+
intents=merged_intents,
|
| 160 |
+
slots=merged_slots,
|
| 161 |
+
is_rewritten=True,
|
| 162 |
+
search_groups=search_groups,
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
# ── SINGLE GROUP (original merge behavior) ──
|
| 166 |
+
merged_slots = {}
|
| 167 |
+
for rq in sub_queries:
|
| 168 |
+
for key, value in rq.slots.items():
|
| 169 |
+
if key in ("PRODUCT1", "PRODUCT2"):
|
| 170 |
+
if "PRODUCT1" not in merged_slots:
|
| 171 |
+
merged_slots["PRODUCT1"] = value
|
| 172 |
+
elif "PRODUCT2" not in merged_slots:
|
| 173 |
+
merged_slots["PRODUCT2"] = value
|
| 174 |
+
elif key not in merged_slots:
|
| 175 |
+
merged_slots[key] = value
|
| 176 |
+
|
| 177 |
+
merged_filters = {}
|
| 178 |
+
for rq in sub_queries:
|
| 179 |
+
for key, value in rq.filters.items():
|
| 180 |
+
if key not in merged_filters:
|
| 181 |
+
merged_filters[key] = value
|
| 182 |
+
elif key == "price_min":
|
| 183 |
+
merged_filters[key] = max(merged_filters[key], value)
|
| 184 |
+
elif key == "price_max":
|
| 185 |
+
merged_filters[key] = min(merged_filters[key], value)
|
| 186 |
+
elif key == "rating_min":
|
| 187 |
+
merged_filters[key] = max(merged_filters[key], value)
|
| 188 |
+
|
| 189 |
+
seen = set()
|
| 190 |
+
search_parts = []
|
| 191 |
+
for rq in sub_queries:
|
| 192 |
+
for word in rq.search_text.split():
|
| 193 |
+
lower = word.lower()
|
| 194 |
+
if lower not in seen:
|
| 195 |
+
seen.add(lower)
|
| 196 |
+
search_parts.append(word)
|
| 197 |
+
search_text = " ".join(search_parts)
|
| 198 |
+
|
| 199 |
+
return RewrittenQuery(
|
| 200 |
+
search_text=search_text,
|
| 201 |
+
filters=merged_filters,
|
| 202 |
+
original_query=original_query,
|
| 203 |
+
intents=merged_intents,
|
| 204 |
+
slots=merged_slots,
|
| 205 |
+
is_rewritten=any(rq.is_rewritten for rq in sub_queries),
|
| 206 |
+
search_groups=[SearchGroup(search_text=search_text, filters=merged_filters)],
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def _parse_price(value: str) -> Optional[float]:
|
| 211 |
+
"""Try to parse a numeric value from a price slot."""
|
| 212 |
+
clean = re.sub(r"[^\d.]", "", value)
|
| 213 |
+
try:
|
| 214 |
+
return float(clean)
|
| 215 |
+
except ValueError:
|
| 216 |
+
return None
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def _detect_price_direction(query: str) -> Optional[str]:
|
| 220 |
+
"""
|
| 221 |
+
Detect whether the user's price intent is a minimum or maximum
|
| 222 |
+
based on modifier words in the raw query.
|
| 223 |
+
|
| 224 |
+
Returns "min", "max", or None if ambiguous.
|
| 225 |
+
"""
|
| 226 |
+
q = query.lower()
|
| 227 |
+
# Patterns that indicate a MINIMUM price ("more than X", "above X", etc.)
|
| 228 |
+
min_patterns = [
|
| 229 |
+
r"\bmore\s+than\b", r"\babove\b", r"\bover\b", r"\bat\s+least\b",
|
| 230 |
+
r"\bhigher\s+than\b", r"\bstarting\b", r"\bfrom\b",
|
| 231 |
+
r"\bexpensive\b", r"\bpricey\b",
|
| 232 |
+
# Filipino
|
| 233 |
+
r"\bhigit\s+sa\b", r"\bmula\s+sa\b",
|
| 234 |
+
]
|
| 235 |
+
# Patterns that indicate a MAXIMUM price ("less than X", "under X", etc.)
|
| 236 |
+
max_patterns = [
|
| 237 |
+
r"\bless\s+than\b", r"\bunder\b", r"\bbelow\b", r"\bat\s+most\b",
|
| 238 |
+
r"\bcheaper\s+than\b", r"\bbudget\b", r"\bcheap\b", r"\baffordable\b",
|
| 239 |
+
# Filipino
|
| 240 |
+
r"\bmura\b", r"\bmababa\b",
|
| 241 |
+
]
|
| 242 |
+
for pat in min_patterns:
|
| 243 |
+
if re.search(pat, q):
|
| 244 |
+
return "min"
|
| 245 |
+
for pat in max_patterns:
|
| 246 |
+
if re.search(pat, q):
|
| 247 |
+
return "max"
|
| 248 |
+
return None
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def rewrite(query: str, intents: list[str], slots: dict) -> RewrittenQuery:
|
| 252 |
+
"""
|
| 253 |
+
Rewrite a user query based on detected intents and extracted slots.
|
| 254 |
+
|
| 255 |
+
Logic:
|
| 256 |
+
- For free_form queries (and no other intents): pass through as-is
|
| 257 |
+
- For filtered_search: extract filter slots into structured filters,
|
| 258 |
+
build search text from product slots only
|
| 259 |
+
- For single_search / multi_search: build search text from product slots,
|
| 260 |
+
include brand/color in search text too
|
| 261 |
+
"""
|
| 262 |
+
|
| 263 |
+
# Default: use original query as-is
|
| 264 |
+
result = RewrittenQuery(
|
| 265 |
+
search_text=query.strip(),
|
| 266 |
+
filters={},
|
| 267 |
+
original_query=query.strip(),
|
| 268 |
+
intents=intents,
|
| 269 |
+
slots=slots,
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
# If no intents or slots were extracted, return original query
|
| 273 |
+
if not intents and not slots:
|
| 274 |
+
return result
|
| 275 |
+
|
| 276 |
+
# Free-form intent with no product slots: pass through as-is
|
| 277 |
+
# (e.g., "pano magluto ng adobo" — not a product search)
|
| 278 |
+
if "free_form" in intents and len(intents) == 1 and not slots:
|
| 279 |
+
return result
|
| 280 |
+
|
| 281 |
+
# --- Correct price slot direction ---
|
| 282 |
+
# The NER model may tag the price value as PRICE_MAX when the user
|
| 283 |
+
# actually means "more than X" (a minimum), or vice versa.
|
| 284 |
+
# Use modifier words in the raw query to fix this.
|
| 285 |
+
direction = _detect_price_direction(query)
|
| 286 |
+
|
| 287 |
+
has_min = "PRICE_MIN" in slots
|
| 288 |
+
has_max = "PRICE_MAX" in slots
|
| 289 |
+
|
| 290 |
+
if direction == "min" and has_max and not has_min:
|
| 291 |
+
# NER said PRICE_MAX but user said "more than" → swap to PRICE_MIN
|
| 292 |
+
slots["PRICE_MIN"] = slots.pop("PRICE_MAX")
|
| 293 |
+
elif direction == "max" and has_min and not has_max:
|
| 294 |
+
# NER said PRICE_MIN but user said "under" → swap to PRICE_MAX
|
| 295 |
+
slots["PRICE_MAX"] = slots.pop("PRICE_MIN")
|
| 296 |
+
|
| 297 |
+
# --- Regex fallback: extract price if NER missed it ---
|
| 298 |
+
# Covers patterns like "less than 30", "under 500", "more than 100", etc.
|
| 299 |
+
if direction is not None and "PRICE_MIN" not in slots and "PRICE_MAX" not in slots:
|
| 300 |
+
price_match = re.search(r'(\d+(?:\.\d+)?)', query)
|
| 301 |
+
if price_match:
|
| 302 |
+
price_val = price_match.group(1)
|
| 303 |
+
if direction == "max":
|
| 304 |
+
slots["PRICE_MAX"] = price_val
|
| 305 |
+
print(f"[QueryRewriter] Regex fallback: PRICE_MAX={price_val} (from '{query}')")
|
| 306 |
+
elif direction == "min":
|
| 307 |
+
slots["PRICE_MIN"] = price_val
|
| 308 |
+
print(f"[QueryRewriter] Regex fallback: PRICE_MIN={price_val} (from '{query}')")
|
| 309 |
+
|
| 310 |
+
# --- Build structured filters from slots ---
|
| 311 |
+
filters = {}
|
| 312 |
+
|
| 313 |
+
price_max = slots.get("PRICE_MAX")
|
| 314 |
+
if price_max:
|
| 315 |
+
parsed = _parse_price(price_max)
|
| 316 |
+
if parsed is not None:
|
| 317 |
+
filters["price_max"] = parsed
|
| 318 |
+
|
| 319 |
+
price_min = slots.get("PRICE_MIN")
|
| 320 |
+
if price_min:
|
| 321 |
+
parsed = _parse_price(price_min)
|
| 322 |
+
if parsed is not None:
|
| 323 |
+
filters["price_min"] = parsed
|
| 324 |
+
|
| 325 |
+
brand = slots.get("BRAND")
|
| 326 |
+
if brand:
|
| 327 |
+
filters["brand"] = brand.strip()
|
| 328 |
+
|
| 329 |
+
color = slots.get("COLOR")
|
| 330 |
+
if color:
|
| 331 |
+
filters["color"] = color.strip()
|
| 332 |
+
|
| 333 |
+
size = slots.get("SIZE")
|
| 334 |
+
if size:
|
| 335 |
+
filters["size"] = size.strip()
|
| 336 |
+
|
| 337 |
+
rating_min = slots.get("RATING_MIN")
|
| 338 |
+
if rating_min:
|
| 339 |
+
parsed = _parse_price(rating_min)
|
| 340 |
+
if parsed is not None:
|
| 341 |
+
filters["rating_min"] = parsed
|
| 342 |
+
|
| 343 |
+
# --- Build search text ---
|
| 344 |
+
search_parts = []
|
| 345 |
+
|
| 346 |
+
# Include product names
|
| 347 |
+
for slot_type in ["PRODUCT1", "PRODUCT2"]:
|
| 348 |
+
if slot_type in slots:
|
| 349 |
+
search_parts.append(slots[slot_type].strip())
|
| 350 |
+
|
| 351 |
+
# Include brand in search text (helps BERT + keyword matching)
|
| 352 |
+
if brand:
|
| 353 |
+
search_parts.insert(0, brand.strip())
|
| 354 |
+
|
| 355 |
+
# Include color in search text (helps keyword matching)
|
| 356 |
+
if color:
|
| 357 |
+
search_parts.insert(0, color.strip())
|
| 358 |
+
|
| 359 |
+
# If we have product-related slots, use them as the search text
|
| 360 |
+
if search_parts:
|
| 361 |
+
search_text = " ".join(search_parts)
|
| 362 |
+
else:
|
| 363 |
+
# No product slots found — clean the original query
|
| 364 |
+
# Remove modifier words and price values
|
| 365 |
+
words = query.strip().split()
|
| 366 |
+
cleaned = [
|
| 367 |
+
w for w in words
|
| 368 |
+
if w.lower() not in MODIFIER_WORDS
|
| 369 |
+
and not re.match(r"^\d+$", w)
|
| 370 |
+
]
|
| 371 |
+
search_text = " ".join(cleaned) if cleaned else query.strip()
|
| 372 |
+
|
| 373 |
+
result.search_text = search_text
|
| 374 |
+
result.filters = filters
|
| 375 |
+
result.is_rewritten = bool(filters) or (search_text != query.strip())
|
| 376 |
+
|
| 377 |
+
return result
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
class QueryRewriterService:
|
| 381 |
+
"""
|
| 382 |
+
Orchestrates intent classification + slot extraction + query rewriting.
|
| 383 |
+
This is the main entry point called from the search route.
|
| 384 |
+
"""
|
| 385 |
+
|
| 386 |
+
def __init__(self):
|
| 387 |
+
self._intent_service = None
|
| 388 |
+
self._slot_service = None
|
| 389 |
+
|
| 390 |
+
def init(self, intent_service, slot_service):
|
| 391 |
+
"""Initialize with references to the intent and slot services."""
|
| 392 |
+
self._intent_service = intent_service
|
| 393 |
+
self._slot_service = slot_service
|
| 394 |
+
|
| 395 |
+
def process(self, query: str) -> RewrittenQuery:
|
| 396 |
+
"""
|
| 397 |
+
Full query rewriting pipeline with multi-sentence support:
|
| 398 |
+
1. Split query into sentences
|
| 399 |
+
2. Process each sentence (intent + slot + rewrite)
|
| 400 |
+
3. Merge results into a single RewrittenQuery
|
| 401 |
+
|
| 402 |
+
Returns RewrittenQuery with search_text, filters, intents, and slots.
|
| 403 |
+
"""
|
| 404 |
+
sentences = split_sentences(query)
|
| 405 |
+
|
| 406 |
+
if len(sentences) == 1:
|
| 407 |
+
# Single sentence: no splitting overhead
|
| 408 |
+
result = self._process_single(sentences[0])
|
| 409 |
+
result.original_query = query.strip()
|
| 410 |
+
result.search_groups = [SearchGroup(search_text=result.search_text, filters=result.filters)]
|
| 411 |
+
self._log(query, result)
|
| 412 |
+
return result
|
| 413 |
+
|
| 414 |
+
# Multiple sentences: process each independently, then merge
|
| 415 |
+
sub_results = [self._process_single(s) for s in sentences]
|
| 416 |
+
merged = _merge_rewritten_queries(sub_results, original_query=query.strip())
|
| 417 |
+
|
| 418 |
+
if merged.is_rewritten:
|
| 419 |
+
print(f"[QueryRewriter] '{query}' -> '{merged.search_text}'")
|
| 420 |
+
print(f"[QueryRewriter] Sentences: {sentences}")
|
| 421 |
+
print(f"[QueryRewriter] Intents: {merged.intents}")
|
| 422 |
+
print(f"[QueryRewriter] Slots: {merged.slots}")
|
| 423 |
+
print(f"[QueryRewriter] Filters: {merged.filters}")
|
| 424 |
+
|
| 425 |
+
return merged
|
| 426 |
+
|
| 427 |
+
def _process_single(self, sentence: str) -> RewrittenQuery:
|
| 428 |
+
"""Process a single sentence through intent + slot + rewrite."""
|
| 429 |
+
# Step 1: Classify intents
|
| 430 |
+
intent_result = {"intents": [], "probabilities": {}}
|
| 431 |
+
if self._intent_service and self._intent_service._loaded:
|
| 432 |
+
intent_result = self._intent_service.predict(sentence)
|
| 433 |
+
|
| 434 |
+
# Step 2: Extract slots
|
| 435 |
+
slot_result = {"slots": {}, "tagged_tokens": []}
|
| 436 |
+
if self._slot_service and self._slot_service._loaded:
|
| 437 |
+
slot_result = self._slot_service.extract(sentence)
|
| 438 |
+
|
| 439 |
+
# Step 3: Rewrite
|
| 440 |
+
return rewrite(
|
| 441 |
+
query=sentence,
|
| 442 |
+
intents=intent_result["intents"],
|
| 443 |
+
slots=slot_result["slots"],
|
| 444 |
+
)
|
| 445 |
+
|
| 446 |
+
def _log(self, query: str, result: RewrittenQuery):
|
| 447 |
+
"""Log rewriting details for debugging."""
|
| 448 |
+
if result.is_rewritten:
|
| 449 |
+
print(f"[QueryRewriter] '{query}' -> '{result.search_text}'")
|
| 450 |
+
print(f"[QueryRewriter] Intents: {result.intents}")
|
| 451 |
+
print(f"[QueryRewriter] Slots: {result.slots}")
|
| 452 |
+
print(f"[QueryRewriter] Filters: {result.filters}")
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
# Global singleton
|
| 456 |
+
query_rewriter = QueryRewriterService()
|
backend/models/ranker.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Ranker Service — loads the trained CrossEncoder model and scores
|
| 3 |
+
(query, product_title) pairs for relevance ranking.
|
| 4 |
+
Loaded once at startup, reused for all requests.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import torch
|
| 9 |
+
import numpy as np
|
| 10 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
| 11 |
+
from config import RANKER_MODEL_PATH
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class RankerService:
|
| 15 |
+
"""Singleton service that scores query-product relevance using a CrossEncoder."""
|
| 16 |
+
|
| 17 |
+
def __init__(self):
|
| 18 |
+
self.model = None
|
| 19 |
+
self.tokenizer = None
|
| 20 |
+
self.device = None
|
| 21 |
+
self._loaded = False
|
| 22 |
+
|
| 23 |
+
def load(self):
|
| 24 |
+
"""Load the trained CrossEncoder model and tokenizer. Call once at app startup."""
|
| 25 |
+
if self._loaded:
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
model_path = RANKER_MODEL_PATH
|
| 29 |
+
if not os.path.exists(model_path):
|
| 30 |
+
print(f"[RankerService] WARNING: Model not found at {model_path}")
|
| 31 |
+
print("[RankerService] Search will use classification-only ranking (no CrossEncoder re-ranking)")
|
| 32 |
+
return
|
| 33 |
+
|
| 34 |
+
print(f"[RankerService] Loading CrossEncoder from {model_path}...")
|
| 35 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 36 |
+
self.model = AutoModelForSequenceClassification.from_pretrained(model_path).to(self.device)
|
| 37 |
+
self.tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 38 |
+
self.model.eval()
|
| 39 |
+
self._loaded = True
|
| 40 |
+
print(f"[RankerService] CrossEncoder loaded on {self.device}")
|
| 41 |
+
|
| 42 |
+
def rank(self, query: str, product_titles: list[str], batch_size: int = 64) -> np.ndarray:
|
| 43 |
+
"""
|
| 44 |
+
Score a query against multiple product titles using the CrossEncoder.
|
| 45 |
+
Returns a numpy array of relevance scores, shape (N,).
|
| 46 |
+
Higher score = more relevant.
|
| 47 |
+
"""
|
| 48 |
+
if not self._loaded:
|
| 49 |
+
# Return neutral scores if model not loaded
|
| 50 |
+
return np.zeros(len(product_titles))
|
| 51 |
+
|
| 52 |
+
n = len(product_titles)
|
| 53 |
+
scores = np.zeros(n)
|
| 54 |
+
|
| 55 |
+
with torch.no_grad():
|
| 56 |
+
for i in range(0, n, batch_size):
|
| 57 |
+
j = min(i + batch_size, n)
|
| 58 |
+
batch_titles = product_titles[i:j]
|
| 59 |
+
batch_queries = [query] * len(batch_titles)
|
| 60 |
+
|
| 61 |
+
features = self.tokenizer(
|
| 62 |
+
batch_queries,
|
| 63 |
+
batch_titles,
|
| 64 |
+
padding=True,
|
| 65 |
+
truncation=True,
|
| 66 |
+
return_tensors="pt",
|
| 67 |
+
).to(self.device)
|
| 68 |
+
|
| 69 |
+
logits = self.model(**features).logits
|
| 70 |
+
scores[i:j] = logits.squeeze(-1).cpu().numpy()
|
| 71 |
+
|
| 72 |
+
return scores
|
| 73 |
+
|
| 74 |
+
def normalize_scores(self, scores: np.ndarray) -> np.ndarray:
|
| 75 |
+
"""Normalize scores to [0, 1] range using min-max normalization."""
|
| 76 |
+
if len(scores) == 0:
|
| 77 |
+
return scores
|
| 78 |
+
min_s = scores.min()
|
| 79 |
+
max_s = scores.max()
|
| 80 |
+
if max_s - min_s < 1e-8:
|
| 81 |
+
return np.ones_like(scores) # All same score → all 1.0
|
| 82 |
+
return (scores - min_s) / (max_s - min_s)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
# Global singleton instance
|
| 86 |
+
ranker_service = RankerService()
|
backend/models/slot_service.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Slot Extraction Service — runs the trained SlotExtractor (NER) model
|
| 3 |
+
for real-time inference on user queries.
|
| 4 |
+
|
| 5 |
+
Loaded once at startup, reused for all search requests.
|
| 6 |
+
Extracts: PRODUCT1, PRODUCT2, BRAND, COLOR, PRICE_MIN, PRICE_MAX,
|
| 7 |
+
PRICE_MOD, RATING_MIN, RATING_MOD, CONN, SIZE, etc.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
import json
|
| 12 |
+
import torch
|
| 13 |
+
import torch.nn as nn
|
| 14 |
+
import numpy as np
|
| 15 |
+
from transformers import BertModel, BertTokenizerFast
|
| 16 |
+
from config import SLOT_MODEL_PATH, BERT_MODEL_NAME, SLOT_MAX_LENGTH
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class SlotExtractor(nn.Module):
|
| 20 |
+
"""
|
| 21 |
+
BERT + token-level classification head for NER/slot extraction.
|
| 22 |
+
Must match the architecture used during training.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self, bert_model_name="bert-base-multilingual-uncased", num_tags=20):
|
| 26 |
+
super().__init__()
|
| 27 |
+
self.bert = BertModel.from_pretrained(bert_model_name)
|
| 28 |
+
self.dropout = nn.Dropout(0.3)
|
| 29 |
+
self.classifier = nn.Linear(768, num_tags)
|
| 30 |
+
|
| 31 |
+
def forward(self, input_ids, attention_mask):
|
| 32 |
+
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
|
| 33 |
+
sequence_output = outputs.last_hidden_state
|
| 34 |
+
sequence_output = self.dropout(sequence_output)
|
| 35 |
+
logits = self.classifier(sequence_output)
|
| 36 |
+
return logits
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class SlotService:
|
| 40 |
+
"""Singleton service for slot/entity extraction at inference time."""
|
| 41 |
+
|
| 42 |
+
def __init__(self):
|
| 43 |
+
self.model = None
|
| 44 |
+
self.tokenizer = None
|
| 45 |
+
self.device = None
|
| 46 |
+
self.tag2id = {}
|
| 47 |
+
self.id2tag = {}
|
| 48 |
+
self._loaded = False
|
| 49 |
+
|
| 50 |
+
def load(self):
|
| 51 |
+
"""Load the trained slot extractor. Call once at app startup."""
|
| 52 |
+
if self._loaded:
|
| 53 |
+
return
|
| 54 |
+
|
| 55 |
+
model_dir = SLOT_MODEL_PATH
|
| 56 |
+
|
| 57 |
+
model_path = os.path.join(model_dir, "model.pt")
|
| 58 |
+
tag_map_path = os.path.join(model_dir, "tag_map.json")
|
| 59 |
+
config_path = os.path.join(model_dir, "config.json")
|
| 60 |
+
|
| 61 |
+
if not os.path.exists(model_path):
|
| 62 |
+
print(f"[SlotService] WARNING: Model not found at {model_path}")
|
| 63 |
+
print("[SlotService] Slot extraction will be unavailable.")
|
| 64 |
+
return
|
| 65 |
+
|
| 66 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 67 |
+
|
| 68 |
+
# Load tag map
|
| 69 |
+
if os.path.exists(tag_map_path):
|
| 70 |
+
with open(tag_map_path, "r") as f:
|
| 71 |
+
self.tag2id = json.load(f)
|
| 72 |
+
elif os.path.exists(config_path):
|
| 73 |
+
with open(config_path, "r") as f:
|
| 74 |
+
config = json.load(f)
|
| 75 |
+
tag_names = config.get("tag_names", [])
|
| 76 |
+
self.tag2id = {tag: i for i, tag in enumerate(tag_names)}
|
| 77 |
+
else:
|
| 78 |
+
print("[SlotService] WARNING: No tag_map.json or config.json found")
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
self.id2tag = {v: k for k, v in self.tag2id.items()}
|
| 82 |
+
num_tags = len(self.tag2id)
|
| 83 |
+
|
| 84 |
+
print(f"[SlotService] Loading slot extractor ({num_tags} tags)...")
|
| 85 |
+
self.model = SlotExtractor(
|
| 86 |
+
bert_model_name=BERT_MODEL_NAME,
|
| 87 |
+
num_tags=num_tags,
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
checkpoint = torch.load(model_path, map_location=self.device, weights_only=False)
|
| 91 |
+
self.model.load_state_dict(checkpoint["model_state_dict"])
|
| 92 |
+
self.model.to(self.device)
|
| 93 |
+
self.model.eval()
|
| 94 |
+
|
| 95 |
+
self.tokenizer = BertTokenizerFast.from_pretrained(BERT_MODEL_NAME)
|
| 96 |
+
self._loaded = True
|
| 97 |
+
print(f"[SlotService] Slot extractor loaded on {self.device}")
|
| 98 |
+
print(f"[SlotService] Tags: {sorted(self.tag2id.keys())}")
|
| 99 |
+
|
| 100 |
+
def extract(self, query: str) -> dict:
|
| 101 |
+
"""
|
| 102 |
+
Extract slots/entities from a query using BIO tagging.
|
| 103 |
+
|
| 104 |
+
Returns:
|
| 105 |
+
{
|
| 106 |
+
"slots": {
|
| 107 |
+
"PRODUCT1": "shoes",
|
| 108 |
+
"BRAND": "Nike",
|
| 109 |
+
"COLOR": "blue",
|
| 110 |
+
"PRICE_MAX": "3000"
|
| 111 |
+
},
|
| 112 |
+
"tagged_tokens": [
|
| 113 |
+
("blue", "B-COLOR"),
|
| 114 |
+
("Nike", "B-BRAND"),
|
| 115 |
+
("shoes", "B-PRODUCT1"),
|
| 116 |
+
("under", "B-PRICE_MOD"),
|
| 117 |
+
("3000", "B-PRICE_MAX")
|
| 118 |
+
]
|
| 119 |
+
}
|
| 120 |
+
"""
|
| 121 |
+
if not self._loaded:
|
| 122 |
+
return {"slots": {}, "tagged_tokens": []}
|
| 123 |
+
|
| 124 |
+
# Tokenize
|
| 125 |
+
words = query.split()
|
| 126 |
+
encoding = self.tokenizer(
|
| 127 |
+
words,
|
| 128 |
+
is_split_into_words=True,
|
| 129 |
+
padding="max_length",
|
| 130 |
+
truncation=True,
|
| 131 |
+
max_length=SLOT_MAX_LENGTH,
|
| 132 |
+
return_tensors="pt",
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
input_ids = encoding["input_ids"].to(self.device)
|
| 136 |
+
attention_mask = encoding["attention_mask"].to(self.device)
|
| 137 |
+
word_ids = encoding.word_ids(batch_index=0)
|
| 138 |
+
|
| 139 |
+
# Predict
|
| 140 |
+
with torch.no_grad():
|
| 141 |
+
logits = self.model(input_ids, attention_mask)
|
| 142 |
+
preds = torch.argmax(logits, dim=-1).cpu().numpy()[0]
|
| 143 |
+
|
| 144 |
+
# Decode: map subword predictions back to words
|
| 145 |
+
# Only take the first subword prediction for each word
|
| 146 |
+
word_tags = {}
|
| 147 |
+
for token_idx, word_idx in enumerate(word_ids):
|
| 148 |
+
if word_idx is None:
|
| 149 |
+
continue # [CLS], [SEP], [PAD]
|
| 150 |
+
if word_idx not in word_tags:
|
| 151 |
+
tag_id = int(preds[token_idx])
|
| 152 |
+
word_tags[word_idx] = self.id2tag.get(tag_id, "O")
|
| 153 |
+
|
| 154 |
+
# Build tagged tokens list
|
| 155 |
+
tagged_tokens = []
|
| 156 |
+
for word_idx, word in enumerate(words):
|
| 157 |
+
tag = word_tags.get(word_idx, "O")
|
| 158 |
+
tagged_tokens.append((word, tag))
|
| 159 |
+
|
| 160 |
+
# Merge BIO tags into slot dict
|
| 161 |
+
slots = self._merge_bio_tags(words, tagged_tokens)
|
| 162 |
+
|
| 163 |
+
return {
|
| 164 |
+
"slots": slots,
|
| 165 |
+
"tagged_tokens": tagged_tokens,
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
def _merge_bio_tags(self, words: list, tagged_tokens: list) -> dict:
|
| 169 |
+
"""
|
| 170 |
+
Merge BIO-tagged tokens into a slot dictionary.
|
| 171 |
+
|
| 172 |
+
Example:
|
| 173 |
+
[("blue", "B-COLOR"), ("Nike", "B-BRAND"), ("running", "B-PRODUCT1"),
|
| 174 |
+
("shoes", "I-PRODUCT1")]
|
| 175 |
+
-> {"COLOR": "blue", "BRAND": "Nike", "PRODUCT1": "running shoes"}
|
| 176 |
+
"""
|
| 177 |
+
slots = {}
|
| 178 |
+
current_entity = None
|
| 179 |
+
current_tokens = []
|
| 180 |
+
|
| 181 |
+
for word, tag in tagged_tokens:
|
| 182 |
+
if tag.startswith("B-"):
|
| 183 |
+
# Save previous entity
|
| 184 |
+
if current_entity and current_tokens:
|
| 185 |
+
slot_key = current_entity
|
| 186 |
+
slot_value = " ".join(current_tokens)
|
| 187 |
+
# Handle multiple entities of same type (e.g., PRODUCT1, PRODUCT2)
|
| 188 |
+
if slot_key in slots:
|
| 189 |
+
slots[slot_key] += " " + slot_value
|
| 190 |
+
else:
|
| 191 |
+
slots[slot_key] = slot_value
|
| 192 |
+
|
| 193 |
+
current_entity = tag[2:] # Strip "B-"
|
| 194 |
+
current_tokens = [word]
|
| 195 |
+
|
| 196 |
+
elif tag.startswith("I-"):
|
| 197 |
+
entity_type = tag[2:]
|
| 198 |
+
if entity_type == current_entity:
|
| 199 |
+
current_tokens.append(word)
|
| 200 |
+
else:
|
| 201 |
+
# Mismatched I-tag: save current, start new
|
| 202 |
+
if current_entity and current_tokens:
|
| 203 |
+
slot_key = current_entity
|
| 204 |
+
slot_value = " ".join(current_tokens)
|
| 205 |
+
if slot_key in slots:
|
| 206 |
+
slots[slot_key] += " " + slot_value
|
| 207 |
+
else:
|
| 208 |
+
slots[slot_key] = slot_value
|
| 209 |
+
current_entity = entity_type
|
| 210 |
+
current_tokens = [word]
|
| 211 |
+
else:
|
| 212 |
+
# O tag: save current entity
|
| 213 |
+
if current_entity and current_tokens:
|
| 214 |
+
slot_key = current_entity
|
| 215 |
+
slot_value = " ".join(current_tokens)
|
| 216 |
+
if slot_key in slots:
|
| 217 |
+
slots[slot_key] += " " + slot_value
|
| 218 |
+
else:
|
| 219 |
+
slots[slot_key] = slot_value
|
| 220 |
+
current_entity = None
|
| 221 |
+
current_tokens = []
|
| 222 |
+
|
| 223 |
+
# Save last entity
|
| 224 |
+
if current_entity and current_tokens:
|
| 225 |
+
slot_key = current_entity
|
| 226 |
+
slot_value = " ".join(current_tokens)
|
| 227 |
+
if slot_key in slots:
|
| 228 |
+
slots[slot_key] += " " + slot_value
|
| 229 |
+
else:
|
| 230 |
+
slots[slot_key] = slot_value
|
| 231 |
+
|
| 232 |
+
return slots
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
# Global singleton
|
| 236 |
+
slot_service = SlotService()
|
backend/requirements.txt
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SmartShop Backend - Python Dependencies
|
| 2 |
+
|
| 3 |
+
# Web framework
|
| 4 |
+
fastapi==0.115.0
|
| 5 |
+
uvicorn[standard]==0.30.6
|
| 6 |
+
python-dotenv==1.0.1
|
| 7 |
+
|
| 8 |
+
# Database
|
| 9 |
+
supabase==2.7.2
|
| 10 |
+
psycopg[binary]==3.3.2
|
| 11 |
+
|
| 12 |
+
# ML / AI
|
| 13 |
+
torch>=2.0.0
|
| 14 |
+
transformers>=4.30.0
|
| 15 |
+
numpy>=1.24.0
|
| 16 |
+
|
| 17 |
+
# Auth
|
| 18 |
+
PyJWT==2.9.0
|
| 19 |
+
bcrypt==4.2.0
|
| 20 |
+
python-multipart==0.0.9
|
| 21 |
+
|
| 22 |
+
# CORS & other
|
| 23 |
+
pydantic[email]==2.9.0
|
| 24 |
+
|
| 25 |
+
# HuggingFace Hub (for downloading model weights at build time)
|
| 26 |
+
huggingface_hub>=0.24.0
|
backend/routes/__init__.py
ADDED
|
File without changes
|
backend/routes/admin.py
ADDED
|
@@ -0,0 +1,2273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Admin routes — dashboard, user management, reports, product management.
|
| 3 |
+
Only accessible by admin users (role='admin').
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, Query
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import Optional
|
| 9 |
+
from database import get_supabase
|
| 10 |
+
from routes.auth import get_current_user
|
| 11 |
+
from datetime import datetime, timedelta, timezone
|
| 12 |
+
|
| 13 |
+
router = APIRouter(prefix="/admin", tags=["Admin"])
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# --- Helpers ---
|
| 17 |
+
|
| 18 |
+
async def require_admin(current_user: dict = Depends(get_current_user)):
|
| 19 |
+
"""Dependency that ensures the current user is an admin."""
|
| 20 |
+
sb = get_supabase()
|
| 21 |
+
result = sb.table("users").select("role").eq("id", current_user["sub"]).execute()
|
| 22 |
+
if not result.data or result.data[0].get("role") != "admin":
|
| 23 |
+
raise HTTPException(status_code=403, detail="Admin access required")
|
| 24 |
+
return current_user
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
# --- Request/Response Models ---
|
| 28 |
+
|
| 29 |
+
class AdminUserResponse(BaseModel):
|
| 30 |
+
id: str
|
| 31 |
+
email: str
|
| 32 |
+
full_name: str
|
| 33 |
+
role: str
|
| 34 |
+
is_banned: bool
|
| 35 |
+
balance: float
|
| 36 |
+
created_at: str
|
| 37 |
+
department_id: Optional[str] = None
|
| 38 |
+
department_name: Optional[str] = None
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BanRequest(BaseModel):
|
| 42 |
+
is_banned: bool
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class UpdateDepartmentRequest(BaseModel):
|
| 46 |
+
department_id: Optional[str] = None # None means remove from department
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class DashboardResponse(BaseModel):
|
| 50 |
+
total_users: int
|
| 51 |
+
total_products: int
|
| 52 |
+
total_orders: int
|
| 53 |
+
total_transaction_orders: int = 0
|
| 54 |
+
total_revenue: float
|
| 55 |
+
total_sales_volume: float
|
| 56 |
+
total_admin_earnings: float = 0
|
| 57 |
+
total_buyers: int = 0
|
| 58 |
+
total_departments: int = 0
|
| 59 |
+
total_managers: int = 0
|
| 60 |
+
total_staff: int = 0
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class TransactionDetail(BaseModel):
|
| 64 |
+
id: str
|
| 65 |
+
buyer_name: str
|
| 66 |
+
seller_name: str
|
| 67 |
+
product_title: str
|
| 68 |
+
quantity: int
|
| 69 |
+
amount: float
|
| 70 |
+
seller_amount: float
|
| 71 |
+
admin_commission: float
|
| 72 |
+
delivery_fee: float = 0
|
| 73 |
+
status: str
|
| 74 |
+
purchase_type: str = "delivery"
|
| 75 |
+
product_images: list = []
|
| 76 |
+
created_at: str
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class DailyIncome(BaseModel):
|
| 80 |
+
date: str
|
| 81 |
+
income: float
|
| 82 |
+
transactions: int
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class TopSeller(BaseModel):
|
| 86 |
+
seller_id: str
|
| 87 |
+
seller_name: str
|
| 88 |
+
total_sales: float
|
| 89 |
+
transaction_count: int
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class TopProduct(BaseModel):
|
| 93 |
+
product_id: str
|
| 94 |
+
product_title: str
|
| 95 |
+
times_sold: int
|
| 96 |
+
total_revenue: float
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class ReportsResponse(BaseModel):
|
| 100 |
+
total_revenue: float
|
| 101 |
+
total_sales_volume: float
|
| 102 |
+
total_orders: int
|
| 103 |
+
avg_transaction_value: float
|
| 104 |
+
daily_income: list[DailyIncome]
|
| 105 |
+
top_sellers: list[TopSeller]
|
| 106 |
+
top_products: list[TopProduct]
|
| 107 |
+
monthly_income: list[DailyIncome]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class AdminProductResponse(BaseModel):
|
| 111 |
+
id: str
|
| 112 |
+
seller_id: str
|
| 113 |
+
seller_name: str
|
| 114 |
+
title: str
|
| 115 |
+
description: str
|
| 116 |
+
price: float
|
| 117 |
+
stock: int
|
| 118 |
+
images: list[str]
|
| 119 |
+
is_active: bool
|
| 120 |
+
created_at: str
|
| 121 |
+
department_id: Optional[str] = None
|
| 122 |
+
department_name: Optional[str] = None
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class AdminUpdateProductRequest(BaseModel):
|
| 126 |
+
title: Optional[str] = None
|
| 127 |
+
price: Optional[float] = None
|
| 128 |
+
stock: Optional[int] = None
|
| 129 |
+
is_active: Optional[bool] = None
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# --- Routes ---
|
| 133 |
+
|
| 134 |
+
@router.get("/dashboard", response_model=DashboardResponse)
|
| 135 |
+
async def admin_dashboard(admin: dict = Depends(require_admin)):
|
| 136 |
+
"""Get admin dashboard stats."""
|
| 137 |
+
sb = get_supabase()
|
| 138 |
+
|
| 139 |
+
users = sb.table("users").select("id", count="exact").execute()
|
| 140 |
+
products = sb.table("products").select("id", count="exact").eq("is_active", True).execute()
|
| 141 |
+
txns = sb.table("product_transactions").select("amount").in_("status", ["completed", "delivered"]).execute()
|
| 142 |
+
all_txns = sb.table("product_transactions").select("id", count="exact").execute()
|
| 143 |
+
|
| 144 |
+
total_volume = sum(float(t.get("amount", 0)) for t in txns.data) if txns.data else 0
|
| 145 |
+
|
| 146 |
+
# Admin earnings = total credited to admin from successful transactions
|
| 147 |
+
earnings = sb.table("admin_earnings").select("amount").execute()
|
| 148 |
+
total_admin_earnings = sum(float(e["amount"]) for e in (earnings.data or []))
|
| 149 |
+
|
| 150 |
+
# Role counts
|
| 151 |
+
buyers_count = sb.table("users").select("id", count="exact").eq("role", "buyer").execute()
|
| 152 |
+
managers_count = sb.table("users").select("id", count="exact").eq("role", "manager").execute()
|
| 153 |
+
staff_count = sb.table("users").select("id", count="exact").eq("role", "seller").execute()
|
| 154 |
+
departments_count = sb.table("departments").select("id", count="exact").execute()
|
| 155 |
+
|
| 156 |
+
return DashboardResponse(
|
| 157 |
+
total_users=users.count or 0,
|
| 158 |
+
total_products=products.count or 0,
|
| 159 |
+
total_orders=len(txns.data) if txns.data else 0,
|
| 160 |
+
total_transaction_orders=all_txns.count or 0,
|
| 161 |
+
total_revenue=round(total_volume, 2),
|
| 162 |
+
total_sales_volume=round(total_volume, 2),
|
| 163 |
+
total_admin_earnings=round(total_admin_earnings, 2),
|
| 164 |
+
total_buyers=buyers_count.count or 0,
|
| 165 |
+
total_departments=departments_count.count or 0,
|
| 166 |
+
total_managers=managers_count.count or 0,
|
| 167 |
+
total_staff=staff_count.count or 0,
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
@router.get("/users", response_model=list[AdminUserResponse])
|
| 172 |
+
async def list_users(
|
| 173 |
+
search: str = Query("", description="Search by name or email"),
|
| 174 |
+
role: str = Query("", description="Filter by role (buyer, seller, manager, delivery)"),
|
| 175 |
+
department_id: str = Query("", description="Filter by department ID"),
|
| 176 |
+
admin: dict = Depends(require_admin),
|
| 177 |
+
):
|
| 178 |
+
"""List all users. Supports search, role filter, and department filter."""
|
| 179 |
+
sb = get_supabase()
|
| 180 |
+
|
| 181 |
+
query = sb.table("users").select("*")
|
| 182 |
+
|
| 183 |
+
if search:
|
| 184 |
+
query = query.or_(f"full_name.ilike.%{search}%,email.ilike.%{search}%")
|
| 185 |
+
if role:
|
| 186 |
+
query = query.eq("role", role)
|
| 187 |
+
if department_id:
|
| 188 |
+
query = query.eq("department_id", department_id)
|
| 189 |
+
|
| 190 |
+
users = query.order("created_at", desc=True).execute()
|
| 191 |
+
|
| 192 |
+
# Get department names for users with department_id
|
| 193 |
+
dept_ids = set(u.get("department_id") for u in (users.data or []) if u.get("department_id"))
|
| 194 |
+
dept_names = {}
|
| 195 |
+
if dept_ids:
|
| 196 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 197 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 198 |
+
|
| 199 |
+
result = []
|
| 200 |
+
for u in users.data:
|
| 201 |
+
result.append(AdminUserResponse(
|
| 202 |
+
id=u["id"],
|
| 203 |
+
email=u["email"],
|
| 204 |
+
full_name=u["full_name"],
|
| 205 |
+
role=u["role"],
|
| 206 |
+
is_banned=u.get("is_banned", False),
|
| 207 |
+
balance=0.0,
|
| 208 |
+
created_at=u["created_at"],
|
| 209 |
+
department_id=u.get("department_id"),
|
| 210 |
+
department_name=dept_names.get(u.get("department_id", ""), None),
|
| 211 |
+
))
|
| 212 |
+
return result
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
@router.put("/users/{user_id}/ban")
|
| 216 |
+
async def ban_user(user_id: str, req: BanRequest, admin: dict = Depends(require_admin)):
|
| 217 |
+
"""Ban or unban a user. Admins cannot be banned."""
|
| 218 |
+
sb = get_supabase()
|
| 219 |
+
|
| 220 |
+
target = sb.table("users").select("role").eq("id", user_id).execute()
|
| 221 |
+
if not target.data:
|
| 222 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 223 |
+
if target.data[0].get("role") == "admin":
|
| 224 |
+
raise HTTPException(status_code=400, detail="Cannot ban an admin account")
|
| 225 |
+
|
| 226 |
+
sb.table("users").update({"is_banned": req.is_banned}).eq("id", user_id).execute()
|
| 227 |
+
return {"message": f"User {'banned' if req.is_banned else 'unbanned'} successfully"}
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
@router.put("/users/{user_id}/department")
|
| 231 |
+
async def update_user_department(user_id: str, req: UpdateDepartmentRequest, admin: dict = Depends(require_admin)):
|
| 232 |
+
"""Assign or remove a user from a department/store. Only for seller and manager roles."""
|
| 233 |
+
sb = get_supabase()
|
| 234 |
+
|
| 235 |
+
target = sb.table("users").select("id, role, department_id").eq("id", user_id).execute()
|
| 236 |
+
if not target.data:
|
| 237 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 238 |
+
|
| 239 |
+
user = target.data[0]
|
| 240 |
+
if user["role"] not in ("seller", "manager"):
|
| 241 |
+
raise HTTPException(status_code=400, detail="Only staff (seller) and manager users can be assigned to a store")
|
| 242 |
+
|
| 243 |
+
if req.department_id:
|
| 244 |
+
dept = sb.table("departments").select("id, manager_id").eq("id", req.department_id).execute()
|
| 245 |
+
if not dept.data:
|
| 246 |
+
raise HTTPException(status_code=404, detail="Store not found")
|
| 247 |
+
# If assigning a manager, ensure the department doesn't already have a different manager
|
| 248 |
+
if user["role"] == "manager" and dept.data[0].get("manager_id") and dept.data[0]["manager_id"] != user_id:
|
| 249 |
+
raise HTTPException(status_code=400, detail="This store already has a manager assigned")
|
| 250 |
+
|
| 251 |
+
# Remove from old department if was a manager there
|
| 252 |
+
if user["role"] == "manager" and user.get("department_id"):
|
| 253 |
+
sb.table("departments").update({"manager_id": None}).eq("manager_id", user_id).execute()
|
| 254 |
+
|
| 255 |
+
# Update user's department
|
| 256 |
+
sb.table("users").update({"department_id": req.department_id}).eq("id", user_id).execute()
|
| 257 |
+
|
| 258 |
+
# If assigning a manager to a new department, set them as the department's manager
|
| 259 |
+
if user["role"] == "manager" and req.department_id:
|
| 260 |
+
sb.table("departments").update({"manager_id": user_id}).eq("id", req.department_id).execute()
|
| 261 |
+
|
| 262 |
+
if req.department_id:
|
| 263 |
+
return {"message": "User assigned to store successfully"}
|
| 264 |
+
return {"message": "User removed from store successfully"}
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
@router.delete("/users/{user_id}")
|
| 268 |
+
async def delete_user(user_id: str, admin: dict = Depends(require_admin)):
|
| 269 |
+
"""Permanently delete a user and all their associated data."""
|
| 270 |
+
sb = get_supabase()
|
| 271 |
+
|
| 272 |
+
target = sb.table("users").select("role, id").eq("id", user_id).execute()
|
| 273 |
+
if not target.data:
|
| 274 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 275 |
+
if target.data[0].get("role") == "admin":
|
| 276 |
+
raise HTTPException(status_code=400, detail="Cannot delete an admin account")
|
| 277 |
+
if user_id == admin["sub"]:
|
| 278 |
+
raise HTTPException(status_code=400, detail="Cannot delete your own account")
|
| 279 |
+
|
| 280 |
+
# Delete only non-financial personal data.
|
| 281 |
+
# Financial records (product_transactions, delivery_earnings, salary_payments,
|
| 282 |
+
# admin_withdrawals, products) are preserved — their user FKs are set to NULL
|
| 283 |
+
# automatically via ON DELETE SET NULL (migration_v10).
|
| 284 |
+
sb.table("wishlist_items").delete().eq("buyer_id", user_id).execute()
|
| 285 |
+
sb.table("cart_items").delete().eq("buyer_id", user_id).execute()
|
| 286 |
+
sb.table("stored_value").delete().eq("user_id", user_id).execute()
|
| 287 |
+
sb.table("user_balances").delete().eq("user_id", user_id).execute()
|
| 288 |
+
sb.table("user_contacts").delete().eq("user_id", user_id).execute()
|
| 289 |
+
|
| 290 |
+
# Finally delete the user — DB cascades/nullifies all remaining FK references
|
| 291 |
+
sb.table("users").delete().eq("id", user_id).execute()
|
| 292 |
+
|
| 293 |
+
return {"message": "User permanently deleted"}
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
@router.get("/transactions", response_model=list[TransactionDetail])
|
| 297 |
+
async def list_transactions(
|
| 298 |
+
search: str = Query("", description="Search by buyer or seller name"),
|
| 299 |
+
txn_type: str = Query("", description="Filter by purchase_type (delivery/walk-in)"),
|
| 300 |
+
status: str = Query("", description="Filter by transaction status"),
|
| 301 |
+
date_range: str = Query("", description="day, week, month, or specific"),
|
| 302 |
+
specific_date: str = Query("", description="YYYY-MM-DD if date_range is specific"),
|
| 303 |
+
admin: dict = Depends(require_admin),
|
| 304 |
+
):
|
| 305 |
+
"""List all product transactions with search and filters support."""
|
| 306 |
+
sb = get_supabase()
|
| 307 |
+
|
| 308 |
+
q = sb.table("product_transactions").select(
|
| 309 |
+
"*, products(title, images)"
|
| 310 |
+
).order("created_at", desc=True)
|
| 311 |
+
|
| 312 |
+
if txn_type:
|
| 313 |
+
q = q.eq("purchase_type", txn_type)
|
| 314 |
+
if status:
|
| 315 |
+
q = q.eq("status", status)
|
| 316 |
+
|
| 317 |
+
# Basic date filters locally since we fetch all matching
|
| 318 |
+
txns = q.execute()
|
| 319 |
+
|
| 320 |
+
if not txns.data:
|
| 321 |
+
return []
|
| 322 |
+
|
| 323 |
+
# Local date filtering
|
| 324 |
+
filtered_data = []
|
| 325 |
+
from datetime import datetime, timedelta
|
| 326 |
+
|
| 327 |
+
today = datetime.now()
|
| 328 |
+
if date_range == "day":
|
| 329 |
+
start = today.replace(hour=0, minute=0, second=0, microsecond=0)
|
| 330 |
+
elif date_range == "week":
|
| 331 |
+
start = today - timedelta(days=today.weekday())
|
| 332 |
+
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
| 333 |
+
elif date_range == "month":
|
| 334 |
+
start = today.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
| 335 |
+
elif date_range == "specific" and specific_date:
|
| 336 |
+
try:
|
| 337 |
+
start = datetime.strptime(specific_date, "%Y-%m-%d")
|
| 338 |
+
except:
|
| 339 |
+
start = None
|
| 340 |
+
else:
|
| 341 |
+
start = None
|
| 342 |
+
|
| 343 |
+
for t in txns.data:
|
| 344 |
+
try:
|
| 345 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00")).replace(tzinfo=None)
|
| 346 |
+
if start:
|
| 347 |
+
if date_range == "specific":
|
| 348 |
+
# For specific date, must match exactly the day
|
| 349 |
+
if dt.date() != start.date():
|
| 350 |
+
continue
|
| 351 |
+
else:
|
| 352 |
+
if dt < start:
|
| 353 |
+
continue
|
| 354 |
+
filtered_data.append(t)
|
| 355 |
+
except:
|
| 356 |
+
filtered_data.append(t)
|
| 357 |
+
|
| 358 |
+
txns.data = filtered_data
|
| 359 |
+
|
| 360 |
+
# Get all user IDs we need names for
|
| 361 |
+
user_ids = set()
|
| 362 |
+
for t in txns.data:
|
| 363 |
+
user_ids.add(t["buyer_id"])
|
| 364 |
+
user_ids.add(t["seller_id"])
|
| 365 |
+
|
| 366 |
+
users_result = sb.table("users").select("id, full_name, department_id").in_("id", list(user_ids)).execute()
|
| 367 |
+
user_map = {u["id"]: u for u in users_result.data} if users_result.data else {}
|
| 368 |
+
|
| 369 |
+
# Batch-lookup department names for sellers with department_id
|
| 370 |
+
dept_ids = set()
|
| 371 |
+
for u in (users_result.data or []):
|
| 372 |
+
if u.get("department_id"):
|
| 373 |
+
dept_ids.add(u["department_id"])
|
| 374 |
+
|
| 375 |
+
dept_names = {}
|
| 376 |
+
if dept_ids:
|
| 377 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 378 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 379 |
+
|
| 380 |
+
def get_display_name(user_id):
|
| 381 |
+
u = user_map.get(user_id)
|
| 382 |
+
if not u:
|
| 383 |
+
return "Unknown"
|
| 384 |
+
dept_id = u.get("department_id")
|
| 385 |
+
if dept_id and dept_id in dept_names:
|
| 386 |
+
return dept_names[dept_id]
|
| 387 |
+
return u.get("full_name", "Unknown")
|
| 388 |
+
|
| 389 |
+
results = []
|
| 390 |
+
for t in txns.data:
|
| 391 |
+
buyer_name = user_map.get(t["buyer_id"], {}).get("full_name", "Unknown")
|
| 392 |
+
seller_name = get_display_name(t["seller_id"])
|
| 393 |
+
product_title = ""
|
| 394 |
+
if t.get("products"):
|
| 395 |
+
product_title = t["products"].get("title", "") if isinstance(t["products"], dict) else ""
|
| 396 |
+
|
| 397 |
+
if search:
|
| 398 |
+
search_lower = search.lower()
|
| 399 |
+
if (search_lower not in buyer_name.lower() and
|
| 400 |
+
search_lower not in seller_name.lower() and
|
| 401 |
+
search_lower not in product_title.lower()):
|
| 402 |
+
continue
|
| 403 |
+
|
| 404 |
+
product_images = []
|
| 405 |
+
if t.get("products") and isinstance(t["products"], dict):
|
| 406 |
+
product_images = t["products"].get("images", []) or []
|
| 407 |
+
|
| 408 |
+
results.append(TransactionDetail(
|
| 409 |
+
id=t["id"],
|
| 410 |
+
buyer_name=buyer_name,
|
| 411 |
+
seller_name=seller_name,
|
| 412 |
+
product_title=product_title,
|
| 413 |
+
quantity=int(t.get("quantity", 1)),
|
| 414 |
+
amount=float(t["amount"]),
|
| 415 |
+
seller_amount=float(t.get("seller_amount", 0)),
|
| 416 |
+
admin_commission=float(t.get("admin_commission", 0)),
|
| 417 |
+
delivery_fee=float(t.get("delivery_fee", 0)),
|
| 418 |
+
status=t["status"],
|
| 419 |
+
purchase_type=t.get("purchase_type", "delivery"),
|
| 420 |
+
product_images=product_images,
|
| 421 |
+
created_at=t["created_at"],
|
| 422 |
+
))
|
| 423 |
+
|
| 424 |
+
return results
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
@router.get("/reports", response_model=ReportsResponse)
|
| 428 |
+
async def admin_reports(admin: dict = Depends(require_admin)):
|
| 429 |
+
"""Detailed admin reports with data for graphs."""
|
| 430 |
+
sb = get_supabase()
|
| 431 |
+
|
| 432 |
+
txns = sb.table("product_transactions").select(
|
| 433 |
+
"*, products(title)"
|
| 434 |
+
).in_("status", ["completed", "delivered"]).order("created_at", desc=True).execute()
|
| 435 |
+
|
| 436 |
+
if not txns.data:
|
| 437 |
+
return ReportsResponse(
|
| 438 |
+
total_revenue=0, total_sales_volume=0, total_orders=0,
|
| 439 |
+
avg_transaction_value=0, daily_income=[], top_sellers=[],
|
| 440 |
+
top_products=[], monthly_income=[],
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
seller_ids = set(t["seller_id"] for t in txns.data)
|
| 444 |
+
users_result = sb.table("users").select("id, full_name, department_id").in_("id", list(seller_ids)).execute()
|
| 445 |
+
user_map = {u["id"]: u for u in users_result.data} if users_result.data else {}
|
| 446 |
+
|
| 447 |
+
# Batch-lookup department names
|
| 448 |
+
dept_ids = set()
|
| 449 |
+
for u in (users_result.data or []):
|
| 450 |
+
if u.get("department_id"):
|
| 451 |
+
dept_ids.add(u["department_id"])
|
| 452 |
+
|
| 453 |
+
dept_names = {}
|
| 454 |
+
if dept_ids:
|
| 455 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 456 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 457 |
+
|
| 458 |
+
def get_seller_display_name(seller_id):
|
| 459 |
+
u = user_map.get(seller_id)
|
| 460 |
+
if not u:
|
| 461 |
+
return "Unknown"
|
| 462 |
+
dept_id = u.get("department_id")
|
| 463 |
+
if dept_id and dept_id in dept_names:
|
| 464 |
+
return dept_names[dept_id]
|
| 465 |
+
return u.get("full_name", "Unknown")
|
| 466 |
+
|
| 467 |
+
# Admin income comes from admin_earnings (credited on successful transactions)
|
| 468 |
+
earnings_data = sb.table("admin_earnings").select("amount, created_at").order("created_at", desc=True).execute()
|
| 469 |
+
|
| 470 |
+
total_income = 0
|
| 471 |
+
daily_data = {}
|
| 472 |
+
monthly_data = {}
|
| 473 |
+
for e in (earnings_data.data or []):
|
| 474 |
+
e_amount = float(e["amount"])
|
| 475 |
+
total_income += e_amount
|
| 476 |
+
try:
|
| 477 |
+
dt = datetime.fromisoformat(e["created_at"].replace("Z", "+00:00"))
|
| 478 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 479 |
+
month_key = dt.strftime("%Y-%m")
|
| 480 |
+
except Exception:
|
| 481 |
+
day_key = e["created_at"][:10]
|
| 482 |
+
month_key = e["created_at"][:7]
|
| 483 |
+
|
| 484 |
+
if day_key not in daily_data:
|
| 485 |
+
daily_data[day_key] = {"income": 0, "count": 0}
|
| 486 |
+
daily_data[day_key]["income"] += e_amount
|
| 487 |
+
daily_data[day_key]["count"] += 1
|
| 488 |
+
|
| 489 |
+
if month_key not in monthly_data:
|
| 490 |
+
monthly_data[month_key] = {"income": 0, "count": 0}
|
| 491 |
+
monthly_data[month_key]["income"] += e_amount
|
| 492 |
+
monthly_data[month_key]["count"] += 1
|
| 493 |
+
|
| 494 |
+
# Sales volume and top sellers/products from transactions
|
| 495 |
+
total_volume = 0
|
| 496 |
+
seller_data = {}
|
| 497 |
+
product_data = {}
|
| 498 |
+
|
| 499 |
+
for t in txns.data:
|
| 500 |
+
amount = float(t["amount"])
|
| 501 |
+
total_volume += amount
|
| 502 |
+
|
| 503 |
+
sid = t["seller_id"]
|
| 504 |
+
if sid not in seller_data:
|
| 505 |
+
seller_data[sid] = {"name": get_seller_display_name(sid), "total": 0, "count": 0}
|
| 506 |
+
seller_data[sid]["total"] += amount
|
| 507 |
+
seller_data[sid]["count"] += 1
|
| 508 |
+
|
| 509 |
+
pid = t["product_id"]
|
| 510 |
+
ptitle = ""
|
| 511 |
+
if t.get("products"):
|
| 512 |
+
ptitle = t["products"].get("title", "") if isinstance(t["products"], dict) else ""
|
| 513 |
+
if pid not in product_data:
|
| 514 |
+
product_data[pid] = {"title": ptitle, "count": 0, "revenue": 0}
|
| 515 |
+
product_data[pid]["count"] += 1
|
| 516 |
+
product_data[pid]["revenue"] += amount
|
| 517 |
+
|
| 518 |
+
daily_income = sorted([
|
| 519 |
+
DailyIncome(date=k, income=round(v["income"], 2), transactions=v["count"])
|
| 520 |
+
for k, v in daily_data.items()
|
| 521 |
+
], key=lambda x: x.date, reverse=True)[:30]
|
| 522 |
+
|
| 523 |
+
monthly_income = sorted([
|
| 524 |
+
DailyIncome(date=k, income=round(v["income"], 2), transactions=v["count"])
|
| 525 |
+
for k, v in monthly_data.items()
|
| 526 |
+
], key=lambda x: x.date, reverse=True)[:12]
|
| 527 |
+
|
| 528 |
+
top_sellers = sorted([
|
| 529 |
+
TopSeller(seller_id=k, seller_name=v["name"], total_sales=round(v["total"], 2), transaction_count=v["count"])
|
| 530 |
+
for k, v in seller_data.items()
|
| 531 |
+
], key=lambda x: x.total_sales, reverse=True)[:10]
|
| 532 |
+
|
| 533 |
+
top_products = sorted([
|
| 534 |
+
TopProduct(product_id=k, product_title=v["title"], times_sold=v["count"], total_revenue=round(v["revenue"], 2))
|
| 535 |
+
for k, v in product_data.items()
|
| 536 |
+
], key=lambda x: x.total_revenue, reverse=True)[:10]
|
| 537 |
+
|
| 538 |
+
avg_val = total_volume / len(txns.data) if txns.data else 0
|
| 539 |
+
|
| 540 |
+
return ReportsResponse(
|
| 541 |
+
total_revenue=round(total_income, 2),
|
| 542 |
+
total_sales_volume=round(total_volume, 2),
|
| 543 |
+
total_orders=len(txns.data),
|
| 544 |
+
avg_transaction_value=round(avg_val, 2),
|
| 545 |
+
daily_income=daily_income,
|
| 546 |
+
top_sellers=top_sellers,
|
| 547 |
+
top_products=top_products,
|
| 548 |
+
monthly_income=monthly_income,
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
# --- Product Management ---
|
| 553 |
+
|
| 554 |
+
@router.get("/products", response_model=list[AdminProductResponse])
|
| 555 |
+
async def list_admin_products(
|
| 556 |
+
search: str = Query("", description="Search by product title"),
|
| 557 |
+
admin: dict = Depends(require_admin),
|
| 558 |
+
):
|
| 559 |
+
"""List all products for admin management."""
|
| 560 |
+
sb = get_supabase()
|
| 561 |
+
|
| 562 |
+
if search:
|
| 563 |
+
products = sb.table("products").select("*, users!products_seller_id_fkey(full_name, department_id)").eq(
|
| 564 |
+
"is_active", True
|
| 565 |
+
).ilike("title", f"%{search}%").order("created_at", desc=True).limit(200).execute()
|
| 566 |
+
else:
|
| 567 |
+
products = sb.table("products").select("*, users!products_seller_id_fkey(full_name, department_id)").eq(
|
| 568 |
+
"is_active", True
|
| 569 |
+
).order("created_at", desc=True).limit(200).execute()
|
| 570 |
+
|
| 571 |
+
# Batch-lookup department names
|
| 572 |
+
dept_ids = set()
|
| 573 |
+
for p in products.data:
|
| 574 |
+
user_info = p.get("users") or {}
|
| 575 |
+
dept_id = user_info.get("department_id")
|
| 576 |
+
if dept_id:
|
| 577 |
+
dept_ids.add(dept_id)
|
| 578 |
+
|
| 579 |
+
dept_names = {}
|
| 580 |
+
if dept_ids:
|
| 581 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 582 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 583 |
+
|
| 584 |
+
results = []
|
| 585 |
+
for p in products.data:
|
| 586 |
+
user_info = p.get("users") or {}
|
| 587 |
+
dept_id = user_info.get("department_id")
|
| 588 |
+
if dept_id and dept_id in dept_names:
|
| 589 |
+
seller_name = dept_names[dept_id]
|
| 590 |
+
else:
|
| 591 |
+
seller_name = user_info.get("full_name", "")
|
| 592 |
+
|
| 593 |
+
results.append(AdminProductResponse(
|
| 594 |
+
id=p["id"],
|
| 595 |
+
seller_id=p["seller_id"],
|
| 596 |
+
seller_name=seller_name,
|
| 597 |
+
title=p["title"],
|
| 598 |
+
description=p.get("description", ""),
|
| 599 |
+
price=float(p["price"]),
|
| 600 |
+
stock=int(p.get("stock", 0)),
|
| 601 |
+
images=p.get("images") or [],
|
| 602 |
+
is_active=p["is_active"],
|
| 603 |
+
created_at=p["created_at"],
|
| 604 |
+
department_id=dept_id,
|
| 605 |
+
department_name=dept_names.get(dept_id) if dept_id else None,
|
| 606 |
+
))
|
| 607 |
+
return results
|
| 608 |
+
|
| 609 |
+
|
| 610 |
+
@router.put("/products/{product_id}", response_model=AdminProductResponse)
|
| 611 |
+
async def admin_update_product(
|
| 612 |
+
product_id: str,
|
| 613 |
+
req: AdminUpdateProductRequest,
|
| 614 |
+
admin: dict = Depends(require_admin),
|
| 615 |
+
):
|
| 616 |
+
"""Admin can update product title, price, stock, and active status."""
|
| 617 |
+
sb = get_supabase()
|
| 618 |
+
|
| 619 |
+
existing = sb.table("products").select("id").eq("id", product_id).execute()
|
| 620 |
+
if not existing.data:
|
| 621 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 622 |
+
|
| 623 |
+
update_data = {k: v for k, v in req.model_dump().items() if v is not None}
|
| 624 |
+
if not update_data:
|
| 625 |
+
raise HTTPException(status_code=400, detail="No fields to update")
|
| 626 |
+
|
| 627 |
+
result = sb.table("products").update(update_data).eq("id", product_id).execute()
|
| 628 |
+
if not result.data:
|
| 629 |
+
raise HTTPException(status_code=500, detail="Failed to update product")
|
| 630 |
+
|
| 631 |
+
# Re-fetch with seller name
|
| 632 |
+
p_result = sb.table("products").select("*, users!products_seller_id_fkey(full_name, department_id)").eq("id", product_id).execute()
|
| 633 |
+
p = p_result.data[0]
|
| 634 |
+
|
| 635 |
+
user_info = p.get("users") or {}
|
| 636 |
+
dept_id = user_info.get("department_id")
|
| 637 |
+
seller_name = user_info.get("full_name", "")
|
| 638 |
+
if dept_id:
|
| 639 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id).execute()
|
| 640 |
+
if dept_resp.data:
|
| 641 |
+
seller_name = dept_resp.data[0]["name"]
|
| 642 |
+
|
| 643 |
+
return AdminProductResponse(
|
| 644 |
+
id=p["id"],
|
| 645 |
+
seller_id=p["seller_id"],
|
| 646 |
+
seller_name=seller_name,
|
| 647 |
+
title=p["title"],
|
| 648 |
+
description=p.get("description", ""),
|
| 649 |
+
price=float(p["price"]),
|
| 650 |
+
stock=int(p.get("stock", 0)),
|
| 651 |
+
images=p.get("images") or [],
|
| 652 |
+
is_active=p["is_active"],
|
| 653 |
+
created_at=p["created_at"],
|
| 654 |
+
department_id=dept_id,
|
| 655 |
+
department_name=seller_name if dept_id else None,
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
|
| 659 |
+
# --- User Detail (Clickable Panel) ---
|
| 660 |
+
|
| 661 |
+
@router.get("/users/{user_id}/detail")
|
| 662 |
+
async def get_user_detail(user_id: str, admin: dict = Depends(require_admin)):
|
| 663 |
+
"""Get full user detail for admin slide panel: report, history, transactions."""
|
| 664 |
+
sb = get_supabase()
|
| 665 |
+
|
| 666 |
+
# 1. User info
|
| 667 |
+
user_resp = sb.table("users").select("*, user_balances(balance), user_contacts(contact_number)").eq("id", user_id).execute()
|
| 668 |
+
if not user_resp.data:
|
| 669 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 670 |
+
|
| 671 |
+
u = user_resp.data[0]
|
| 672 |
+
bal = 0.0
|
| 673 |
+
if u.get("user_balances"):
|
| 674 |
+
if isinstance(u["user_balances"], list) and len(u["user_balances"]) > 0:
|
| 675 |
+
bal = float(u["user_balances"][0].get("balance", 0))
|
| 676 |
+
elif isinstance(u["user_balances"], dict):
|
| 677 |
+
bal = float(u["user_balances"].get("balance", 0))
|
| 678 |
+
|
| 679 |
+
contact = ""
|
| 680 |
+
if u.get("user_contacts"):
|
| 681 |
+
if isinstance(u["user_contacts"], list) and len(u["user_contacts"]) > 0:
|
| 682 |
+
contact = u["user_contacts"][0].get("contact_number", "")
|
| 683 |
+
elif isinstance(u["user_contacts"], dict):
|
| 684 |
+
contact = u["user_contacts"].get("contact_number", "")
|
| 685 |
+
|
| 686 |
+
# 2. Transactions — for sellers, also fetch by assigned_staff_id for full coverage
|
| 687 |
+
bought = sb.table("product_transactions").select("*, products(title, images)").eq("buyer_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 688 |
+
sold = sb.table("product_transactions").select("*, products(title, images)").eq("seller_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 689 |
+
delivered = sb.table("product_transactions").select("*, products(title, images)").eq("delivery_user_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 690 |
+
|
| 691 |
+
# For sellers, also fetch transactions assigned to them
|
| 692 |
+
assigned_txns_data = []
|
| 693 |
+
if u["role"] == "seller":
|
| 694 |
+
try:
|
| 695 |
+
assigned = sb.table("product_transactions").select("*, products(title, images)").eq(
|
| 696 |
+
"assigned_staff_id", user_id
|
| 697 |
+
).order("created_at", desc=True).limit(100).execute()
|
| 698 |
+
assigned_txns_data = assigned.data or []
|
| 699 |
+
except Exception:
|
| 700 |
+
assigned_txns_data = []
|
| 701 |
+
|
| 702 |
+
# Merge and deduplicate
|
| 703 |
+
all_txns = (bought.data or []) + (sold.data or []) + (delivered.data or []) + assigned_txns_data
|
| 704 |
+
seen = set()
|
| 705 |
+
transactions = []
|
| 706 |
+
raw_txns = [] # Keep raw data for seller infographics
|
| 707 |
+
for t in all_txns:
|
| 708 |
+
if t["id"] not in seen:
|
| 709 |
+
seen.add(t["id"])
|
| 710 |
+
raw_txns.append(t)
|
| 711 |
+
transactions.append({
|
| 712 |
+
"id": t["id"],
|
| 713 |
+
"product_title": (t.get("products") or {}).get("title", ""),
|
| 714 |
+
"amount": float(t["amount"]),
|
| 715 |
+
"quantity": int(t.get("quantity", 1)),
|
| 716 |
+
"status": t["status"],
|
| 717 |
+
"role_in_txn": "buyer" if t["buyer_id"] == user_id else ("seller" if t["seller_id"] == user_id else "delivery"),
|
| 718 |
+
"created_at": t["created_at"],
|
| 719 |
+
})
|
| 720 |
+
transactions.sort(key=lambda x: x["created_at"], reverse=True)
|
| 721 |
+
raw_txns.sort(key=lambda x: x["created_at"], reverse=True)
|
| 722 |
+
|
| 723 |
+
# 3. Report: daily/weekly/monthly breakdown
|
| 724 |
+
daily_data = {}
|
| 725 |
+
monthly_data = {}
|
| 726 |
+
today_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 727 |
+
completed_statuses = ("completed", "delivered")
|
| 728 |
+
|
| 729 |
+
# Seller-specific counters
|
| 730 |
+
completed_count = 0
|
| 731 |
+
total_items = 0
|
| 732 |
+
today_tasks = 0
|
| 733 |
+
delivery_items_today = 0
|
| 734 |
+
products_handled = {}
|
| 735 |
+
|
| 736 |
+
for t in raw_txns:
|
| 737 |
+
amt = float(t["amount"])
|
| 738 |
+
qty = int(t.get("quantity", 1))
|
| 739 |
+
status = t["status"]
|
| 740 |
+
purchase_type = t.get("purchase_type", "delivery")
|
| 741 |
+
is_completed = status in completed_statuses
|
| 742 |
+
|
| 743 |
+
try:
|
| 744 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 745 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 746 |
+
month_key = dt.strftime("%Y-%m")
|
| 747 |
+
except Exception:
|
| 748 |
+
day_key = t["created_at"][:10]
|
| 749 |
+
month_key = t["created_at"][:7]
|
| 750 |
+
|
| 751 |
+
if day_key not in daily_data:
|
| 752 |
+
daily_data[day_key] = {"amount": 0, "count": 0, "delivery_items": 0}
|
| 753 |
+
daily_data[day_key]["amount"] += amt
|
| 754 |
+
daily_data[day_key]["count"] += 1
|
| 755 |
+
if is_completed:
|
| 756 |
+
daily_data[day_key]["delivery_items"] += qty
|
| 757 |
+
|
| 758 |
+
if month_key not in monthly_data:
|
| 759 |
+
monthly_data[month_key] = {"amount": 0, "count": 0}
|
| 760 |
+
monthly_data[month_key]["amount"] += amt
|
| 761 |
+
monthly_data[month_key]["count"] += 1
|
| 762 |
+
|
| 763 |
+
# Seller-specific metrics
|
| 764 |
+
if u["role"] == "seller" and is_completed:
|
| 765 |
+
completed_count += 1
|
| 766 |
+
total_items += qty
|
| 767 |
+
if day_key == today_str:
|
| 768 |
+
today_tasks += 1
|
| 769 |
+
delivery_items_today += qty
|
| 770 |
+
|
| 771 |
+
# Track recent products handled
|
| 772 |
+
pid = t["product_id"]
|
| 773 |
+
prod_info = t.get("products") or {}
|
| 774 |
+
if pid not in products_handled:
|
| 775 |
+
products_handled[pid] = {
|
| 776 |
+
"product_id": pid,
|
| 777 |
+
"product_title": prod_info.get("title", ""),
|
| 778 |
+
"product_image": ((prod_info.get("images") or [""])[0]) if prod_info.get("images") else "",
|
| 779 |
+
"quantity_processed": 0,
|
| 780 |
+
"last_handled": t["created_at"],
|
| 781 |
+
"purchase_type": purchase_type,
|
| 782 |
+
}
|
| 783 |
+
products_handled[pid]["quantity_processed"] += qty
|
| 784 |
+
|
| 785 |
+
daily = sorted(
|
| 786 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"],
|
| 787 |
+
"delivery_items": v["delivery_items"]}
|
| 788 |
+
for k, v in daily_data.items()],
|
| 789 |
+
key=lambda x: x["date"], reverse=True
|
| 790 |
+
)[:30]
|
| 791 |
+
|
| 792 |
+
monthly = sorted(
|
| 793 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"]} for k, v in monthly_data.items()],
|
| 794 |
+
key=lambda x: x["date"], reverse=True
|
| 795 |
+
)[:12]
|
| 796 |
+
|
| 797 |
+
recent_products_handled = sorted(
|
| 798 |
+
products_handled.values(), key=lambda x: x["last_handled"], reverse=True
|
| 799 |
+
)[:20] if u["role"] == "seller" else []
|
| 800 |
+
|
| 801 |
+
# 4. SVF history
|
| 802 |
+
svf = sb.table("stored_value").select("*").eq("user_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 803 |
+
|
| 804 |
+
# 5. Seller products (if user is a seller)
|
| 805 |
+
seller_products = []
|
| 806 |
+
if u["role"] == "seller":
|
| 807 |
+
prods = sb.table("products").select("id, title, price, stock, images, is_active, created_at").eq("seller_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 808 |
+
seller_products = [
|
| 809 |
+
{
|
| 810 |
+
"id": p["id"],
|
| 811 |
+
"title": p["title"],
|
| 812 |
+
"price": float(p["price"]),
|
| 813 |
+
"stock": int(p.get("stock", 0)),
|
| 814 |
+
"image_url": (p.get("images") or [""])[0] if p.get("images") else "",
|
| 815 |
+
"is_active": p["is_active"],
|
| 816 |
+
"created_at": p["created_at"],
|
| 817 |
+
}
|
| 818 |
+
for p in (prods.data or [])
|
| 819 |
+
]
|
| 820 |
+
|
| 821 |
+
return {
|
| 822 |
+
"user": {
|
| 823 |
+
"id": u["id"],
|
| 824 |
+
"email": u["email"],
|
| 825 |
+
"full_name": u["full_name"],
|
| 826 |
+
"role": u["role"],
|
| 827 |
+
"is_banned": u.get("is_banned", False),
|
| 828 |
+
"balance": bal,
|
| 829 |
+
"contact_number": contact,
|
| 830 |
+
"created_at": u["created_at"],
|
| 831 |
+
},
|
| 832 |
+
"report": {
|
| 833 |
+
"total_transactions": len(transactions),
|
| 834 |
+
"total_amount": round(sum(t["amount"] for t in transactions), 2),
|
| 835 |
+
"total_completed_tasks": completed_count,
|
| 836 |
+
"total_items_processed": total_items,
|
| 837 |
+
"tasks_completed_today": today_tasks,
|
| 838 |
+
"delivery_items_today": delivery_items_today,
|
| 839 |
+
"daily": daily,
|
| 840 |
+
"monthly": monthly,
|
| 841 |
+
},
|
| 842 |
+
"transactions": transactions[:50],
|
| 843 |
+
"seller_products": seller_products,
|
| 844 |
+
"recent_products_handled": recent_products_handled,
|
| 845 |
+
"svf_history": [
|
| 846 |
+
{
|
| 847 |
+
"id": s["id"],
|
| 848 |
+
"type": s["transaction_type"],
|
| 849 |
+
"amount": float(s["amount"]),
|
| 850 |
+
"created_at": s["created_at"],
|
| 851 |
+
}
|
| 852 |
+
for s in (svf.data or [])
|
| 853 |
+
],
|
| 854 |
+
}
|
| 855 |
+
|
| 856 |
+
|
| 857 |
+
# --- Admin: Product approval (pending / approved / unapproved) ---
|
| 858 |
+
|
| 859 |
+
@router.get("/pending-products")
|
| 860 |
+
async def admin_get_pending_products(admin: dict = Depends(require_admin)):
|
| 861 |
+
"""Get all products with status 'pending', with seller info."""
|
| 862 |
+
sb = get_supabase()
|
| 863 |
+
prods = sb.table("products").select("*").eq("status", "pending").order("created_at", desc=True).execute()
|
| 864 |
+
|
| 865 |
+
results = []
|
| 866 |
+
for p in (prods.data or []):
|
| 867 |
+
seller = sb.table("users").select("full_name, email, department_id").eq("id", p["seller_id"]).execute()
|
| 868 |
+
seller_info = seller.data[0] if seller.data else {}
|
| 869 |
+
|
| 870 |
+
seller_name = seller_info.get("full_name", "Unknown")
|
| 871 |
+
dept_id = seller_info.get("department_id")
|
| 872 |
+
if dept_id:
|
| 873 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id).execute()
|
| 874 |
+
if dept_resp.data:
|
| 875 |
+
seller_name = dept_resp.data[0]["name"]
|
| 876 |
+
|
| 877 |
+
results.append({
|
| 878 |
+
"id": p["id"],
|
| 879 |
+
"title": p["title"],
|
| 880 |
+
"description": p.get("description", ""),
|
| 881 |
+
"price": float(p["price"]),
|
| 882 |
+
"stock": p["stock"],
|
| 883 |
+
"images": p.get("images", []),
|
| 884 |
+
"seller_id": p["seller_id"],
|
| 885 |
+
"seller_name": seller_name,
|
| 886 |
+
"seller_email": seller_info.get("email", ""),
|
| 887 |
+
"status": p["status"],
|
| 888 |
+
"created_at": p["created_at"],
|
| 889 |
+
})
|
| 890 |
+
|
| 891 |
+
return results
|
| 892 |
+
|
| 893 |
+
|
| 894 |
+
@router.put("/products/{product_id}/approve")
|
| 895 |
+
async def admin_approve_product(
|
| 896 |
+
product_id: str,
|
| 897 |
+
admin: dict = Depends(require_admin),
|
| 898 |
+
):
|
| 899 |
+
"""Approve a product (pending → approved) so it can be listed and sold."""
|
| 900 |
+
sb = get_supabase()
|
| 901 |
+
|
| 902 |
+
prod = sb.table("products").select("status").eq("id", product_id).execute()
|
| 903 |
+
if not prod.data:
|
| 904 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 905 |
+
|
| 906 |
+
if prod.data[0]["status"] != "pending":
|
| 907 |
+
raise HTTPException(status_code=400, detail=f"Can only approve products with status 'pending'. Current: {prod.data[0]['status']}")
|
| 908 |
+
|
| 909 |
+
sb.table("products").update({"status": "approved"}).eq("id", product_id).execute()
|
| 910 |
+
return {"message": "Product approved"}
|
| 911 |
+
|
| 912 |
+
|
| 913 |
+
@router.put("/products/{product_id}/unapprove")
|
| 914 |
+
async def admin_unapprove_product(
|
| 915 |
+
product_id: str,
|
| 916 |
+
admin: dict = Depends(require_admin),
|
| 917 |
+
):
|
| 918 |
+
"""Unapprove a product (pending → unapproved)."""
|
| 919 |
+
sb = get_supabase()
|
| 920 |
+
|
| 921 |
+
prod = sb.table("products").select("status").eq("id", product_id).execute()
|
| 922 |
+
if not prod.data:
|
| 923 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 924 |
+
|
| 925 |
+
if prod.data[0]["status"] != "pending":
|
| 926 |
+
raise HTTPException(status_code=400, detail=f"Can only unapprove products with status 'pending'. Current: {prod.data[0]['status']}")
|
| 927 |
+
|
| 928 |
+
sb.table("products").update({"status": "unapproved"}).eq("id", product_id).execute()
|
| 929 |
+
return {"message": "Product unapproved"}
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
# --- Delivery User Registration ---
|
| 933 |
+
|
| 934 |
+
class DeliveryRegisterRequest(BaseModel):
|
| 935 |
+
full_name: str
|
| 936 |
+
email: str
|
| 937 |
+
password: str
|
| 938 |
+
contact_number: str
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
@router.post("/delivery/register")
|
| 942 |
+
async def admin_register_delivery(
|
| 943 |
+
req: DeliveryRegisterRequest,
|
| 944 |
+
admin: dict = Depends(require_admin),
|
| 945 |
+
):
|
| 946 |
+
"""Admin-only: register a new delivery user with unique name, email, and contact."""
|
| 947 |
+
import bcrypt
|
| 948 |
+
import traceback
|
| 949 |
+
|
| 950 |
+
try:
|
| 951 |
+
sb = get_supabase()
|
| 952 |
+
|
| 953 |
+
# Check unique email
|
| 954 |
+
existing_email = sb.table("users").select("id").eq("email", req.email).execute()
|
| 955 |
+
if existing_email.data:
|
| 956 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 957 |
+
|
| 958 |
+
# Check unique full_name
|
| 959 |
+
existing_name = sb.table("users").select("id").eq("full_name", req.full_name).execute()
|
| 960 |
+
if existing_name.data:
|
| 961 |
+
raise HTTPException(status_code=400, detail="Full name already taken")
|
| 962 |
+
|
| 963 |
+
# Check unique contact_number
|
| 964 |
+
existing_contact = sb.table("user_contacts").select("user_id").eq("contact_number", req.contact_number).execute()
|
| 965 |
+
if existing_contact.data:
|
| 966 |
+
raise HTTPException(status_code=400, detail="Contact number already registered")
|
| 967 |
+
|
| 968 |
+
# Hash password
|
| 969 |
+
password_hash = bcrypt.hashpw(req.password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
| 970 |
+
|
| 971 |
+
# Create user with delivery role
|
| 972 |
+
result = sb.table("users").insert({
|
| 973 |
+
"email": req.email,
|
| 974 |
+
"password_hash": password_hash,
|
| 975 |
+
"full_name": req.full_name,
|
| 976 |
+
"role": "delivery",
|
| 977 |
+
"is_banned": False,
|
| 978 |
+
}).execute()
|
| 979 |
+
|
| 980 |
+
if not result.data:
|
| 981 |
+
raise HTTPException(status_code=500, detail="Failed to create delivery user")
|
| 982 |
+
|
| 983 |
+
user = result.data[0]
|
| 984 |
+
|
| 985 |
+
# Create balance
|
| 986 |
+
sb.table("user_balances").insert({"user_id": user["id"], "balance": 0.00}).execute()
|
| 987 |
+
|
| 988 |
+
# Create contact
|
| 989 |
+
sb.table("user_contacts").insert({"user_id": user["id"], "contact_number": req.contact_number}).execute()
|
| 990 |
+
|
| 991 |
+
return {
|
| 992 |
+
"message": "Delivery user registered successfully",
|
| 993 |
+
"user": {
|
| 994 |
+
"id": user["id"],
|
| 995 |
+
"full_name": user["full_name"],
|
| 996 |
+
"email": user["email"],
|
| 997 |
+
"role": "delivery",
|
| 998 |
+
"contact_number": req.contact_number,
|
| 999 |
+
},
|
| 1000 |
+
}
|
| 1001 |
+
|
| 1002 |
+
except HTTPException:
|
| 1003 |
+
raise # Re-raise HTTP exceptions as-is
|
| 1004 |
+
except Exception as e:
|
| 1005 |
+
print(f"[DeliveryRegister] ERROR: {e}")
|
| 1006 |
+
traceback.print_exc()
|
| 1007 |
+
raise HTTPException(status_code=500, detail=f"Registration failed: {str(e)}")
|
| 1008 |
+
|
| 1009 |
+
|
| 1010 |
+
# --- Department CRUD ---
|
| 1011 |
+
|
| 1012 |
+
class DepartmentCreateRequest(BaseModel):
|
| 1013 |
+
name: str
|
| 1014 |
+
description: str = ""
|
| 1015 |
+
|
| 1016 |
+
|
| 1017 |
+
class DepartmentUpdateRequest(BaseModel):
|
| 1018 |
+
name: Optional[str] = None
|
| 1019 |
+
description: Optional[str] = None
|
| 1020 |
+
|
| 1021 |
+
|
| 1022 |
+
@router.post("/departments")
|
| 1023 |
+
async def create_department(req: DepartmentCreateRequest, admin: dict = Depends(require_admin)):
|
| 1024 |
+
"""Create a new department."""
|
| 1025 |
+
sb = get_supabase()
|
| 1026 |
+
|
| 1027 |
+
existing = sb.table("departments").select("id").eq("name", req.name).execute()
|
| 1028 |
+
if existing.data:
|
| 1029 |
+
raise HTTPException(status_code=400, detail="Department name already exists")
|
| 1030 |
+
|
| 1031 |
+
result = sb.table("departments").insert({
|
| 1032 |
+
"name": req.name,
|
| 1033 |
+
"description": req.description,
|
| 1034 |
+
}).execute()
|
| 1035 |
+
|
| 1036 |
+
if not result.data:
|
| 1037 |
+
raise HTTPException(status_code=500, detail="Failed to create department")
|
| 1038 |
+
|
| 1039 |
+
return {"message": "Department created", "department": result.data[0]}
|
| 1040 |
+
|
| 1041 |
+
|
| 1042 |
+
@router.get("/departments")
|
| 1043 |
+
async def list_departments(admin: dict = Depends(require_admin)):
|
| 1044 |
+
"""List all departments with manager info and staff count."""
|
| 1045 |
+
sb = get_supabase()
|
| 1046 |
+
|
| 1047 |
+
depts = sb.table("departments").select("*").order("created_at", desc=True).execute()
|
| 1048 |
+
|
| 1049 |
+
results = []
|
| 1050 |
+
for d in (depts.data or []):
|
| 1051 |
+
# Get manager name
|
| 1052 |
+
manager_name = ""
|
| 1053 |
+
if d.get("manager_id"):
|
| 1054 |
+
mgr = sb.table("users").select("full_name").eq("id", d["manager_id"]).execute()
|
| 1055 |
+
if mgr.data:
|
| 1056 |
+
manager_name = mgr.data[0]["full_name"]
|
| 1057 |
+
|
| 1058 |
+
# Staff count
|
| 1059 |
+
staff = sb.table("users").select("id", count="exact").eq("department_id", d["id"]).eq("role", "seller").execute()
|
| 1060 |
+
|
| 1061 |
+
# Products count (via staff + manager)
|
| 1062 |
+
staff_ids_result = sb.table("users").select("id").eq("department_id", d["id"]).eq("role", "seller").execute()
|
| 1063 |
+
staff_ids = [s["id"] for s in (staff_ids_result.data or [])]
|
| 1064 |
+
# Include manager's own products/transactions
|
| 1065 |
+
if d.get("manager_id") and d["manager_id"] not in staff_ids:
|
| 1066 |
+
staff_ids.append(d["manager_id"])
|
| 1067 |
+
|
| 1068 |
+
product_count = 0
|
| 1069 |
+
low_stock_count = 0
|
| 1070 |
+
if staff_ids:
|
| 1071 |
+
prods = sb.table("products").select("id, stock").in_("seller_id", staff_ids).eq("is_active", True).execute()
|
| 1072 |
+
product_count = len(prods.data) if prods.data else 0
|
| 1073 |
+
low_stock_count = sum(1 for p in (prods.data or []) if int(p.get("stock", 0)) < 5)
|
| 1074 |
+
|
| 1075 |
+
# Revenue and order counts from completed transactions
|
| 1076 |
+
total_revenue = 0
|
| 1077 |
+
total_orders = 0
|
| 1078 |
+
delivery_orders = 0
|
| 1079 |
+
if staff_ids:
|
| 1080 |
+
txns = sb.table("product_transactions").select(
|
| 1081 |
+
"seller_amount"
|
| 1082 |
+
).in_("seller_id", staff_ids).in_("status", ["delivered", "completed"]).execute()
|
| 1083 |
+
for t in (txns.data or []):
|
| 1084 |
+
total_revenue += float(t.get("seller_amount", 0))
|
| 1085 |
+
total_orders += 1
|
| 1086 |
+
delivery_orders += 1
|
| 1087 |
+
|
| 1088 |
+
results.append({
|
| 1089 |
+
"id": d["id"],
|
| 1090 |
+
"name": d["name"],
|
| 1091 |
+
"description": d.get("description", ""),
|
| 1092 |
+
"manager_id": d.get("manager_id"),
|
| 1093 |
+
"manager_name": manager_name,
|
| 1094 |
+
"staff_count": staff.count or 0,
|
| 1095 |
+
"product_count": product_count,
|
| 1096 |
+
"low_stock_count": low_stock_count,
|
| 1097 |
+
"total_revenue": round(total_revenue, 2),
|
| 1098 |
+
"total_orders": total_orders,
|
| 1099 |
+
"delivery_orders": delivery_orders,
|
| 1100 |
+
"created_at": d["created_at"],
|
| 1101 |
+
})
|
| 1102 |
+
|
| 1103 |
+
return results
|
| 1104 |
+
|
| 1105 |
+
|
| 1106 |
+
@router.get("/departments/{dept_id}")
|
| 1107 |
+
async def get_department_detail(dept_id: str, admin: dict = Depends(require_admin)):
|
| 1108 |
+
"""Get detailed department info with sales data for graphs."""
|
| 1109 |
+
sb = get_supabase()
|
| 1110 |
+
|
| 1111 |
+
dept = sb.table("departments").select("*").eq("id", dept_id).execute()
|
| 1112 |
+
if not dept.data:
|
| 1113 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 1114 |
+
|
| 1115 |
+
d = dept.data[0]
|
| 1116 |
+
|
| 1117 |
+
# Manager info
|
| 1118 |
+
manager_name = ""
|
| 1119 |
+
if d.get("manager_id"):
|
| 1120 |
+
mgr = sb.table("users").select("full_name, email").eq("id", d["manager_id"]).execute()
|
| 1121 |
+
if mgr.data:
|
| 1122 |
+
manager_name = mgr.data[0]["full_name"]
|
| 1123 |
+
|
| 1124 |
+
# Staff in department
|
| 1125 |
+
staff_result = sb.table("users").select("id, full_name, email, is_banned, created_at").eq(
|
| 1126 |
+
"department_id", dept_id
|
| 1127 |
+
).eq("role", "seller").order("created_at", desc=True).execute()
|
| 1128 |
+
staff_ids = [s["id"] for s in (staff_result.data or [])]
|
| 1129 |
+
# Include manager's own products/transactions
|
| 1130 |
+
if d.get("manager_id") and d["manager_id"] not in staff_ids:
|
| 1131 |
+
staff_ids.append(d["manager_id"])
|
| 1132 |
+
|
| 1133 |
+
# Products with revenue data
|
| 1134 |
+
product_count = 0
|
| 1135 |
+
low_stock_count = 0
|
| 1136 |
+
department_products = []
|
| 1137 |
+
if staff_ids:
|
| 1138 |
+
prods = sb.table("products").select("id, title, images, price, stock, is_active").in_("seller_id", staff_ids).eq("is_active", True).execute()
|
| 1139 |
+
product_count = len(prods.data) if prods.data else 0
|
| 1140 |
+
low_stock_count = sum(1 for p in (prods.data or []) if int(p.get("stock", 0)) < 5)
|
| 1141 |
+
|
| 1142 |
+
# Get revenue per product from completed transactions
|
| 1143 |
+
if prods.data:
|
| 1144 |
+
prod_ids = [p["id"] for p in prods.data]
|
| 1145 |
+
prod_txns = sb.table("product_transactions").select(
|
| 1146 |
+
"product_id, amount"
|
| 1147 |
+
).in_("product_id", prod_ids).in_("status", ["delivered", "completed"]).execute()
|
| 1148 |
+
prod_revenue = {}
|
| 1149 |
+
for pt in (prod_txns.data or []):
|
| 1150 |
+
pid = pt["product_id"]
|
| 1151 |
+
prod_revenue[pid] = prod_revenue.get(pid, 0) + float(pt["amount"])
|
| 1152 |
+
|
| 1153 |
+
for p in prods.data:
|
| 1154 |
+
department_products.append({
|
| 1155 |
+
"id": p["id"],
|
| 1156 |
+
"title": p["title"],
|
| 1157 |
+
"images": p.get("images", []),
|
| 1158 |
+
"price": float(p["price"]),
|
| 1159 |
+
"stock": int(p.get("stock", 0)),
|
| 1160 |
+
"total_revenue": round(prod_revenue.get(p["id"], 0), 2),
|
| 1161 |
+
})
|
| 1162 |
+
# Sort by revenue descending
|
| 1163 |
+
department_products.sort(key=lambda x: x["total_revenue"], reverse=True)
|
| 1164 |
+
|
| 1165 |
+
# Pending restock requests for this department (include in-delivery)
|
| 1166 |
+
pending_restocks = []
|
| 1167 |
+
restock_result = sb.table("restock_requests").select(
|
| 1168 |
+
"*, products(title, images, stock)"
|
| 1169 |
+
).eq("department_id", dept_id).in_("status", ["pending_manager", "approved_manager", "accepted_delivery", "in_transit"]).order("created_at", desc=True).limit(20).execute()
|
| 1170 |
+
if restock_result.data:
|
| 1171 |
+
rs_staff_ids = set(r["staff_id"] for r in restock_result.data)
|
| 1172 |
+
# Also collect delivery user IDs
|
| 1173 |
+
rs_delivery_ids = set(r["delivery_user_id"] for r in restock_result.data if r.get("delivery_user_id"))
|
| 1174 |
+
all_user_ids = rs_staff_ids | rs_delivery_ids
|
| 1175 |
+
rs_users = sb.table("users").select("id, full_name, role").in_("id", list(all_user_ids)).execute() if all_user_ids else None
|
| 1176 |
+
rs_user_map = {u["id"]: {"name": u["full_name"], "role": u.get("role", "")} for u in (rs_users.data or [])} if rs_users else {}
|
| 1177 |
+
for r in restock_result.data:
|
| 1178 |
+
prod_info = r.get("products") or {}
|
| 1179 |
+
requester = rs_user_map.get(r["staff_id"], {"name": "Unknown", "role": ""})
|
| 1180 |
+
delivery_info = rs_user_map.get(r.get("delivery_user_id", ""), {"name": "", "role": ""})
|
| 1181 |
+
pending_restocks.append({
|
| 1182 |
+
"id": r["id"],
|
| 1183 |
+
"product_title": prod_info.get("title", ""),
|
| 1184 |
+
"product_images": prod_info.get("images", []),
|
| 1185 |
+
"current_stock": int(prod_info.get("stock", 0)),
|
| 1186 |
+
"requested_quantity": r["requested_quantity"],
|
| 1187 |
+
"approved_quantity": r.get("approved_quantity"),
|
| 1188 |
+
"status": r["status"],
|
| 1189 |
+
"requested_by": requester["name"],
|
| 1190 |
+
"requested_by_role": requester["role"],
|
| 1191 |
+
"delivery_user_name": delivery_info["name"],
|
| 1192 |
+
"created_at": r["created_at"],
|
| 1193 |
+
})
|
| 1194 |
+
|
| 1195 |
+
# Transaction data for sales graphs
|
| 1196 |
+
daily_sales = {}
|
| 1197 |
+
weekly_sales = {}
|
| 1198 |
+
monthly_sales = {}
|
| 1199 |
+
delivery_earnings = {}
|
| 1200 |
+
total_revenue = 0
|
| 1201 |
+
total_orders = 0
|
| 1202 |
+
delivery_order_count = 0
|
| 1203 |
+
|
| 1204 |
+
if staff_ids:
|
| 1205 |
+
txns = sb.table("product_transactions").select(
|
| 1206 |
+
"amount, seller_amount, created_at"
|
| 1207 |
+
).in_("seller_id", staff_ids).in_("status", ["delivered", "completed"]).execute()
|
| 1208 |
+
|
| 1209 |
+
for t in (txns.data or []):
|
| 1210 |
+
amt = float(t.get("seller_amount", 0))
|
| 1211 |
+
total_revenue += amt
|
| 1212 |
+
total_orders += 1
|
| 1213 |
+
delivery_order_count += 1
|
| 1214 |
+
|
| 1215 |
+
try:
|
| 1216 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 1217 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 1218 |
+
week_start = dt - timedelta(days=dt.weekday())
|
| 1219 |
+
week_key = week_start.strftime("%Y-%m-%d")
|
| 1220 |
+
month_key = dt.strftime("%Y-%m")
|
| 1221 |
+
except Exception:
|
| 1222 |
+
day_key = t["created_at"][:10]
|
| 1223 |
+
week_key = t["created_at"][:10]
|
| 1224 |
+
month_key = t["created_at"][:7]
|
| 1225 |
+
|
| 1226 |
+
for data, key in [(daily_sales, day_key), (weekly_sales, week_key), (monthly_sales, month_key)]:
|
| 1227 |
+
if key not in data:
|
| 1228 |
+
data[key] = {"amount": 0, "count": 0}
|
| 1229 |
+
data[key]["amount"] += amt
|
| 1230 |
+
data[key]["count"] += 1
|
| 1231 |
+
|
| 1232 |
+
# Delivery earnings breakdown by month
|
| 1233 |
+
if month_key not in delivery_earnings:
|
| 1234 |
+
delivery_earnings[month_key] = {"amount": 0, "count": 0}
|
| 1235 |
+
delivery_earnings[month_key]["amount"] += amt
|
| 1236 |
+
delivery_earnings[month_key]["count"] += 1
|
| 1237 |
+
|
| 1238 |
+
def to_list(data):
|
| 1239 |
+
return sorted(
|
| 1240 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"]} for k, v in data.items()],
|
| 1241 |
+
key=lambda x: x["date"], reverse=True
|
| 1242 |
+
)[:30]
|
| 1243 |
+
|
| 1244 |
+
return {
|
| 1245 |
+
"department": {
|
| 1246 |
+
"id": d["id"],
|
| 1247 |
+
"name": d["name"],
|
| 1248 |
+
"description": d.get("description", ""),
|
| 1249 |
+
"manager_id": d.get("manager_id"),
|
| 1250 |
+
"manager_name": manager_name,
|
| 1251 |
+
"created_at": d["created_at"],
|
| 1252 |
+
},
|
| 1253 |
+
"staff": staff_result.data or [],
|
| 1254 |
+
"total_staff": len(staff_result.data or []),
|
| 1255 |
+
"total_products": product_count,
|
| 1256 |
+
"low_stock_count": low_stock_count,
|
| 1257 |
+
"products": department_products,
|
| 1258 |
+
"pending_restocks": pending_restocks,
|
| 1259 |
+
"total_revenue": round(total_revenue, 2),
|
| 1260 |
+
"total_orders": total_orders,
|
| 1261 |
+
"delivery_orders": delivery_order_count,
|
| 1262 |
+
"daily_sales": to_list(daily_sales),
|
| 1263 |
+
"weekly_sales": to_list(weekly_sales),
|
| 1264 |
+
"monthly_sales": to_list(monthly_sales),
|
| 1265 |
+
"delivery_earnings": to_list(delivery_earnings),
|
| 1266 |
+
}
|
| 1267 |
+
|
| 1268 |
+
|
| 1269 |
+
@router.put("/departments/{dept_id}")
|
| 1270 |
+
async def update_department(dept_id: str, req: DepartmentUpdateRequest, admin: dict = Depends(require_admin)):
|
| 1271 |
+
"""Update a department's name or description."""
|
| 1272 |
+
sb = get_supabase()
|
| 1273 |
+
|
| 1274 |
+
existing = sb.table("departments").select("id").eq("id", dept_id).execute()
|
| 1275 |
+
if not existing.data:
|
| 1276 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 1277 |
+
|
| 1278 |
+
update_data = {k: v for k, v in req.model_dump().items() if v is not None}
|
| 1279 |
+
if not update_data:
|
| 1280 |
+
raise HTTPException(status_code=400, detail="No fields to update")
|
| 1281 |
+
|
| 1282 |
+
sb.table("departments").update(update_data).eq("id", dept_id).execute()
|
| 1283 |
+
return {"message": "Department updated"}
|
| 1284 |
+
|
| 1285 |
+
|
| 1286 |
+
@router.delete("/departments/{dept_id}")
|
| 1287 |
+
async def delete_department(dept_id: str, admin: dict = Depends(require_admin)):
|
| 1288 |
+
"""Delete a department only if no staff or manager are still assigned."""
|
| 1289 |
+
sb = get_supabase()
|
| 1290 |
+
dept = sb.table("departments").select("id, name, manager_id").eq("id", dept_id).execute()
|
| 1291 |
+
if not dept.data:
|
| 1292 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 1293 |
+
|
| 1294 |
+
# Check if any staff (sellers) are still assigned
|
| 1295 |
+
staff = sb.table("users").select("id").eq("department_id", dept_id).execute()
|
| 1296 |
+
if staff.data:
|
| 1297 |
+
raise HTTPException(
|
| 1298 |
+
status_code=400,
|
| 1299 |
+
detail="Cannot delete this store while staff or managers are still assigned. Remove all members first."
|
| 1300 |
+
)
|
| 1301 |
+
|
| 1302 |
+
sb.table("departments").delete().eq("id", dept_id).execute()
|
| 1303 |
+
return {"message": f"Store '{dept.data[0]['name']}' deleted"}
|
| 1304 |
+
|
| 1305 |
+
|
| 1306 |
+
# --- Admin Delete Product ---
|
| 1307 |
+
|
| 1308 |
+
@router.delete("/products/{product_id}")
|
| 1309 |
+
async def admin_delete_product(product_id: str, admin: dict = Depends(require_admin)):
|
| 1310 |
+
"""Soft-delete a product (set is_active=False). Admin can delete any product regardless of ownership."""
|
| 1311 |
+
sb = get_supabase()
|
| 1312 |
+
existing = sb.table("products").select("id, title").eq("id", product_id).execute()
|
| 1313 |
+
if not existing.data:
|
| 1314 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 1315 |
+
sb.table("products").update({"is_active": False}).eq("id", product_id).execute()
|
| 1316 |
+
return {"message": "Product deleted successfully"}
|
| 1317 |
+
|
| 1318 |
+
|
| 1319 |
+
# --- Admin Create Product for Department ---
|
| 1320 |
+
|
| 1321 |
+
class AdminCreateProductRequest(BaseModel):
|
| 1322 |
+
title: str
|
| 1323 |
+
description: str = ""
|
| 1324 |
+
price: float
|
| 1325 |
+
images: list = []
|
| 1326 |
+
|
| 1327 |
+
|
| 1328 |
+
@router.post("/departments/{dept_id}/products")
|
| 1329 |
+
async def admin_create_product_for_dept(
|
| 1330 |
+
dept_id: str, req: AdminCreateProductRequest, admin: dict = Depends(require_admin)
|
| 1331 |
+
):
|
| 1332 |
+
"""Admin creates a product for a specific department/store with stock=0 and auto-approved."""
|
| 1333 |
+
from models.bert_service import bert_service
|
| 1334 |
+
from database import store_product_embedding
|
| 1335 |
+
|
| 1336 |
+
sb = get_supabase()
|
| 1337 |
+
|
| 1338 |
+
# Verify department exists
|
| 1339 |
+
dept = sb.table("departments").select("id, manager_id, name").eq("id", dept_id).execute()
|
| 1340 |
+
if not dept.data:
|
| 1341 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 1342 |
+
|
| 1343 |
+
# Validate fields
|
| 1344 |
+
if not req.title or not req.title.strip():
|
| 1345 |
+
raise HTTPException(status_code=400, detail="Product title is required")
|
| 1346 |
+
if req.price <= 0:
|
| 1347 |
+
raise HTTPException(status_code=400, detail="Price must be greater than 0")
|
| 1348 |
+
if not req.images or len(req.images) == 0:
|
| 1349 |
+
raise HTTPException(status_code=400, detail="At least one product image is required")
|
| 1350 |
+
if len(req.images) > 5:
|
| 1351 |
+
raise HTTPException(status_code=400, detail="Maximum 5 images allowed")
|
| 1352 |
+
|
| 1353 |
+
# Use the department's manager as the seller_id so the product belongs to the store.
|
| 1354 |
+
# Fall back to admin if the department has no manager assigned yet.
|
| 1355 |
+
manager_id = dept.data[0].get("manager_id")
|
| 1356 |
+
seller_id = manager_id if manager_id else admin["sub"]
|
| 1357 |
+
|
| 1358 |
+
result = sb.table("products").insert({
|
| 1359 |
+
"seller_id": seller_id,
|
| 1360 |
+
"title": req.title.strip(),
|
| 1361 |
+
"description": (req.description or "").strip(),
|
| 1362 |
+
"price": req.price,
|
| 1363 |
+
"stock": 0,
|
| 1364 |
+
"images": req.images,
|
| 1365 |
+
"status": "approved",
|
| 1366 |
+
}).execute()
|
| 1367 |
+
|
| 1368 |
+
if not result.data:
|
| 1369 |
+
raise HTTPException(status_code=500, detail="Failed to create product")
|
| 1370 |
+
|
| 1371 |
+
product = result.data[0]
|
| 1372 |
+
|
| 1373 |
+
# Compute BERT embedding
|
| 1374 |
+
try:
|
| 1375 |
+
if bert_service._loaded:
|
| 1376 |
+
embedding = bert_service.compute_embedding(req.title)
|
| 1377 |
+
store_product_embedding(product["id"], embedding)
|
| 1378 |
+
except Exception as e:
|
| 1379 |
+
print(f"[Admin] Warning: Failed to compute embedding: {e}")
|
| 1380 |
+
|
| 1381 |
+
return {
|
| 1382 |
+
"message": f"Product created for {dept.data[0]['name']}",
|
| 1383 |
+
"product": {
|
| 1384 |
+
"id": product["id"],
|
| 1385 |
+
"title": product["title"],
|
| 1386 |
+
"price": float(product["price"]),
|
| 1387 |
+
"stock": 0,
|
| 1388 |
+
"status": "approved",
|
| 1389 |
+
},
|
| 1390 |
+
}
|
| 1391 |
+
|
| 1392 |
+
|
| 1393 |
+
# --- Manager Registration ---
|
| 1394 |
+
|
| 1395 |
+
class ManagerRegisterRequest(BaseModel):
|
| 1396 |
+
full_name: str
|
| 1397 |
+
email: str
|
| 1398 |
+
password: str
|
| 1399 |
+
contact_number: str = ""
|
| 1400 |
+
department_id: str
|
| 1401 |
+
|
| 1402 |
+
|
| 1403 |
+
@router.post("/managers/register")
|
| 1404 |
+
async def admin_register_manager(req: ManagerRegisterRequest, admin: dict = Depends(require_admin)):
|
| 1405 |
+
"""Admin-only: register a new manager and assign to a department."""
|
| 1406 |
+
import bcrypt
|
| 1407 |
+
import traceback
|
| 1408 |
+
|
| 1409 |
+
try:
|
| 1410 |
+
sb = get_supabase()
|
| 1411 |
+
|
| 1412 |
+
# Verify department exists
|
| 1413 |
+
dept = sb.table("departments").select("id, manager_id").eq("id", req.department_id).execute()
|
| 1414 |
+
if not dept.data:
|
| 1415 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 1416 |
+
|
| 1417 |
+
if dept.data[0].get("manager_id"):
|
| 1418 |
+
raise HTTPException(status_code=400, detail="This department already has a manager assigned")
|
| 1419 |
+
|
| 1420 |
+
# Check unique email
|
| 1421 |
+
existing_email = sb.table("users").select("id").eq("email", req.email).execute()
|
| 1422 |
+
if existing_email.data:
|
| 1423 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 1424 |
+
|
| 1425 |
+
# Check unique full_name
|
| 1426 |
+
existing_name = sb.table("users").select("id").eq("full_name", req.full_name).execute()
|
| 1427 |
+
if existing_name.data:
|
| 1428 |
+
raise HTTPException(status_code=400, detail="Full name already taken")
|
| 1429 |
+
|
| 1430 |
+
# Hash password
|
| 1431 |
+
password_hash = bcrypt.hashpw(req.password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
| 1432 |
+
|
| 1433 |
+
# Create user with manager role
|
| 1434 |
+
result = sb.table("users").insert({
|
| 1435 |
+
"email": req.email,
|
| 1436 |
+
"password_hash": password_hash,
|
| 1437 |
+
"full_name": req.full_name,
|
| 1438 |
+
"role": "manager",
|
| 1439 |
+
"is_banned": False,
|
| 1440 |
+
"department_id": req.department_id,
|
| 1441 |
+
}).execute()
|
| 1442 |
+
|
| 1443 |
+
if not result.data:
|
| 1444 |
+
raise HTTPException(status_code=500, detail="Failed to create manager")
|
| 1445 |
+
|
| 1446 |
+
user = result.data[0]
|
| 1447 |
+
|
| 1448 |
+
# Create contact if provided
|
| 1449 |
+
if req.contact_number:
|
| 1450 |
+
sb.table("user_contacts").insert({"user_id": user["id"], "contact_number": req.contact_number}).execute()
|
| 1451 |
+
|
| 1452 |
+
# Assign manager to department
|
| 1453 |
+
sb.table("departments").update({"manager_id": user["id"]}).eq("id", req.department_id).execute()
|
| 1454 |
+
|
| 1455 |
+
return {
|
| 1456 |
+
"message": "Manager registered and assigned to department",
|
| 1457 |
+
"user": {
|
| 1458 |
+
"id": user["id"],
|
| 1459 |
+
"full_name": user["full_name"],
|
| 1460 |
+
"email": user["email"],
|
| 1461 |
+
"role": "manager",
|
| 1462 |
+
"department_id": req.department_id,
|
| 1463 |
+
},
|
| 1464 |
+
}
|
| 1465 |
+
|
| 1466 |
+
except HTTPException:
|
| 1467 |
+
raise
|
| 1468 |
+
except Exception as e:
|
| 1469 |
+
print(f"[ManagerRegister] ERROR: {e}")
|
| 1470 |
+
traceback.print_exc()
|
| 1471 |
+
raise HTTPException(status_code=500, detail=f"Registration failed: {str(e)}")
|
| 1472 |
+
|
| 1473 |
+
|
| 1474 |
+
# --- Product Removal Approval ---
|
| 1475 |
+
|
| 1476 |
+
@router.get("/pending-removals")
|
| 1477 |
+
async def admin_get_pending_removals(admin: dict = Depends(require_admin)):
|
| 1478 |
+
"""Get all products with status 'pending_removal'."""
|
| 1479 |
+
sb = get_supabase()
|
| 1480 |
+
prods = sb.table("products").select("*").eq("status", "pending_removal").order("removal_requested_at", desc=True).execute()
|
| 1481 |
+
|
| 1482 |
+
results = []
|
| 1483 |
+
for p in (prods.data or []):
|
| 1484 |
+
seller = sb.table("users").select("full_name, email, department_id").eq("id", p["seller_id"]).execute()
|
| 1485 |
+
seller_info = seller.data[0] if seller.data else {}
|
| 1486 |
+
|
| 1487 |
+
seller_name = seller_info.get("full_name", "Unknown")
|
| 1488 |
+
dept_id = seller_info.get("department_id")
|
| 1489 |
+
dept_name = ""
|
| 1490 |
+
if dept_id:
|
| 1491 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id).execute()
|
| 1492 |
+
if dept_resp.data:
|
| 1493 |
+
dept_name = dept_resp.data[0]["name"]
|
| 1494 |
+
seller_name = dept_name
|
| 1495 |
+
|
| 1496 |
+
requester_name = ""
|
| 1497 |
+
if p.get("removal_requested_by"):
|
| 1498 |
+
req_user = sb.table("users").select("full_name").eq("id", p["removal_requested_by"]).execute()
|
| 1499 |
+
if req_user.data:
|
| 1500 |
+
requester_name = req_user.data[0]["full_name"]
|
| 1501 |
+
|
| 1502 |
+
results.append({
|
| 1503 |
+
"id": p["id"],
|
| 1504 |
+
"title": p["title"],
|
| 1505 |
+
"description": p.get("description", ""),
|
| 1506 |
+
"price": float(p["price"]),
|
| 1507 |
+
"stock": p["stock"],
|
| 1508 |
+
"images": p.get("images", []),
|
| 1509 |
+
"seller_id": p["seller_id"],
|
| 1510 |
+
"seller_name": seller_name,
|
| 1511 |
+
"department_name": dept_name,
|
| 1512 |
+
"status": p["status"],
|
| 1513 |
+
"removal_requested_by": p.get("removal_requested_by"),
|
| 1514 |
+
"requester_name": requester_name,
|
| 1515 |
+
"removal_requested_at": p.get("removal_requested_at"),
|
| 1516 |
+
"created_at": p["created_at"],
|
| 1517 |
+
})
|
| 1518 |
+
|
| 1519 |
+
return results
|
| 1520 |
+
|
| 1521 |
+
|
| 1522 |
+
@router.put("/products/{product_id}/approve-removal")
|
| 1523 |
+
async def admin_approve_removal(product_id: str, admin: dict = Depends(require_admin)):
|
| 1524 |
+
"""Approve a product removal request. Deactivates the product."""
|
| 1525 |
+
sb = get_supabase()
|
| 1526 |
+
|
| 1527 |
+
prod = sb.table("products").select("status").eq("id", product_id).execute()
|
| 1528 |
+
if not prod.data:
|
| 1529 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 1530 |
+
|
| 1531 |
+
if prod.data[0]["status"] != "pending_removal":
|
| 1532 |
+
raise HTTPException(status_code=400, detail="Product is not pending removal")
|
| 1533 |
+
|
| 1534 |
+
sb.table("products").update({
|
| 1535 |
+
"is_active": False,
|
| 1536 |
+
"status": "unapproved",
|
| 1537 |
+
}).eq("id", product_id).execute()
|
| 1538 |
+
|
| 1539 |
+
return {"message": "Product removal approved. Product has been deactivated."}
|
| 1540 |
+
|
| 1541 |
+
|
| 1542 |
+
@router.put("/products/{product_id}/reject-removal")
|
| 1543 |
+
async def admin_reject_removal(product_id: str, admin: dict = Depends(require_admin)):
|
| 1544 |
+
"""Reject a product removal request. Product returns to approved status."""
|
| 1545 |
+
sb = get_supabase()
|
| 1546 |
+
|
| 1547 |
+
prod = sb.table("products").select("status").eq("id", product_id).execute()
|
| 1548 |
+
if not prod.data:
|
| 1549 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 1550 |
+
|
| 1551 |
+
if prod.data[0]["status"] != "pending_removal":
|
| 1552 |
+
raise HTTPException(status_code=400, detail="Product is not pending removal")
|
| 1553 |
+
|
| 1554 |
+
sb.table("products").update({
|
| 1555 |
+
"status": "approved",
|
| 1556 |
+
"removal_requested_by": None,
|
| 1557 |
+
"removal_requested_at": None,
|
| 1558 |
+
}).eq("id", product_id).execute()
|
| 1559 |
+
|
| 1560 |
+
return {"message": "Product removal rejected. Product remains active."}
|
| 1561 |
+
|
| 1562 |
+
|
| 1563 |
+
# --- Deliveries Management ---
|
| 1564 |
+
|
| 1565 |
+
class DeliveryStatsDay(BaseModel):
|
| 1566 |
+
date: str
|
| 1567 |
+
count: int
|
| 1568 |
+
|
| 1569 |
+
class Deliveryman(BaseModel):
|
| 1570 |
+
user_id: str
|
| 1571 |
+
full_name: str
|
| 1572 |
+
email: str
|
| 1573 |
+
contact_number: str = ""
|
| 1574 |
+
total_deliveries: int = 0
|
| 1575 |
+
avg_delivery_time: Optional[float] = None
|
| 1576 |
+
completed_count: int = 0
|
| 1577 |
+
|
| 1578 |
+
class DeliveriesStatsResponse(BaseModel):
|
| 1579 |
+
total_deliveries: int
|
| 1580 |
+
avg_delivery_time: Optional[float]
|
| 1581 |
+
deliveries_by_day: list[DeliveryStatsDay]
|
| 1582 |
+
deliveries_by_week: list[DeliveryStatsDay]
|
| 1583 |
+
deliveries_by_month: list[DeliveryStatsDay]
|
| 1584 |
+
deliverymen: list[Deliveryman]
|
| 1585 |
+
|
| 1586 |
+
|
| 1587 |
+
@router.get("/deliveries/stats", response_model=DeliveriesStatsResponse)
|
| 1588 |
+
async def get_deliveries_stats(admin: dict = Depends(require_admin)):
|
| 1589 |
+
"""Get all deliveries stats including avg delivery time and breakdown by deliveryman."""
|
| 1590 |
+
sb = get_supabase()
|
| 1591 |
+
|
| 1592 |
+
# Get all delivery transactions (filter out empty delivery_user_id locally)
|
| 1593 |
+
all_txns = sb.table("product_transactions").select(
|
| 1594 |
+
"*"
|
| 1595 |
+
).order("created_at", desc=False).execute()
|
| 1596 |
+
|
| 1597 |
+
if all_txns.data:
|
| 1598 |
+
txns_data = [t for t in all_txns.data if t.get("delivery_user_id")]
|
| 1599 |
+
else:
|
| 1600 |
+
txns_data = []
|
| 1601 |
+
|
| 1602 |
+
class AttrDict:
|
| 1603 |
+
def __init__(self, d):
|
| 1604 |
+
self.data = d
|
| 1605 |
+
txns = AttrDict(txns_data)
|
| 1606 |
+
|
| 1607 |
+
if not txns.data:
|
| 1608 |
+
return DeliveriesStatsResponse(
|
| 1609 |
+
total_deliveries=0,
|
| 1610 |
+
avg_delivery_time=None,
|
| 1611 |
+
deliveries_by_day=[],
|
| 1612 |
+
deliveries_by_week=[],
|
| 1613 |
+
deliveries_by_month=[],
|
| 1614 |
+
deliverymen=[]
|
| 1615 |
+
)
|
| 1616 |
+
|
| 1617 |
+
# Get deliveryman contact info
|
| 1618 |
+
deliveryman_ids = list(set(t["delivery_user_id"] for t in txns.data if t["delivery_user_id"]))
|
| 1619 |
+
contacts = {}
|
| 1620 |
+
if deliveryman_ids:
|
| 1621 |
+
contacts_result = sb.table("user_contacts").select("user_id, contact_number").in_("user_id", deliveryman_ids).execute()
|
| 1622 |
+
contacts = {c["user_id"]: c["contact_number"] for c in (contacts_result.data or [])}
|
| 1623 |
+
|
| 1624 |
+
# Get user details for deliverymen
|
| 1625 |
+
user_details = {}
|
| 1626 |
+
if deliveryman_ids:
|
| 1627 |
+
users_result = sb.table("users").select("id, full_name, email").in_("id", deliveryman_ids).execute()
|
| 1628 |
+
user_details = {u["id"]: u for u in (users_result.data or [])}
|
| 1629 |
+
|
| 1630 |
+
# Calculate stats
|
| 1631 |
+
delivery_times = []
|
| 1632 |
+
deliveries_by_date = {}
|
| 1633 |
+
deliveries_by_week = {}
|
| 1634 |
+
deliveries_by_month = {}
|
| 1635 |
+
deliverymen_map = {}
|
| 1636 |
+
|
| 1637 |
+
for t in txns.data:
|
| 1638 |
+
delivery_user_id = t.get("delivery_user_id")
|
| 1639 |
+
if not delivery_user_id:
|
| 1640 |
+
continue
|
| 1641 |
+
|
| 1642 |
+
# Initialize deliveryman entry
|
| 1643 |
+
if delivery_user_id not in deliverymen_map:
|
| 1644 |
+
user_info = user_details.get(delivery_user_id, {})
|
| 1645 |
+
deliverymen_map[delivery_user_id] = {
|
| 1646 |
+
"user_id": delivery_user_id,
|
| 1647 |
+
"full_name": user_info.get("full_name", "Unknown"),
|
| 1648 |
+
"email": user_info.get("email", ""),
|
| 1649 |
+
"contact_number": contacts.get(delivery_user_id, ""),
|
| 1650 |
+
"total_deliveries": 0,
|
| 1651 |
+
"completed_count": 0,
|
| 1652 |
+
"delivery_times": []
|
| 1653 |
+
}
|
| 1654 |
+
|
| 1655 |
+
deliverymen_map[delivery_user_id]["total_deliveries"] += 1
|
| 1656 |
+
|
| 1657 |
+
# Track delivery time if status is delivered
|
| 1658 |
+
if t.get("status") == "delivered":
|
| 1659 |
+
deliverymen_map[delivery_user_id]["completed_count"] += 1
|
| 1660 |
+
created = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 1661 |
+
if t.get("updated_at"):
|
| 1662 |
+
updated = datetime.fromisoformat(t["updated_at"].replace("Z", "+00:00"))
|
| 1663 |
+
time_diff = (updated - created).total_seconds() / 3600 # hours
|
| 1664 |
+
deliverymen_map[delivery_user_id]["delivery_times"].append(time_diff)
|
| 1665 |
+
delivery_times.append(time_diff)
|
| 1666 |
+
|
| 1667 |
+
# Count by date
|
| 1668 |
+
created_date = t["created_at"].split("T")[0]
|
| 1669 |
+
deliveries_by_date[created_date] = deliveries_by_date.get(created_date, 0) + 1
|
| 1670 |
+
|
| 1671 |
+
# Count by week (ISO week)
|
| 1672 |
+
created = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 1673 |
+
week_key = created.strftime("%Y-W%U")
|
| 1674 |
+
deliveries_by_week[week_key] = deliveries_by_week.get(week_key, 0) + 1
|
| 1675 |
+
|
| 1676 |
+
# Count by month
|
| 1677 |
+
month_key = created.strftime("%Y-%m")
|
| 1678 |
+
deliveries_by_month[month_key] = deliveries_by_month.get(month_key, 0) + 1
|
| 1679 |
+
|
| 1680 |
+
# Calculate average delivery time
|
| 1681 |
+
avg_delivery_time = sum(delivery_times) / len(delivery_times) if delivery_times else None
|
| 1682 |
+
|
| 1683 |
+
# Format deliveries breakdown
|
| 1684 |
+
days_list = [{"date": date, "count": count} for date, count in sorted(deliveries_by_date.items())]
|
| 1685 |
+
weeks_list = [{"date": week, "count": count} for week, count in sorted(deliveries_by_week.items())]
|
| 1686 |
+
months_list = [{"date": month, "count": count} for month, count in sorted(deliveries_by_month.items())]
|
| 1687 |
+
|
| 1688 |
+
# Build deliverymen list
|
| 1689 |
+
deliverymen_list = []
|
| 1690 |
+
for user_id, data in deliverymen_map.items():
|
| 1691 |
+
avg_time = sum(data["delivery_times"]) / len(data["delivery_times"]) if data["delivery_times"] else None
|
| 1692 |
+
deliverymen_list.append(Deliveryman(
|
| 1693 |
+
user_id=user_id,
|
| 1694 |
+
full_name=data["full_name"],
|
| 1695 |
+
email=data["email"],
|
| 1696 |
+
contact_number=data["contact_number"],
|
| 1697 |
+
total_deliveries=data["total_deliveries"],
|
| 1698 |
+
avg_delivery_time=avg_time,
|
| 1699 |
+
completed_count=data["completed_count"]
|
| 1700 |
+
))
|
| 1701 |
+
|
| 1702 |
+
return DeliveriesStatsResponse(
|
| 1703 |
+
total_deliveries=len(txns.data),
|
| 1704 |
+
avg_delivery_time=avg_delivery_time,
|
| 1705 |
+
deliveries_by_day=days_list,
|
| 1706 |
+
deliveries_by_week=weeks_list,
|
| 1707 |
+
deliveries_by_month=months_list,
|
| 1708 |
+
deliverymen=deliverymen_list
|
| 1709 |
+
)
|
| 1710 |
+
|
| 1711 |
+
|
| 1712 |
+
# --- Admin Restock Request ---
|
| 1713 |
+
|
| 1714 |
+
class AdminRestockRequestCreate(BaseModel):
|
| 1715 |
+
product_id: str
|
| 1716 |
+
requested_quantity: int
|
| 1717 |
+
notes: str = ""
|
| 1718 |
+
|
| 1719 |
+
|
| 1720 |
+
@router.post("/restock-request")
|
| 1721 |
+
async def admin_create_restock_request(req: AdminRestockRequestCreate, admin: dict = Depends(require_admin)):
|
| 1722 |
+
"""
|
| 1723 |
+
Admin creates a restock request that bypasses manager approval.
|
| 1724 |
+
Status is set directly to 'approved_manager' so delivery can pick it up.
|
| 1725 |
+
"""
|
| 1726 |
+
sb = get_supabase()
|
| 1727 |
+
admin_id = admin["sub"]
|
| 1728 |
+
|
| 1729 |
+
if req.requested_quantity < 1:
|
| 1730 |
+
raise HTTPException(status_code=400, detail="Quantity must be at least 1")
|
| 1731 |
+
|
| 1732 |
+
# Get the product and its department
|
| 1733 |
+
product = sb.table("products").select("id, seller_id, title").eq("id", req.product_id).execute()
|
| 1734 |
+
if not product.data:
|
| 1735 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 1736 |
+
|
| 1737 |
+
# Get seller's department_id
|
| 1738 |
+
seller_id = product.data[0]["seller_id"]
|
| 1739 |
+
seller = sb.table("users").select("department_id").eq("id", seller_id).execute()
|
| 1740 |
+
department_id = seller.data[0].get("department_id") if seller.data else None
|
| 1741 |
+
|
| 1742 |
+
if not department_id:
|
| 1743 |
+
raise HTTPException(status_code=400, detail="Product does not belong to a department")
|
| 1744 |
+
|
| 1745 |
+
# Get admin name
|
| 1746 |
+
admin_user = sb.table("users").select("full_name").eq("id", admin_id).execute()
|
| 1747 |
+
admin_name = admin_user.data[0]["full_name"] if admin_user.data else "Admin"
|
| 1748 |
+
|
| 1749 |
+
now = datetime.now(timezone.utc).isoformat()
|
| 1750 |
+
|
| 1751 |
+
# Create restock request with status 'approved_manager' (bypasses manager)
|
| 1752 |
+
result = sb.table("restock_requests").insert({
|
| 1753 |
+
"staff_id": admin_id,
|
| 1754 |
+
"department_id": department_id,
|
| 1755 |
+
"product_id": req.product_id,
|
| 1756 |
+
"requested_quantity": req.requested_quantity,
|
| 1757 |
+
"approved_quantity": req.requested_quantity,
|
| 1758 |
+
"notes": req.notes,
|
| 1759 |
+
"status": "approved_manager",
|
| 1760 |
+
"manager_approved_at": now,
|
| 1761 |
+
}).execute()
|
| 1762 |
+
|
| 1763 |
+
if not result.data:
|
| 1764 |
+
raise HTTPException(status_code=500, detail="Failed to create restock request")
|
| 1765 |
+
|
| 1766 |
+
return {
|
| 1767 |
+
"message": f"Restock request created by {admin_name} and marked as To Be Delivered",
|
| 1768 |
+
"request": result.data[0],
|
| 1769 |
+
"requested_by": admin_name,
|
| 1770 |
+
}
|
| 1771 |
+
|
| 1772 |
+
|
| 1773 |
+
@router.get("/restock-requests")
|
| 1774 |
+
async def admin_get_restock_requests(
|
| 1775 |
+
department_id: str = Query("", description="Filter by department ID"),
|
| 1776 |
+
status: str = Query("", description="Filter by status"),
|
| 1777 |
+
admin: dict = Depends(require_admin),
|
| 1778 |
+
):
|
| 1779 |
+
"""Get restock requests, optionally filtered by department and status."""
|
| 1780 |
+
sb = get_supabase()
|
| 1781 |
+
|
| 1782 |
+
query = sb.table("restock_requests").select(
|
| 1783 |
+
"*, products(title, price, stock, images)"
|
| 1784 |
+
).order("created_at", desc=True).limit(100)
|
| 1785 |
+
|
| 1786 |
+
if department_id:
|
| 1787 |
+
query = query.eq("department_id", department_id)
|
| 1788 |
+
if status:
|
| 1789 |
+
query = query.eq("status", status)
|
| 1790 |
+
|
| 1791 |
+
requests = query.execute()
|
| 1792 |
+
|
| 1793 |
+
# Get staff names
|
| 1794 |
+
staff_ids = set()
|
| 1795 |
+
for r in (requests.data or []):
|
| 1796 |
+
staff_ids.add(r["staff_id"])
|
| 1797 |
+
|
| 1798 |
+
staff_names = {}
|
| 1799 |
+
if staff_ids:
|
| 1800 |
+
users_result = sb.table("users").select("id, full_name, role").in_("id", list(staff_ids)).execute()
|
| 1801 |
+
staff_names = {u["id"]: {"name": u["full_name"], "role": u.get("role", "")} for u in (users_result.data or [])}
|
| 1802 |
+
|
| 1803 |
+
results = []
|
| 1804 |
+
for r in (requests.data or []):
|
| 1805 |
+
prod = r.get("products") or {}
|
| 1806 |
+
staff_info = staff_names.get(r["staff_id"], {"name": "Unknown", "role": ""})
|
| 1807 |
+
results.append({
|
| 1808 |
+
"id": r["id"],
|
| 1809 |
+
"product_id": r["product_id"],
|
| 1810 |
+
"product_title": prod.get("title", ""),
|
| 1811 |
+
"product_images": prod.get("images", []),
|
| 1812 |
+
"current_stock": int(prod.get("stock", 0)),
|
| 1813 |
+
"requested_quantity": r["requested_quantity"],
|
| 1814 |
+
"approved_quantity": r.get("approved_quantity"),
|
| 1815 |
+
"notes": r.get("notes", ""),
|
| 1816 |
+
"requested_by": staff_info["name"],
|
| 1817 |
+
"requested_by_role": staff_info["role"],
|
| 1818 |
+
"status": r["status"],
|
| 1819 |
+
"created_at": r["created_at"],
|
| 1820 |
+
})
|
| 1821 |
+
|
| 1822 |
+
return results
|
| 1823 |
+
|
| 1824 |
+
|
| 1825 |
+
# --- Salary Management ---
|
| 1826 |
+
|
| 1827 |
+
class SetSalaryRequest(BaseModel):
|
| 1828 |
+
salary: float
|
| 1829 |
+
|
| 1830 |
+
|
| 1831 |
+
class PayIndividualRequest(BaseModel):
|
| 1832 |
+
recipient_id: str
|
| 1833 |
+
amount: float
|
| 1834 |
+
|
| 1835 |
+
|
| 1836 |
+
class PayGroupRequest(BaseModel):
|
| 1837 |
+
amount_per_person: Optional[float] = None # If None, uses each person's fixed salary
|
| 1838 |
+
|
| 1839 |
+
|
| 1840 |
+
@router.get("/salaries")
|
| 1841 |
+
async def admin_get_salaries(admin: dict = Depends(require_admin)):
|
| 1842 |
+
"""Get salary overview: all departments with managers, staff, their salaries, and payment history."""
|
| 1843 |
+
sb = get_supabase()
|
| 1844 |
+
admin_id = admin["sub"]
|
| 1845 |
+
|
| 1846 |
+
# Admin balance
|
| 1847 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 1848 |
+
admin_balance = float(admin_bal.data[0]["balance"]) if admin_bal.data else 0.0
|
| 1849 |
+
|
| 1850 |
+
# Get all departments
|
| 1851 |
+
depts = sb.table("departments").select("id, name, manager_id").execute()
|
| 1852 |
+
dept_list = depts.data or []
|
| 1853 |
+
|
| 1854 |
+
# Get all managers and staff (sellers)
|
| 1855 |
+
managers = sb.table("users").select("id, full_name, email, role, department_id, salary").eq("role", "manager").execute()
|
| 1856 |
+
staff = sb.table("users").select("id, full_name, email, role, department_id, salary").eq("role", "seller").execute()
|
| 1857 |
+
|
| 1858 |
+
manager_map = {}
|
| 1859 |
+
for m in (managers.data or []):
|
| 1860 |
+
manager_map[m["id"]] = m
|
| 1861 |
+
|
| 1862 |
+
# Build per-department data
|
| 1863 |
+
now = datetime.now(timezone.utc)
|
| 1864 |
+
current_month = now.strftime("%Y-%m")
|
| 1865 |
+
|
| 1866 |
+
# Get salary payments for current month
|
| 1867 |
+
payments_this_month = sb.table("salary_payments").select("*").eq("payment_month", current_month).order("created_at", desc=True).execute()
|
| 1868 |
+
payments_data = payments_this_month.data or []
|
| 1869 |
+
|
| 1870 |
+
# Build paid amounts per recipient this month
|
| 1871 |
+
paid_this_month = {}
|
| 1872 |
+
for p in payments_data:
|
| 1873 |
+
rid = p["recipient_id"]
|
| 1874 |
+
paid_this_month[rid] = paid_this_month.get(rid, 0) + float(p["amount"])
|
| 1875 |
+
|
| 1876 |
+
# Build department list
|
| 1877 |
+
departments_result = []
|
| 1878 |
+
total_salaries = 0.0
|
| 1879 |
+
|
| 1880 |
+
for dept in dept_list:
|
| 1881 |
+
dept_managers = []
|
| 1882 |
+
dept_staff = []
|
| 1883 |
+
|
| 1884 |
+
# Find manager for this dept
|
| 1885 |
+
if dept.get("manager_id") and dept["manager_id"] in manager_map:
|
| 1886 |
+
mgr = manager_map[dept["manager_id"]]
|
| 1887 |
+
salary_val = float(mgr.get("salary", 0))
|
| 1888 |
+
paid_val = paid_this_month.get(mgr["id"], 0)
|
| 1889 |
+
dept_managers.append({
|
| 1890 |
+
"id": mgr["id"],
|
| 1891 |
+
"full_name": mgr["full_name"],
|
| 1892 |
+
"email": mgr["email"],
|
| 1893 |
+
"role": "manager",
|
| 1894 |
+
"salary": salary_val,
|
| 1895 |
+
"paid_this_month": paid_val,
|
| 1896 |
+
"remaining": max(salary_val - paid_val, 0),
|
| 1897 |
+
})
|
| 1898 |
+
total_salaries += max(salary_val - paid_val, 0)
|
| 1899 |
+
|
| 1900 |
+
# Find staff for this dept
|
| 1901 |
+
for s in (staff.data or []):
|
| 1902 |
+
if s.get("department_id") == dept["id"]:
|
| 1903 |
+
salary_val = float(s.get("salary", 0))
|
| 1904 |
+
paid_val = paid_this_month.get(s["id"], 0)
|
| 1905 |
+
dept_staff.append({
|
| 1906 |
+
"id": s["id"],
|
| 1907 |
+
"full_name": s["full_name"],
|
| 1908 |
+
"email": s["email"],
|
| 1909 |
+
"role": "seller",
|
| 1910 |
+
"salary": salary_val,
|
| 1911 |
+
"paid_this_month": paid_val,
|
| 1912 |
+
"remaining": max(salary_val - paid_val, 0),
|
| 1913 |
+
})
|
| 1914 |
+
total_salaries += max(salary_val - paid_val, 0)
|
| 1915 |
+
|
| 1916 |
+
dept_total = sum(m["remaining"] for m in dept_managers) + sum(s["remaining"] for s in dept_staff)
|
| 1917 |
+
|
| 1918 |
+
departments_result.append({
|
| 1919 |
+
"id": dept["id"],
|
| 1920 |
+
"name": dept["name"],
|
| 1921 |
+
"managers": dept_managers,
|
| 1922 |
+
"staff": dept_staff,
|
| 1923 |
+
"total_remaining": dept_total,
|
| 1924 |
+
})
|
| 1925 |
+
|
| 1926 |
+
# Next pay date: 1st of next month
|
| 1927 |
+
if now.month == 12:
|
| 1928 |
+
next_pay = datetime(now.year + 1, 1, 1)
|
| 1929 |
+
else:
|
| 1930 |
+
next_pay = datetime(now.year, now.month + 1, 1)
|
| 1931 |
+
|
| 1932 |
+
# Recent payment history (last 50)
|
| 1933 |
+
recent_payments = sb.table("salary_payments").select("*").order("created_at", desc=True).limit(50).execute()
|
| 1934 |
+
# Get recipient names
|
| 1935 |
+
recipient_ids = set(p["recipient_id"] for p in (recent_payments.data or []))
|
| 1936 |
+
recipient_names = {}
|
| 1937 |
+
if recipient_ids:
|
| 1938 |
+
rn = sb.table("users").select("id, full_name").in_("id", list(recipient_ids)).execute()
|
| 1939 |
+
recipient_names = {u["id"]: u["full_name"] for u in (rn.data or [])}
|
| 1940 |
+
|
| 1941 |
+
payment_history = []
|
| 1942 |
+
for p in (recent_payments.data or []):
|
| 1943 |
+
payment_history.append({
|
| 1944 |
+
"id": p["id"],
|
| 1945 |
+
"recipient_id": p["recipient_id"],
|
| 1946 |
+
"recipient_name": recipient_names.get(p["recipient_id"], "Unknown"),
|
| 1947 |
+
"amount": float(p["amount"]),
|
| 1948 |
+
"payment_month": p["payment_month"],
|
| 1949 |
+
"notes": p.get("notes", ""),
|
| 1950 |
+
"created_at": p["created_at"],
|
| 1951 |
+
})
|
| 1952 |
+
|
| 1953 |
+
return {
|
| 1954 |
+
"admin_balance": admin_balance,
|
| 1955 |
+
"total_salaries_remaining": round(total_salaries, 2),
|
| 1956 |
+
"current_month": current_month,
|
| 1957 |
+
"next_pay_date": next_pay.strftime("%B %d, %Y"),
|
| 1958 |
+
"departments": departments_result,
|
| 1959 |
+
"payment_history": payment_history,
|
| 1960 |
+
}
|
| 1961 |
+
|
| 1962 |
+
|
| 1963 |
+
@router.put("/salaries/set/{user_id}")
|
| 1964 |
+
async def admin_set_salary(user_id: str, req: SetSalaryRequest, admin: dict = Depends(require_admin)):
|
| 1965 |
+
"""Set or update the fixed salary for a staff member or manager."""
|
| 1966 |
+
if req.salary < 0:
|
| 1967 |
+
raise HTTPException(status_code=400, detail="Salary cannot be negative")
|
| 1968 |
+
|
| 1969 |
+
sb = get_supabase()
|
| 1970 |
+
|
| 1971 |
+
# Verify user exists and is staff or manager
|
| 1972 |
+
user = sb.table("users").select("id, role").eq("id", user_id).execute()
|
| 1973 |
+
if not user.data:
|
| 1974 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 1975 |
+
if user.data[0]["role"] not in ("seller", "manager"):
|
| 1976 |
+
raise HTTPException(status_code=400, detail="Can only set salary for staff or managers")
|
| 1977 |
+
|
| 1978 |
+
sb.table("users").update({"salary": req.salary}).eq("id", user_id).execute()
|
| 1979 |
+
|
| 1980 |
+
return {"message": f"Salary updated to PHP {req.salary:.2f}"}
|
| 1981 |
+
|
| 1982 |
+
|
| 1983 |
+
@router.post("/salaries/pay-all")
|
| 1984 |
+
async def admin_pay_all_salaries(admin: dict = Depends(require_admin)):
|
| 1985 |
+
"""Pay all staff and managers their remaining salary for the current month."""
|
| 1986 |
+
sb = get_supabase()
|
| 1987 |
+
admin_id = admin["sub"]
|
| 1988 |
+
|
| 1989 |
+
now = datetime.now(timezone.utc)
|
| 1990 |
+
current_month = now.strftime("%Y-%m")
|
| 1991 |
+
|
| 1992 |
+
# Get admin balance
|
| 1993 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 1994 |
+
if not admin_bal.data:
|
| 1995 |
+
raise HTTPException(status_code=400, detail="Admin balance not found")
|
| 1996 |
+
admin_balance = float(admin_bal.data[0]["balance"])
|
| 1997 |
+
|
| 1998 |
+
# Get all managers and staff with salary > 0
|
| 1999 |
+
recipients = sb.table("users").select("id, full_name, role, department_id, salary").in_(
|
| 2000 |
+
"role", ["seller", "manager"]
|
| 2001 |
+
).execute()
|
| 2002 |
+
|
| 2003 |
+
if not recipients.data:
|
| 2004 |
+
raise HTTPException(status_code=400, detail="No staff or managers found")
|
| 2005 |
+
|
| 2006 |
+
# Get payments already made this month
|
| 2007 |
+
payments = sb.table("salary_payments").select("recipient_id, amount").eq("payment_month", current_month).execute()
|
| 2008 |
+
paid_map = {}
|
| 2009 |
+
for p in (payments.data or []):
|
| 2010 |
+
rid = p["recipient_id"]
|
| 2011 |
+
paid_map[rid] = paid_map.get(rid, 0) + float(p["amount"])
|
| 2012 |
+
|
| 2013 |
+
# Calculate remaining for each
|
| 2014 |
+
to_pay = []
|
| 2015 |
+
total_needed = 0
|
| 2016 |
+
for r in recipients.data:
|
| 2017 |
+
salary = float(r.get("salary", 0))
|
| 2018 |
+
if salary <= 0:
|
| 2019 |
+
continue
|
| 2020 |
+
paid = paid_map.get(r["id"], 0)
|
| 2021 |
+
remaining = max(salary - paid, 0)
|
| 2022 |
+
if remaining > 0:
|
| 2023 |
+
to_pay.append({"user": r, "amount": remaining})
|
| 2024 |
+
total_needed += remaining
|
| 2025 |
+
|
| 2026 |
+
if not to_pay:
|
| 2027 |
+
raise HTTPException(status_code=400, detail="No remaining salaries to pay this month")
|
| 2028 |
+
|
| 2029 |
+
if admin_balance < total_needed:
|
| 2030 |
+
raise HTTPException(
|
| 2031 |
+
status_code=400,
|
| 2032 |
+
detail=f"Insufficient balance. Need PHP {total_needed:.2f}, have PHP {admin_balance:.2f}"
|
| 2033 |
+
)
|
| 2034 |
+
|
| 2035 |
+
# Deduct from admin
|
| 2036 |
+
new_admin_balance = admin_balance - total_needed
|
| 2037 |
+
sb.table("user_balances").update({"balance": new_admin_balance}).eq("user_id", admin_id).execute()
|
| 2038 |
+
|
| 2039 |
+
# Credit each recipient and record payments
|
| 2040 |
+
paid_count = 0
|
| 2041 |
+
for item in to_pay:
|
| 2042 |
+
user = item["user"]
|
| 2043 |
+
amount = item["amount"]
|
| 2044 |
+
|
| 2045 |
+
# Credit recipient balance
|
| 2046 |
+
rec_bal = sb.table("user_balances").select("balance").eq("user_id", user["id"]).execute()
|
| 2047 |
+
if rec_bal.data:
|
| 2048 |
+
new_bal = float(rec_bal.data[0]["balance"]) + amount
|
| 2049 |
+
sb.table("user_balances").update({"balance": new_bal}).eq("user_id", user["id"]).execute()
|
| 2050 |
+
else:
|
| 2051 |
+
sb.table("user_balances").insert({"user_id": user["id"], "balance": amount}).execute()
|
| 2052 |
+
|
| 2053 |
+
# Record salary payment
|
| 2054 |
+
sb.table("salary_payments").insert({
|
| 2055 |
+
"admin_id": admin_id,
|
| 2056 |
+
"recipient_id": user["id"],
|
| 2057 |
+
"department_id": user.get("department_id"),
|
| 2058 |
+
"amount": amount,
|
| 2059 |
+
"payment_month": current_month,
|
| 2060 |
+
"notes": "Bulk pay all",
|
| 2061 |
+
}).execute()
|
| 2062 |
+
|
| 2063 |
+
# Record in SVF history for recipient
|
| 2064 |
+
sb.table("stored_value").insert({
|
| 2065 |
+
"user_id": user["id"],
|
| 2066 |
+
"transaction_type": "deposit",
|
| 2067 |
+
"amount": amount,
|
| 2068 |
+
}).execute()
|
| 2069 |
+
|
| 2070 |
+
paid_count += 1
|
| 2071 |
+
|
| 2072 |
+
# Record admin withdrawal in SVF
|
| 2073 |
+
sb.table("stored_value").insert({
|
| 2074 |
+
"user_id": admin_id,
|
| 2075 |
+
"transaction_type": "withdrawal",
|
| 2076 |
+
"amount": total_needed,
|
| 2077 |
+
}).execute()
|
| 2078 |
+
|
| 2079 |
+
return {
|
| 2080 |
+
"message": f"Successfully paid {paid_count} people a total of PHP {total_needed:.2f}",
|
| 2081 |
+
"total_paid": total_needed,
|
| 2082 |
+
"recipients_count": paid_count,
|
| 2083 |
+
"new_admin_balance": new_admin_balance,
|
| 2084 |
+
}
|
| 2085 |
+
|
| 2086 |
+
|
| 2087 |
+
@router.post("/salaries/pay-store/{department_id}")
|
| 2088 |
+
async def admin_pay_store_salaries(department_id: str, admin: dict = Depends(require_admin)):
|
| 2089 |
+
"""Pay all remaining salaries for a specific store/department."""
|
| 2090 |
+
sb = get_supabase()
|
| 2091 |
+
admin_id = admin["sub"]
|
| 2092 |
+
|
| 2093 |
+
now = datetime.now(timezone.utc)
|
| 2094 |
+
current_month = now.strftime("%Y-%m")
|
| 2095 |
+
|
| 2096 |
+
# Verify department exists
|
| 2097 |
+
dept = sb.table("departments").select("id, name, manager_id").eq("id", department_id).execute()
|
| 2098 |
+
if not dept.data:
|
| 2099 |
+
raise HTTPException(status_code=404, detail="Department not found")
|
| 2100 |
+
|
| 2101 |
+
# Get admin balance
|
| 2102 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 2103 |
+
if not admin_bal.data:
|
| 2104 |
+
raise HTTPException(status_code=400, detail="Admin balance not found")
|
| 2105 |
+
admin_balance = float(admin_bal.data[0]["balance"])
|
| 2106 |
+
|
| 2107 |
+
# Get staff in this department
|
| 2108 |
+
staff = sb.table("users").select("id, full_name, role, department_id, salary").eq(
|
| 2109 |
+
"department_id", department_id
|
| 2110 |
+
).in_("role", ["seller", "manager"]).execute()
|
| 2111 |
+
|
| 2112 |
+
# Also include manager via departments.manager_id
|
| 2113 |
+
manager_id = dept.data[0].get("manager_id")
|
| 2114 |
+
all_recipients = list(staff.data or [])
|
| 2115 |
+
if manager_id:
|
| 2116 |
+
mgr = sb.table("users").select("id, full_name, role, department_id, salary").eq("id", manager_id).execute()
|
| 2117 |
+
if mgr.data:
|
| 2118 |
+
existing_ids = {r["id"] for r in all_recipients}
|
| 2119 |
+
if mgr.data[0]["id"] not in existing_ids:
|
| 2120 |
+
all_recipients.append(mgr.data[0])
|
| 2121 |
+
|
| 2122 |
+
if not all_recipients:
|
| 2123 |
+
raise HTTPException(status_code=400, detail="No staff or managers in this department")
|
| 2124 |
+
|
| 2125 |
+
# Get existing payments this month
|
| 2126 |
+
recipient_ids = [r["id"] for r in all_recipients]
|
| 2127 |
+
payments = sb.table("salary_payments").select("recipient_id, amount").eq(
|
| 2128 |
+
"payment_month", current_month
|
| 2129 |
+
).in_("recipient_id", recipient_ids).execute()
|
| 2130 |
+
|
| 2131 |
+
paid_map = {}
|
| 2132 |
+
for p in (payments.data or []):
|
| 2133 |
+
rid = p["recipient_id"]
|
| 2134 |
+
paid_map[rid] = paid_map.get(rid, 0) + float(p["amount"])
|
| 2135 |
+
|
| 2136 |
+
to_pay = []
|
| 2137 |
+
total_needed = 0
|
| 2138 |
+
for r in all_recipients:
|
| 2139 |
+
salary = float(r.get("salary", 0))
|
| 2140 |
+
if salary <= 0:
|
| 2141 |
+
continue
|
| 2142 |
+
paid = paid_map.get(r["id"], 0)
|
| 2143 |
+
remaining = max(salary - paid, 0)
|
| 2144 |
+
if remaining > 0:
|
| 2145 |
+
to_pay.append({"user": r, "amount": remaining})
|
| 2146 |
+
total_needed += remaining
|
| 2147 |
+
|
| 2148 |
+
if not to_pay:
|
| 2149 |
+
raise HTTPException(status_code=400, detail="No remaining salaries to pay in this department")
|
| 2150 |
+
|
| 2151 |
+
if admin_balance < total_needed:
|
| 2152 |
+
raise HTTPException(
|
| 2153 |
+
status_code=400,
|
| 2154 |
+
detail=f"Insufficient balance. Need PHP {total_needed:.2f}, have PHP {admin_balance:.2f}"
|
| 2155 |
+
)
|
| 2156 |
+
|
| 2157 |
+
# Deduct from admin
|
| 2158 |
+
new_admin_balance = admin_balance - total_needed
|
| 2159 |
+
sb.table("user_balances").update({"balance": new_admin_balance}).eq("user_id", admin_id).execute()
|
| 2160 |
+
|
| 2161 |
+
paid_count = 0
|
| 2162 |
+
for item in to_pay:
|
| 2163 |
+
user = item["user"]
|
| 2164 |
+
amount = item["amount"]
|
| 2165 |
+
|
| 2166 |
+
rec_bal = sb.table("user_balances").select("balance").eq("user_id", user["id"]).execute()
|
| 2167 |
+
if rec_bal.data:
|
| 2168 |
+
new_bal = float(rec_bal.data[0]["balance"]) + amount
|
| 2169 |
+
sb.table("user_balances").update({"balance": new_bal}).eq("user_id", user["id"]).execute()
|
| 2170 |
+
else:
|
| 2171 |
+
sb.table("user_balances").insert({"user_id": user["id"], "balance": amount}).execute()
|
| 2172 |
+
|
| 2173 |
+
sb.table("salary_payments").insert({
|
| 2174 |
+
"admin_id": admin_id,
|
| 2175 |
+
"recipient_id": user["id"],
|
| 2176 |
+
"department_id": department_id,
|
| 2177 |
+
"amount": amount,
|
| 2178 |
+
"payment_month": current_month,
|
| 2179 |
+
"notes": f"Store pay: {dept.data[0]['name']}",
|
| 2180 |
+
}).execute()
|
| 2181 |
+
|
| 2182 |
+
sb.table("stored_value").insert({
|
| 2183 |
+
"user_id": user["id"],
|
| 2184 |
+
"transaction_type": "deposit",
|
| 2185 |
+
"amount": amount,
|
| 2186 |
+
}).execute()
|
| 2187 |
+
|
| 2188 |
+
paid_count += 1
|
| 2189 |
+
|
| 2190 |
+
sb.table("stored_value").insert({
|
| 2191 |
+
"user_id": admin_id,
|
| 2192 |
+
"transaction_type": "withdrawal",
|
| 2193 |
+
"amount": total_needed,
|
| 2194 |
+
}).execute()
|
| 2195 |
+
|
| 2196 |
+
return {
|
| 2197 |
+
"message": f"Paid {paid_count} people in {dept.data[0]['name']} — PHP {total_needed:.2f}",
|
| 2198 |
+
"total_paid": total_needed,
|
| 2199 |
+
"recipients_count": paid_count,
|
| 2200 |
+
"new_admin_balance": new_admin_balance,
|
| 2201 |
+
}
|
| 2202 |
+
|
| 2203 |
+
|
| 2204 |
+
@router.post("/salaries/pay-individual")
|
| 2205 |
+
async def admin_pay_individual(req: PayIndividualRequest, admin: dict = Depends(require_admin)):
|
| 2206 |
+
"""Pay a specific amount to a specific staff member or manager."""
|
| 2207 |
+
if req.amount <= 0:
|
| 2208 |
+
raise HTTPException(status_code=400, detail="Amount must be greater than 0")
|
| 2209 |
+
|
| 2210 |
+
sb = get_supabase()
|
| 2211 |
+
admin_id = admin["sub"]
|
| 2212 |
+
|
| 2213 |
+
now = datetime.now(timezone.utc)
|
| 2214 |
+
current_month = now.strftime("%Y-%m")
|
| 2215 |
+
|
| 2216 |
+
# Verify recipient
|
| 2217 |
+
recipient = sb.table("users").select("id, full_name, role, department_id, salary").eq("id", req.recipient_id).execute()
|
| 2218 |
+
if not recipient.data:
|
| 2219 |
+
raise HTTPException(status_code=404, detail="Recipient not found")
|
| 2220 |
+
if recipient.data[0]["role"] not in ("seller", "manager"):
|
| 2221 |
+
raise HTTPException(status_code=400, detail="Can only pay staff or managers")
|
| 2222 |
+
|
| 2223 |
+
# Get admin balance
|
| 2224 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 2225 |
+
if not admin_bal.data:
|
| 2226 |
+
raise HTTPException(status_code=400, detail="Admin balance not found")
|
| 2227 |
+
admin_balance = float(admin_bal.data[0]["balance"])
|
| 2228 |
+
|
| 2229 |
+
if admin_balance < req.amount:
|
| 2230 |
+
raise HTTPException(
|
| 2231 |
+
status_code=400,
|
| 2232 |
+
detail=f"Insufficient balance. Need PHP {req.amount:.2f}, have PHP {admin_balance:.2f}"
|
| 2233 |
+
)
|
| 2234 |
+
|
| 2235 |
+
# Deduct from admin
|
| 2236 |
+
new_admin_balance = admin_balance - req.amount
|
| 2237 |
+
sb.table("user_balances").update({"balance": new_admin_balance}).eq("user_id", admin_id).execute()
|
| 2238 |
+
|
| 2239 |
+
# Credit recipient
|
| 2240 |
+
rec_bal = sb.table("user_balances").select("balance").eq("user_id", req.recipient_id).execute()
|
| 2241 |
+
if rec_bal.data:
|
| 2242 |
+
new_bal = float(rec_bal.data[0]["balance"]) + req.amount
|
| 2243 |
+
sb.table("user_balances").update({"balance": new_bal}).eq("user_id", req.recipient_id).execute()
|
| 2244 |
+
else:
|
| 2245 |
+
sb.table("user_balances").insert({"user_id": req.recipient_id, "balance": req.amount}).execute()
|
| 2246 |
+
|
| 2247 |
+
# Record payment
|
| 2248 |
+
sb.table("salary_payments").insert({
|
| 2249 |
+
"admin_id": admin_id,
|
| 2250 |
+
"recipient_id": req.recipient_id,
|
| 2251 |
+
"department_id": recipient.data[0].get("department_id"),
|
| 2252 |
+
"amount": req.amount,
|
| 2253 |
+
"payment_month": current_month,
|
| 2254 |
+
"notes": f"Individual payment to {recipient.data[0]['full_name']}",
|
| 2255 |
+
}).execute()
|
| 2256 |
+
|
| 2257 |
+
# SVF records
|
| 2258 |
+
sb.table("stored_value").insert({
|
| 2259 |
+
"user_id": req.recipient_id,
|
| 2260 |
+
"transaction_type": "deposit",
|
| 2261 |
+
"amount": req.amount,
|
| 2262 |
+
}).execute()
|
| 2263 |
+
|
| 2264 |
+
sb.table("stored_value").insert({
|
| 2265 |
+
"user_id": admin_id,
|
| 2266 |
+
"transaction_type": "withdrawal",
|
| 2267 |
+
"amount": req.amount,
|
| 2268 |
+
}).execute()
|
| 2269 |
+
|
| 2270 |
+
return {
|
| 2271 |
+
"message": f"Paid PHP {req.amount:.2f} to {recipient.data[0]['full_name']}",
|
| 2272 |
+
"new_admin_balance": new_admin_balance,
|
| 2273 |
+
}
|
backend/routes/auth.py
ADDED
|
@@ -0,0 +1,339 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication routes — register, login, get current user.
|
| 3 |
+
Uses JWT tokens and bcrypt password hashing.
|
| 4 |
+
Roles: buyer, seller, admin
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 8 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
from datetime import datetime, timedelta, timezone
|
| 11 |
+
import bcrypt
|
| 12 |
+
import jwt
|
| 13 |
+
from database import get_supabase
|
| 14 |
+
from config import JWT_SECRET, JWT_ALGORITHM, JWT_EXPIRATION_HOURS
|
| 15 |
+
|
| 16 |
+
router = APIRouter(prefix="/auth", tags=["Authentication"])
|
| 17 |
+
security = HTTPBearer()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# --- Request/Response Models ---
|
| 21 |
+
|
| 22 |
+
class RegisterRequest(BaseModel):
|
| 23 |
+
email: str
|
| 24 |
+
password: str
|
| 25 |
+
full_name: str
|
| 26 |
+
role: str = "buyer" # buyer or seller only
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class LoginRequest(BaseModel):
|
| 30 |
+
email: str
|
| 31 |
+
password: str
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class UserResponse(BaseModel):
|
| 35 |
+
id: str
|
| 36 |
+
email: str
|
| 37 |
+
full_name: str
|
| 38 |
+
role: str
|
| 39 |
+
is_banned: bool = False
|
| 40 |
+
created_at: str
|
| 41 |
+
department_id: str = ""
|
| 42 |
+
manager_id: str = ""
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class TokenResponse(BaseModel):
|
| 46 |
+
access_token: str
|
| 47 |
+
token_type: str = "bearer"
|
| 48 |
+
user: UserResponse
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
# --- JWT Helpers ---
|
| 52 |
+
|
| 53 |
+
def create_token(user_id: str, email: str, role: str = "buyer") -> str:
|
| 54 |
+
payload = {
|
| 55 |
+
"sub": user_id,
|
| 56 |
+
"email": email,
|
| 57 |
+
"role": role,
|
| 58 |
+
"exp": datetime.now(timezone.utc) + timedelta(hours=JWT_EXPIRATION_HOURS),
|
| 59 |
+
"iat": datetime.now(timezone.utc),
|
| 60 |
+
}
|
| 61 |
+
return jwt.encode(payload, JWT_SECRET, algorithm=JWT_ALGORITHM)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)) -> dict:
|
| 65 |
+
"""Dependency that verifies JWT token and returns the payload."""
|
| 66 |
+
try:
|
| 67 |
+
payload = jwt.decode(credentials.credentials, JWT_SECRET, algorithms=[JWT_ALGORITHM])
|
| 68 |
+
return payload
|
| 69 |
+
except jwt.ExpiredSignatureError:
|
| 70 |
+
raise HTTPException(status_code=401, detail="Token expired")
|
| 71 |
+
except jwt.InvalidTokenError:
|
| 72 |
+
raise HTTPException(status_code=401, detail="Invalid token")
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
# Dependency alias for use in other routes
|
| 76 |
+
get_current_user = verify_token
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
# --- Routes ---
|
| 80 |
+
|
| 81 |
+
@router.post("/register", response_model=TokenResponse)
|
| 82 |
+
async def register(req: RegisterRequest):
|
| 83 |
+
"""Register a new user account. Role must be 'buyer' or 'seller'."""
|
| 84 |
+
# Validate role
|
| 85 |
+
if req.role not in ("buyer", "seller", "delivery"):
|
| 86 |
+
raise HTTPException(status_code=400, detail="Role must be 'buyer', 'seller', or 'delivery'")
|
| 87 |
+
|
| 88 |
+
sb = get_supabase()
|
| 89 |
+
|
| 90 |
+
# Check if email already exists
|
| 91 |
+
existing = sb.table("users").select("id").eq("email", req.email).execute()
|
| 92 |
+
if existing.data:
|
| 93 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 94 |
+
|
| 95 |
+
# Hash password
|
| 96 |
+
password_hash = bcrypt.hashpw(req.password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
| 97 |
+
|
| 98 |
+
# Create user
|
| 99 |
+
result = sb.table("users").insert({
|
| 100 |
+
"email": req.email,
|
| 101 |
+
"password_hash": password_hash,
|
| 102 |
+
"full_name": req.full_name,
|
| 103 |
+
"role": req.role,
|
| 104 |
+
"is_banned": False,
|
| 105 |
+
}).execute()
|
| 106 |
+
|
| 107 |
+
if not result.data:
|
| 108 |
+
raise HTTPException(status_code=500, detail="Failed to create user")
|
| 109 |
+
|
| 110 |
+
user = result.data[0]
|
| 111 |
+
|
| 112 |
+
# Create initial balance (0.00)
|
| 113 |
+
sb.table("user_balances").insert({
|
| 114 |
+
"user_id": user["id"],
|
| 115 |
+
"balance": 0.00,
|
| 116 |
+
}).execute()
|
| 117 |
+
|
| 118 |
+
# Generate token
|
| 119 |
+
token = create_token(user["id"], user["email"], role=user["role"])
|
| 120 |
+
|
| 121 |
+
return TokenResponse(
|
| 122 |
+
access_token=token,
|
| 123 |
+
user=UserResponse(
|
| 124 |
+
id=user["id"],
|
| 125 |
+
email=user["email"],
|
| 126 |
+
full_name=user["full_name"],
|
| 127 |
+
role=user["role"],
|
| 128 |
+
is_banned=False,
|
| 129 |
+
created_at=user["created_at"],
|
| 130 |
+
),
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
@router.post("/login", response_model=TokenResponse)
|
| 135 |
+
async def login(req: LoginRequest):
|
| 136 |
+
"""Login with email and password."""
|
| 137 |
+
sb = get_supabase()
|
| 138 |
+
|
| 139 |
+
result = sb.table("users").select("*").eq("email", req.email).execute()
|
| 140 |
+
if not result.data:
|
| 141 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 142 |
+
|
| 143 |
+
user = result.data[0]
|
| 144 |
+
|
| 145 |
+
# Block admin accounts from using the main login — they must use /auth/admin/login
|
| 146 |
+
if user.get("role") == "admin":
|
| 147 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 148 |
+
|
| 149 |
+
# Check if user is banned
|
| 150 |
+
if user.get("is_banned"):
|
| 151 |
+
raise HTTPException(status_code=403, detail="Your account has been banned. Contact admin for support.")
|
| 152 |
+
|
| 153 |
+
# Verify password
|
| 154 |
+
if not bcrypt.checkpw(req.password.encode("utf-8"), user["password_hash"].encode("utf-8")):
|
| 155 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 156 |
+
|
| 157 |
+
token = create_token(user["id"], user["email"], role=user["role"])
|
| 158 |
+
|
| 159 |
+
return TokenResponse(
|
| 160 |
+
access_token=token,
|
| 161 |
+
user=UserResponse(
|
| 162 |
+
id=user["id"],
|
| 163 |
+
email=user["email"],
|
| 164 |
+
full_name=user["full_name"],
|
| 165 |
+
role=user["role"],
|
| 166 |
+
is_banned=user.get("is_banned", False),
|
| 167 |
+
created_at=user["created_at"],
|
| 168 |
+
department_id=user.get("department_id") or "",
|
| 169 |
+
manager_id=user.get("manager_id") or "",
|
| 170 |
+
),
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
# Admin login endpoint
|
| 175 |
+
@router.post("/admin/login", response_model=TokenResponse)
|
| 176 |
+
async def admin_login(req: LoginRequest):
|
| 177 |
+
"""Login as admin. Only users with role='admin' can login here."""
|
| 178 |
+
sb = get_supabase()
|
| 179 |
+
|
| 180 |
+
result = sb.table("users").select("*").eq("email", req.email).execute()
|
| 181 |
+
if not result.data:
|
| 182 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 183 |
+
|
| 184 |
+
user = result.data[0]
|
| 185 |
+
|
| 186 |
+
if user.get("role") != "admin":
|
| 187 |
+
raise HTTPException(status_code=403, detail="This account is not an admin")
|
| 188 |
+
|
| 189 |
+
if not bcrypt.checkpw(req.password.encode("utf-8"), user["password_hash"].encode("utf-8")):
|
| 190 |
+
raise HTTPException(status_code=401, detail="Invalid email or password")
|
| 191 |
+
|
| 192 |
+
token = create_token(user["id"], user["email"], role="admin")
|
| 193 |
+
|
| 194 |
+
return TokenResponse(
|
| 195 |
+
access_token=token,
|
| 196 |
+
user=UserResponse(
|
| 197 |
+
id=user["id"],
|
| 198 |
+
email=user["email"],
|
| 199 |
+
full_name=user["full_name"],
|
| 200 |
+
role="admin",
|
| 201 |
+
is_banned=user.get("is_banned", False),
|
| 202 |
+
created_at=user["created_at"],
|
| 203 |
+
),
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
# Admin registration (only if no admin exists yet)
|
| 208 |
+
@router.post("/admin/register", response_model=TokenResponse)
|
| 209 |
+
async def admin_register(req: RegisterRequest):
|
| 210 |
+
"""Register as admin. Only works if no admin exists yet."""
|
| 211 |
+
sb = get_supabase()
|
| 212 |
+
|
| 213 |
+
# Check if an admin already exists
|
| 214 |
+
existing_admin = sb.table("users").select("id").eq("role", "admin").execute()
|
| 215 |
+
if existing_admin.data:
|
| 216 |
+
raise HTTPException(status_code=400, detail="Admin account already exists. Contact the existing admin.")
|
| 217 |
+
|
| 218 |
+
# Check if email already exists
|
| 219 |
+
existing = sb.table("users").select("id").eq("email", req.email).execute()
|
| 220 |
+
if existing.data:
|
| 221 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 222 |
+
|
| 223 |
+
password_hash = bcrypt.hashpw(req.password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
| 224 |
+
|
| 225 |
+
result = sb.table("users").insert({
|
| 226 |
+
"email": req.email,
|
| 227 |
+
"password_hash": password_hash,
|
| 228 |
+
"full_name": req.full_name,
|
| 229 |
+
"role": "admin",
|
| 230 |
+
"is_banned": False,
|
| 231 |
+
}).execute()
|
| 232 |
+
|
| 233 |
+
if not result.data:
|
| 234 |
+
raise HTTPException(status_code=500, detail="Failed to create admin account")
|
| 235 |
+
|
| 236 |
+
user = result.data[0]
|
| 237 |
+
|
| 238 |
+
sb.table("user_balances").insert({
|
| 239 |
+
"user_id": user["id"],
|
| 240 |
+
"balance": 0.00,
|
| 241 |
+
}).execute()
|
| 242 |
+
|
| 243 |
+
token = create_token(user["id"], user["email"], role="admin")
|
| 244 |
+
|
| 245 |
+
return TokenResponse(
|
| 246 |
+
access_token=token,
|
| 247 |
+
user=UserResponse(
|
| 248 |
+
id=user["id"],
|
| 249 |
+
email=user["email"],
|
| 250 |
+
full_name=user["full_name"],
|
| 251 |
+
role="admin",
|
| 252 |
+
is_banned=False,
|
| 253 |
+
created_at=user["created_at"],
|
| 254 |
+
),
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
@router.get("/me", response_model=UserResponse)
|
| 259 |
+
async def get_me(current_user: dict = Depends(get_current_user)):
|
| 260 |
+
"""Get the current logged-in user's profile."""
|
| 261 |
+
sb = get_supabase()
|
| 262 |
+
result = sb.table("users").select("*").eq("id", current_user["sub"]).execute()
|
| 263 |
+
|
| 264 |
+
if not result.data:
|
| 265 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 266 |
+
|
| 267 |
+
user = result.data[0]
|
| 268 |
+
return UserResponse(
|
| 269 |
+
id=user["id"],
|
| 270 |
+
email=user["email"],
|
| 271 |
+
full_name=user["full_name"],
|
| 272 |
+
role=user["role"],
|
| 273 |
+
is_banned=user.get("is_banned", False),
|
| 274 |
+
created_at=user["created_at"],
|
| 275 |
+
department_id=user.get("department_id") or "",
|
| 276 |
+
manager_id=user.get("manager_id") or "",
|
| 277 |
+
)
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
@router.get("/profile")
|
| 281 |
+
async def get_profile(current_user: dict = Depends(get_current_user)):
|
| 282 |
+
"""Get full user profile including balance and contact info."""
|
| 283 |
+
sb = get_supabase()
|
| 284 |
+
user = sb.table("users").select("*").eq("id", current_user["sub"]).execute()
|
| 285 |
+
if not user.data:
|
| 286 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 287 |
+
|
| 288 |
+
u = user.data[0]
|
| 289 |
+
|
| 290 |
+
# Get balance
|
| 291 |
+
bal = sb.table("user_balances").select("balance").eq("user_id", current_user["sub"]).execute()
|
| 292 |
+
balance = float(bal.data[0]["balance"]) if bal.data else 0.0
|
| 293 |
+
|
| 294 |
+
# Get contact
|
| 295 |
+
contact = sb.table("user_contacts").select("contact_number").eq("user_id", current_user["sub"]).execute()
|
| 296 |
+
contact_number = contact.data[0]["contact_number"] if contact.data else ""
|
| 297 |
+
|
| 298 |
+
return {
|
| 299 |
+
"id": u["id"],
|
| 300 |
+
"email": u["email"],
|
| 301 |
+
"full_name": u["full_name"],
|
| 302 |
+
"role": u["role"],
|
| 303 |
+
"balance": balance,
|
| 304 |
+
"contact_number": contact_number,
|
| 305 |
+
"department_id": u.get("department_id") or "",
|
| 306 |
+
"manager_id": u.get("manager_id") or "",
|
| 307 |
+
"created_at": u["created_at"],
|
| 308 |
+
}
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class ProfileUpdateRequest(BaseModel):
|
| 312 |
+
full_name: str = None
|
| 313 |
+
email: str = None
|
| 314 |
+
contact_number: str = None
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
@router.put("/profile")
|
| 318 |
+
async def update_profile(req: ProfileUpdateRequest, current_user: dict = Depends(get_current_user)):
|
| 319 |
+
"""Update the current user's profile."""
|
| 320 |
+
sb = get_supabase()
|
| 321 |
+
user_id = current_user["sub"]
|
| 322 |
+
|
| 323 |
+
updates = {}
|
| 324 |
+
if req.full_name:
|
| 325 |
+
updates["full_name"] = req.full_name
|
| 326 |
+
if req.email:
|
| 327 |
+
updates["email"] = req.email
|
| 328 |
+
|
| 329 |
+
if updates:
|
| 330 |
+
sb.table("users").update(updates).eq("id", user_id).execute()
|
| 331 |
+
|
| 332 |
+
if req.contact_number is not None:
|
| 333 |
+
existing = sb.table("user_contacts").select("user_id").eq("user_id", user_id).execute()
|
| 334 |
+
if existing.data:
|
| 335 |
+
sb.table("user_contacts").update({"contact_number": req.contact_number}).eq("user_id", user_id).execute()
|
| 336 |
+
else:
|
| 337 |
+
sb.table("user_contacts").insert({"user_id": user_id, "contact_number": req.contact_number}).execute()
|
| 338 |
+
|
| 339 |
+
return {"message": "Profile updated successfully"}
|
backend/routes/cart.py
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Cart routes — shopping cart for buyers.
|
| 3 |
+
One flat ₱90 delivery fee per unique department/store (per group).
|
| 4 |
+
Checkout creates one transaction per cart item, grouped by group_id per department.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from typing import Optional
|
| 10 |
+
import uuid
|
| 11 |
+
from database import get_supabase
|
| 12 |
+
from routes.auth import get_current_user
|
| 13 |
+
|
| 14 |
+
router = APIRouter(prefix="/cart", tags=["Cart"])
|
| 15 |
+
|
| 16 |
+
DELIVERY_FEE_PER_DEPARTMENT = 90.00
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AddToCartRequest(BaseModel):
|
| 20 |
+
product_id: str
|
| 21 |
+
quantity: int = 1
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class UpdateCartRequest(BaseModel):
|
| 25 |
+
product_id: str
|
| 26 |
+
quantity: int
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class CartItemResponse(BaseModel):
|
| 30 |
+
id: str
|
| 31 |
+
product_id: str
|
| 32 |
+
title: str
|
| 33 |
+
description: str
|
| 34 |
+
price: float
|
| 35 |
+
quantity: int
|
| 36 |
+
subtotal: float
|
| 37 |
+
seller_id: str
|
| 38 |
+
seller_name: str
|
| 39 |
+
image_url: str
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class CartResponse(BaseModel):
|
| 43 |
+
items: list[CartItemResponse]
|
| 44 |
+
departments_count: int
|
| 45 |
+
delivery_fee_per_department: float
|
| 46 |
+
total_delivery_fee: float
|
| 47 |
+
products_total: float
|
| 48 |
+
grand_total: float
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
# --- Routes ---
|
| 52 |
+
|
| 53 |
+
@router.get("/", response_model=CartResponse)
|
| 54 |
+
async def get_cart(current_user: dict = Depends(get_current_user)):
|
| 55 |
+
"""Get the current buyer's cart with delivery fee breakdown."""
|
| 56 |
+
sb = get_supabase()
|
| 57 |
+
user_id = current_user["sub"]
|
| 58 |
+
|
| 59 |
+
cart_data = sb.table("cart_items").select(
|
| 60 |
+
"*, products(id, title, description, price, seller_id, images, stock)"
|
| 61 |
+
).eq("buyer_id", user_id).order("created_at", desc=False).execute()
|
| 62 |
+
|
| 63 |
+
items = []
|
| 64 |
+
department_ids = set()
|
| 65 |
+
independent_sellers = set()
|
| 66 |
+
products_total = 0.0
|
| 67 |
+
seller_name_cache = {}
|
| 68 |
+
seller_dept_cache = {}
|
| 69 |
+
|
| 70 |
+
for c in (cart_data.data or []):
|
| 71 |
+
prod = c.get("products")
|
| 72 |
+
if not prod:
|
| 73 |
+
continue
|
| 74 |
+
|
| 75 |
+
price = float(prod["price"])
|
| 76 |
+
qty = int(c["quantity"])
|
| 77 |
+
subtotal = price * qty
|
| 78 |
+
products_total += subtotal
|
| 79 |
+
|
| 80 |
+
# Look up seller name and department (cached)
|
| 81 |
+
sid = prod["seller_id"]
|
| 82 |
+
if sid not in seller_name_cache:
|
| 83 |
+
seller_resp = sb.table("users").select("full_name, department_id").eq("id", sid).execute()
|
| 84 |
+
if seller_resp.data:
|
| 85 |
+
full_name = seller_resp.data[0]["full_name"]
|
| 86 |
+
dept_id_val = seller_resp.data[0].get("department_id")
|
| 87 |
+
seller_dept_cache[sid] = dept_id_val
|
| 88 |
+
if dept_id_val:
|
| 89 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id_val).execute()
|
| 90 |
+
if dept_resp.data:
|
| 91 |
+
seller_name_cache[sid] = dept_resp.data[0]["name"]
|
| 92 |
+
else:
|
| 93 |
+
seller_name_cache[sid] = full_name
|
| 94 |
+
else:
|
| 95 |
+
seller_name_cache[sid] = full_name
|
| 96 |
+
else:
|
| 97 |
+
seller_name_cache[sid] = "Seller"
|
| 98 |
+
seller_dept_cache[sid] = None
|
| 99 |
+
seller_name = seller_name_cache[sid]
|
| 100 |
+
|
| 101 |
+
# Track unique departments (independent sellers each count as one unit)
|
| 102 |
+
dept_id = seller_dept_cache.get(sid)
|
| 103 |
+
if dept_id:
|
| 104 |
+
department_ids.add(dept_id)
|
| 105 |
+
else:
|
| 106 |
+
independent_sellers.add(sid)
|
| 107 |
+
|
| 108 |
+
images = prod.get("images") or []
|
| 109 |
+
|
| 110 |
+
items.append(CartItemResponse(
|
| 111 |
+
id=c["id"],
|
| 112 |
+
product_id=prod["id"],
|
| 113 |
+
title=prod["title"],
|
| 114 |
+
description=prod.get("description", ""),
|
| 115 |
+
price=price,
|
| 116 |
+
quantity=qty,
|
| 117 |
+
subtotal=round(subtotal, 2),
|
| 118 |
+
seller_id=prod["seller_id"],
|
| 119 |
+
seller_name=seller_name,
|
| 120 |
+
image_url=images[0] if images else "",
|
| 121 |
+
))
|
| 122 |
+
|
| 123 |
+
# Delivery fee per unique department (independent sellers each count as one unit)
|
| 124 |
+
delivery_units = len(department_ids) + len(independent_sellers)
|
| 125 |
+
total_delivery = delivery_units * DELIVERY_FEE_PER_DEPARTMENT
|
| 126 |
+
|
| 127 |
+
return CartResponse(
|
| 128 |
+
items=items,
|
| 129 |
+
departments_count=delivery_units,
|
| 130 |
+
delivery_fee_per_department=DELIVERY_FEE_PER_DEPARTMENT,
|
| 131 |
+
total_delivery_fee=total_delivery,
|
| 132 |
+
products_total=round(products_total, 2),
|
| 133 |
+
grand_total=round(products_total + total_delivery, 2),
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
@router.post("/add")
|
| 139 |
+
async def add_to_cart(req: AddToCartRequest, current_user: dict = Depends(get_current_user)):
|
| 140 |
+
"""Add a product to cart (or increment quantity if already there)."""
|
| 141 |
+
sb = get_supabase()
|
| 142 |
+
user_id = current_user["sub"]
|
| 143 |
+
|
| 144 |
+
if req.quantity < 1:
|
| 145 |
+
raise HTTPException(status_code=400, detail="Quantity must be at least 1")
|
| 146 |
+
|
| 147 |
+
# Verify product exists and is active
|
| 148 |
+
prod = sb.table("products").select("id, stock, seller_id").eq("id", req.product_id).eq("is_active", True).eq("status", "approved").execute()
|
| 149 |
+
if not prod.data:
|
| 150 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 151 |
+
|
| 152 |
+
if prod.data[0]["seller_id"] == user_id:
|
| 153 |
+
raise HTTPException(status_code=400, detail="Cannot add your own product to cart")
|
| 154 |
+
|
| 155 |
+
# Check if already in cart
|
| 156 |
+
existing = sb.table("cart_items").select("id, quantity").eq("buyer_id", user_id).eq("product_id", req.product_id).execute()
|
| 157 |
+
if existing.data:
|
| 158 |
+
new_qty = existing.data[0]["quantity"] + req.quantity
|
| 159 |
+
if new_qty > prod.data[0]["stock"]:
|
| 160 |
+
raise HTTPException(status_code=400, detail=f"Not enough stock. Available: {prod.data[0]['stock']}")
|
| 161 |
+
sb.table("cart_items").update({"quantity": new_qty}).eq("id", existing.data[0]["id"]).execute()
|
| 162 |
+
return {"message": f"Cart updated. Quantity: {new_qty}"}
|
| 163 |
+
else:
|
| 164 |
+
if req.quantity > prod.data[0]["stock"]:
|
| 165 |
+
raise HTTPException(status_code=400, detail=f"Not enough stock. Available: {prod.data[0]['stock']}")
|
| 166 |
+
sb.table("cart_items").insert({
|
| 167 |
+
"buyer_id": user_id,
|
| 168 |
+
"product_id": req.product_id,
|
| 169 |
+
"quantity": req.quantity,
|
| 170 |
+
}).execute()
|
| 171 |
+
return {"message": "Added to cart"}
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@router.put("/update")
|
| 175 |
+
async def update_cart_item(req: UpdateCartRequest, current_user: dict = Depends(get_current_user)):
|
| 176 |
+
"""Update quantity of a cart item."""
|
| 177 |
+
sb = get_supabase()
|
| 178 |
+
user_id = current_user["sub"]
|
| 179 |
+
|
| 180 |
+
if req.quantity < 1:
|
| 181 |
+
raise HTTPException(status_code=400, detail="Quantity must be at least 1")
|
| 182 |
+
|
| 183 |
+
existing = sb.table("cart_items").select("id").eq("buyer_id", user_id).eq("product_id", req.product_id).execute()
|
| 184 |
+
if not existing.data:
|
| 185 |
+
raise HTTPException(status_code=404, detail="Item not in cart")
|
| 186 |
+
|
| 187 |
+
sb.table("cart_items").update({"quantity": req.quantity}).eq("id", existing.data[0]["id"]).execute()
|
| 188 |
+
return {"message": f"Quantity updated to {req.quantity}"}
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
@router.delete("/remove/{product_id}")
|
| 192 |
+
async def remove_from_cart(product_id: str, current_user: dict = Depends(get_current_user)):
|
| 193 |
+
"""Remove a product from cart."""
|
| 194 |
+
sb = get_supabase()
|
| 195 |
+
sb.table("cart_items").delete().eq("buyer_id", current_user["sub"]).eq("product_id", product_id).execute()
|
| 196 |
+
return {"message": "Removed from cart"}
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@router.delete("/clear")
|
| 200 |
+
async def clear_cart(current_user: dict = Depends(get_current_user)):
|
| 201 |
+
"""Clear entire cart."""
|
| 202 |
+
sb = get_supabase()
|
| 203 |
+
sb.table("cart_items").delete().eq("buyer_id", current_user["sub"]).execute()
|
| 204 |
+
return {"message": "Cart cleared"}
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
class CheckoutRequest(BaseModel):
|
| 208 |
+
pass # Kept for backwards-compat; all orders are delivery now
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
@router.post("/checkout")
|
| 212 |
+
async def checkout_cart(req: CheckoutRequest = CheckoutRequest(), current_user: dict = Depends(get_current_user)):
|
| 213 |
+
"""
|
| 214 |
+
Checkout all items in cart.
|
| 215 |
+
- Delivery: ₱90 delivery fee per unique department (one group per department).
|
| 216 |
+
- All items in the same department share a group_id.
|
| 217 |
+
- The first item in each department group carries the delivery fee; rest are ₱0.
|
| 218 |
+
"""
|
| 219 |
+
sb = get_supabase()
|
| 220 |
+
user_id = current_user["sub"]
|
| 221 |
+
|
| 222 |
+
# 1. Check user is buyer
|
| 223 |
+
user_result = sb.table("users").select("role, is_banned").eq("id", user_id).execute()
|
| 224 |
+
if not user_result.data:
|
| 225 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 226 |
+
if user_result.data[0].get("is_banned"):
|
| 227 |
+
raise HTTPException(status_code=403, detail="Your account has been banned")
|
| 228 |
+
if user_result.data[0]["role"] == "admin":
|
| 229 |
+
raise HTTPException(status_code=403, detail="Admin accounts cannot purchase products")
|
| 230 |
+
if user_result.data[0]["role"] != "buyer":
|
| 231 |
+
raise HTTPException(status_code=403, detail="Only buyers can checkout")
|
| 232 |
+
|
| 233 |
+
# 2. Check contact number and delivery address
|
| 234 |
+
contact = sb.table("user_contacts").select("contact_number, delivery_address").eq("user_id", user_id).execute()
|
| 235 |
+
if not contact.data:
|
| 236 |
+
raise HTTPException(status_code=400, detail="Please add your contact number before placing an order")
|
| 237 |
+
|
| 238 |
+
delivery_address = (contact.data[0].get("delivery_address") or "").strip()
|
| 239 |
+
if not delivery_address:
|
| 240 |
+
raise HTTPException(status_code=400, detail="Please set your delivery address before placing a delivery order")
|
| 241 |
+
|
| 242 |
+
# 3. Get cart items with product info
|
| 243 |
+
cart_data = sb.table("cart_items").select(
|
| 244 |
+
"*, products(id, title, price, seller_id, stock)"
|
| 245 |
+
).eq("buyer_id", user_id).execute()
|
| 246 |
+
|
| 247 |
+
if not cart_data.data or len(cart_data.data) == 0:
|
| 248 |
+
raise HTTPException(status_code=400, detail="Your cart is empty")
|
| 249 |
+
|
| 250 |
+
# 4. Validate stock and calculate totals
|
| 251 |
+
items = []
|
| 252 |
+
products_total = 0.0
|
| 253 |
+
seller_dept_cache = {}
|
| 254 |
+
|
| 255 |
+
for c in cart_data.data:
|
| 256 |
+
prod = c.get("products")
|
| 257 |
+
if not prod:
|
| 258 |
+
continue
|
| 259 |
+
|
| 260 |
+
if c["quantity"] > prod["stock"]:
|
| 261 |
+
raise HTTPException(
|
| 262 |
+
status_code=400,
|
| 263 |
+
detail=f"Not enough stock for '{prod['title']}'. Available: {prod['stock']}, in cart: {c['quantity']}"
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
price = float(prod["price"])
|
| 267 |
+
subtotal = price * c["quantity"]
|
| 268 |
+
products_total += subtotal
|
| 269 |
+
|
| 270 |
+
# Look up seller's department (cached)
|
| 271 |
+
sid = prod["seller_id"]
|
| 272 |
+
if sid not in seller_dept_cache:
|
| 273 |
+
seller_info = sb.table("users").select("department_id").eq("id", sid).execute()
|
| 274 |
+
seller_dept_cache[sid] = seller_info.data[0].get("department_id") if seller_info.data else None
|
| 275 |
+
|
| 276 |
+
dept_id = seller_dept_cache[sid]
|
| 277 |
+
# Use department_id as delivery unit key, or seller_id for independent sellers
|
| 278 |
+
delivery_unit = dept_id if dept_id else f"ind_{sid}"
|
| 279 |
+
|
| 280 |
+
items.append({
|
| 281 |
+
"cart_id": c["id"],
|
| 282 |
+
"product_id": prod["id"],
|
| 283 |
+
"seller_id": prod["seller_id"],
|
| 284 |
+
"title": prod["title"],
|
| 285 |
+
"quantity": c["quantity"],
|
| 286 |
+
"price": price,
|
| 287 |
+
"subtotal": subtotal,
|
| 288 |
+
"delivery_unit": delivery_unit,
|
| 289 |
+
})
|
| 290 |
+
|
| 291 |
+
# One group_id per delivery unit — generated fresh for this checkout
|
| 292 |
+
delivery_units = set(i["delivery_unit"] for i in items)
|
| 293 |
+
group_id_map = {unit: str(uuid.uuid4()) for unit in delivery_units}
|
| 294 |
+
|
| 295 |
+
total_delivery = len(delivery_units) * DELIVERY_FEE_PER_DEPARTMENT
|
| 296 |
+
|
| 297 |
+
# Assign full ₱90 to first item per delivery unit; ₱0 to the rest
|
| 298 |
+
delivery_fee_per_item = {}
|
| 299 |
+
seen_units: set = set()
|
| 300 |
+
for item in items:
|
| 301 |
+
unit = item["delivery_unit"]
|
| 302 |
+
if unit not in seen_units:
|
| 303 |
+
delivery_fee_per_item[item["product_id"]] = DELIVERY_FEE_PER_DEPARTMENT
|
| 304 |
+
seen_units.add(unit)
|
| 305 |
+
else:
|
| 306 |
+
delivery_fee_per_item[item["product_id"]] = 0.0
|
| 307 |
+
|
| 308 |
+
grand_total = products_total + total_delivery
|
| 309 |
+
|
| 310 |
+
# 5. Check balance
|
| 311 |
+
balance_result = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 312 |
+
if not balance_result.data:
|
| 313 |
+
raise HTTPException(status_code=400, detail="No balance found. Top up your wallet first.")
|
| 314 |
+
|
| 315 |
+
buyer_balance = float(balance_result.data[0]["balance"])
|
| 316 |
+
if buyer_balance < grand_total:
|
| 317 |
+
raise HTTPException(
|
| 318 |
+
status_code=400,
|
| 319 |
+
detail=f"Insufficient balance. You have PHP {buyer_balance:.2f}, total is PHP {grand_total:.2f}"
|
| 320 |
+
)
|
| 321 |
+
|
| 322 |
+
# 6. Process each item — create transactions and decrement stock
|
| 323 |
+
transaction_ids = []
|
| 324 |
+
|
| 325 |
+
for item in items:
|
| 326 |
+
amount = item["subtotal"]
|
| 327 |
+
d_fee = delivery_fee_per_item.get(item["product_id"], 0)
|
| 328 |
+
seller_amount = amount
|
| 329 |
+
admin_commission = 0.0
|
| 330 |
+
group_id = group_id_map[item["delivery_unit"]]
|
| 331 |
+
|
| 332 |
+
# Create transaction
|
| 333 |
+
txn = sb.table("product_transactions").insert({
|
| 334 |
+
"buyer_id": user_id,
|
| 335 |
+
"seller_id": item["seller_id"],
|
| 336 |
+
"product_id": item["product_id"],
|
| 337 |
+
"quantity": item["quantity"],
|
| 338 |
+
"amount": amount,
|
| 339 |
+
"seller_amount": seller_amount,
|
| 340 |
+
"admin_commission": admin_commission,
|
| 341 |
+
"delivery_fee": d_fee,
|
| 342 |
+
"delivery_address": delivery_address,
|
| 343 |
+
"purchase_type": "delivery",
|
| 344 |
+
"status": "pending",
|
| 345 |
+
"group_id": group_id,
|
| 346 |
+
}).execute()
|
| 347 |
+
|
| 348 |
+
if txn.data:
|
| 349 |
+
transaction_ids.append(txn.data[0]["id"])
|
| 350 |
+
|
| 351 |
+
# Decrement stock
|
| 352 |
+
prod_result = sb.table("products").select("stock").eq("id", item["product_id"]).execute()
|
| 353 |
+
if prod_result.data:
|
| 354 |
+
new_stock = int(prod_result.data[0]["stock"]) - item["quantity"]
|
| 355 |
+
sb.table("products").update({"stock": new_stock}).eq("id", item["product_id"]).execute()
|
| 356 |
+
|
| 357 |
+
# 7. Deduct from buyer (money held until transaction completes)
|
| 358 |
+
new_balance = buyer_balance - grand_total
|
| 359 |
+
sb.table("user_balances").update({"balance": new_balance}).eq("user_id", user_id).execute()
|
| 360 |
+
|
| 361 |
+
# Log the cart checkout as a single purchase entry in wallet history
|
| 362 |
+
try:
|
| 363 |
+
sb.table("stored_value").insert({
|
| 364 |
+
"user_id": user_id,
|
| 365 |
+
"transaction_type": "purchase",
|
| 366 |
+
"amount": grand_total,
|
| 367 |
+
"metadata": {
|
| 368 |
+
"order_type": "cart_checkout",
|
| 369 |
+
"item_count": len(items),
|
| 370 |
+
"departments": len(delivery_units),
|
| 371 |
+
"products_total": round(products_total, 2),
|
| 372 |
+
"delivery_fee": total_delivery,
|
| 373 |
+
"transaction_ids": [str(tid) for tid in transaction_ids],
|
| 374 |
+
"group_ids": list(group_id_map.values()),
|
| 375 |
+
},
|
| 376 |
+
}).execute()
|
| 377 |
+
except Exception:
|
| 378 |
+
pass # Log failure must not block cart clearing
|
| 379 |
+
|
| 380 |
+
# 8. Clear cart
|
| 381 |
+
sb.table("cart_items").delete().eq("buyer_id", user_id).execute()
|
| 382 |
+
|
| 383 |
+
return {
|
| 384 |
+
"message": "Order placed successfully!",
|
| 385 |
+
"transaction_ids": transaction_ids,
|
| 386 |
+
"group_ids": list(group_id_map.values()),
|
| 387 |
+
"products_total": round(products_total, 2),
|
| 388 |
+
"delivery_fee": total_delivery,
|
| 389 |
+
"grand_total": round(grand_total, 2),
|
| 390 |
+
"new_balance": round(new_balance, 2),
|
| 391 |
+
}
|
backend/routes/contacts.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Contact routes — manage user contact numbers.
|
| 3 |
+
Required for buyers (before checkout) and delivery users (before accepting deliveries).
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import Optional
|
| 9 |
+
from database import get_supabase
|
| 10 |
+
from routes.auth import get_current_user
|
| 11 |
+
|
| 12 |
+
router = APIRouter(prefix="/contacts", tags=["Contacts"])
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ContactRequest(BaseModel):
|
| 16 |
+
contact_number: str
|
| 17 |
+
delivery_address: Optional[str] = None
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class ContactResponse(BaseModel):
|
| 21 |
+
user_id: str
|
| 22 |
+
contact_number: str
|
| 23 |
+
delivery_address: str = ""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@router.get("/me", response_model=ContactResponse)
|
| 27 |
+
async def get_my_contact(current_user: dict = Depends(get_current_user)):
|
| 28 |
+
"""Get the current user's contact number and delivery address."""
|
| 29 |
+
sb = get_supabase()
|
| 30 |
+
result = sb.table("user_contacts").select("*").eq("user_id", current_user["sub"]).execute()
|
| 31 |
+
if not result.data:
|
| 32 |
+
raise HTTPException(status_code=404, detail="No contact number set")
|
| 33 |
+
row = result.data[0]
|
| 34 |
+
return ContactResponse(
|
| 35 |
+
user_id=row["user_id"],
|
| 36 |
+
contact_number=row["contact_number"],
|
| 37 |
+
delivery_address=row.get("delivery_address", ""),
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
@router.put("/me", response_model=ContactResponse)
|
| 42 |
+
async def set_my_contact(req: ContactRequest, current_user: dict = Depends(get_current_user)):
|
| 43 |
+
"""Set or update the current user's contact number and delivery address."""
|
| 44 |
+
if not req.contact_number or len(req.contact_number.strip()) < 7:
|
| 45 |
+
raise HTTPException(status_code=400, detail="Please enter a valid contact number")
|
| 46 |
+
|
| 47 |
+
sb = get_supabase()
|
| 48 |
+
user_id = current_user["sub"]
|
| 49 |
+
clean_number = req.contact_number.strip()
|
| 50 |
+
address = (req.delivery_address or "").strip()
|
| 51 |
+
|
| 52 |
+
# Upsert: try update first, then insert if not found
|
| 53 |
+
existing = sb.table("user_contacts").select("user_id").eq("user_id", user_id).execute()
|
| 54 |
+
if existing.data:
|
| 55 |
+
sb.table("user_contacts").update({
|
| 56 |
+
"contact_number": clean_number,
|
| 57 |
+
"delivery_address": address,
|
| 58 |
+
}).eq("user_id", user_id).execute()
|
| 59 |
+
else:
|
| 60 |
+
sb.table("user_contacts").insert({
|
| 61 |
+
"user_id": user_id,
|
| 62 |
+
"contact_number": clean_number,
|
| 63 |
+
"delivery_address": address,
|
| 64 |
+
}).execute()
|
| 65 |
+
|
| 66 |
+
return ContactResponse(user_id=user_id, contact_number=clean_number, delivery_address=address)
|
backend/routes/delivery.py
ADDED
|
@@ -0,0 +1,601 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Delivery routes — delivery user dashboard.
|
| 3 |
+
Available pickups, pick group, update group status, earnings, history.
|
| 4 |
+
Max 5 active delivery GROUPS enforced.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 8 |
+
from pydantic import BaseModel
|
| 9 |
+
from typing import Optional
|
| 10 |
+
from database import get_supabase
|
| 11 |
+
from routes.auth import get_current_user
|
| 12 |
+
from datetime import datetime, timedelta, timezone
|
| 13 |
+
|
| 14 |
+
router = APIRouter(prefix="/delivery", tags=["Delivery"])
|
| 15 |
+
|
| 16 |
+
MAX_ACTIVE_DELIVERIES = 5
|
| 17 |
+
DELIVERY_FEE = 90.00
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# --- Helpers ---
|
| 21 |
+
|
| 22 |
+
async def require_delivery(current_user: dict = Depends(get_current_user)):
|
| 23 |
+
"""Ensure the current user is a delivery user."""
|
| 24 |
+
if current_user.get("role") != "delivery":
|
| 25 |
+
sb = get_supabase()
|
| 26 |
+
result = sb.table("users").select("role").eq("id", current_user["sub"]).execute()
|
| 27 |
+
if not result.data or result.data[0]["role"] != "delivery":
|
| 28 |
+
raise HTTPException(status_code=403, detail="Delivery user access required")
|
| 29 |
+
return current_user
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# --- Response Models ---
|
| 33 |
+
|
| 34 |
+
class AvailableOrderResponse(BaseModel):
|
| 35 |
+
group_id: str
|
| 36 |
+
buyer_name: str
|
| 37 |
+
buyer_contact: str = ""
|
| 38 |
+
seller_name: str
|
| 39 |
+
delivery_address: str = ""
|
| 40 |
+
delivery_fee: float
|
| 41 |
+
total_amount: float
|
| 42 |
+
status: str
|
| 43 |
+
created_at: str
|
| 44 |
+
items: list = []
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class DeliveryHistoryItem(BaseModel):
|
| 48 |
+
group_id: str
|
| 49 |
+
buyer_name: str
|
| 50 |
+
buyer_contact: str
|
| 51 |
+
seller_name: str
|
| 52 |
+
delivery_address: str = ""
|
| 53 |
+
delivery_fee: float
|
| 54 |
+
total_amount: float
|
| 55 |
+
status: str
|
| 56 |
+
created_at: str
|
| 57 |
+
items: list = []
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class EarningsDay(BaseModel):
|
| 61 |
+
date: str
|
| 62 |
+
amount: float
|
| 63 |
+
count: int
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class TransactionHistoryItem(BaseModel):
|
| 67 |
+
type: str
|
| 68 |
+
date: str
|
| 69 |
+
amount: float
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class EarningsResponse(BaseModel):
|
| 73 |
+
total_earnings: float
|
| 74 |
+
total_deliveries: int
|
| 75 |
+
wallet_balance: float
|
| 76 |
+
daily: list[EarningsDay]
|
| 77 |
+
weekly: list[EarningsDay]
|
| 78 |
+
monthly: list[EarningsDay]
|
| 79 |
+
daily_delivery_count: list[EarningsDay]
|
| 80 |
+
weekly_delivery_count: list[EarningsDay]
|
| 81 |
+
monthly_delivery_count: list[EarningsDay]
|
| 82 |
+
history: list[TransactionHistoryItem] = []
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class StatusUpdateRequest(BaseModel):
|
| 86 |
+
status: str # 'delivered' or 'undelivered'
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class WithdrawRequest(BaseModel):
|
| 90 |
+
amount: float
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
# --- Helper: build grouped order response ---
|
| 94 |
+
|
| 95 |
+
def _build_groups(txns_data, user_names, buyer_contacts):
|
| 96 |
+
"""Group transactions by group_id into delivery box structures."""
|
| 97 |
+
groups = {}
|
| 98 |
+
for t in txns_data:
|
| 99 |
+
gid = t.get("group_id") or t["id"]
|
| 100 |
+
prod = t.get("products") or {}
|
| 101 |
+
if gid not in groups:
|
| 102 |
+
groups[gid] = {
|
| 103 |
+
"group_id": gid,
|
| 104 |
+
"buyer_id": t["buyer_id"],
|
| 105 |
+
"buyer_name": user_names.get(t["buyer_id"], "Unknown"),
|
| 106 |
+
"buyer_contact": buyer_contacts.get(t["buyer_id"], "N/A"),
|
| 107 |
+
"seller_id": t["seller_id"],
|
| 108 |
+
"seller_name": user_names.get(t["seller_id"], "Unknown"),
|
| 109 |
+
"delivery_address": t.get("delivery_address", ""),
|
| 110 |
+
"delivery_fee": DELIVERY_FEE,
|
| 111 |
+
"total_amount": 0.0,
|
| 112 |
+
"status": t["status"],
|
| 113 |
+
"created_at": t["created_at"],
|
| 114 |
+
"items": [],
|
| 115 |
+
}
|
| 116 |
+
groups[gid]["items"].append({
|
| 117 |
+
"transaction_id": t["id"],
|
| 118 |
+
"product_id": t["product_id"],
|
| 119 |
+
"product_title": prod.get("title", ""),
|
| 120 |
+
"product_price": float(prod.get("price", 0)),
|
| 121 |
+
"product_images": prod.get("images", []),
|
| 122 |
+
"quantity": int(t.get("quantity", 1)),
|
| 123 |
+
"amount": float(t["amount"]),
|
| 124 |
+
})
|
| 125 |
+
groups[gid]["total_amount"] += float(t["amount"])
|
| 126 |
+
return list(groups.values())
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# --- Routes ---
|
| 130 |
+
|
| 131 |
+
@router.get("/available", response_model=list[AvailableOrderResponse])
|
| 132 |
+
async def get_available_orders(delivery_user: dict = Depends(require_delivery)):
|
| 133 |
+
"""Get order GROUPS with all items approved, ready for pickup."""
|
| 134 |
+
sb = get_supabase()
|
| 135 |
+
|
| 136 |
+
# Fetch all approved transactions not yet assigned to a delivery user
|
| 137 |
+
txns = sb.table("product_transactions").select(
|
| 138 |
+
"*, products(title, price, images)"
|
| 139 |
+
).eq("status", "approved").is_("delivery_user_id", "null").order("created_at", desc=False).limit(200).execute()
|
| 140 |
+
|
| 141 |
+
if not txns.data:
|
| 142 |
+
return []
|
| 143 |
+
|
| 144 |
+
# Build user lookups
|
| 145 |
+
user_ids = set()
|
| 146 |
+
for t in txns.data:
|
| 147 |
+
user_ids.add(t["buyer_id"])
|
| 148 |
+
user_ids.add(t["seller_id"])
|
| 149 |
+
|
| 150 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(user_ids)).execute()
|
| 151 |
+
user_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 152 |
+
|
| 153 |
+
buyer_ids = list(set(t["buyer_id"] for t in txns.data))
|
| 154 |
+
contacts_result = sb.table("user_contacts").select("user_id, contact_number").in_("user_id", buyer_ids).execute()
|
| 155 |
+
buyer_contacts = {c["user_id"]: c["contact_number"] for c in (contacts_result.data or [])}
|
| 156 |
+
|
| 157 |
+
# Group and filter: only show groups where ALL items are approved
|
| 158 |
+
raw_groups = {}
|
| 159 |
+
for t in txns.data:
|
| 160 |
+
gid = t.get("group_id") or t["id"]
|
| 161 |
+
if gid not in raw_groups:
|
| 162 |
+
raw_groups[gid] = {"txns": [], "all_approved": True}
|
| 163 |
+
raw_groups[gid]["txns"].append(t)
|
| 164 |
+
|
| 165 |
+
# Also check if any transaction in this group has a non-approved status (leftover pending items)
|
| 166 |
+
result_groups = []
|
| 167 |
+
for gid, gdata in raw_groups.items():
|
| 168 |
+
group_txns = gdata["txns"]
|
| 169 |
+
# Check for any pending items in the same group (e.g. buyer added another item just now)
|
| 170 |
+
pending_check = sb.table("product_transactions").select("id", count="exact").eq(
|
| 171 |
+
"group_id", gid
|
| 172 |
+
).eq("status", "pending").execute()
|
| 173 |
+
if (pending_check.count or 0) > 0:
|
| 174 |
+
continue # Skip — group still has pending items
|
| 175 |
+
|
| 176 |
+
# Build item list
|
| 177 |
+
buyer_id = group_txns[0]["buyer_id"]
|
| 178 |
+
seller_id = group_txns[0]["seller_id"]
|
| 179 |
+
items = []
|
| 180 |
+
total_amount = 0.0
|
| 181 |
+
for t in group_txns:
|
| 182 |
+
prod = t.get("products") or {}
|
| 183 |
+
items.append({
|
| 184 |
+
"transaction_id": t["id"],
|
| 185 |
+
"product_id": t["product_id"],
|
| 186 |
+
"product_title": prod.get("title", ""),
|
| 187 |
+
"product_price": float(prod.get("price", 0)),
|
| 188 |
+
"product_images": prod.get("images", []),
|
| 189 |
+
"quantity": int(t.get("quantity", 1)),
|
| 190 |
+
"amount": float(t["amount"]),
|
| 191 |
+
})
|
| 192 |
+
total_amount += float(t["amount"])
|
| 193 |
+
|
| 194 |
+
result_groups.append(AvailableOrderResponse(
|
| 195 |
+
group_id=gid,
|
| 196 |
+
buyer_name=user_names.get(buyer_id, "Unknown"),
|
| 197 |
+
buyer_contact=buyer_contacts.get(buyer_id, "N/A"),
|
| 198 |
+
seller_name=user_names.get(seller_id, "Unknown"),
|
| 199 |
+
delivery_address=group_txns[0].get("delivery_address", ""),
|
| 200 |
+
delivery_fee=DELIVERY_FEE,
|
| 201 |
+
total_amount=round(total_amount, 2),
|
| 202 |
+
status="approved",
|
| 203 |
+
created_at=group_txns[0]["created_at"],
|
| 204 |
+
items=items,
|
| 205 |
+
))
|
| 206 |
+
|
| 207 |
+
return result_groups
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
@router.get("/active", response_model=list[AvailableOrderResponse])
|
| 211 |
+
async def get_active_deliveries(delivery_user: dict = Depends(require_delivery)):
|
| 212 |
+
"""Get delivery user's current active delivery groups (status='ondeliver')."""
|
| 213 |
+
sb = get_supabase()
|
| 214 |
+
user_id = delivery_user["sub"]
|
| 215 |
+
|
| 216 |
+
txns = sb.table("product_transactions").select(
|
| 217 |
+
"*, products(title, price, images)"
|
| 218 |
+
).eq("delivery_user_id", user_id).eq("status", "ondeliver").order("created_at", desc=False).execute()
|
| 219 |
+
|
| 220 |
+
if not txns.data:
|
| 221 |
+
return []
|
| 222 |
+
|
| 223 |
+
user_ids = set()
|
| 224 |
+
for t in txns.data:
|
| 225 |
+
user_ids.add(t["buyer_id"])
|
| 226 |
+
user_ids.add(t["seller_id"])
|
| 227 |
+
|
| 228 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(user_ids)).execute()
|
| 229 |
+
user_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 230 |
+
|
| 231 |
+
buyer_ids = list(set(t["buyer_id"] for t in txns.data))
|
| 232 |
+
contacts_result = sb.table("user_contacts").select("user_id, contact_number").in_("user_id", buyer_ids).execute()
|
| 233 |
+
buyer_contacts = {c["user_id"]: c["contact_number"] for c in (contacts_result.data or [])}
|
| 234 |
+
|
| 235 |
+
groups = {}
|
| 236 |
+
for t in txns.data:
|
| 237 |
+
gid = t.get("group_id") or t["id"]
|
| 238 |
+
prod = t.get("products") or {}
|
| 239 |
+
if gid not in groups:
|
| 240 |
+
groups[gid] = {
|
| 241 |
+
"group_id": gid,
|
| 242 |
+
"buyer_name": user_names.get(t["buyer_id"], "Unknown"),
|
| 243 |
+
"buyer_contact": buyer_contacts.get(t["buyer_id"], "N/A"),
|
| 244 |
+
"seller_name": user_names.get(t["seller_id"], "Unknown"),
|
| 245 |
+
"delivery_address": t.get("delivery_address", ""),
|
| 246 |
+
"delivery_fee": DELIVERY_FEE,
|
| 247 |
+
"total_amount": 0.0,
|
| 248 |
+
"status": "ondeliver",
|
| 249 |
+
"created_at": t["created_at"],
|
| 250 |
+
"items": [],
|
| 251 |
+
}
|
| 252 |
+
groups[gid]["items"].append({
|
| 253 |
+
"transaction_id": t["id"],
|
| 254 |
+
"product_id": t["product_id"],
|
| 255 |
+
"product_title": prod.get("title", ""),
|
| 256 |
+
"product_price": float(prod.get("price", 0)),
|
| 257 |
+
"product_images": prod.get("images", []),
|
| 258 |
+
"quantity": int(t.get("quantity", 1)),
|
| 259 |
+
"amount": float(t["amount"]),
|
| 260 |
+
})
|
| 261 |
+
groups[gid]["total_amount"] += float(t["amount"])
|
| 262 |
+
|
| 263 |
+
return [
|
| 264 |
+
AvailableOrderResponse(**g)
|
| 265 |
+
for g in groups.values()
|
| 266 |
+
]
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@router.post("/pick/{group_id}")
|
| 270 |
+
async def pick_order(group_id: str, delivery_user: dict = Depends(require_delivery)):
|
| 271 |
+
"""Pick an entire group for delivery. Max 5 active groups."""
|
| 272 |
+
sb = get_supabase()
|
| 273 |
+
user_id = delivery_user["sub"]
|
| 274 |
+
|
| 275 |
+
# Check contact number
|
| 276 |
+
contact = sb.table("user_contacts").select("contact_number").eq("user_id", user_id).execute()
|
| 277 |
+
if not contact.data:
|
| 278 |
+
raise HTTPException(status_code=400, detail="Please add your contact number before accepting deliveries")
|
| 279 |
+
|
| 280 |
+
# Count active delivery GROUPS (not individual transactions)
|
| 281 |
+
active_groups_result = sb.table("product_transactions").select("id, group_id").eq(
|
| 282 |
+
"delivery_user_id", user_id
|
| 283 |
+
).eq("status", "ondeliver").execute()
|
| 284 |
+
active_group_ids = set(
|
| 285 |
+
t.get("group_id") or t["id"]
|
| 286 |
+
for t in (active_groups_result.data or [])
|
| 287 |
+
)
|
| 288 |
+
if len(active_group_ids) >= MAX_ACTIVE_DELIVERIES:
|
| 289 |
+
raise HTTPException(
|
| 290 |
+
status_code=400,
|
| 291 |
+
detail=f"You already have {MAX_ACTIVE_DELIVERIES} active delivery groups. Complete some before picking more."
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
# Verify all transactions in the group are approved and unassigned
|
| 295 |
+
used_fallback = False
|
| 296 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 297 |
+
"group_id", group_id
|
| 298 |
+
).execute()
|
| 299 |
+
|
| 300 |
+
if not group_txns.data:
|
| 301 |
+
# Fallback: treat as single transaction_id
|
| 302 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 303 |
+
"id", group_id
|
| 304 |
+
).execute()
|
| 305 |
+
used_fallback = True
|
| 306 |
+
|
| 307 |
+
if not group_txns.data:
|
| 308 |
+
raise HTTPException(status_code=404, detail="Order group not found")
|
| 309 |
+
|
| 310 |
+
for t in group_txns.data:
|
| 311 |
+
if t["status"] != "approved":
|
| 312 |
+
raise HTTPException(status_code=400, detail=f"Order group is not fully approved yet (status: {t['status']})")
|
| 313 |
+
if t.get("delivery_user_id"):
|
| 314 |
+
raise HTTPException(status_code=400, detail="This order group is already assigned to another delivery user")
|
| 315 |
+
|
| 316 |
+
# Assign all transactions in group to delivery user
|
| 317 |
+
if used_fallback:
|
| 318 |
+
sb.table("product_transactions").update({
|
| 319 |
+
"delivery_user_id": user_id,
|
| 320 |
+
"status": "ondeliver",
|
| 321 |
+
}).eq("id", group_id).execute()
|
| 322 |
+
else:
|
| 323 |
+
sb.table("product_transactions").update({
|
| 324 |
+
"delivery_user_id": user_id,
|
| 325 |
+
"status": "ondeliver",
|
| 326 |
+
}).eq("group_id", group_id).execute()
|
| 327 |
+
|
| 328 |
+
return {"message": "Order group picked up! Deliver all items to the buyer."}
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
@router.put("/status/{group_id}")
|
| 332 |
+
async def update_delivery_status(
|
| 333 |
+
group_id: str,
|
| 334 |
+
req: StatusUpdateRequest,
|
| 335 |
+
delivery_user: dict = Depends(require_delivery),
|
| 336 |
+
):
|
| 337 |
+
"""Update delivery status to 'delivered' or 'undelivered' for an entire group."""
|
| 338 |
+
sb = get_supabase()
|
| 339 |
+
user_id = delivery_user["sub"]
|
| 340 |
+
|
| 341 |
+
if req.status not in ("delivered", "undelivered"):
|
| 342 |
+
raise HTTPException(status_code=400, detail="Status must be 'delivered' or 'undelivered'")
|
| 343 |
+
|
| 344 |
+
# Verify this group belongs to this delivery user
|
| 345 |
+
used_fallback = False
|
| 346 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 347 |
+
"group_id", group_id
|
| 348 |
+
).eq("delivery_user_id", user_id).eq("status", "ondeliver").execute()
|
| 349 |
+
|
| 350 |
+
# Fallback: treat as single transaction_id
|
| 351 |
+
if not group_txns.data:
|
| 352 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 353 |
+
"id", group_id
|
| 354 |
+
).eq("delivery_user_id", user_id).eq("status", "ondeliver").execute()
|
| 355 |
+
used_fallback = True
|
| 356 |
+
|
| 357 |
+
if not group_txns.data:
|
| 358 |
+
raise HTTPException(status_code=404, detail="Order group not found or not assigned to you")
|
| 359 |
+
|
| 360 |
+
# Update all transactions in group
|
| 361 |
+
if used_fallback:
|
| 362 |
+
sb.table("product_transactions").update({"status": req.status}).eq("id", group_id).eq("delivery_user_id", user_id).execute()
|
| 363 |
+
else:
|
| 364 |
+
sb.table("product_transactions").update({"status": req.status}).eq("group_id", group_id).eq("delivery_user_id", user_id).execute()
|
| 365 |
+
|
| 366 |
+
buyer_id = group_txns.data[0]["buyer_id"]
|
| 367 |
+
representative_txn_id = group_txns.data[0]["id"]
|
| 368 |
+
|
| 369 |
+
if req.status == "undelivered":
|
| 370 |
+
# Refund buyer: sum of all amounts + delivery fee
|
| 371 |
+
total_refund = sum(
|
| 372 |
+
float(t.get("amount", 0)) + float(t.get("delivery_fee", 0))
|
| 373 |
+
for t in group_txns.data
|
| 374 |
+
)
|
| 375 |
+
buyer_bal = sb.table("user_balances").select("balance").eq("user_id", buyer_id).execute()
|
| 376 |
+
if buyer_bal.data:
|
| 377 |
+
new_buyer_bal = float(buyer_bal.data[0]["balance"]) + total_refund
|
| 378 |
+
sb.table("user_balances").update({"balance": new_buyer_bal}).eq("user_id", buyer_id).execute()
|
| 379 |
+
|
| 380 |
+
# Log refund
|
| 381 |
+
sb.table("stored_value").insert({
|
| 382 |
+
"user_id": buyer_id,
|
| 383 |
+
"transaction_type": "deposit",
|
| 384 |
+
"amount": total_refund,
|
| 385 |
+
}).execute()
|
| 386 |
+
|
| 387 |
+
# Restore product stock for each item
|
| 388 |
+
for t in group_txns.data:
|
| 389 |
+
product = sb.table("products").select("stock").eq("id", t["product_id"]).execute()
|
| 390 |
+
if product.data:
|
| 391 |
+
current_stock = int(product.data[0].get("stock", 0))
|
| 392 |
+
new_stock = current_stock + int(t.get("quantity", 1))
|
| 393 |
+
sb.table("products").update({"stock": new_stock}).eq("id", t["product_id"]).execute()
|
| 394 |
+
|
| 395 |
+
if req.status == "delivered":
|
| 396 |
+
# Delivery fee: ₱90 flat per group goes to delivery user
|
| 397 |
+
sb.table("delivery_earnings").insert({
|
| 398 |
+
"delivery_user_id": user_id,
|
| 399 |
+
"transaction_id": representative_txn_id,
|
| 400 |
+
"amount": DELIVERY_FEE,
|
| 401 |
+
}).execute()
|
| 402 |
+
del_bal = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 403 |
+
if del_bal.data:
|
| 404 |
+
new_del_bal = float(del_bal.data[0]["balance"]) + DELIVERY_FEE
|
| 405 |
+
sb.table("user_balances").update({"balance": new_del_bal}).eq("user_id", user_id).execute()
|
| 406 |
+
|
| 407 |
+
# Admin gets the total product amount for all items in the group
|
| 408 |
+
total_product_amount = sum(float(t.get("amount", 0)) for t in group_txns.data)
|
| 409 |
+
sb.table("admin_earnings").insert({
|
| 410 |
+
"transaction_id": representative_txn_id,
|
| 411 |
+
"amount": total_product_amount,
|
| 412 |
+
}).execute()
|
| 413 |
+
admin_user = sb.table("users").select("id").eq("role", "admin").limit(1).execute()
|
| 414 |
+
if admin_user.data:
|
| 415 |
+
admin_id = admin_user.data[0]["id"]
|
| 416 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 417 |
+
if admin_bal.data:
|
| 418 |
+
new_admin_bal = float(admin_bal.data[0]["balance"]) + total_product_amount
|
| 419 |
+
sb.table("user_balances").update({"balance": new_admin_bal}).eq("user_id", admin_id).execute()
|
| 420 |
+
|
| 421 |
+
status_msg = "delivered" if req.status == "delivered" else "marked as undelivered"
|
| 422 |
+
return {"message": f"Order group {status_msg} successfully!"}
|
| 423 |
+
|
| 424 |
+
|
| 425 |
+
@router.get("/earnings", response_model=EarningsResponse)
|
| 426 |
+
async def get_earnings(delivery_user: dict = Depends(require_delivery)):
|
| 427 |
+
"""Get delivery earnings with daily/weekly/monthly breakdowns for graphs."""
|
| 428 |
+
sb = get_supabase()
|
| 429 |
+
user_id = delivery_user["sub"]
|
| 430 |
+
|
| 431 |
+
# Get all earnings
|
| 432 |
+
earnings = sb.table("delivery_earnings").select("*").eq(
|
| 433 |
+
"delivery_user_id", user_id
|
| 434 |
+
).order("created_at", desc=True).execute()
|
| 435 |
+
|
| 436 |
+
# Get wallet balance
|
| 437 |
+
bal = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 438 |
+
wallet_balance = float(bal.data[0]["balance"]) if bal.data else 0.0
|
| 439 |
+
|
| 440 |
+
all_data = earnings.data or []
|
| 441 |
+
total_earnings = sum(float(e["amount"]) for e in all_data)
|
| 442 |
+
total_deliveries = len(all_data)
|
| 443 |
+
|
| 444 |
+
# Build time-series data
|
| 445 |
+
daily_data = {}
|
| 446 |
+
weekly_data = {}
|
| 447 |
+
monthly_data = {}
|
| 448 |
+
|
| 449 |
+
for e in all_data:
|
| 450 |
+
try:
|
| 451 |
+
dt = datetime.fromisoformat(e["created_at"].replace("Z", "+00:00"))
|
| 452 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 453 |
+
week_start = dt - timedelta(days=dt.weekday())
|
| 454 |
+
week_key = week_start.strftime("%Y-%m-%d")
|
| 455 |
+
month_key = dt.strftime("%Y-%m")
|
| 456 |
+
except Exception:
|
| 457 |
+
day_key = e["created_at"][:10]
|
| 458 |
+
week_key = e["created_at"][:10]
|
| 459 |
+
month_key = e["created_at"][:7]
|
| 460 |
+
|
| 461 |
+
amt = float(e["amount"])
|
| 462 |
+
|
| 463 |
+
if day_key not in daily_data:
|
| 464 |
+
daily_data[day_key] = {"amount": 0, "count": 0}
|
| 465 |
+
daily_data[day_key]["amount"] += amt
|
| 466 |
+
daily_data[day_key]["count"] += 1
|
| 467 |
+
|
| 468 |
+
if week_key not in weekly_data:
|
| 469 |
+
weekly_data[week_key] = {"amount": 0, "count": 0}
|
| 470 |
+
weekly_data[week_key]["amount"] += amt
|
| 471 |
+
weekly_data[week_key]["count"] += 1
|
| 472 |
+
|
| 473 |
+
if month_key not in monthly_data:
|
| 474 |
+
monthly_data[month_key] = {"amount": 0, "count": 0}
|
| 475 |
+
monthly_data[month_key]["amount"] += amt
|
| 476 |
+
monthly_data[month_key]["count"] += 1
|
| 477 |
+
|
| 478 |
+
def to_list(data):
|
| 479 |
+
return sorted(
|
| 480 |
+
[EarningsDay(date=k, amount=round(v["amount"], 2), count=v["count"]) for k, v in data.items()],
|
| 481 |
+
key=lambda x: x.date, reverse=True
|
| 482 |
+
)[:30]
|
| 483 |
+
|
| 484 |
+
withdrawals = sb.table("stored_value").select("*").eq("user_id", user_id).execute()
|
| 485 |
+
hist = []
|
| 486 |
+
for e in all_data:
|
| 487 |
+
hist.append(TransactionHistoryItem(
|
| 488 |
+
type="Delivery Fee",
|
| 489 |
+
date=e["created_at"],
|
| 490 |
+
amount=float(e["amount"])
|
| 491 |
+
))
|
| 492 |
+
for w in withdrawals.data or []:
|
| 493 |
+
if w.get("amount"):
|
| 494 |
+
# A withdrawal is a deduction from the delivery wallet
|
| 495 |
+
ttype = w.get("transaction_type", "Withdrawal").capitalize()
|
| 496 |
+
hist.append(TransactionHistoryItem(
|
| 497 |
+
type=ttype,
|
| 498 |
+
date=w["created_at"],
|
| 499 |
+
amount=abs(float(w["amount"]))
|
| 500 |
+
))
|
| 501 |
+
hist.sort(key=lambda x: x.date, reverse=True)
|
| 502 |
+
|
| 503 |
+
return EarningsResponse(
|
| 504 |
+
total_earnings=round(total_earnings, 2),
|
| 505 |
+
total_deliveries=total_deliveries,
|
| 506 |
+
wallet_balance=round(wallet_balance, 2),
|
| 507 |
+
daily=to_list(daily_data),
|
| 508 |
+
weekly=to_list(weekly_data),
|
| 509 |
+
monthly=to_list(monthly_data),
|
| 510 |
+
daily_delivery_count=to_list(daily_data),
|
| 511 |
+
weekly_delivery_count=to_list(weekly_data),
|
| 512 |
+
monthly_delivery_count=to_list(monthly_data),
|
| 513 |
+
history=hist
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
@router.get("/history", response_model=list[DeliveryHistoryItem])
|
| 518 |
+
async def get_delivery_history(delivery_user: dict = Depends(require_delivery)):
|
| 519 |
+
"""Get delivery history grouped by group_id."""
|
| 520 |
+
sb = get_supabase()
|
| 521 |
+
user_id = delivery_user["sub"]
|
| 522 |
+
|
| 523 |
+
txns = sb.table("product_transactions").select(
|
| 524 |
+
"*, products(title, price, images)"
|
| 525 |
+
).eq("delivery_user_id", user_id).in_(
|
| 526 |
+
"status", ["delivered", "undelivered", "ondeliver"]
|
| 527 |
+
).order("created_at", desc=True).limit(200).execute()
|
| 528 |
+
|
| 529 |
+
if not txns.data:
|
| 530 |
+
return []
|
| 531 |
+
|
| 532 |
+
user_ids = set()
|
| 533 |
+
for t in txns.data:
|
| 534 |
+
user_ids.add(t["buyer_id"])
|
| 535 |
+
user_ids.add(t["seller_id"])
|
| 536 |
+
|
| 537 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(user_ids)).execute()
|
| 538 |
+
user_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 539 |
+
|
| 540 |
+
contacts_result = sb.table("user_contacts").select("user_id, contact_number").in_(
|
| 541 |
+
"user_id", list(user_ids)
|
| 542 |
+
).execute()
|
| 543 |
+
user_contacts = {c["user_id"]: c["contact_number"] for c in (contacts_result.data or [])}
|
| 544 |
+
|
| 545 |
+
groups = {}
|
| 546 |
+
for t in txns.data:
|
| 547 |
+
gid = t.get("group_id") or t["id"]
|
| 548 |
+
prod = t.get("products") or {}
|
| 549 |
+
if gid not in groups:
|
| 550 |
+
groups[gid] = {
|
| 551 |
+
"group_id": gid,
|
| 552 |
+
"buyer_name": user_names.get(t["buyer_id"], "Unknown"),
|
| 553 |
+
"buyer_contact": user_contacts.get(t["buyer_id"], "N/A"),
|
| 554 |
+
"seller_name": user_names.get(t["seller_id"], "Unknown"),
|
| 555 |
+
"delivery_address": t.get("delivery_address", ""),
|
| 556 |
+
"delivery_fee": DELIVERY_FEE,
|
| 557 |
+
"total_amount": 0.0,
|
| 558 |
+
"status": t["status"],
|
| 559 |
+
"created_at": t["created_at"],
|
| 560 |
+
"items": [],
|
| 561 |
+
}
|
| 562 |
+
groups[gid]["items"].append({
|
| 563 |
+
"transaction_id": t["id"],
|
| 564 |
+
"product_title": prod.get("title", ""),
|
| 565 |
+
"product_price": float(prod.get("price", 0)),
|
| 566 |
+
"product_images": prod.get("images", []),
|
| 567 |
+
"quantity": int(t.get("quantity", 1)),
|
| 568 |
+
"amount": float(t["amount"]),
|
| 569 |
+
})
|
| 570 |
+
groups[gid]["total_amount"] += float(t["amount"])
|
| 571 |
+
|
| 572 |
+
return [DeliveryHistoryItem(**g) for g in groups.values()]
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
@router.post("/withdraw")
|
| 576 |
+
async def withdraw_earnings(req: WithdrawRequest, delivery_user: dict = Depends(require_delivery)):
|
| 577 |
+
"""Withdraw earnings from delivery wallet."""
|
| 578 |
+
sb = get_supabase()
|
| 579 |
+
user_id = delivery_user["sub"]
|
| 580 |
+
|
| 581 |
+
if req.amount <= 0:
|
| 582 |
+
raise HTTPException(status_code=400, detail="Amount must be positive")
|
| 583 |
+
|
| 584 |
+
bal = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 585 |
+
if not bal.data:
|
| 586 |
+
raise HTTPException(status_code=404, detail="Balance not found")
|
| 587 |
+
|
| 588 |
+
current = float(bal.data[0]["balance"])
|
| 589 |
+
if current < req.amount:
|
| 590 |
+
raise HTTPException(status_code=400, detail="Insufficient balance")
|
| 591 |
+
|
| 592 |
+
new_bal = current - req.amount
|
| 593 |
+
sb.table("user_balances").update({"balance": new_bal}).eq("user_id", user_id).execute()
|
| 594 |
+
|
| 595 |
+
sb.table("stored_value").insert({
|
| 596 |
+
"user_id": user_id,
|
| 597 |
+
"transaction_type": "withdrawal",
|
| 598 |
+
"amount": req.amount,
|
| 599 |
+
}).execute()
|
| 600 |
+
|
| 601 |
+
return {"message": f"Withdrew PHP {req.amount:.2f}", "balance": round(new_bal, 2)}
|
backend/routes/insights.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Insights routes — dynamic AI insights built from transactions and search prompts.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from fastapi import APIRouter, Depends, HTTPException
|
| 6 |
+
from pydantic import BaseModel
|
| 7 |
+
from collections import Counter
|
| 8 |
+
import re
|
| 9 |
+
from database import get_supabase
|
| 10 |
+
from routes.auth import get_current_user
|
| 11 |
+
|
| 12 |
+
router = APIRouter(prefix="/insights", tags=["Insights"])
|
| 13 |
+
|
| 14 |
+
class PromptRequest(BaseModel):
|
| 15 |
+
prompt: str
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@router.post("/prompts")
|
| 19 |
+
async def log_prompt(req: PromptRequest, current_user: dict = Depends(get_current_user)):
|
| 20 |
+
"""Logs a search prompt for the user."""
|
| 21 |
+
sb = get_supabase()
|
| 22 |
+
sb.table("user_prompts").insert({
|
| 23 |
+
"user_id": current_user["sub"],
|
| 24 |
+
"prompt_text": req.prompt
|
| 25 |
+
}).execute()
|
| 26 |
+
return {"status": "ok"}
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@router.get("/seller")
|
| 30 |
+
async def get_seller_insights(current_user: dict = Depends(get_current_user)):
|
| 31 |
+
"""Dynamic NLP Insights for Seller Dashboard using global search prompts."""
|
| 32 |
+
if current_user.get("role") != "seller" and current_user.get("role") != "admin":
|
| 33 |
+
# Note: Depending on rules, we might allow admin to see this too
|
| 34 |
+
# But we'll enforce just basic checking here.
|
| 35 |
+
pass
|
| 36 |
+
|
| 37 |
+
sb = get_supabase()
|
| 38 |
+
|
| 39 |
+
# Fetch all prompts globally (to see market trends)
|
| 40 |
+
prompts_resp = sb.table("user_prompts").select("prompt_text").execute()
|
| 41 |
+
prompts = [p["prompt_text"] for p in prompts_resp.data] if prompts_resp.data else []
|
| 42 |
+
|
| 43 |
+
# 1. Zero-Result / Market Gap Analytics (Simulated via frequency/trends)
|
| 44 |
+
# Since we can't truly know if they were zero-result at the time of query,
|
| 45 |
+
# we'll find top queries and label them as "Market Opportunities"
|
| 46 |
+
query_counts = Counter(prompts)
|
| 47 |
+
top_queries = query_counts.most_common(5)
|
| 48 |
+
|
| 49 |
+
zero_result_queries = []
|
| 50 |
+
for q, count in top_queries:
|
| 51 |
+
insight = "High demand — consider sourcing inventory" if count > 5 else "Niche market opportunity"
|
| 52 |
+
trend = "rising" if count > 2 else "stable"
|
| 53 |
+
zero_result_queries.append({
|
| 54 |
+
"query": q,
|
| 55 |
+
"count": count,
|
| 56 |
+
"insight": insight,
|
| 57 |
+
"trend": trend
|
| 58 |
+
})
|
| 59 |
+
|
| 60 |
+
# 2. Keyword Heatmap
|
| 61 |
+
words = []
|
| 62 |
+
for p in prompts:
|
| 63 |
+
# Simple tokenization
|
| 64 |
+
tokens = re.findall(r'\b\w+\b', p.lower())
|
| 65 |
+
# Filter short words
|
| 66 |
+
tokens = [t for t in tokens if len(t) > 3]
|
| 67 |
+
words.extend(tokens)
|
| 68 |
+
|
| 69 |
+
word_counts = Counter(words)
|
| 70 |
+
top_words = word_counts.most_common(12)
|
| 71 |
+
|
| 72 |
+
keyword_heatmap = []
|
| 73 |
+
colors = ['#10b981', '#6366f1', '#f59e0b', '#ef4444', '#8b5cf6', '#14b8a6', '#f97316', '#ec4899', '#06b6d4', '#84cc16']
|
| 74 |
+
for i, (word, count) in enumerate(top_words):
|
| 75 |
+
# Calculate a weight from 40 to 95
|
| 76 |
+
max_c = top_words[0][1] if top_words else 1
|
| 77 |
+
weight = 40 + (count / max_c) * 55
|
| 78 |
+
color = colors[i % len(colors)]
|
| 79 |
+
keyword_heatmap.append({
|
| 80 |
+
"word": word.capitalize(),
|
| 81 |
+
"weight": int(weight),
|
| 82 |
+
"color": color
|
| 83 |
+
})
|
| 84 |
+
|
| 85 |
+
# 3. Sentiment Analytics
|
| 86 |
+
# In a full system we'd run a sentiment model on reviews/messages.
|
| 87 |
+
# We will return static demo info for sentiment since we don't have reviews yet.
|
| 88 |
+
sentiment_data = {
|
| 89 |
+
"positive": 75,
|
| 90 |
+
"neutral": 15,
|
| 91 |
+
"negative": 10,
|
| 92 |
+
"topPositive": ['Great quality overall', 'Fast shipping noted', 'Exactly as described'],
|
| 93 |
+
"topNegative": ['Waiting on tracking', 'Size mismatch reported'],
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
return {
|
| 97 |
+
"zero_result_queries": zero_result_queries,
|
| 98 |
+
"keyword_heatmap": keyword_heatmap,
|
| 99 |
+
"sentiment_data": sentiment_data
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
@router.get("/buyer/recommendations")
|
| 104 |
+
async def get_buyer_recommendations(current_user: dict = Depends(get_current_user)):
|
| 105 |
+
"""Dynamic Recommendations based on the buyer's past search history."""
|
| 106 |
+
sb = get_supabase()
|
| 107 |
+
|
| 108 |
+
# 1. Fetch user's recent prompts
|
| 109 |
+
prompts_resp = (
|
| 110 |
+
sb.table("user_prompts")
|
| 111 |
+
.select("prompt_text")
|
| 112 |
+
.eq("user_id", current_user["sub"])
|
| 113 |
+
.order("created_at", desc=True)
|
| 114 |
+
.limit(10)
|
| 115 |
+
.execute()
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
if not prompts_resp.data:
|
| 119 |
+
# Fallback: return popular/recent products for new users
|
| 120 |
+
fallback_resp = (
|
| 121 |
+
sb.table("products")
|
| 122 |
+
.select("*, users!products_seller_id_fkey(full_name)")
|
| 123 |
+
.eq("is_active", True)
|
| 124 |
+
.gt("stock", 0)
|
| 125 |
+
.order("created_at", desc=True)
|
| 126 |
+
.limit(6)
|
| 127 |
+
.execute()
|
| 128 |
+
)
|
| 129 |
+
products = fallback_resp.data if fallback_resp.data else []
|
| 130 |
+
# Attach seller_name for frontend
|
| 131 |
+
for p in products:
|
| 132 |
+
user_info = p.pop("users", None)
|
| 133 |
+
p["seller_name"] = user_info["full_name"] if user_info else "Seller"
|
| 134 |
+
return {"recommendations": products, "based_on": "popular"}
|
| 135 |
+
|
| 136 |
+
# 2. Deduplicate and take the 3 most recent unique search queries
|
| 137 |
+
seen = set()
|
| 138 |
+
unique_prompts = []
|
| 139 |
+
for p in prompts_resp.data:
|
| 140 |
+
text = p["prompt_text"].strip().lower()
|
| 141 |
+
if text not in seen:
|
| 142 |
+
seen.add(text)
|
| 143 |
+
unique_prompts.append(p["prompt_text"].strip())
|
| 144 |
+
if len(unique_prompts) >= 3:
|
| 145 |
+
break
|
| 146 |
+
|
| 147 |
+
# 3. Build OR filter across all unique queries
|
| 148 |
+
or_parts = []
|
| 149 |
+
for q in unique_prompts:
|
| 150 |
+
safe_q = q.replace("%", "").replace("_", "")
|
| 151 |
+
or_parts.append(f"title.ilike.%{safe_q}%")
|
| 152 |
+
or_parts.append(f"description.ilike.%{safe_q}%")
|
| 153 |
+
or_filter = ",".join(or_parts)
|
| 154 |
+
|
| 155 |
+
recs_resp = (
|
| 156 |
+
sb.table("products")
|
| 157 |
+
.select("*, users!products_seller_id_fkey(full_name)")
|
| 158 |
+
.or_(or_filter)
|
| 159 |
+
.eq("is_active", True)
|
| 160 |
+
.gt("stock", 0)
|
| 161 |
+
.limit(6)
|
| 162 |
+
.execute()
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
products = recs_resp.data if recs_resp.data else []
|
| 166 |
+
|
| 167 |
+
# Attach seller_name for frontend
|
| 168 |
+
for p in products:
|
| 169 |
+
user_info = p.pop("users", None)
|
| 170 |
+
p["seller_name"] = user_info["full_name"] if user_info else "Seller"
|
| 171 |
+
|
| 172 |
+
return {
|
| 173 |
+
"recommendations": products,
|
| 174 |
+
"based_on": unique_prompts[0] if unique_prompts else "popular",
|
| 175 |
+
}
|
backend/routes/manager.py
ADDED
|
@@ -0,0 +1,745 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Manager routes — department management, staff CRUD, restock approval.
|
| 3 |
+
Only accessible by manager users (role='manager').
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, Query
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import Optional
|
| 9 |
+
from database import get_supabase
|
| 10 |
+
from routes.auth import get_current_user
|
| 11 |
+
from datetime import datetime, timedelta, timezone
|
| 12 |
+
|
| 13 |
+
router = APIRouter(prefix="/manager", tags=["Manager"])
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# --- Helpers ---
|
| 17 |
+
|
| 18 |
+
async def require_manager(current_user: dict = Depends(get_current_user)):
|
| 19 |
+
"""Dependency that ensures the current user is a manager."""
|
| 20 |
+
sb = get_supabase()
|
| 21 |
+
result = sb.table("users").select("role, department_id").eq("id", current_user["sub"]).execute()
|
| 22 |
+
if not result.data or result.data[0].get("role") != "manager":
|
| 23 |
+
raise HTTPException(status_code=403, detail="Manager access required")
|
| 24 |
+
current_user["department_id"] = result.data[0].get("department_id")
|
| 25 |
+
return current_user
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
# --- Request/Response Models ---
|
| 29 |
+
|
| 30 |
+
class StaffRegisterRequest(BaseModel):
|
| 31 |
+
full_name: str
|
| 32 |
+
email: str
|
| 33 |
+
password: str
|
| 34 |
+
contact_number: str = ""
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class RestockApproveRequest(BaseModel):
|
| 38 |
+
approved_quantity: Optional[int] = None
|
| 39 |
+
manager_notes: str = ""
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class RestockRejectRequest(BaseModel):
|
| 43 |
+
manager_notes: str = ""
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
# --- Routes ---
|
| 47 |
+
|
| 48 |
+
@router.get("/dashboard")
|
| 49 |
+
async def manager_dashboard(manager: dict = Depends(require_manager)):
|
| 50 |
+
"""Get manager dashboard stats for their department."""
|
| 51 |
+
sb = get_supabase()
|
| 52 |
+
dept_id = manager.get("department_id")
|
| 53 |
+
|
| 54 |
+
if not dept_id:
|
| 55 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 56 |
+
|
| 57 |
+
# Department info
|
| 58 |
+
dept = sb.table("departments").select("*").eq("id", dept_id).execute()
|
| 59 |
+
dept_info = dept.data[0] if dept.data else {}
|
| 60 |
+
|
| 61 |
+
# Staff count
|
| 62 |
+
staff = sb.table("users").select("id", count="exact").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 63 |
+
|
| 64 |
+
# Products in department (via staff + manager themselves)
|
| 65 |
+
staff_ids_result = sb.table("users").select("id").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 66 |
+
staff_ids = [s["id"] for s in (staff_ids_result.data or [])]
|
| 67 |
+
manager_id = manager["sub"]
|
| 68 |
+
if manager_id not in staff_ids:
|
| 69 |
+
staff_ids.append(manager_id)
|
| 70 |
+
|
| 71 |
+
total_products = 0
|
| 72 |
+
total_revenue = 0
|
| 73 |
+
daily_sales = {}
|
| 74 |
+
weekly_sales = {}
|
| 75 |
+
monthly_sales = {}
|
| 76 |
+
|
| 77 |
+
if staff_ids:
|
| 78 |
+
products = sb.table("products").select("id", count="exact").in_("seller_id", staff_ids).execute()
|
| 79 |
+
total_products = products.count or 0
|
| 80 |
+
|
| 81 |
+
# Revenue from transactions
|
| 82 |
+
txns = sb.table("product_transactions").select("amount, seller_amount, created_at, purchase_type").in_(
|
| 83 |
+
"seller_id", staff_ids
|
| 84 |
+
).in_("status", ["delivered", "completed"]).execute()
|
| 85 |
+
|
| 86 |
+
for t in (txns.data or []):
|
| 87 |
+
amt = float(t.get("seller_amount", 0))
|
| 88 |
+
total_revenue += amt
|
| 89 |
+
|
| 90 |
+
try:
|
| 91 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 92 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 93 |
+
week_start = dt - timedelta(days=dt.weekday())
|
| 94 |
+
week_key = week_start.strftime("%Y-%m-%d")
|
| 95 |
+
month_key = dt.strftime("%Y-%m")
|
| 96 |
+
except Exception:
|
| 97 |
+
day_key = t["created_at"][:10]
|
| 98 |
+
week_key = t["created_at"][:10]
|
| 99 |
+
month_key = t["created_at"][:7]
|
| 100 |
+
|
| 101 |
+
for data, key in [(daily_sales, day_key), (weekly_sales, week_key), (monthly_sales, month_key)]:
|
| 102 |
+
if key not in data:
|
| 103 |
+
data[key] = {"amount": 0, "count": 0}
|
| 104 |
+
data[key]["amount"] += amt
|
| 105 |
+
data[key]["count"] += 1
|
| 106 |
+
|
| 107 |
+
def to_list(data):
|
| 108 |
+
return sorted(
|
| 109 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"]} for k, v in data.items()],
|
| 110 |
+
key=lambda x: x["date"], reverse=True
|
| 111 |
+
)[:30]
|
| 112 |
+
|
| 113 |
+
# Pending restock requests
|
| 114 |
+
pending_restocks = sb.table("restock_requests").select("id", count="exact").eq(
|
| 115 |
+
"department_id", dept_id
|
| 116 |
+
).eq("status", "pending_manager").execute()
|
| 117 |
+
|
| 118 |
+
return {
|
| 119 |
+
"department": dept_info,
|
| 120 |
+
"total_staff": staff.count or 0,
|
| 121 |
+
"total_products": total_products,
|
| 122 |
+
"total_revenue": round(total_revenue, 2),
|
| 123 |
+
"pending_restocks": pending_restocks.count or 0,
|
| 124 |
+
"daily_sales": to_list(daily_sales),
|
| 125 |
+
"weekly_sales": to_list(weekly_sales),
|
| 126 |
+
"monthly_sales": to_list(monthly_sales),
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
@router.get("/staff")
|
| 131 |
+
async def list_staff(
|
| 132 |
+
search: str = Query("", description="Search by name or email"),
|
| 133 |
+
manager: dict = Depends(require_manager),
|
| 134 |
+
):
|
| 135 |
+
"""List all staff in the manager's department."""
|
| 136 |
+
sb = get_supabase()
|
| 137 |
+
dept_id = manager.get("department_id")
|
| 138 |
+
|
| 139 |
+
if not dept_id:
|
| 140 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 141 |
+
|
| 142 |
+
query = sb.table("users").select("*").eq(
|
| 143 |
+
"department_id", dept_id
|
| 144 |
+
).eq("role", "seller")
|
| 145 |
+
|
| 146 |
+
if search:
|
| 147 |
+
query = query.or_(f"full_name.ilike.%{search}%,email.ilike.%{search}%")
|
| 148 |
+
|
| 149 |
+
result = query.order("created_at", desc=True).execute()
|
| 150 |
+
|
| 151 |
+
all_staff_ids = [u["id"] for u in (result.data or [])]
|
| 152 |
+
|
| 153 |
+
# Batch-fetch completed transaction stats for all staff
|
| 154 |
+
staff_stats = {}
|
| 155 |
+
if all_staff_ids:
|
| 156 |
+
today_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 157 |
+
completed_statuses = ["completed", "delivered"]
|
| 158 |
+
|
| 159 |
+
# Fetch completed transactions for all staff (assigned or legacy seller_id)
|
| 160 |
+
txns = sb.table("product_transactions").select(
|
| 161 |
+
"assigned_staff_id, seller_id, status, quantity, purchase_type, created_at"
|
| 162 |
+
).in_("status", completed_statuses).execute()
|
| 163 |
+
|
| 164 |
+
for t in (txns.data or []):
|
| 165 |
+
# Determine which staff this belongs to
|
| 166 |
+
staff_id = t.get("assigned_staff_id") or t.get("seller_id")
|
| 167 |
+
if staff_id not in all_staff_ids:
|
| 168 |
+
continue
|
| 169 |
+
|
| 170 |
+
if staff_id not in staff_stats:
|
| 171 |
+
staff_stats[staff_id] = {
|
| 172 |
+
"total_completed_tasks": 0,
|
| 173 |
+
"tasks_completed_today": 0,
|
| 174 |
+
"delivery_items_today": 0,
|
| 175 |
+
}
|
| 176 |
+
stats = staff_stats[staff_id]
|
| 177 |
+
qty = int(t.get("quantity", 1))
|
| 178 |
+
stats["total_completed_tasks"] += 1
|
| 179 |
+
|
| 180 |
+
try:
|
| 181 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 182 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 183 |
+
except Exception:
|
| 184 |
+
day_key = t["created_at"][:10]
|
| 185 |
+
|
| 186 |
+
if day_key == today_str:
|
| 187 |
+
stats["tasks_completed_today"] += 1
|
| 188 |
+
stats["delivery_items_today"] += qty
|
| 189 |
+
|
| 190 |
+
staff_list = []
|
| 191 |
+
for u in (result.data or []):
|
| 192 |
+
uid = u["id"]
|
| 193 |
+
s = staff_stats.get(uid, {})
|
| 194 |
+
staff_list.append({
|
| 195 |
+
"id": uid,
|
| 196 |
+
"email": u["email"],
|
| 197 |
+
"full_name": u["full_name"],
|
| 198 |
+
"role": u["role"],
|
| 199 |
+
"is_banned": u.get("is_banned", False),
|
| 200 |
+
"created_at": u["created_at"],
|
| 201 |
+
"total_completed_tasks": s.get("total_completed_tasks", 0),
|
| 202 |
+
"tasks_completed_today": s.get("tasks_completed_today", 0),
|
| 203 |
+
"delivery_items_today": s.get("delivery_items_today", 0),
|
| 204 |
+
})
|
| 205 |
+
|
| 206 |
+
return staff_list
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
@router.post("/staff/register")
|
| 210 |
+
async def register_staff(req: StaffRegisterRequest, manager: dict = Depends(require_manager)):
|
| 211 |
+
"""Manager creates a new staff (seller) account in their department."""
|
| 212 |
+
import bcrypt
|
| 213 |
+
import traceback
|
| 214 |
+
|
| 215 |
+
try:
|
| 216 |
+
sb = get_supabase()
|
| 217 |
+
dept_id = manager.get("department_id")
|
| 218 |
+
manager_id = manager["sub"]
|
| 219 |
+
|
| 220 |
+
if not dept_id:
|
| 221 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 222 |
+
|
| 223 |
+
# Check unique email
|
| 224 |
+
existing_email = sb.table("users").select("id").eq("email", req.email).execute()
|
| 225 |
+
if existing_email.data:
|
| 226 |
+
raise HTTPException(status_code=400, detail="Email already registered")
|
| 227 |
+
|
| 228 |
+
# Check unique full_name
|
| 229 |
+
existing_name = sb.table("users").select("id").eq("full_name", req.full_name).execute()
|
| 230 |
+
if existing_name.data:
|
| 231 |
+
raise HTTPException(status_code=400, detail="Full name already taken")
|
| 232 |
+
|
| 233 |
+
# Check unique contact_number if provided
|
| 234 |
+
if req.contact_number:
|
| 235 |
+
existing_contact = sb.table("user_contacts").select("user_id").eq("contact_number", req.contact_number).execute()
|
| 236 |
+
if existing_contact.data:
|
| 237 |
+
raise HTTPException(status_code=400, detail="Contact number already registered")
|
| 238 |
+
|
| 239 |
+
# Hash password
|
| 240 |
+
password_hash = bcrypt.hashpw(req.password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
| 241 |
+
|
| 242 |
+
# Create user with seller role and department assignment
|
| 243 |
+
result = sb.table("users").insert({
|
| 244 |
+
"email": req.email,
|
| 245 |
+
"password_hash": password_hash,
|
| 246 |
+
"full_name": req.full_name,
|
| 247 |
+
"role": "seller",
|
| 248 |
+
"is_banned": False,
|
| 249 |
+
"department_id": dept_id,
|
| 250 |
+
"manager_id": manager_id,
|
| 251 |
+
}).execute()
|
| 252 |
+
|
| 253 |
+
if not result.data:
|
| 254 |
+
raise HTTPException(status_code=500, detail="Failed to create staff user")
|
| 255 |
+
|
| 256 |
+
user = result.data[0]
|
| 257 |
+
|
| 258 |
+
# Create contact if provided
|
| 259 |
+
if req.contact_number:
|
| 260 |
+
sb.table("user_contacts").insert({"user_id": user["id"], "contact_number": req.contact_number}).execute()
|
| 261 |
+
|
| 262 |
+
return {
|
| 263 |
+
"message": "Staff registered successfully",
|
| 264 |
+
"user": {
|
| 265 |
+
"id": user["id"],
|
| 266 |
+
"full_name": user["full_name"],
|
| 267 |
+
"email": user["email"],
|
| 268 |
+
"role": "seller",
|
| 269 |
+
"department_id": dept_id,
|
| 270 |
+
"contact_number": req.contact_number,
|
| 271 |
+
},
|
| 272 |
+
}
|
| 273 |
+
|
| 274 |
+
except HTTPException:
|
| 275 |
+
raise
|
| 276 |
+
except Exception as e:
|
| 277 |
+
print(f"[StaffRegister] ERROR: {e}")
|
| 278 |
+
traceback.print_exc()
|
| 279 |
+
raise HTTPException(status_code=500, detail=f"Registration failed: {str(e)}")
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
@router.get("/staff/{user_id}/detail")
|
| 283 |
+
async def get_staff_detail(user_id: str, manager: dict = Depends(require_manager)):
|
| 284 |
+
"""Get detailed info about a staff member in the manager's department."""
|
| 285 |
+
sb = get_supabase()
|
| 286 |
+
dept_id = manager.get("department_id")
|
| 287 |
+
|
| 288 |
+
# Verify staff belongs to manager's department
|
| 289 |
+
user_resp = sb.table("users").select("*, user_contacts(contact_number)").eq("id", user_id).execute()
|
| 290 |
+
if not user_resp.data:
|
| 291 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 292 |
+
|
| 293 |
+
u = user_resp.data[0]
|
| 294 |
+
if u.get("department_id") != dept_id:
|
| 295 |
+
raise HTTPException(status_code=403, detail="This user is not in your department")
|
| 296 |
+
|
| 297 |
+
contact = ""
|
| 298 |
+
if u.get("user_contacts"):
|
| 299 |
+
if isinstance(u["user_contacts"], list) and len(u["user_contacts"]) > 0:
|
| 300 |
+
contact = u["user_contacts"][0].get("contact_number", "")
|
| 301 |
+
elif isinstance(u["user_contacts"], dict):
|
| 302 |
+
contact = u["user_contacts"].get("contact_number", "")
|
| 303 |
+
|
| 304 |
+
# Transactions — use assigned_staff_id for accuracy, fallback to seller_id for legacy
|
| 305 |
+
assigned_txns = sb.table("product_transactions").select("*, products(title, images)").eq(
|
| 306 |
+
"assigned_staff_id", user_id
|
| 307 |
+
).order("created_at", desc=True).limit(100).execute()
|
| 308 |
+
|
| 309 |
+
legacy_txns = sb.table("product_transactions").select("*, products(title, images)").eq(
|
| 310 |
+
"seller_id", user_id
|
| 311 |
+
).is_("assigned_staff_id", "null").order("created_at", desc=True).limit(100).execute()
|
| 312 |
+
|
| 313 |
+
# Merge and deduplicate
|
| 314 |
+
seen_ids = set()
|
| 315 |
+
all_txns = []
|
| 316 |
+
for t in (assigned_txns.data or []) + (legacy_txns.data or []):
|
| 317 |
+
if t["id"] not in seen_ids:
|
| 318 |
+
seen_ids.add(t["id"])
|
| 319 |
+
all_txns.append(t)
|
| 320 |
+
all_txns.sort(key=lambda x: x["created_at"], reverse=True)
|
| 321 |
+
|
| 322 |
+
today_str = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
| 323 |
+
completed_statuses = ("completed", "delivered")
|
| 324 |
+
|
| 325 |
+
daily_data = {}
|
| 326 |
+
monthly_data = {}
|
| 327 |
+
completed_count = 0
|
| 328 |
+
total_items = 0
|
| 329 |
+
today_tasks = 0
|
| 330 |
+
delivery_items_today = 0
|
| 331 |
+
products_handled = {}
|
| 332 |
+
|
| 333 |
+
for t in all_txns:
|
| 334 |
+
amt = float(t["amount"])
|
| 335 |
+
qty = int(t.get("quantity", 1))
|
| 336 |
+
status = t["status"]
|
| 337 |
+
purchase_type = t.get("purchase_type", "delivery")
|
| 338 |
+
is_completed = status in completed_statuses
|
| 339 |
+
|
| 340 |
+
try:
|
| 341 |
+
dt = datetime.fromisoformat(t["created_at"].replace("Z", "+00:00"))
|
| 342 |
+
day_key = dt.strftime("%Y-%m-%d")
|
| 343 |
+
month_key = dt.strftime("%Y-%m")
|
| 344 |
+
except Exception:
|
| 345 |
+
day_key = t["created_at"][:10]
|
| 346 |
+
month_key = t["created_at"][:7]
|
| 347 |
+
|
| 348 |
+
# Daily data with delivery breakdown
|
| 349 |
+
if day_key not in daily_data:
|
| 350 |
+
daily_data[day_key] = {"amount": 0, "count": 0, "delivery_items": 0}
|
| 351 |
+
daily_data[day_key]["amount"] += amt
|
| 352 |
+
daily_data[day_key]["count"] += 1
|
| 353 |
+
if is_completed:
|
| 354 |
+
daily_data[day_key]["delivery_items"] += qty
|
| 355 |
+
|
| 356 |
+
# Monthly data
|
| 357 |
+
if month_key not in monthly_data:
|
| 358 |
+
monthly_data[month_key] = {"amount": 0, "count": 0}
|
| 359 |
+
monthly_data[month_key]["amount"] += amt
|
| 360 |
+
monthly_data[month_key]["count"] += 1
|
| 361 |
+
|
| 362 |
+
# Completion metrics
|
| 363 |
+
if is_completed:
|
| 364 |
+
completed_count += 1
|
| 365 |
+
total_items += qty
|
| 366 |
+
if day_key == today_str:
|
| 367 |
+
today_tasks += 1
|
| 368 |
+
delivery_items_today += qty
|
| 369 |
+
|
| 370 |
+
# Track recent products handled
|
| 371 |
+
pid = t["product_id"]
|
| 372 |
+
prod_info = t.get("products") or {}
|
| 373 |
+
if pid not in products_handled:
|
| 374 |
+
products_handled[pid] = {
|
| 375 |
+
"product_id": pid,
|
| 376 |
+
"product_title": prod_info.get("title", ""),
|
| 377 |
+
"product_image": ((prod_info.get("images") or [""])[0]) if prod_info.get("images") else "",
|
| 378 |
+
"quantity_processed": 0,
|
| 379 |
+
"last_handled": t["created_at"],
|
| 380 |
+
"purchase_type": purchase_type,
|
| 381 |
+
}
|
| 382 |
+
products_handled[pid]["quantity_processed"] += qty
|
| 383 |
+
|
| 384 |
+
daily = sorted(
|
| 385 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"],
|
| 386 |
+
"delivery_items": v["delivery_items"]}
|
| 387 |
+
for k, v in daily_data.items()],
|
| 388 |
+
key=lambda x: x["date"], reverse=True
|
| 389 |
+
)[:30]
|
| 390 |
+
|
| 391 |
+
monthly = sorted(
|
| 392 |
+
[{"date": k, "amount": round(v["amount"], 2), "count": v["count"]} for k, v in monthly_data.items()],
|
| 393 |
+
key=lambda x: x["date"], reverse=True
|
| 394 |
+
)[:12]
|
| 395 |
+
|
| 396 |
+
recent_products_handled = sorted(
|
| 397 |
+
products_handled.values(), key=lambda x: x["last_handled"], reverse=True
|
| 398 |
+
)[:20]
|
| 399 |
+
|
| 400 |
+
# Products
|
| 401 |
+
prods = sb.table("products").select("id, title, price, stock, images, is_active, created_at").eq("seller_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 402 |
+
products = [
|
| 403 |
+
{
|
| 404 |
+
"id": p["id"],
|
| 405 |
+
"title": p["title"],
|
| 406 |
+
"price": float(p["price"]),
|
| 407 |
+
"stock": int(p.get("stock", 0)),
|
| 408 |
+
"image_url": (p.get("images") or [""])[0] if p.get("images") else "",
|
| 409 |
+
"is_active": p["is_active"],
|
| 410 |
+
"created_at": p["created_at"],
|
| 411 |
+
}
|
| 412 |
+
for p in (prods.data or [])
|
| 413 |
+
]
|
| 414 |
+
|
| 415 |
+
return {
|
| 416 |
+
"user": {
|
| 417 |
+
"id": u["id"],
|
| 418 |
+
"email": u["email"],
|
| 419 |
+
"full_name": u["full_name"],
|
| 420 |
+
"role": u["role"],
|
| 421 |
+
"is_banned": u.get("is_banned", False),
|
| 422 |
+
"contact_number": contact,
|
| 423 |
+
"created_at": u["created_at"],
|
| 424 |
+
},
|
| 425 |
+
"report": {
|
| 426 |
+
"total_transactions": len(all_txns),
|
| 427 |
+
"total_amount": round(sum(float(t["amount"]) for t in all_txns), 2),
|
| 428 |
+
"total_completed_tasks": completed_count,
|
| 429 |
+
"total_items_processed": total_items,
|
| 430 |
+
"tasks_completed_today": today_tasks,
|
| 431 |
+
"delivery_items_today": delivery_items_today,
|
| 432 |
+
"daily": daily,
|
| 433 |
+
"monthly": monthly,
|
| 434 |
+
},
|
| 435 |
+
"products": products,
|
| 436 |
+
"recent_products_handled": recent_products_handled,
|
| 437 |
+
}
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
# --- Restock Approval ---
|
| 441 |
+
|
| 442 |
+
@router.get("/restock-requests")
|
| 443 |
+
async def get_restock_requests(
|
| 444 |
+
status: str = Query("pending_manager", description="Filter by status"),
|
| 445 |
+
manager: dict = Depends(require_manager),
|
| 446 |
+
):
|
| 447 |
+
"""Get restock requests for the manager's department."""
|
| 448 |
+
sb = get_supabase()
|
| 449 |
+
dept_id = manager.get("department_id")
|
| 450 |
+
|
| 451 |
+
if not dept_id:
|
| 452 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 453 |
+
|
| 454 |
+
query = sb.table("restock_requests").select(
|
| 455 |
+
"*, products(title, price, stock, images)"
|
| 456 |
+
).eq("department_id", dept_id)
|
| 457 |
+
|
| 458 |
+
if status:
|
| 459 |
+
query = query.eq("status", status)
|
| 460 |
+
|
| 461 |
+
requests = query.order("created_at", desc=True).execute()
|
| 462 |
+
|
| 463 |
+
# Get staff names
|
| 464 |
+
staff_ids = set(r["staff_id"] for r in (requests.data or []))
|
| 465 |
+
staff_names = {}
|
| 466 |
+
if staff_ids:
|
| 467 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(staff_ids)).execute()
|
| 468 |
+
staff_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 469 |
+
|
| 470 |
+
results = []
|
| 471 |
+
for r in (requests.data or []):
|
| 472 |
+
prod = r.get("products") or {}
|
| 473 |
+
results.append({
|
| 474 |
+
"id": r["id"],
|
| 475 |
+
"staff_id": r["staff_id"],
|
| 476 |
+
"staff_name": staff_names.get(r["staff_id"], "Unknown"),
|
| 477 |
+
"product_id": r["product_id"],
|
| 478 |
+
"product_title": prod.get("title", ""),
|
| 479 |
+
"product_images": prod.get("images", []),
|
| 480 |
+
"product_price": float(prod.get("price", 0)),
|
| 481 |
+
"current_stock": int(prod.get("stock", 0)),
|
| 482 |
+
"requested_quantity": r["requested_quantity"],
|
| 483 |
+
"approved_quantity": r.get("approved_quantity"),
|
| 484 |
+
"notes": r.get("notes", ""),
|
| 485 |
+
"manager_notes": r.get("manager_notes", ""),
|
| 486 |
+
"status": r["status"],
|
| 487 |
+
"created_at": r["created_at"],
|
| 488 |
+
})
|
| 489 |
+
|
| 490 |
+
return results
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
@router.put("/restock-requests/{request_id}/approve")
|
| 494 |
+
async def approve_restock(
|
| 495 |
+
request_id: str,
|
| 496 |
+
req: RestockApproveRequest,
|
| 497 |
+
manager: dict = Depends(require_manager),
|
| 498 |
+
):
|
| 499 |
+
"""Approve a restock request. Moves to deliveryman queue."""
|
| 500 |
+
sb = get_supabase()
|
| 501 |
+
dept_id = manager.get("department_id")
|
| 502 |
+
|
| 503 |
+
# Verify request belongs to department and is pending
|
| 504 |
+
restock = sb.table("restock_requests").select("*").eq("id", request_id).eq(
|
| 505 |
+
"department_id", dept_id
|
| 506 |
+
).eq("status", "pending_manager").execute()
|
| 507 |
+
|
| 508 |
+
if not restock.data:
|
| 509 |
+
raise HTTPException(status_code=404, detail="Restock request not found or already processed")
|
| 510 |
+
|
| 511 |
+
update_data = {
|
| 512 |
+
"status": "approved_manager",
|
| 513 |
+
"manager_approved_at": datetime.now(timezone.utc).isoformat(),
|
| 514 |
+
"manager_notes": req.manager_notes,
|
| 515 |
+
}
|
| 516 |
+
|
| 517 |
+
if req.approved_quantity is not None:
|
| 518 |
+
update_data["approved_quantity"] = req.approved_quantity
|
| 519 |
+
else:
|
| 520 |
+
update_data["approved_quantity"] = restock.data[0]["requested_quantity"]
|
| 521 |
+
|
| 522 |
+
sb.table("restock_requests").update(update_data).eq("id", request_id).execute()
|
| 523 |
+
|
| 524 |
+
return {"message": "Restock request approved and moved to delivery queue"}
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
@router.put("/restock-requests/{request_id}/reject")
|
| 528 |
+
async def reject_restock(
|
| 529 |
+
request_id: str,
|
| 530 |
+
req: RestockRejectRequest,
|
| 531 |
+
manager: dict = Depends(require_manager),
|
| 532 |
+
):
|
| 533 |
+
"""Reject a restock request."""
|
| 534 |
+
sb = get_supabase()
|
| 535 |
+
dept_id = manager.get("department_id")
|
| 536 |
+
|
| 537 |
+
restock = sb.table("restock_requests").select("*").eq("id", request_id).eq(
|
| 538 |
+
"department_id", dept_id
|
| 539 |
+
).eq("status", "pending_manager").execute()
|
| 540 |
+
|
| 541 |
+
if not restock.data:
|
| 542 |
+
raise HTTPException(status_code=404, detail="Restock request not found or already processed")
|
| 543 |
+
|
| 544 |
+
sb.table("restock_requests").update({
|
| 545 |
+
"status": "rejected_manager",
|
| 546 |
+
"manager_notes": req.manager_notes,
|
| 547 |
+
}).eq("id", request_id).execute()
|
| 548 |
+
|
| 549 |
+
return {"message": "Restock request rejected"}
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
# --- Department Products & Transactions ---
|
| 553 |
+
|
| 554 |
+
@router.get("/products")
|
| 555 |
+
async def list_department_products(
|
| 556 |
+
search: str = Query("", description="Search by product title"),
|
| 557 |
+
manager: dict = Depends(require_manager),
|
| 558 |
+
):
|
| 559 |
+
"""List all products from staff in the manager's department."""
|
| 560 |
+
sb = get_supabase()
|
| 561 |
+
dept_id = manager.get("department_id")
|
| 562 |
+
|
| 563 |
+
if not dept_id:
|
| 564 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 565 |
+
|
| 566 |
+
# Get all staff in department + the manager themselves (managers create products)
|
| 567 |
+
staff_result = sb.table("users").select("id, full_name").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 568 |
+
staff_ids = [s["id"] for s in (staff_result.data or [])]
|
| 569 |
+
staff_names = {s["id"]: s["full_name"] for s in (staff_result.data or [])}
|
| 570 |
+
|
| 571 |
+
# Include manager's own products (managers create products with their own ID as seller_id)
|
| 572 |
+
manager_id = manager["sub"]
|
| 573 |
+
if manager_id not in staff_ids:
|
| 574 |
+
staff_ids.append(manager_id)
|
| 575 |
+
# Get manager's name for display
|
| 576 |
+
mgr_user = sb.table("users").select("full_name").eq("id", manager_id).execute()
|
| 577 |
+
if mgr_user.data:
|
| 578 |
+
staff_names[manager_id] = mgr_user.data[0]["full_name"]
|
| 579 |
+
|
| 580 |
+
if not staff_ids:
|
| 581 |
+
return []
|
| 582 |
+
|
| 583 |
+
query = sb.table("products").select("*").in_("seller_id", staff_ids).order("created_at", desc=True)
|
| 584 |
+
|
| 585 |
+
if search:
|
| 586 |
+
query = query.ilike("title", f"%{search}%")
|
| 587 |
+
|
| 588 |
+
result = query.execute()
|
| 589 |
+
|
| 590 |
+
return [
|
| 591 |
+
{
|
| 592 |
+
"id": p["id"],
|
| 593 |
+
"title": p["title"],
|
| 594 |
+
"description": p.get("description", ""),
|
| 595 |
+
"price": float(p["price"]),
|
| 596 |
+
"stock": int(p.get("stock", 0)),
|
| 597 |
+
"images": p.get("images") or [],
|
| 598 |
+
"is_active": p.get("is_active", True),
|
| 599 |
+
"status": p.get("status", "pending"),
|
| 600 |
+
"seller_id": p["seller_id"],
|
| 601 |
+
"seller_name": staff_names.get(p["seller_id"], "Unknown"),
|
| 602 |
+
"created_at": p["created_at"],
|
| 603 |
+
}
|
| 604 |
+
for p in (result.data or [])
|
| 605 |
+
]
|
| 606 |
+
|
| 607 |
+
|
| 608 |
+
@router.get("/transactions")
|
| 609 |
+
async def list_department_transactions(
|
| 610 |
+
search: str = Query("", description="Search by buyer or product"),
|
| 611 |
+
manager: dict = Depends(require_manager),
|
| 612 |
+
):
|
| 613 |
+
"""List all transactions from staff in the manager's department."""
|
| 614 |
+
sb = get_supabase()
|
| 615 |
+
dept_id = manager.get("department_id")
|
| 616 |
+
|
| 617 |
+
if not dept_id:
|
| 618 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 619 |
+
|
| 620 |
+
# Get all staff in department + manager themselves
|
| 621 |
+
staff_result = sb.table("users").select("id, full_name").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 622 |
+
staff_ids = [s["id"] for s in (staff_result.data or [])]
|
| 623 |
+
|
| 624 |
+
# Include manager's own ID (managers can create products with their own seller_id)
|
| 625 |
+
manager_id = manager["sub"]
|
| 626 |
+
if manager_id not in staff_ids:
|
| 627 |
+
staff_ids.append(manager_id)
|
| 628 |
+
|
| 629 |
+
if not staff_ids:
|
| 630 |
+
return []
|
| 631 |
+
|
| 632 |
+
txns = sb.table("product_transactions").select(
|
| 633 |
+
"*, products(title)"
|
| 634 |
+
).in_("seller_id", staff_ids).order("created_at", desc=True).limit(100).execute()
|
| 635 |
+
|
| 636 |
+
# Get buyer names
|
| 637 |
+
buyer_ids = set(t.get("buyer_id") for t in (txns.data or []) if t.get("buyer_id"))
|
| 638 |
+
all_user_ids = buyer_ids | set(staff_ids)
|
| 639 |
+
user_names = {}
|
| 640 |
+
if all_user_ids:
|
| 641 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(all_user_ids)).execute()
|
| 642 |
+
user_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 643 |
+
|
| 644 |
+
results = []
|
| 645 |
+
for t in (txns.data or []):
|
| 646 |
+
product_title = (t.get("products") or {}).get("title", "Unknown")
|
| 647 |
+
if search and search.lower() not in product_title.lower() and search.lower() not in user_names.get(t.get("buyer_id"), "").lower():
|
| 648 |
+
continue
|
| 649 |
+
results.append({
|
| 650 |
+
"id": t["id"],
|
| 651 |
+
"buyer_name": user_names.get(t.get("buyer_id"), "Unknown"),
|
| 652 |
+
"seller_name": user_names.get(t.get("seller_id"), "Unknown"),
|
| 653 |
+
"product_title": product_title,
|
| 654 |
+
"quantity": int(t.get("quantity", 1)),
|
| 655 |
+
"amount": float(t.get("amount", 0)),
|
| 656 |
+
"seller_amount": float(t.get("seller_amount", 0)),
|
| 657 |
+
"delivery_fee": float(t.get("delivery_fee", 0)),
|
| 658 |
+
"purchase_type": t.get("purchase_type", "delivery"),
|
| 659 |
+
"status": t.get("status", ""),
|
| 660 |
+
"created_at": t["created_at"],
|
| 661 |
+
})
|
| 662 |
+
|
| 663 |
+
return results
|
| 664 |
+
|
| 665 |
+
|
| 666 |
+
# --- Staff Removal ---
|
| 667 |
+
|
| 668 |
+
@router.delete("/staff/{user_id}/remove")
|
| 669 |
+
async def remove_staff(user_id: str, manager: dict = Depends(require_manager)):
|
| 670 |
+
"""Remove a staff member from the manager's department.
|
| 671 |
+
This unassigns them from the department (sets department_id and manager_id to null).
|
| 672 |
+
"""
|
| 673 |
+
sb = get_supabase()
|
| 674 |
+
dept_id = manager.get("department_id")
|
| 675 |
+
manager_id = manager["sub"]
|
| 676 |
+
|
| 677 |
+
if not dept_id:
|
| 678 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 679 |
+
|
| 680 |
+
# Prevent manager from removing themselves
|
| 681 |
+
if user_id == manager_id:
|
| 682 |
+
raise HTTPException(status_code=400, detail="You cannot remove yourself from the department")
|
| 683 |
+
|
| 684 |
+
# Verify user exists and belongs to this manager's department
|
| 685 |
+
user_resp = sb.table("users").select("id, role, department_id, full_name").eq("id", user_id).execute()
|
| 686 |
+
if not user_resp.data:
|
| 687 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 688 |
+
|
| 689 |
+
user = user_resp.data[0]
|
| 690 |
+
if user.get("department_id") != dept_id:
|
| 691 |
+
raise HTTPException(status_code=403, detail="This user is not in your department")
|
| 692 |
+
|
| 693 |
+
if user.get("role") != "seller":
|
| 694 |
+
raise HTTPException(status_code=400, detail="Can only remove staff (seller) members")
|
| 695 |
+
|
| 696 |
+
# Delete only non-financial personal data.
|
| 697 |
+
# Financial records are preserved — their user FKs are set to NULL
|
| 698 |
+
# automatically via ON DELETE SET NULL (migration_v10).
|
| 699 |
+
sb.table("wishlist_items").delete().eq("buyer_id", user_id).execute()
|
| 700 |
+
sb.table("cart_items").delete().eq("buyer_id", user_id).execute()
|
| 701 |
+
sb.table("stored_value").delete().eq("user_id", user_id).execute()
|
| 702 |
+
sb.table("user_balances").delete().eq("user_id", user_id).execute()
|
| 703 |
+
sb.table("user_contacts").delete().eq("user_id", user_id).execute()
|
| 704 |
+
|
| 705 |
+
# Permanently delete the user — DB cascades/nullifies all remaining FK references
|
| 706 |
+
sb.table("users").delete().eq("id", user_id).execute()
|
| 707 |
+
|
| 708 |
+
return {"message": f"Staff member '{user['full_name']}' has been permanently deleted"}
|
| 709 |
+
|
| 710 |
+
|
| 711 |
+
# --- Product Removal Request ---
|
| 712 |
+
|
| 713 |
+
@router.post("/products/{product_id}/request-removal")
|
| 714 |
+
async def request_product_removal(product_id: str, manager: dict = Depends(require_manager)):
|
| 715 |
+
"""Manager requests removal of a product. Sets status to 'pending_removal' for admin approval."""
|
| 716 |
+
sb = get_supabase()
|
| 717 |
+
dept_id = manager.get("department_id")
|
| 718 |
+
manager_id = manager["sub"]
|
| 719 |
+
|
| 720 |
+
if not dept_id:
|
| 721 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 722 |
+
|
| 723 |
+
# Get all staff IDs in this department + manager
|
| 724 |
+
staff_result = sb.table("users").select("id").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 725 |
+
staff_ids = [s["id"] for s in (staff_result.data or [])]
|
| 726 |
+
if manager_id not in staff_ids:
|
| 727 |
+
staff_ids.append(manager_id)
|
| 728 |
+
|
| 729 |
+
prod = sb.table("products").select("id, status, seller_id").eq("id", product_id).execute()
|
| 730 |
+
if not prod.data:
|
| 731 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 732 |
+
|
| 733 |
+
if prod.data[0]["seller_id"] not in staff_ids:
|
| 734 |
+
raise HTTPException(status_code=403, detail="Product does not belong to your department")
|
| 735 |
+
|
| 736 |
+
if prod.data[0]["status"] != "approved":
|
| 737 |
+
raise HTTPException(status_code=400, detail="Only approved products can be requested for removal")
|
| 738 |
+
|
| 739 |
+
sb.table("products").update({
|
| 740 |
+
"status": "pending_removal",
|
| 741 |
+
"removal_requested_by": manager_id,
|
| 742 |
+
"removal_requested_at": datetime.now(timezone.utc).isoformat(),
|
| 743 |
+
}).eq("id", product_id).execute()
|
| 744 |
+
|
| 745 |
+
return {"message": "Product removal requested. Awaiting admin approval."}
|
backend/routes/products.py
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Product routes — create, list, update, delete products.
|
| 3 |
+
When a product is created, its BERT embedding is computed and stored.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import Optional
|
| 9 |
+
import uuid
|
| 10 |
+
import base64
|
| 11 |
+
|
| 12 |
+
from database import get_supabase, store_product_embedding
|
| 13 |
+
from models.bert_service import bert_service
|
| 14 |
+
from routes.auth import get_current_user
|
| 15 |
+
from config import SUPABASE_URL
|
| 16 |
+
|
| 17 |
+
router = APIRouter(prefix="/products", tags=["Products"])
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# --- Request/Response Models ---
|
| 21 |
+
|
| 22 |
+
class CreateProductRequest(BaseModel):
|
| 23 |
+
title: str
|
| 24 |
+
description: str = ""
|
| 25 |
+
price: float
|
| 26 |
+
stock: int = 1
|
| 27 |
+
images: list[str] = [] # Array of image URLs or data URIs
|
| 28 |
+
tracking_number: Optional[str] = None # Parcel tracking number
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class UpdateProductRequest(BaseModel):
|
| 32 |
+
title: Optional[str] = None
|
| 33 |
+
description: Optional[str] = None
|
| 34 |
+
price: Optional[float] = None
|
| 35 |
+
stock: Optional[int] = None
|
| 36 |
+
images: Optional[list[str]] = None
|
| 37 |
+
is_active: Optional[bool] = None
|
| 38 |
+
tracking_number: Optional[str] = None
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ProductResponse(BaseModel):
|
| 42 |
+
id: str
|
| 43 |
+
seller_id: str
|
| 44 |
+
title: str
|
| 45 |
+
description: str
|
| 46 |
+
price: float
|
| 47 |
+
stock: int = 0
|
| 48 |
+
images: list[str] = []
|
| 49 |
+
tracking_number: Optional[str] = None
|
| 50 |
+
is_active: bool
|
| 51 |
+
status: str = "pending"
|
| 52 |
+
created_at: str
|
| 53 |
+
seller_name: Optional[str] = None
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def build_product_response(p, seller_name=""):
|
| 57 |
+
"""Helper to build ProductResponse from DB row."""
|
| 58 |
+
return ProductResponse(
|
| 59 |
+
id=p["id"],
|
| 60 |
+
seller_id=p["seller_id"],
|
| 61 |
+
title=p["title"],
|
| 62 |
+
description=p["description"] or "",
|
| 63 |
+
price=float(p["price"]),
|
| 64 |
+
stock=int(p.get("stock", 0)),
|
| 65 |
+
images=p.get("images") or [],
|
| 66 |
+
tracking_number=p.get("tracking_number"),
|
| 67 |
+
is_active=p["is_active"],
|
| 68 |
+
status=p.get("status", "pending"),
|
| 69 |
+
created_at=p["created_at"],
|
| 70 |
+
seller_name=seller_name,
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# --- Image Upload Endpoint ---
|
| 75 |
+
|
| 76 |
+
@router.post("/upload-image")
|
| 77 |
+
async def upload_image(
|
| 78 |
+
file: UploadFile = File(...),
|
| 79 |
+
current_user: dict = Depends(get_current_user),
|
| 80 |
+
):
|
| 81 |
+
"""Upload a product image to Supabase Storage. Returns the public URL."""
|
| 82 |
+
sb = get_supabase()
|
| 83 |
+
|
| 84 |
+
# Validate file type
|
| 85 |
+
allowed = {"image/jpeg", "image/png", "image/webp", "image/gif"}
|
| 86 |
+
if file.content_type not in allowed:
|
| 87 |
+
raise HTTPException(status_code=400, detail=f"File type {file.content_type} not allowed. Use JPEG, PNG, WebP, or GIF.")
|
| 88 |
+
|
| 89 |
+
# Validate file size (max 5MB)
|
| 90 |
+
contents = await file.read()
|
| 91 |
+
if len(contents) > 5 * 1024 * 1024:
|
| 92 |
+
raise HTTPException(status_code=400, detail="File too large. Max 5MB.")
|
| 93 |
+
|
| 94 |
+
# Generate unique filename
|
| 95 |
+
ext = file.filename.split(".")[-1] if "." in file.filename else "jpg"
|
| 96 |
+
filename = f"{current_user['sub']}/{uuid.uuid4().hex}.{ext}"
|
| 97 |
+
|
| 98 |
+
# Upload to Supabase Storage
|
| 99 |
+
try:
|
| 100 |
+
sb.storage.from_("product-images").upload(
|
| 101 |
+
filename,
|
| 102 |
+
contents,
|
| 103 |
+
file_options={"content-type": file.content_type},
|
| 104 |
+
)
|
| 105 |
+
except Exception as e:
|
| 106 |
+
raise HTTPException(status_code=500, detail=f"Failed to upload image: {str(e)}")
|
| 107 |
+
|
| 108 |
+
# Get public URL
|
| 109 |
+
public_url = f"{SUPABASE_URL}/storage/v1/object/public/product-images/{filename}"
|
| 110 |
+
|
| 111 |
+
return {"url": public_url, "filename": filename}
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# --- Routes ---
|
| 115 |
+
|
| 116 |
+
@router.post("/", response_model=ProductResponse)
|
| 117 |
+
async def create_product(req: CreateProductRequest, current_user: dict = Depends(get_current_user)):
|
| 118 |
+
"""
|
| 119 |
+
Create a new product listing.
|
| 120 |
+
Automatically computes BERT embedding if the model is loaded.
|
| 121 |
+
"""
|
| 122 |
+
sb = get_supabase()
|
| 123 |
+
|
| 124 |
+
# Verify user is a manager (only managers can create products)
|
| 125 |
+
user_result = sb.table("users").select("role, department_id").eq("id", current_user["sub"]).execute()
|
| 126 |
+
if not user_result.data:
|
| 127 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 128 |
+
|
| 129 |
+
user_role = user_result.data[0]["role"]
|
| 130 |
+
if user_role != "manager":
|
| 131 |
+
raise HTTPException(status_code=403, detail="Only managers can create products")
|
| 132 |
+
|
| 133 |
+
# Validate required fields
|
| 134 |
+
if not req.title or not req.title.strip():
|
| 135 |
+
raise HTTPException(status_code=400, detail="Product title is required")
|
| 136 |
+
if req.price <= 0:
|
| 137 |
+
raise HTTPException(status_code=400, detail="Price must be greater than 0")
|
| 138 |
+
if req.stock < 1:
|
| 139 |
+
raise HTTPException(status_code=400, detail="Stock must be at least 1")
|
| 140 |
+
if not req.images or len(req.images) == 0:
|
| 141 |
+
raise HTTPException(status_code=400, detail="At least one product image is required")
|
| 142 |
+
if len(req.images) > 5:
|
| 143 |
+
raise HTTPException(status_code=400, detail="Maximum 5 images allowed")
|
| 144 |
+
|
| 145 |
+
# Insert the product
|
| 146 |
+
result = sb.table("products").insert({
|
| 147 |
+
"seller_id": current_user["sub"],
|
| 148 |
+
"title": req.title.strip(),
|
| 149 |
+
"description": req.description.strip(),
|
| 150 |
+
"price": req.price,
|
| 151 |
+
"stock": req.stock,
|
| 152 |
+
"images": req.images,
|
| 153 |
+
"tracking_number": req.tracking_number,
|
| 154 |
+
}).execute()
|
| 155 |
+
|
| 156 |
+
if not result.data:
|
| 157 |
+
raise HTTPException(status_code=500, detail="Failed to create product")
|
| 158 |
+
|
| 159 |
+
product = result.data[0]
|
| 160 |
+
|
| 161 |
+
# Compute and store BERT embedding (if model loaded)
|
| 162 |
+
try:
|
| 163 |
+
if bert_service._loaded:
|
| 164 |
+
embedding = bert_service.compute_embedding(req.title)
|
| 165 |
+
store_product_embedding(product["id"], embedding)
|
| 166 |
+
print(f"[Products] Embedding computed for product: {product['id']}")
|
| 167 |
+
except Exception as e:
|
| 168 |
+
print(f"[Products] Warning: Failed to compute embedding: {e}")
|
| 169 |
+
|
| 170 |
+
return build_product_response(product)
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
@router.post("/backfill-embeddings")
|
| 174 |
+
async def backfill_embeddings():
|
| 175 |
+
"""
|
| 176 |
+
Compute and store BERT embeddings for all products that are missing them.
|
| 177 |
+
Use this after adding products directly to the database.
|
| 178 |
+
"""
|
| 179 |
+
if not bert_service._loaded:
|
| 180 |
+
raise HTTPException(status_code=503, detail="BERT model not loaded. Cannot compute embeddings.")
|
| 181 |
+
|
| 182 |
+
sb = get_supabase()
|
| 183 |
+
# Fetch all products without embeddings
|
| 184 |
+
result = sb.table("products").select("id, title").is_("embedding", "null").execute()
|
| 185 |
+
|
| 186 |
+
if not result.data:
|
| 187 |
+
return {"message": "All products already have embeddings.", "updated": 0}
|
| 188 |
+
|
| 189 |
+
updated = 0
|
| 190 |
+
errors = []
|
| 191 |
+
for p in result.data:
|
| 192 |
+
try:
|
| 193 |
+
embedding = bert_service.compute_embedding(p["title"])
|
| 194 |
+
store_product_embedding(p["id"], embedding)
|
| 195 |
+
updated += 1
|
| 196 |
+
print(f"[Backfill] Embedded: {p['id']} — {p['title']}")
|
| 197 |
+
except Exception as e:
|
| 198 |
+
errors.append({"id": p["id"], "title": p["title"], "error": str(e)})
|
| 199 |
+
print(f"[Backfill] Failed: {p['id']} — {e}")
|
| 200 |
+
|
| 201 |
+
return {
|
| 202 |
+
"message": f"Backfill complete. {updated}/{len(result.data)} products updated.",
|
| 203 |
+
"updated": updated,
|
| 204 |
+
"total": len(result.data),
|
| 205 |
+
"errors": errors,
|
| 206 |
+
}
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
@router.get("/", response_model=list[ProductResponse])
|
| 210 |
+
async def list_products(limit: int = 50, offset: int = 0):
|
| 211 |
+
"""List all active products with stock > 0 (public, no auth required)."""
|
| 212 |
+
sb = get_supabase()
|
| 213 |
+
result = (
|
| 214 |
+
sb.table("products")
|
| 215 |
+
.select("*, users!products_seller_id_fkey(full_name, department_id)")
|
| 216 |
+
.eq("is_active", True)
|
| 217 |
+
.eq("status", "approved")
|
| 218 |
+
.gt("stock", 0)
|
| 219 |
+
.order("created_at", desc=True)
|
| 220 |
+
.range(offset, offset + limit - 1)
|
| 221 |
+
.execute()
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
# Collect department IDs to batch-lookup department names
|
| 225 |
+
dept_ids = set()
|
| 226 |
+
for p in result.data:
|
| 227 |
+
user_info = p.get("users") or {}
|
| 228 |
+
dept_id = user_info.get("department_id")
|
| 229 |
+
if dept_id:
|
| 230 |
+
dept_ids.add(dept_id)
|
| 231 |
+
|
| 232 |
+
dept_names = {}
|
| 233 |
+
if dept_ids:
|
| 234 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 235 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 236 |
+
|
| 237 |
+
products = []
|
| 238 |
+
for p in result.data:
|
| 239 |
+
user_info = p.get("users") or {}
|
| 240 |
+
dept_id = user_info.get("department_id")
|
| 241 |
+
if dept_id and dept_id in dept_names:
|
| 242 |
+
seller_name = dept_names[dept_id]
|
| 243 |
+
else:
|
| 244 |
+
seller_name = user_info.get("full_name", "")
|
| 245 |
+
products.append(build_product_response(p, seller_name))
|
| 246 |
+
return products
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
@router.get("/my", response_model=list[ProductResponse])
|
| 250 |
+
async def list_my_products(current_user: dict = Depends(get_current_user)):
|
| 251 |
+
"""List products owned by the current user or their department (includes all, even out of stock)."""
|
| 252 |
+
sb = get_supabase()
|
| 253 |
+
user_id = current_user["sub"]
|
| 254 |
+
|
| 255 |
+
# Check if user is a seller in a department — if so, also include products from their manager
|
| 256 |
+
user_info = sb.table("users").select("role, department_id, manager_id").eq("id", user_id).execute()
|
| 257 |
+
seller_ids = [user_id]
|
| 258 |
+
|
| 259 |
+
if user_info.data and user_info.data[0].get("department_id"):
|
| 260 |
+
dept_id = user_info.data[0]["department_id"]
|
| 261 |
+
# Find the manager of this department (they create products for the department)
|
| 262 |
+
dept = sb.table("departments").select("manager_id").eq("id", dept_id).execute()
|
| 263 |
+
if dept.data and dept.data[0].get("manager_id"):
|
| 264 |
+
manager_id = dept.data[0]["manager_id"]
|
| 265 |
+
if manager_id not in seller_ids:
|
| 266 |
+
seller_ids.append(manager_id)
|
| 267 |
+
|
| 268 |
+
result = sb.table("products").select("*").in_("seller_id", seller_ids).order("created_at", desc=True).execute()
|
| 269 |
+
|
| 270 |
+
return [build_product_response(p) for p in result.data]
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
@router.get("/{product_id}", response_model=ProductResponse)
|
| 274 |
+
async def get_product(product_id: str):
|
| 275 |
+
"""Get a single product by ID (public)."""
|
| 276 |
+
sb = get_supabase()
|
| 277 |
+
result = sb.table("products").select("*, users!products_seller_id_fkey(full_name, department_id)").eq("id", product_id).execute()
|
| 278 |
+
|
| 279 |
+
if not result.data:
|
| 280 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 281 |
+
|
| 282 |
+
p = result.data[0]
|
| 283 |
+
user_info = p.get("users") or {}
|
| 284 |
+
dept_id = user_info.get("department_id")
|
| 285 |
+
seller_name = user_info.get("full_name", "")
|
| 286 |
+
if dept_id:
|
| 287 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id).execute()
|
| 288 |
+
if dept_resp.data:
|
| 289 |
+
seller_name = dept_resp.data[0]["name"]
|
| 290 |
+
return build_product_response(p, seller_name)
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
@router.put("/{product_id}", response_model=ProductResponse)
|
| 294 |
+
async def update_product(product_id: str, req: UpdateProductRequest, current_user: dict = Depends(get_current_user)):
|
| 295 |
+
"""Update a product. Only the owner can update."""
|
| 296 |
+
sb = get_supabase()
|
| 297 |
+
|
| 298 |
+
# Verify ownership
|
| 299 |
+
existing = sb.table("products").select("seller_id").eq("id", product_id).execute()
|
| 300 |
+
if not existing.data:
|
| 301 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 302 |
+
if existing.data[0]["seller_id"] != current_user["sub"]:
|
| 303 |
+
raise HTTPException(status_code=403, detail="Not your product")
|
| 304 |
+
|
| 305 |
+
# Build update dict (only non-None fields)
|
| 306 |
+
update_data = {k: v for k, v in req.model_dump().items() if v is not None}
|
| 307 |
+
if not update_data:
|
| 308 |
+
raise HTTPException(status_code=400, detail="No fields to update")
|
| 309 |
+
|
| 310 |
+
# Validate images limit
|
| 311 |
+
if req.images is not None and len(req.images) > 5:
|
| 312 |
+
raise HTTPException(status_code=400, detail="Maximum 5 images allowed")
|
| 313 |
+
|
| 314 |
+
result = sb.table("products").update(update_data).eq("id", product_id).execute()
|
| 315 |
+
p = result.data[0]
|
| 316 |
+
|
| 317 |
+
# Re-compute embedding if title changed
|
| 318 |
+
if req.title:
|
| 319 |
+
try:
|
| 320 |
+
if bert_service._loaded:
|
| 321 |
+
embedding = bert_service.compute_embedding(req.title)
|
| 322 |
+
store_product_embedding(product_id, embedding)
|
| 323 |
+
except Exception as e:
|
| 324 |
+
print(f"[Products] Warning: Failed to recompute embedding: {e}")
|
| 325 |
+
|
| 326 |
+
return build_product_response(p)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
@router.delete("/{product_id}")
|
| 330 |
+
async def delete_product(product_id: str, current_user: dict = Depends(get_current_user)):
|
| 331 |
+
"""Soft-delete a product (set is_active=False). Only the owner can delete."""
|
| 332 |
+
sb = get_supabase()
|
| 333 |
+
|
| 334 |
+
existing = sb.table("products").select("seller_id").eq("id", product_id).execute()
|
| 335 |
+
if not existing.data:
|
| 336 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 337 |
+
if existing.data[0]["seller_id"] != current_user["sub"]:
|
| 338 |
+
raise HTTPException(status_code=403, detail="Not your product")
|
| 339 |
+
|
| 340 |
+
sb.table("products").update({"is_active": False}).eq("id", product_id).execute()
|
| 341 |
+
return {"message": "Product deleted successfully"}
|
backend/routes/restock.py
ADDED
|
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Restock routes — staff requests, delivery queue, fulfillment.
|
| 3 |
+
Workflow: Staff Request → Manager Approval → Deliveryman Queue → Delivery → Stock Update
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends, Query
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from typing import Optional
|
| 9 |
+
from database import get_supabase
|
| 10 |
+
from routes.auth import get_current_user
|
| 11 |
+
from datetime import datetime, timezone
|
| 12 |
+
|
| 13 |
+
router = APIRouter(prefix="/restock", tags=["Restock"])
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# --- Helpers ---
|
| 17 |
+
|
| 18 |
+
async def require_seller(current_user: dict = Depends(get_current_user)):
|
| 19 |
+
"""Ensure the current user is a seller (staff)."""
|
| 20 |
+
sb = get_supabase()
|
| 21 |
+
result = sb.table("users").select("role, department_id").eq("id", current_user["sub"]).execute()
|
| 22 |
+
if not result.data or result.data[0].get("role") != "seller":
|
| 23 |
+
raise HTTPException(status_code=403, detail="Staff/seller access required")
|
| 24 |
+
current_user["department_id"] = result.data[0].get("department_id")
|
| 25 |
+
return current_user
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
async def require_delivery(current_user: dict = Depends(get_current_user)):
|
| 29 |
+
"""Ensure the current user is a delivery user."""
|
| 30 |
+
sb = get_supabase()
|
| 31 |
+
result = sb.table("users").select("role").eq("id", current_user["sub"]).execute()
|
| 32 |
+
if not result.data or result.data[0].get("role") != "delivery":
|
| 33 |
+
raise HTTPException(status_code=403, detail="Delivery user access required")
|
| 34 |
+
return current_user
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# --- Request Models ---
|
| 38 |
+
|
| 39 |
+
class RestockRequestCreate(BaseModel):
|
| 40 |
+
product_id: str
|
| 41 |
+
requested_quantity: int
|
| 42 |
+
notes: str = ""
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class DeliveryModifyRequest(BaseModel):
|
| 46 |
+
delivery_notes: str = ""
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# --- Staff Routes ---
|
| 50 |
+
|
| 51 |
+
@router.post("/request")
|
| 52 |
+
async def create_restock_request(req: RestockRequestCreate, seller: dict = Depends(require_seller)):
|
| 53 |
+
"""Staff creates a restock request for one of their products."""
|
| 54 |
+
sb = get_supabase()
|
| 55 |
+
user_id = seller["sub"]
|
| 56 |
+
dept_id = seller.get("department_id")
|
| 57 |
+
|
| 58 |
+
if not dept_id:
|
| 59 |
+
raise HTTPException(status_code=400, detail="You are not assigned to a department. Only department staff can request restocks.")
|
| 60 |
+
|
| 61 |
+
if req.requested_quantity < 1:
|
| 62 |
+
raise HTTPException(status_code=400, detail="Quantity must be at least 1")
|
| 63 |
+
|
| 64 |
+
# Verify product belongs to this seller or their department
|
| 65 |
+
product = sb.table("products").select("id, seller_id").eq("id", req.product_id).execute()
|
| 66 |
+
if not product.data:
|
| 67 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 68 |
+
product_seller_id = product.data[0]["seller_id"]
|
| 69 |
+
if product_seller_id != user_id:
|
| 70 |
+
# Allow if the product belongs to the seller's department manager
|
| 71 |
+
allowed = False
|
| 72 |
+
if dept_id:
|
| 73 |
+
dept = sb.table("departments").select("manager_id").eq("id", dept_id).execute()
|
| 74 |
+
if dept.data and dept.data[0].get("manager_id") == product_seller_id:
|
| 75 |
+
allowed = True
|
| 76 |
+
if not allowed:
|
| 77 |
+
raise HTTPException(status_code=403, detail="You can only request restock for products in your department")
|
| 78 |
+
|
| 79 |
+
# Create restock request
|
| 80 |
+
result = sb.table("restock_requests").insert({
|
| 81 |
+
"staff_id": user_id,
|
| 82 |
+
"department_id": dept_id,
|
| 83 |
+
"product_id": req.product_id,
|
| 84 |
+
"requested_quantity": req.requested_quantity,
|
| 85 |
+
"notes": req.notes,
|
| 86 |
+
"status": "pending_manager",
|
| 87 |
+
}).execute()
|
| 88 |
+
|
| 89 |
+
if not result.data:
|
| 90 |
+
raise HTTPException(status_code=500, detail="Failed to create restock request")
|
| 91 |
+
|
| 92 |
+
return {"message": "Restock request submitted for manager approval", "request": result.data[0]}
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
@router.get("/my-requests")
|
| 96 |
+
async def get_my_requests(seller: dict = Depends(require_seller)):
|
| 97 |
+
"""List all restock requests created by this staff member."""
|
| 98 |
+
sb = get_supabase()
|
| 99 |
+
user_id = seller["sub"]
|
| 100 |
+
|
| 101 |
+
requests = sb.table("restock_requests").select(
|
| 102 |
+
"*, products(title, price, stock, images)"
|
| 103 |
+
).eq("staff_id", user_id).order("created_at", desc=True).limit(50).execute()
|
| 104 |
+
|
| 105 |
+
results = []
|
| 106 |
+
for r in (requests.data or []):
|
| 107 |
+
prod = r.get("products") or {}
|
| 108 |
+
results.append({
|
| 109 |
+
"id": r["id"],
|
| 110 |
+
"product_id": r["product_id"],
|
| 111 |
+
"product_title": prod.get("title", ""),
|
| 112 |
+
"product_images": prod.get("images", []),
|
| 113 |
+
"current_stock": int(prod.get("stock", 0)),
|
| 114 |
+
"requested_quantity": r["requested_quantity"],
|
| 115 |
+
"approved_quantity": r.get("approved_quantity"),
|
| 116 |
+
"notes": r.get("notes", ""),
|
| 117 |
+
"manager_notes": r.get("manager_notes", ""),
|
| 118 |
+
"delivery_notes": r.get("delivery_notes", ""),
|
| 119 |
+
"status": r["status"],
|
| 120 |
+
"created_at": r["created_at"],
|
| 121 |
+
})
|
| 122 |
+
|
| 123 |
+
return results
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
# --- Delivery Routes ---
|
| 127 |
+
|
| 128 |
+
@router.get("/delivery-queue")
|
| 129 |
+
async def get_delivery_queue(delivery_user: dict = Depends(require_delivery)):
|
| 130 |
+
"""Get restock requests approved by managers, ready for delivery pickup."""
|
| 131 |
+
sb = get_supabase()
|
| 132 |
+
|
| 133 |
+
requests = sb.table("restock_requests").select(
|
| 134 |
+
"*, products(title, price, stock, images)"
|
| 135 |
+
).eq("status", "approved_manager").is_("delivery_user_id", "null").order("manager_approved_at", desc=False).limit(50).execute()
|
| 136 |
+
|
| 137 |
+
# Get staff names and department names
|
| 138 |
+
staff_ids = set()
|
| 139 |
+
dept_ids = set()
|
| 140 |
+
for r in (requests.data or []):
|
| 141 |
+
staff_ids.add(r["staff_id"])
|
| 142 |
+
dept_ids.add(r["department_id"])
|
| 143 |
+
|
| 144 |
+
staff_names = {}
|
| 145 |
+
if staff_ids:
|
| 146 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(staff_ids)).execute()
|
| 147 |
+
staff_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 148 |
+
|
| 149 |
+
dept_names = {}
|
| 150 |
+
if dept_ids:
|
| 151 |
+
depts_result = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 152 |
+
dept_names = {d["id"]: d["name"] for d in (depts_result.data or [])}
|
| 153 |
+
|
| 154 |
+
results = []
|
| 155 |
+
for r in (requests.data or []):
|
| 156 |
+
prod = r.get("products") or {}
|
| 157 |
+
qty = r.get("approved_quantity") or r["requested_quantity"]
|
| 158 |
+
results.append({
|
| 159 |
+
"id": r["id"],
|
| 160 |
+
"staff_id": r["staff_id"],
|
| 161 |
+
"staff_name": staff_names.get(r["staff_id"], "Unknown"),
|
| 162 |
+
"department_id": r["department_id"],
|
| 163 |
+
"department_name": dept_names.get(r["department_id"], "Unknown"),
|
| 164 |
+
"product_id": r["product_id"],
|
| 165 |
+
"product_title": prod.get("title", ""),
|
| 166 |
+
"product_images": prod.get("images", []),
|
| 167 |
+
"quantity": qty,
|
| 168 |
+
"notes": r.get("notes", ""),
|
| 169 |
+
"manager_notes": r.get("manager_notes", ""),
|
| 170 |
+
"status": r["status"],
|
| 171 |
+
"manager_approved_at": r.get("manager_approved_at", ""),
|
| 172 |
+
"created_at": r["created_at"],
|
| 173 |
+
})
|
| 174 |
+
|
| 175 |
+
return results
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
@router.post("/{request_id}/accept")
|
| 179 |
+
async def accept_restock_delivery(request_id: str, delivery_user: dict = Depends(require_delivery)):
|
| 180 |
+
"""Deliveryman accepts a restock request."""
|
| 181 |
+
sb = get_supabase()
|
| 182 |
+
user_id = delivery_user["sub"]
|
| 183 |
+
|
| 184 |
+
# Verify request is available
|
| 185 |
+
restock = sb.table("restock_requests").select("*").eq("id", request_id).eq("status", "approved_manager").execute()
|
| 186 |
+
if not restock.data:
|
| 187 |
+
raise HTTPException(status_code=404, detail="Restock request not found or not available")
|
| 188 |
+
|
| 189 |
+
if restock.data[0].get("delivery_user_id"):
|
| 190 |
+
raise HTTPException(status_code=400, detail="This request is already assigned to another delivery user")
|
| 191 |
+
|
| 192 |
+
sb.table("restock_requests").update({
|
| 193 |
+
"status": "accepted_delivery",
|
| 194 |
+
"delivery_user_id": user_id,
|
| 195 |
+
"delivery_accepted_at": datetime.now(timezone.utc).isoformat(),
|
| 196 |
+
}).eq("id", request_id).execute()
|
| 197 |
+
|
| 198 |
+
return {"message": "Restock delivery accepted"}
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@router.put("/{request_id}/modify")
|
| 202 |
+
async def modify_restock_delivery(
|
| 203 |
+
request_id: str,
|
| 204 |
+
req: DeliveryModifyRequest,
|
| 205 |
+
delivery_user: dict = Depends(require_delivery),
|
| 206 |
+
):
|
| 207 |
+
"""Deliveryman modifies delivery notes on a restock request."""
|
| 208 |
+
sb = get_supabase()
|
| 209 |
+
user_id = delivery_user["sub"]
|
| 210 |
+
|
| 211 |
+
restock = sb.table("restock_requests").select("*").eq("id", request_id).eq(
|
| 212 |
+
"delivery_user_id", user_id
|
| 213 |
+
).in_("status", ["accepted_delivery", "in_transit"]).execute()
|
| 214 |
+
|
| 215 |
+
if not restock.data:
|
| 216 |
+
raise HTTPException(status_code=404, detail="Restock request not found or not assigned to you")
|
| 217 |
+
|
| 218 |
+
sb.table("restock_requests").update({
|
| 219 |
+
"delivery_notes": req.delivery_notes,
|
| 220 |
+
}).eq("id", request_id).execute()
|
| 221 |
+
|
| 222 |
+
return {"message": "Delivery notes updated"}
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@router.put("/{request_id}/deliver")
|
| 226 |
+
async def complete_restock_delivery(request_id: str, delivery_user: dict = Depends(require_delivery)):
|
| 227 |
+
"""Mark restock as delivered and increment product stock. Deduct ₱90 delivery fee from admin."""
|
| 228 |
+
sb = get_supabase()
|
| 229 |
+
user_id = delivery_user["sub"]
|
| 230 |
+
|
| 231 |
+
RESTOCK_DELIVERY_FEE = 90.00
|
| 232 |
+
|
| 233 |
+
restock = sb.table("restock_requests").select("*").eq("id", request_id).eq(
|
| 234 |
+
"delivery_user_id", user_id
|
| 235 |
+
).in_("status", ["accepted_delivery", "in_transit"]).execute()
|
| 236 |
+
|
| 237 |
+
if not restock.data:
|
| 238 |
+
raise HTTPException(status_code=404, detail="Restock request not found or not assigned to you")
|
| 239 |
+
|
| 240 |
+
r = restock.data[0]
|
| 241 |
+
qty = r.get("approved_quantity") or r["requested_quantity"]
|
| 242 |
+
|
| 243 |
+
# Get product info for metadata
|
| 244 |
+
product = sb.table("products").select("stock, title").eq("id", r["product_id"]).execute()
|
| 245 |
+
product_title = product.data[0].get("title", "Product") if product.data else "Product"
|
| 246 |
+
|
| 247 |
+
# Increment product stock
|
| 248 |
+
if product.data:
|
| 249 |
+
new_stock = int(product.data[0]["stock"]) + qty
|
| 250 |
+
sb.table("products").update({"stock": new_stock}).eq("id", r["product_id"]).execute()
|
| 251 |
+
|
| 252 |
+
# Update restock request
|
| 253 |
+
sb.table("restock_requests").update({
|
| 254 |
+
"status": "delivered",
|
| 255 |
+
"delivered_at": datetime.now(timezone.utc).isoformat(),
|
| 256 |
+
}).eq("id", request_id).execute()
|
| 257 |
+
|
| 258 |
+
# Get delivery person name
|
| 259 |
+
delivery_user_info = sb.table("users").select("full_name").eq("id", user_id).execute()
|
| 260 |
+
delivery_name = delivery_user_info.data[0]["full_name"] if delivery_user_info.data else "Deliveryman"
|
| 261 |
+
|
| 262 |
+
# Get admin user
|
| 263 |
+
admin_user = sb.table("users").select("id").eq("role", "admin").limit(1).execute()
|
| 264 |
+
if admin_user.data:
|
| 265 |
+
admin_id = admin_user.data[0]["id"]
|
| 266 |
+
|
| 267 |
+
# Deduct ₱90 from admin balance
|
| 268 |
+
admin_bal = sb.table("user_balances").select("balance").eq("user_id", admin_id).execute()
|
| 269 |
+
if admin_bal.data:
|
| 270 |
+
new_admin_bal = float(admin_bal.data[0]["balance"]) - RESTOCK_DELIVERY_FEE
|
| 271 |
+
sb.table("user_balances").update({"balance": new_admin_bal}).eq("user_id", admin_id).execute()
|
| 272 |
+
|
| 273 |
+
# Record admin SVF deduction with metadata
|
| 274 |
+
sb.table("stored_value").insert({
|
| 275 |
+
"user_id": admin_id,
|
| 276 |
+
"transaction_type": "restock_payment",
|
| 277 |
+
"amount": RESTOCK_DELIVERY_FEE,
|
| 278 |
+
"metadata": {
|
| 279 |
+
"restock_request_id": request_id,
|
| 280 |
+
"product_title": product_title,
|
| 281 |
+
"quantity": qty,
|
| 282 |
+
"delivery_user_name": delivery_name,
|
| 283 |
+
"delivery_user_id": user_id,
|
| 284 |
+
},
|
| 285 |
+
}).execute()
|
| 286 |
+
|
| 287 |
+
# Credit ₱90 to delivery person's balance
|
| 288 |
+
del_bal = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 289 |
+
if del_bal.data:
|
| 290 |
+
new_del_bal = float(del_bal.data[0]["balance"]) + RESTOCK_DELIVERY_FEE
|
| 291 |
+
sb.table("user_balances").update({"balance": new_del_bal}).eq("user_id", user_id).execute()
|
| 292 |
+
|
| 293 |
+
# Record delivery person SVF deposit
|
| 294 |
+
sb.table("stored_value").insert({
|
| 295 |
+
"user_id": user_id,
|
| 296 |
+
"transaction_type": "restock_earning",
|
| 297 |
+
"amount": RESTOCK_DELIVERY_FEE,
|
| 298 |
+
"metadata": {
|
| 299 |
+
"restock_request_id": request_id,
|
| 300 |
+
"product_title": product_title,
|
| 301 |
+
"quantity": qty,
|
| 302 |
+
},
|
| 303 |
+
}).execute()
|
| 304 |
+
|
| 305 |
+
return {"message": f"Restock delivered. {qty} units added to product stock. ₱{RESTOCK_DELIVERY_FEE:.2f} delivery fee processed."}
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
@router.get("/delivery-history")
|
| 309 |
+
async def get_restock_delivery_history(delivery_user: dict = Depends(require_delivery)):
|
| 310 |
+
"""Get deliveryman's completed restock delivery history."""
|
| 311 |
+
sb = get_supabase()
|
| 312 |
+
user_id = delivery_user["sub"]
|
| 313 |
+
|
| 314 |
+
requests = sb.table("restock_requests").select(
|
| 315 |
+
"*, products(title, price, stock, images)"
|
| 316 |
+
).eq("delivery_user_id", user_id).in_(
|
| 317 |
+
"status", ["delivered", "accepted_delivery", "in_transit"]
|
| 318 |
+
).order("delivered_at", desc=True).limit(100).execute()
|
| 319 |
+
|
| 320 |
+
staff_ids = set()
|
| 321 |
+
dept_ids = set()
|
| 322 |
+
for r in (requests.data or []):
|
| 323 |
+
staff_ids.add(r["staff_id"])
|
| 324 |
+
dept_ids.add(r["department_id"])
|
| 325 |
+
|
| 326 |
+
staff_names = {}
|
| 327 |
+
if staff_ids:
|
| 328 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(staff_ids)).execute()
|
| 329 |
+
staff_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 330 |
+
|
| 331 |
+
dept_names = {}
|
| 332 |
+
if dept_ids:
|
| 333 |
+
depts_result = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 334 |
+
dept_names = {d["id"]: d["name"] for d in (depts_result.data or [])}
|
| 335 |
+
|
| 336 |
+
results = []
|
| 337 |
+
for r in (requests.data or []):
|
| 338 |
+
prod = r.get("products") or {}
|
| 339 |
+
qty = r.get("approved_quantity") or r["requested_quantity"]
|
| 340 |
+
results.append({
|
| 341 |
+
"id": r["id"],
|
| 342 |
+
"staff_name": staff_names.get(r["staff_id"], "Unknown"),
|
| 343 |
+
"department_name": dept_names.get(r["department_id"], "Unknown"),
|
| 344 |
+
"product_title": prod.get("title", ""),
|
| 345 |
+
"product_images": prod.get("images", []),
|
| 346 |
+
"quantity": qty,
|
| 347 |
+
"notes": r.get("notes", ""),
|
| 348 |
+
"delivery_notes": r.get("delivery_notes", ""),
|
| 349 |
+
"status": r["status"],
|
| 350 |
+
"delivered_at": r.get("delivered_at", ""),
|
| 351 |
+
"created_at": r["created_at"],
|
| 352 |
+
})
|
| 353 |
+
|
| 354 |
+
return results
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
@router.get("/active-deliveries")
|
| 358 |
+
async def get_active_restock_deliveries(delivery_user: dict = Depends(require_delivery)):
|
| 359 |
+
"""Get deliveryman's active restock deliveries."""
|
| 360 |
+
sb = get_supabase()
|
| 361 |
+
user_id = delivery_user["sub"]
|
| 362 |
+
|
| 363 |
+
requests = sb.table("restock_requests").select(
|
| 364 |
+
"*, products(title, price, stock, images)"
|
| 365 |
+
).eq("delivery_user_id", user_id).in_(
|
| 366 |
+
"status", ["accepted_delivery", "in_transit"]
|
| 367 |
+
).order("delivery_accepted_at", desc=False).execute()
|
| 368 |
+
|
| 369 |
+
# Get staff names and department names
|
| 370 |
+
staff_ids = set()
|
| 371 |
+
dept_ids = set()
|
| 372 |
+
for r in (requests.data or []):
|
| 373 |
+
staff_ids.add(r["staff_id"])
|
| 374 |
+
dept_ids.add(r["department_id"])
|
| 375 |
+
|
| 376 |
+
staff_names = {}
|
| 377 |
+
if staff_ids:
|
| 378 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(staff_ids)).execute()
|
| 379 |
+
staff_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 380 |
+
|
| 381 |
+
dept_names = {}
|
| 382 |
+
if dept_ids:
|
| 383 |
+
depts_result = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 384 |
+
dept_names = {d["id"]: d["name"] for d in (depts_result.data or [])}
|
| 385 |
+
|
| 386 |
+
results = []
|
| 387 |
+
for r in (requests.data or []):
|
| 388 |
+
prod = r.get("products") or {}
|
| 389 |
+
qty = r.get("approved_quantity") or r["requested_quantity"]
|
| 390 |
+
results.append({
|
| 391 |
+
"id": r["id"],
|
| 392 |
+
"staff_name": staff_names.get(r["staff_id"], "Unknown"),
|
| 393 |
+
"department_name": dept_names.get(r["department_id"], "Unknown"),
|
| 394 |
+
"product_title": prod.get("title", ""),
|
| 395 |
+
"product_images": prod.get("images", []),
|
| 396 |
+
"quantity": qty,
|
| 397 |
+
"delivery_notes": r.get("delivery_notes", ""),
|
| 398 |
+
"status": r["status"],
|
| 399 |
+
"delivery_accepted_at": r.get("delivery_accepted_at", ""),
|
| 400 |
+
})
|
| 401 |
+
|
| 402 |
+
return results
|
backend/routes/search.py
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Search route — the CORE of the thesis.
|
| 3 |
+
|
| 4 |
+
Architecture (from README.md):
|
| 5 |
+
User Query: "I want an affordable dress"
|
| 6 |
+
|
|
| 7 |
+
================|================
|
| 8 |
+
| | |
|
| 9 |
+
[Intent Class.] [Slot/Entity [BERT Embedding]
|
| 10 |
+
| Extraction] |
|
| 11 |
+
v | v
|
| 12 |
+
intent:purchase v 768-dim vector
|
| 13 |
+
{ |
|
| 14 |
+
category: "dress" |
|
| 15 |
+
price: "affordable" |
|
| 16 |
+
} |
|
| 17 |
+
================|===============|
|
| 18 |
+
| |
|
| 19 |
+
[Query Rewriting] |
|
| 20 |
+
"dress" + filters |
|
| 21 |
+
| |
|
| 22 |
+
===============|================
|
| 23 |
+
| | |
|
| 24 |
+
[Supabase Filter] [pgvector [CrossEncoder
|
| 25 |
+
price <= budget Similarity] Ranker]
|
| 26 |
+
| | |
|
| 27 |
+
===============|================
|
| 28 |
+
|
|
| 29 |
+
[ESCI Classifier]
|
| 30 |
+
E / S / C / I
|
| 31 |
+
|
|
| 32 |
+
[Score Blending]
|
| 33 |
+
0.5*R + 0.3*C + 0.2*S
|
| 34 |
+
|
|
| 35 |
+
Final Ranked Results
|
| 36 |
+
|
| 37 |
+
Pipeline Stages:
|
| 38 |
+
1. BERT Embedding: Query → 768-dimensional vector
|
| 39 |
+
2. pgvector Similarity: Top-50 candidates via cosine similarity
|
| 40 |
+
3. CrossEncoder Re-Ranking: Pairwise relevance scoring
|
| 41 |
+
4. ESCI Classifier: E/S/C/I classification with softmax probabilities
|
| 42 |
+
5. Score Blending: 0.5×Ranker + 0.3×Classifier + 0.2×Similarity
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
from fastapi import APIRouter, Query, HTTPException, UploadFile, File
|
| 46 |
+
from pydantic import BaseModel
|
| 47 |
+
from typing import Optional
|
| 48 |
+
import traceback
|
| 49 |
+
import tempfile
|
| 50 |
+
import os
|
| 51 |
+
import numpy as np
|
| 52 |
+
|
| 53 |
+
from models.bert_service import bert_service
|
| 54 |
+
from models.classifier import classifier_service, LABEL_PRIORITY
|
| 55 |
+
from models.ranker import ranker_service
|
| 56 |
+
from models.query_rewriter import query_rewriter
|
| 57 |
+
from database import search_similar_products, search_similar_products_filtered, get_supabase
|
| 58 |
+
from config import (
|
| 59 |
+
SEARCH_TOP_K_CANDIDATES,
|
| 60 |
+
SEARCH_MAX_RESULTS,
|
| 61 |
+
RANKER_WEIGHT,
|
| 62 |
+
CLASSIFIER_WEIGHT,
|
| 63 |
+
SIMILARITY_WEIGHT,
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
router = APIRouter(prefix="/search", tags=["Search"])
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
# =============================================================================
|
| 70 |
+
# Response Models
|
| 71 |
+
# =============================================================================
|
| 72 |
+
|
| 73 |
+
class SearchResultItem(BaseModel):
|
| 74 |
+
id: str
|
| 75 |
+
title: str
|
| 76 |
+
description: str
|
| 77 |
+
price: float
|
| 78 |
+
stock: int = 0
|
| 79 |
+
image_url: str
|
| 80 |
+
seller_id: str
|
| 81 |
+
# Scoring components
|
| 82 |
+
similarity: float = 0.0 # pgvector cosine similarity (0-1)
|
| 83 |
+
ranker_score: float = 0.0 # CrossEncoder relevance score (0-1, normalized)
|
| 84 |
+
relevance_score: float = 0.0 # Final blended score: 0.5*R + 0.3*C + 0.2*S
|
| 85 |
+
# ESCI classification
|
| 86 |
+
relevance_label: str = "Exact" # E/S/C/I classification
|
| 87 |
+
relevance_confidence: float = 1.0
|
| 88 |
+
exact_prob: float = 0.0
|
| 89 |
+
substitute_prob: float = 0.0
|
| 90 |
+
complement_prob: float = 0.0
|
| 91 |
+
irrelevant_prob: float = 0.0
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class SearchResponse(BaseModel):
|
| 95 |
+
query: str
|
| 96 |
+
total_results: int
|
| 97 |
+
results: list[SearchResultItem]
|
| 98 |
+
message: str = ""
|
| 99 |
+
# Query Rewriting metadata (from Intent + Slot extraction)
|
| 100 |
+
rewritten_query: str = ""
|
| 101 |
+
detected_intents: list[str] = []
|
| 102 |
+
extracted_slots: dict = {}
|
| 103 |
+
applied_filters: dict = {}
|
| 104 |
+
search_groups: list[dict] = [] # [{search_text, filters}, ...] for compound queries
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
# =============================================================================
|
| 108 |
+
# Helper Functions
|
| 109 |
+
# =============================================================================
|
| 110 |
+
|
| 111 |
+
def _first_image(images_field) -> str:
|
| 112 |
+
"""Extract the first image URL from a product's images field."""
|
| 113 |
+
images = images_field or []
|
| 114 |
+
if isinstance(images, str):
|
| 115 |
+
return images
|
| 116 |
+
if isinstance(images, list) and len(images) > 0:
|
| 117 |
+
return images[0]
|
| 118 |
+
return ""
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _compute_blended_score(
|
| 122 |
+
ranker_score: float,
|
| 123 |
+
classifier_priority: float,
|
| 124 |
+
similarity: float,
|
| 125 |
+
w_ranker: float = RANKER_WEIGHT,
|
| 126 |
+
w_classifier: float = CLASSIFIER_WEIGHT,
|
| 127 |
+
w_similarity: float = SIMILARITY_WEIGHT,
|
| 128 |
+
) -> float:
|
| 129 |
+
"""
|
| 130 |
+
Score Blending formula from README:
|
| 131 |
+
relevance_score = 0.4×R + 0.25×C + 0.35×S
|
| 132 |
+
|
| 133 |
+
Where:
|
| 134 |
+
- R = Ranker score (CrossEncoder, normalized 0-1)
|
| 135 |
+
- C = Classifier priority (E=1.0, S=0.67, C=0.33, I=0.0)
|
| 136 |
+
- S = Similarity (pgvector cosine similarity, 0-1)
|
| 137 |
+
"""
|
| 138 |
+
return (
|
| 139 |
+
w_ranker * ranker_score +
|
| 140 |
+
w_classifier * classifier_priority +
|
| 141 |
+
w_similarity * similarity
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _label_to_priority_weight(label: str) -> float:
|
| 146 |
+
"""
|
| 147 |
+
Convert ESCI label to priority weight for score blending.
|
| 148 |
+
E=1.0, S=0.67, C=0.33, I=0.0
|
| 149 |
+
"""
|
| 150 |
+
priority = LABEL_PRIORITY.get(label, 3) # Default to Irrelevant (3)
|
| 151 |
+
return (3 - priority) / 3
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _run_search_pipeline(
|
| 155 |
+
search_text: str,
|
| 156 |
+
filters: dict,
|
| 157 |
+
original_query: str,
|
| 158 |
+
max_candidates: int,
|
| 159 |
+
include_complements: bool,
|
| 160 |
+
include_substitutes: bool,
|
| 161 |
+
show_all: bool,
|
| 162 |
+
) -> list[SearchResultItem]:
|
| 163 |
+
"""
|
| 164 |
+
Core search pipeline for a single search group.
|
| 165 |
+
Stages: BERT Embedding -> pgvector -> CrossEncoder -> ESCI -> Score Blending
|
| 166 |
+
"""
|
| 167 |
+
query_embedding = bert_service.compute_embedding(search_text)
|
| 168 |
+
|
| 169 |
+
if filters:
|
| 170 |
+
raw_candidates = search_similar_products_filtered(
|
| 171 |
+
query_embedding, top_k=max_candidates,
|
| 172 |
+
price_min=filters.get("price_min"), price_max=filters.get("price_max"),
|
| 173 |
+
brand=filters.get("brand"), color=filters.get("color"),
|
| 174 |
+
)
|
| 175 |
+
else:
|
| 176 |
+
raw_candidates = search_similar_products(query_embedding, top_k=max_candidates)
|
| 177 |
+
|
| 178 |
+
print(f"[Search] '{search_text}': {len(raw_candidates)} raw candidates")
|
| 179 |
+
|
| 180 |
+
# Hard post-filter: enforce price constraints even if pgvector missed them
|
| 181 |
+
# (safety net for cases where the DB filter didn't apply, e.g. no-filter branch)
|
| 182 |
+
if filters.get("price_max") is not None:
|
| 183 |
+
raw_candidates = [c for c in raw_candidates if float(c["price"]) <= filters["price_max"]]
|
| 184 |
+
if filters.get("price_min") is not None:
|
| 185 |
+
raw_candidates = [c for c in raw_candidates if float(c["price"]) >= filters["price_min"]]
|
| 186 |
+
|
| 187 |
+
MIN_SIMILARITY_THRESHOLD = 0.20
|
| 188 |
+
candidates = raw_candidates if show_all else [c for c in raw_candidates if c["similarity"] >= MIN_SIMILARITY_THRESHOLD]
|
| 189 |
+
if not candidates:
|
| 190 |
+
return []
|
| 191 |
+
|
| 192 |
+
product_titles = [c["title"] for c in candidates]
|
| 193 |
+
if ranker_service._loaded:
|
| 194 |
+
raw_ranker_scores = ranker_service.rank(original_query, product_titles)
|
| 195 |
+
ranker_scores = ranker_service.normalize_scores(raw_ranker_scores)
|
| 196 |
+
else:
|
| 197 |
+
ranker_scores = [c["similarity"] for c in candidates]
|
| 198 |
+
|
| 199 |
+
product_embeddings = np.array([c["embedding"] for c in candidates])
|
| 200 |
+
if classifier_service._loaded:
|
| 201 |
+
classifications = classifier_service.classify_batch(query_embedding, product_embeddings)
|
| 202 |
+
else:
|
| 203 |
+
classifications = [{"label": "Exact", "confidence": 1.0, "class_id": 0,
|
| 204 |
+
"exact_prob": 1.0, "substitute_prob": 0.0, "complement_prob": 0.0, "irrelevant_prob": 0.0}
|
| 205 |
+
for _ in candidates]
|
| 206 |
+
|
| 207 |
+
if ranker_service._loaded:
|
| 208 |
+
w_r, w_c, w_s = 0.55, 0.05, 0.40
|
| 209 |
+
else:
|
| 210 |
+
w_r, w_c, w_s = 0.0, 0.05, 0.95
|
| 211 |
+
|
| 212 |
+
MIN_RELEVANCE_SCORE = 0.75
|
| 213 |
+
scored = []
|
| 214 |
+
for idx, (cand, cls) in enumerate(zip(candidates, classifications)):
|
| 215 |
+
label = cls["label"]
|
| 216 |
+
r_score = float(ranker_scores[idx])
|
| 217 |
+
sim = float(cand["similarity"])
|
| 218 |
+
rel = _compute_blended_score(r_score, _label_to_priority_weight(label), sim, w_r, w_c, w_s)
|
| 219 |
+
# All labels (including Exact) must meet the minimum relevance threshold.
|
| 220 |
+
# Irrelevant products are shown only if they score >= 0.75; otherwise dropped.
|
| 221 |
+
if not show_all and rel < MIN_RELEVANCE_SCORE:
|
| 222 |
+
continue
|
| 223 |
+
if not show_all and label == "Substitute" and not include_substitutes:
|
| 224 |
+
continue
|
| 225 |
+
if not show_all and label == "Complement" and not include_complements:
|
| 226 |
+
continue
|
| 227 |
+
scored.append(SearchResultItem(
|
| 228 |
+
id=str(cand["id"]), title=cand["title"],
|
| 229 |
+
description=cand.get("description") or "",
|
| 230 |
+
price=float(cand["price"]),
|
| 231 |
+
stock=int(cand.get("stock", 0)),
|
| 232 |
+
image_url=_first_image(cand.get("images")),
|
| 233 |
+
seller_id=str(cand["seller_id"]),
|
| 234 |
+
similarity=round(sim, 4), ranker_score=round(r_score, 4),
|
| 235 |
+
relevance_score=round(rel, 4), relevance_label=label,
|
| 236 |
+
relevance_confidence=round(cls["confidence"], 4),
|
| 237 |
+
exact_prob=round(cls.get("exact_prob", 0.0), 4),
|
| 238 |
+
substitute_prob=round(cls.get("substitute_prob", 0.0), 4),
|
| 239 |
+
complement_prob=round(cls.get("complement_prob", 0.0), 4),
|
| 240 |
+
irrelevant_prob=round(cls.get("irrelevant_prob", 0.0), 4),
|
| 241 |
+
))
|
| 242 |
+
return scored
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
# =============================================================================
|
| 246 |
+
# Main Search Route
|
| 247 |
+
# =============================================================================
|
| 248 |
+
|
| 249 |
+
@router.get("/", response_model=SearchResponse)
|
| 250 |
+
async def search_products(
|
| 251 |
+
q: str = Query(..., min_length=1, max_length=500, description="Search query text"),
|
| 252 |
+
max_results: int = Query(default=SEARCH_MAX_RESULTS, ge=1, le=100),
|
| 253 |
+
include_complements: bool = Query(default=True, description="Include Complement results"),
|
| 254 |
+
include_substitutes: bool = Query(default=True, description="Include Substitute results"),
|
| 255 |
+
show_all: bool = Query(default=False, description="Show all products without threshold filtering (admin mode)"),
|
| 256 |
+
):
|
| 257 |
+
"""
|
| 258 |
+
Product Search with compound query support.
|
| 259 |
+
Splits compound queries (e.g. 'shoes under 300 and bags under 500')
|
| 260 |
+
into independent search groups, each with its own filters.
|
| 261 |
+
"""
|
| 262 |
+
try:
|
| 263 |
+
rewritten = query_rewriter.process(q)
|
| 264 |
+
|
| 265 |
+
print(f"[Search] Original query: '{q}'")
|
| 266 |
+
print(f"[Search] Intents: {rewritten.intents}")
|
| 267 |
+
print(f"[Search] Slots: {rewritten.slots}")
|
| 268 |
+
print(f"[Search] Search groups: {len(rewritten.search_groups)}")
|
| 269 |
+
for i, g in enumerate(rewritten.search_groups):
|
| 270 |
+
print(f"[Search] Group {i+1}: '{g.search_text}' | Filters: {g.filters}")
|
| 271 |
+
|
| 272 |
+
if not bert_service._loaded:
|
| 273 |
+
return await _fallback_text_search(q, rewritten, max_results)
|
| 274 |
+
|
| 275 |
+
# Run pipeline for each search group
|
| 276 |
+
all_results = []
|
| 277 |
+
for group in rewritten.search_groups:
|
| 278 |
+
group_results = _run_search_pipeline(
|
| 279 |
+
search_text=group.search_text,
|
| 280 |
+
filters=group.filters,
|
| 281 |
+
original_query=q,
|
| 282 |
+
max_candidates=SEARCH_TOP_K_CANDIDATES,
|
| 283 |
+
include_complements=include_complements,
|
| 284 |
+
include_substitutes=include_substitutes,
|
| 285 |
+
show_all=show_all,
|
| 286 |
+
)
|
| 287 |
+
all_results.extend(group_results)
|
| 288 |
+
|
| 289 |
+
# Deduplicate by product id (keep highest relevance score)
|
| 290 |
+
seen = {}
|
| 291 |
+
for r in all_results:
|
| 292 |
+
if r.id not in seen or r.relevance_score > seen[r.id].relevance_score:
|
| 293 |
+
seen[r.id] = r
|
| 294 |
+
|
| 295 |
+
final_results = sorted(seen.values(), key=lambda r: r.relevance_score, reverse=True)[:max_results]
|
| 296 |
+
print(f"[Search] Final results: {len(final_results)} (from {len(rewritten.search_groups)} group(s))")
|
| 297 |
+
|
| 298 |
+
return SearchResponse(
|
| 299 |
+
query=q,
|
| 300 |
+
total_results=len(final_results),
|
| 301 |
+
results=final_results,
|
| 302 |
+
message="" if final_results else "No products found matching your query.",
|
| 303 |
+
rewritten_query=rewritten.search_text,
|
| 304 |
+
detected_intents=rewritten.intents,
|
| 305 |
+
extracted_slots=rewritten.slots,
|
| 306 |
+
applied_filters=rewritten.search_groups[0].filters if len(rewritten.search_groups) == 1 else {},
|
| 307 |
+
search_groups=[{"search_text": g.search_text, "filters": g.filters} for g in rewritten.search_groups],
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
except HTTPException:
|
| 311 |
+
raise
|
| 312 |
+
except Exception as e:
|
| 313 |
+
print(f"[Search] ERROR: {e}")
|
| 314 |
+
traceback.print_exc()
|
| 315 |
+
raise HTTPException(status_code=500, detail=f"Search failed: {str(e)}")
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
# =============================================================================
|
| 319 |
+
# Fallback: Text-only search when ML models are not loaded
|
| 320 |
+
# =============================================================================
|
| 321 |
+
|
| 322 |
+
async def _fallback_text_search(
|
| 323 |
+
original_query: str,
|
| 324 |
+
rewritten,
|
| 325 |
+
max_results: int,
|
| 326 |
+
) -> SearchResponse:
|
| 327 |
+
"""Simple ILIKE text search fallback when ML models are not available."""
|
| 328 |
+
sb = get_supabase()
|
| 329 |
+
all_results = []
|
| 330 |
+
|
| 331 |
+
for group in rewritten.search_groups:
|
| 332 |
+
qb = sb.table("products").select("*").eq("is_active", True).eq("status", "approved").gt("stock", 0)
|
| 333 |
+
qb = qb.or_(f"title.ilike.%{group.search_text}%,description.ilike.%{group.search_text}%")
|
| 334 |
+
if "price_max" in group.filters:
|
| 335 |
+
qb = qb.lte("price", group.filters["price_max"])
|
| 336 |
+
if "price_min" in group.filters:
|
| 337 |
+
qb = qb.gte("price", group.filters["price_min"])
|
| 338 |
+
response = qb.limit(max_results).execute()
|
| 339 |
+
|
| 340 |
+
for p in response.data:
|
| 341 |
+
all_results.append(SearchResultItem(
|
| 342 |
+
id=str(p["id"]), title=p["title"],
|
| 343 |
+
description=p.get("description") or "",
|
| 344 |
+
price=float(p["price"]),
|
| 345 |
+
image_url=_first_image(p.get("images")),
|
| 346 |
+
seller_id=str(p["seller_id"]),
|
| 347 |
+
similarity=1.0, ranker_score=0.0, relevance_score=1.0,
|
| 348 |
+
relevance_label="Exact", relevance_confidence=1.0,
|
| 349 |
+
exact_prob=1.0, substitute_prob=0.0, complement_prob=0.0, irrelevant_prob=0.0,
|
| 350 |
+
))
|
| 351 |
+
|
| 352 |
+
# Deduplicate
|
| 353 |
+
seen = {}
|
| 354 |
+
for r in all_results:
|
| 355 |
+
if r.id not in seen or r.relevance_score > seen[r.id].relevance_score:
|
| 356 |
+
seen[r.id] = r
|
| 357 |
+
results = sorted(seen.values(), key=lambda r: r.relevance_score, reverse=True)[:max_results]
|
| 358 |
+
|
| 359 |
+
return SearchResponse(
|
| 360 |
+
query=original_query,
|
| 361 |
+
total_results=len(results),
|
| 362 |
+
results=results,
|
| 363 |
+
message="" if results else "No products found.",
|
| 364 |
+
rewritten_query=rewritten.search_text,
|
| 365 |
+
detected_intents=rewritten.intents,
|
| 366 |
+
extracted_slots=rewritten.slots,
|
| 367 |
+
applied_filters=rewritten.search_groups[0].filters if len(rewritten.search_groups) == 1 else {},
|
| 368 |
+
search_groups=[{"search_text": g.search_text, "filters": g.filters} for g in rewritten.search_groups],
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
# =============================================================================
|
| 373 |
+
# Voice Transcription Endpoint
|
| 374 |
+
# =============================================================================
|
| 375 |
+
|
| 376 |
+
@router.post("/transcribe")
|
| 377 |
+
async def transcribe_audio(audio: UploadFile = File(...)):
|
| 378 |
+
"""
|
| 379 |
+
Accept an audio file (webm from MediaRecorder) and return transcribed text.
|
| 380 |
+
Uses SpeechRecognition + pydub for cross-browser voice search support.
|
| 381 |
+
"""
|
| 382 |
+
tmp_webm = None
|
| 383 |
+
tmp_wav = None
|
| 384 |
+
try:
|
| 385 |
+
import speech_recognition as sr
|
| 386 |
+
from pydub import AudioSegment
|
| 387 |
+
|
| 388 |
+
# Save uploaded audio to temp file
|
| 389 |
+
tmp_webm = tempfile.NamedTemporaryFile(delete=False, suffix=".webm")
|
| 390 |
+
content = await audio.read()
|
| 391 |
+
tmp_webm.write(content)
|
| 392 |
+
tmp_webm.close()
|
| 393 |
+
|
| 394 |
+
# Convert webm → wav
|
| 395 |
+
tmp_wav_path = tmp_webm.name.replace(".webm", ".wav")
|
| 396 |
+
audio_segment = AudioSegment.from_file(tmp_webm.name, format="webm")
|
| 397 |
+
audio_segment.export(tmp_wav_path, format="wav")
|
| 398 |
+
tmp_wav = tmp_wav_path
|
| 399 |
+
|
| 400 |
+
# Transcribe using Google Speech Recognition
|
| 401 |
+
recognizer = sr.Recognizer()
|
| 402 |
+
with sr.AudioFile(tmp_wav) as source:
|
| 403 |
+
audio_data = recognizer.record(source)
|
| 404 |
+
|
| 405 |
+
transcript = recognizer.recognize_google(audio_data)
|
| 406 |
+
return {"transcript": transcript}
|
| 407 |
+
|
| 408 |
+
except ImportError:
|
| 409 |
+
return {"error": "Speech recognition libraries not installed. Run: pip install SpeechRecognition pydub"}
|
| 410 |
+
except Exception as e:
|
| 411 |
+
print(f"[Transcribe] Error: {e}")
|
| 412 |
+
return {"error": f"Transcription failed: {str(e)}"}
|
| 413 |
+
finally:
|
| 414 |
+
if tmp_webm and os.path.exists(tmp_webm.name):
|
| 415 |
+
os.unlink(tmp_webm.name)
|
| 416 |
+
if tmp_wav and os.path.exists(tmp_wav):
|
| 417 |
+
os.unlink(tmp_wav)
|
backend/routes/transactions.py
ADDED
|
@@ -0,0 +1,955 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Transaction routes — buy products, view transaction history, manage balance.
|
| 3 |
+
Buyer's money is held on purchase (pending). On successful delivery/completion,
|
| 4 |
+
the product amount is credited to the admin. Buyer can cancel with conditions.
|
| 5 |
+
Supports quantity (buyer selects how many to buy).
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 9 |
+
from pydantic import BaseModel
|
| 10 |
+
from typing import Optional
|
| 11 |
+
import uuid
|
| 12 |
+
from datetime import datetime, timezone, timedelta
|
| 13 |
+
from database import get_supabase
|
| 14 |
+
from routes.auth import get_current_user
|
| 15 |
+
|
| 16 |
+
router = APIRouter(prefix="/transactions", tags=["Transactions"])
|
| 17 |
+
|
| 18 |
+
DELIVERY_FEE = 90.00
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# --- Request/Response Models ---
|
| 22 |
+
|
| 23 |
+
class BuyRequest(BaseModel):
|
| 24 |
+
product_id: str
|
| 25 |
+
quantity: int = 1 # How many items to buy
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TopUpRequest(BaseModel):
|
| 29 |
+
amount: float
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class WithdrawRequest(BaseModel):
|
| 33 |
+
amount: float
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TransactionResponse(BaseModel):
|
| 37 |
+
id: str
|
| 38 |
+
buyer_id: str
|
| 39 |
+
seller_id: str
|
| 40 |
+
product_id: str
|
| 41 |
+
product_title: str
|
| 42 |
+
amount: float
|
| 43 |
+
quantity: int = 1
|
| 44 |
+
seller_amount: float
|
| 45 |
+
admin_commission: float
|
| 46 |
+
delivery_fee: float = 0
|
| 47 |
+
status: str
|
| 48 |
+
purchase_type: str = "delivery"
|
| 49 |
+
delivery_user_id: str = ""
|
| 50 |
+
delivery_user_name: str = ""
|
| 51 |
+
delivery_user_contact: str = ""
|
| 52 |
+
seller_name: str = ""
|
| 53 |
+
buyer_name: str = ""
|
| 54 |
+
assigned_staff_id: str = ""
|
| 55 |
+
assigned_staff_name: str = ""
|
| 56 |
+
delivery_address: str = ""
|
| 57 |
+
product_images: list = []
|
| 58 |
+
group_id: str = ""
|
| 59 |
+
created_at: str
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class BalanceResponse(BaseModel):
|
| 63 |
+
user_id: str
|
| 64 |
+
balance: float
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class SVFEntry(BaseModel):
|
| 68 |
+
id: str
|
| 69 |
+
user_id: str
|
| 70 |
+
transaction_type: str
|
| 71 |
+
amount: float
|
| 72 |
+
metadata: Optional[dict] = None
|
| 73 |
+
created_at: str
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
# --- Routes ---
|
| 77 |
+
|
| 78 |
+
@router.post("/buy", response_model=TransactionResponse)
|
| 79 |
+
async def buy_product(req: BuyRequest, current_user: dict = Depends(get_current_user)):
|
| 80 |
+
"""
|
| 81 |
+
Buy a product. 100% of product revenue goes to the department balance.
|
| 82 |
+
Buyer can select quantity. Stock is decremented.
|
| 83 |
+
"""
|
| 84 |
+
sb = get_supabase()
|
| 85 |
+
user_id = current_user["sub"]
|
| 86 |
+
|
| 87 |
+
if req.quantity < 1:
|
| 88 |
+
raise HTTPException(status_code=400, detail="Quantity must be at least 1")
|
| 89 |
+
|
| 90 |
+
# 1. Check user isn't banned
|
| 91 |
+
user_result = sb.table("users").select("role, is_banned").eq("id", user_id).execute()
|
| 92 |
+
if not user_result.data:
|
| 93 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 94 |
+
|
| 95 |
+
user_data = user_result.data[0]
|
| 96 |
+
if user_data.get("is_banned"):
|
| 97 |
+
raise HTTPException(status_code=403, detail="Your account has been banned")
|
| 98 |
+
|
| 99 |
+
user_role = user_data["role"]
|
| 100 |
+
if user_role == "admin":
|
| 101 |
+
raise HTTPException(status_code=403, detail="Admin accounts cannot purchase products")
|
| 102 |
+
if user_role != "buyer":
|
| 103 |
+
raise HTTPException(status_code=403, detail="Only buyers can purchase products")
|
| 104 |
+
|
| 105 |
+
# Always delivery orders
|
| 106 |
+
|
| 107 |
+
# For delivery orders, get buyer's delivery address
|
| 108 |
+
delivery_address = ""
|
| 109 |
+
contact = sb.table("user_contacts").select("contact_number, delivery_address").eq("user_id", user_id).execute()
|
| 110 |
+
if not contact.data:
|
| 111 |
+
raise HTTPException(status_code=400, detail="Please add your contact number and delivery address before placing a delivery order")
|
| 112 |
+
delivery_address = (contact.data[0].get("delivery_address") or "").strip()
|
| 113 |
+
if not delivery_address:
|
| 114 |
+
raise HTTPException(status_code=400, detail="Please set your delivery address before placing a delivery order")
|
| 115 |
+
|
| 116 |
+
# 2. Get product
|
| 117 |
+
product_result = sb.table("products").select("*").eq("id", req.product_id).eq("is_active", True).eq("status", "approved").execute()
|
| 118 |
+
if not product_result.data:
|
| 119 |
+
raise HTTPException(status_code=404, detail="Product not found or not available")
|
| 120 |
+
|
| 121 |
+
product = product_result.data[0]
|
| 122 |
+
|
| 123 |
+
# 3. Can't buy your own product
|
| 124 |
+
if product["seller_id"] == user_id:
|
| 125 |
+
raise HTTPException(status_code=400, detail="Cannot buy your own product")
|
| 126 |
+
|
| 127 |
+
# 4. Check stock
|
| 128 |
+
current_stock = int(product.get("stock", 0))
|
| 129 |
+
if current_stock <= 0:
|
| 130 |
+
raise HTTPException(status_code=400, detail="Product is out of stock")
|
| 131 |
+
if req.quantity > current_stock:
|
| 132 |
+
raise HTTPException(
|
| 133 |
+
status_code=400,
|
| 134 |
+
detail=f"Not enough stock. Available: {current_stock}, requested: {req.quantity}",
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
# 5. Check buyer balance
|
| 138 |
+
balance_result = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 139 |
+
if not balance_result.data:
|
| 140 |
+
raise HTTPException(status_code=400, detail="No balance found. Top up your wallet first.")
|
| 141 |
+
|
| 142 |
+
buyer_balance = float(balance_result.data[0]["balance"])
|
| 143 |
+
unit_price = float(product["price"])
|
| 144 |
+
total_price = unit_price * req.quantity
|
| 145 |
+
|
| 146 |
+
# --- Group detection: find an open group for this buyer+store within 1 hour ---
|
| 147 |
+
seller_id = product["seller_id"]
|
| 148 |
+
# Determine the delivery unit (department or seller)
|
| 149 |
+
seller_info = sb.table("users").select("department_id").eq("id", seller_id).execute()
|
| 150 |
+
seller_dept_id = seller_info.data[0].get("department_id") if seller_info.data else None
|
| 151 |
+
|
| 152 |
+
existing_group_id = None
|
| 153 |
+
delivery_fee = 90.00 # Default: new group
|
| 154 |
+
|
| 155 |
+
# Look for an active group for this buyer within the same store/department in the last hour.
|
| 156 |
+
# An open group has NO transaction in ondeliver/delivered/undelivered/cancelled state.
|
| 157 |
+
one_hour_ago = (datetime.now(timezone.utc) - timedelta(hours=1)).isoformat()
|
| 158 |
+
|
| 159 |
+
if seller_dept_id:
|
| 160 |
+
# Get all sellers in the same department
|
| 161 |
+
dept_sellers = sb.table("users").select("id").eq("department_id", seller_dept_id).execute()
|
| 162 |
+
dept_seller_ids = [s["id"] for s in (dept_sellers.data or [])]
|
| 163 |
+
# Find recent pending/approved transactions for this buyer from department sellers
|
| 164 |
+
recent = sb.table("product_transactions").select("group_id, status").eq(
|
| 165 |
+
"buyer_id", user_id
|
| 166 |
+
).in_("seller_id", dept_seller_ids).in_(
|
| 167 |
+
"status", ["pending", "approved"]
|
| 168 |
+
).gte("created_at", one_hour_ago).not_.is_("group_id", "null").execute()
|
| 169 |
+
else:
|
| 170 |
+
recent = sb.table("product_transactions").select("group_id, status").eq(
|
| 171 |
+
"buyer_id", user_id
|
| 172 |
+
).eq("seller_id", seller_id).in_(
|
| 173 |
+
"status", ["pending", "approved"]
|
| 174 |
+
).gte("created_at", one_hour_ago).not_.is_("group_id", "null").execute()
|
| 175 |
+
|
| 176 |
+
if recent.data:
|
| 177 |
+
# Collect candidate group ids
|
| 178 |
+
candidate_groups = set(r["group_id"] for r in recent.data if r.get("group_id"))
|
| 179 |
+
# Filter out any group that has a picked-up/done transaction
|
| 180 |
+
for gid in candidate_groups:
|
| 181 |
+
bad = sb.table("product_transactions").select("id", count="exact").eq(
|
| 182 |
+
"group_id", gid
|
| 183 |
+
).in_("status", ["ondeliver", "delivered", "undelivered", "cancelled"]).execute()
|
| 184 |
+
if (bad.count or 0) == 0:
|
| 185 |
+
existing_group_id = gid
|
| 186 |
+
delivery_fee = 0.0 # Joining existing group — no extra delivery fee
|
| 187 |
+
break
|
| 188 |
+
|
| 189 |
+
group_id = existing_group_id if existing_group_id else str(uuid.uuid4())
|
| 190 |
+
grand_total = total_price + delivery_fee
|
| 191 |
+
|
| 192 |
+
if buyer_balance < grand_total:
|
| 193 |
+
raise HTTPException(
|
| 194 |
+
status_code=400,
|
| 195 |
+
detail=f"Insufficient balance. You have PHP {buyer_balance:.2f}, total cost is PHP {grand_total:.2f}",
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
# 6. Amounts
|
| 199 |
+
seller_amount = total_price
|
| 200 |
+
admin_commission = 0.0
|
| 201 |
+
|
| 202 |
+
# 7. Deduct from buyer
|
| 203 |
+
new_buyer_balance = buyer_balance - grand_total
|
| 204 |
+
sb.table("user_balances").update({"balance": new_buyer_balance}).eq("user_id", user_id).execute()
|
| 205 |
+
|
| 206 |
+
# Log the purchase deduction in stored_value so it appears in wallet history
|
| 207 |
+
sb.table("stored_value").insert({
|
| 208 |
+
"user_id": user_id,
|
| 209 |
+
"transaction_type": "purchase",
|
| 210 |
+
"amount": grand_total,
|
| 211 |
+
"metadata": {
|
| 212 |
+
"product_id": req.product_id,
|
| 213 |
+
"product_title": product["title"],
|
| 214 |
+
"quantity": req.quantity,
|
| 215 |
+
"product_amount": total_price,
|
| 216 |
+
"delivery_fee": delivery_fee,
|
| 217 |
+
"group_id": group_id,
|
| 218 |
+
"joined_existing_group": existing_group_id is not None,
|
| 219 |
+
},
|
| 220 |
+
}).execute()
|
| 221 |
+
|
| 222 |
+
# 8. Decrement stock
|
| 223 |
+
new_stock = current_stock - req.quantity
|
| 224 |
+
sb.table("products").update({"stock": new_stock}).eq("id", req.product_id).execute()
|
| 225 |
+
|
| 226 |
+
# 9. Create product_transaction record
|
| 227 |
+
txn_result = sb.table("product_transactions").insert({
|
| 228 |
+
"buyer_id": user_id,
|
| 229 |
+
"seller_id": product["seller_id"],
|
| 230 |
+
"product_id": req.product_id,
|
| 231 |
+
"quantity": req.quantity,
|
| 232 |
+
"amount": total_price,
|
| 233 |
+
"seller_amount": seller_amount,
|
| 234 |
+
"admin_commission": admin_commission,
|
| 235 |
+
"delivery_fee": delivery_fee,
|
| 236 |
+
"delivery_address": delivery_address,
|
| 237 |
+
"purchase_type": "delivery",
|
| 238 |
+
"status": "pending",
|
| 239 |
+
"group_id": group_id,
|
| 240 |
+
}).execute()
|
| 241 |
+
|
| 242 |
+
if not txn_result.data:
|
| 243 |
+
raise HTTPException(status_code=500, detail="Failed to create transaction")
|
| 244 |
+
|
| 245 |
+
txn = txn_result.data[0]
|
| 246 |
+
return TransactionResponse(
|
| 247 |
+
id=txn["id"],
|
| 248 |
+
buyer_id=txn["buyer_id"],
|
| 249 |
+
seller_id=txn["seller_id"],
|
| 250 |
+
product_id=txn["product_id"],
|
| 251 |
+
product_title=product["title"],
|
| 252 |
+
amount=float(txn["amount"]),
|
| 253 |
+
quantity=int(txn.get("quantity", 1)),
|
| 254 |
+
seller_amount=float(txn.get("seller_amount", 0)),
|
| 255 |
+
admin_commission=float(txn.get("admin_commission", 0)),
|
| 256 |
+
delivery_fee=float(txn.get("delivery_fee", 0)),
|
| 257 |
+
status=txn["status"],
|
| 258 |
+
purchase_type=txn.get("purchase_type", "delivery"),
|
| 259 |
+
delivery_user_id=txn.get("delivery_user_id") or "",
|
| 260 |
+
group_id=txn.get("group_id") or "",
|
| 261 |
+
created_at=txn["created_at"],
|
| 262 |
+
)
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
@router.get("/history", response_model=list[TransactionResponse])
|
| 266 |
+
async def get_transaction_history(current_user: dict = Depends(get_current_user)):
|
| 267 |
+
"""Get all transactions for the current user (as buyer or seller).
|
| 268 |
+
For sellers/managers in a department, includes all department transactions."""
|
| 269 |
+
sb = get_supabase()
|
| 270 |
+
user_id = current_user["sub"]
|
| 271 |
+
|
| 272 |
+
bought = sb.table("product_transactions").select("*, products(title, images)").eq("buyer_id", user_id).order("created_at", desc=True).execute()
|
| 273 |
+
sold = sb.table("product_transactions").select("*, products(title, images)").eq("seller_id", user_id).order("created_at", desc=True).execute()
|
| 274 |
+
|
| 275 |
+
all_txns = (bought.data or []) + (sold.data or [])
|
| 276 |
+
|
| 277 |
+
# For sellers/managers in a department, also include department-wide transactions
|
| 278 |
+
user_info = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 279 |
+
if user_info.data and user_info.data[0].get("role") in ("seller", "manager") and user_info.data[0].get("department_id"):
|
| 280 |
+
dept_id = user_info.data[0]["department_id"]
|
| 281 |
+
dept_staff = sb.table("users").select("id").eq("department_id", dept_id).execute()
|
| 282 |
+
dept_ids = [s["id"] for s in (dept_staff.data or [])]
|
| 283 |
+
# Also include the department manager
|
| 284 |
+
dept_info = sb.table("departments").select("manager_id").eq("id", dept_id).execute()
|
| 285 |
+
if dept_info.data and dept_info.data[0].get("manager_id"):
|
| 286 |
+
mgr_id = dept_info.data[0]["manager_id"]
|
| 287 |
+
if mgr_id not in dept_ids:
|
| 288 |
+
dept_ids.append(mgr_id)
|
| 289 |
+
if dept_ids:
|
| 290 |
+
dept_txns = sb.table("product_transactions").select("*, products(title, images)").in_(
|
| 291 |
+
"seller_id", dept_ids
|
| 292 |
+
).order("created_at", desc=True).execute()
|
| 293 |
+
all_txns += (dept_txns.data or [])
|
| 294 |
+
seen = set()
|
| 295 |
+
unique_txns = []
|
| 296 |
+
for t in all_txns:
|
| 297 |
+
if t["id"] not in seen:
|
| 298 |
+
seen.add(t["id"])
|
| 299 |
+
unique_txns.append(t)
|
| 300 |
+
|
| 301 |
+
unique_txns.sort(key=lambda t: t["created_at"], reverse=True)
|
| 302 |
+
|
| 303 |
+
# Get delivery user info
|
| 304 |
+
delivery_ids = set(t.get("delivery_user_id") for t in unique_txns if t.get("delivery_user_id"))
|
| 305 |
+
delivery_names = {}
|
| 306 |
+
delivery_contacts = {}
|
| 307 |
+
if delivery_ids:
|
| 308 |
+
d_users = sb.table("users").select("id, full_name").in_("id", list(delivery_ids)).execute()
|
| 309 |
+
delivery_names = {u["id"]: u["full_name"] for u in (d_users.data or [])}
|
| 310 |
+
d_contacts = sb.table("user_contacts").select("user_id, contact_number").in_("user_id", list(delivery_ids)).execute()
|
| 311 |
+
delivery_contacts = {c["user_id"]: c["contact_number"] for c in (d_contacts.data or [])}
|
| 312 |
+
|
| 313 |
+
# Get seller names (use department name if seller belongs to a department)
|
| 314 |
+
seller_ids = set(t.get("seller_id") for t in unique_txns if t.get("seller_id"))
|
| 315 |
+
seller_names = {}
|
| 316 |
+
if seller_ids:
|
| 317 |
+
s_users = sb.table("users").select("id, full_name, department_id").in_("id", list(seller_ids)).execute()
|
| 318 |
+
# Batch-lookup department names
|
| 319 |
+
dept_ids = set(u.get("department_id") for u in (s_users.data or []) if u.get("department_id"))
|
| 320 |
+
dept_names = {}
|
| 321 |
+
if dept_ids:
|
| 322 |
+
depts = sb.table("departments").select("id, name").in_("id", list(dept_ids)).execute()
|
| 323 |
+
dept_names = {d["id"]: d["name"] for d in (depts.data or [])}
|
| 324 |
+
for u in (s_users.data or []):
|
| 325 |
+
dept_id = u.get("department_id")
|
| 326 |
+
if dept_id and dept_id in dept_names:
|
| 327 |
+
seller_names[u["id"]] = dept_names[dept_id]
|
| 328 |
+
else:
|
| 329 |
+
seller_names[u["id"]] = u["full_name"]
|
| 330 |
+
|
| 331 |
+
# Get buyer names
|
| 332 |
+
buyer_ids = set(t.get("buyer_id") for t in unique_txns if t.get("buyer_id"))
|
| 333 |
+
buyer_names = {}
|
| 334 |
+
if buyer_ids:
|
| 335 |
+
b_users = sb.table("users").select("id, full_name").in_("id", list(buyer_ids)).execute()
|
| 336 |
+
buyer_names = {u["id"]: u["full_name"] for u in (b_users.data or [])}
|
| 337 |
+
|
| 338 |
+
# Get assigned staff names
|
| 339 |
+
assigned_ids = set(t.get("assigned_staff_id") for t in unique_txns if t.get("assigned_staff_id"))
|
| 340 |
+
assigned_names = {}
|
| 341 |
+
if assigned_ids:
|
| 342 |
+
a_users = sb.table("users").select("id, full_name").in_("id", list(assigned_ids)).execute()
|
| 343 |
+
assigned_names = {u["id"]: u["full_name"] for u in (a_users.data or [])}
|
| 344 |
+
|
| 345 |
+
return [
|
| 346 |
+
TransactionResponse(
|
| 347 |
+
id=t["id"],
|
| 348 |
+
buyer_id=t["buyer_id"],
|
| 349 |
+
seller_id=t["seller_id"],
|
| 350 |
+
product_id=t["product_id"],
|
| 351 |
+
product_title=t.get("products", {}).get("title", "") if t.get("products") else "",
|
| 352 |
+
product_images=t.get("products", {}).get("images", []) if t.get("products") else [],
|
| 353 |
+
amount=float(t["amount"]),
|
| 354 |
+
quantity=int(t.get("quantity", 1)),
|
| 355 |
+
seller_amount=float(t.get("seller_amount", 0)),
|
| 356 |
+
admin_commission=float(t.get("admin_commission", 0)),
|
| 357 |
+
delivery_fee=float(t.get("delivery_fee", 0)),
|
| 358 |
+
status=t["status"],
|
| 359 |
+
purchase_type=t.get("purchase_type", "delivery"),
|
| 360 |
+
delivery_user_id=t.get("delivery_user_id") or "",
|
| 361 |
+
delivery_user_name=delivery_names.get(t.get("delivery_user_id", ""), ""),
|
| 362 |
+
delivery_user_contact=delivery_contacts.get(t.get("delivery_user_id", ""), ""),
|
| 363 |
+
seller_name=seller_names.get(t.get("seller_id", ""), ""),
|
| 364 |
+
buyer_name=buyer_names.get(t.get("buyer_id", ""), ""),
|
| 365 |
+
assigned_staff_id=t.get("assigned_staff_id") or "",
|
| 366 |
+
assigned_staff_name=assigned_names.get(t.get("assigned_staff_id", ""), ""),
|
| 367 |
+
delivery_address=t.get("delivery_address", ""),
|
| 368 |
+
group_id=t.get("group_id") or "",
|
| 369 |
+
created_at=t["created_at"],
|
| 370 |
+
)
|
| 371 |
+
for t in unique_txns
|
| 372 |
+
]
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
@router.get("/balance", response_model=BalanceResponse)
|
| 376 |
+
async def get_balance(current_user: dict = Depends(get_current_user)):
|
| 377 |
+
"""Get the current user's balance."""
|
| 378 |
+
sb = get_supabase()
|
| 379 |
+
result = sb.table("user_balances").select("*").eq("user_id", current_user["sub"]).execute()
|
| 380 |
+
|
| 381 |
+
if not result.data:
|
| 382 |
+
raise HTTPException(status_code=404, detail="Balance not found")
|
| 383 |
+
|
| 384 |
+
return BalanceResponse(
|
| 385 |
+
user_id=result.data[0]["user_id"],
|
| 386 |
+
balance=float(result.data[0]["balance"]),
|
| 387 |
+
)
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
@router.post("/topup", response_model=BalanceResponse)
|
| 391 |
+
async def topup_balance(req: TopUpRequest, current_user: dict = Depends(get_current_user)):
|
| 392 |
+
"""Add funds to the current user's balance."""
|
| 393 |
+
if req.amount <= 0:
|
| 394 |
+
raise HTTPException(status_code=400, detail="Amount must be positive")
|
| 395 |
+
|
| 396 |
+
sb = get_supabase()
|
| 397 |
+
user_id = current_user["sub"]
|
| 398 |
+
|
| 399 |
+
result = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 400 |
+
if not result.data:
|
| 401 |
+
raise HTTPException(status_code=404, detail="Balance not found")
|
| 402 |
+
|
| 403 |
+
new_balance = float(result.data[0]["balance"]) + req.amount
|
| 404 |
+
sb.table("user_balances").update({"balance": new_balance}).eq("user_id", user_id).execute()
|
| 405 |
+
|
| 406 |
+
# Record SVF deposit
|
| 407 |
+
sb.table("stored_value").insert({
|
| 408 |
+
"user_id": user_id,
|
| 409 |
+
"transaction_type": "deposit",
|
| 410 |
+
"amount": req.amount,
|
| 411 |
+
}).execute()
|
| 412 |
+
|
| 413 |
+
return BalanceResponse(user_id=user_id, balance=new_balance)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
@router.post("/withdraw", response_model=BalanceResponse)
|
| 417 |
+
async def withdraw_balance(req: WithdrawRequest, current_user: dict = Depends(get_current_user)):
|
| 418 |
+
"""Withdraw funds from the current user's balance."""
|
| 419 |
+
if req.amount <= 0:
|
| 420 |
+
raise HTTPException(status_code=400, detail="Amount must be positive")
|
| 421 |
+
|
| 422 |
+
sb = get_supabase()
|
| 423 |
+
user_id = current_user["sub"]
|
| 424 |
+
|
| 425 |
+
result = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 426 |
+
if not result.data:
|
| 427 |
+
raise HTTPException(status_code=404, detail="Balance not found")
|
| 428 |
+
|
| 429 |
+
current_balance = float(result.data[0]["balance"])
|
| 430 |
+
if current_balance < req.amount:
|
| 431 |
+
raise HTTPException(status_code=400, detail="Insufficient balance")
|
| 432 |
+
|
| 433 |
+
new_balance = current_balance - req.amount
|
| 434 |
+
sb.table("user_balances").update({"balance": new_balance}).eq("user_id", user_id).execute()
|
| 435 |
+
|
| 436 |
+
# Record SVF withdrawal
|
| 437 |
+
sb.table("stored_value").insert({
|
| 438 |
+
"user_id": user_id,
|
| 439 |
+
"transaction_type": "withdrawal",
|
| 440 |
+
"amount": req.amount,
|
| 441 |
+
}).execute()
|
| 442 |
+
|
| 443 |
+
return BalanceResponse(user_id=user_id, balance=new_balance)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
@router.get("/svf-history", response_model=list[SVFEntry])
|
| 447 |
+
async def get_svf_history(current_user: dict = Depends(get_current_user)):
|
| 448 |
+
"""Get stored value facility history for the current user."""
|
| 449 |
+
sb = get_supabase()
|
| 450 |
+
user_id = current_user["sub"]
|
| 451 |
+
|
| 452 |
+
result = sb.table("stored_value").select("*").eq("user_id", user_id).order("created_at", desc=True).limit(100).execute()
|
| 453 |
+
|
| 454 |
+
return [
|
| 455 |
+
SVFEntry(
|
| 456 |
+
id=row["id"],
|
| 457 |
+
user_id=row["user_id"],
|
| 458 |
+
transaction_type=row["transaction_type"],
|
| 459 |
+
amount=float(row["amount"]),
|
| 460 |
+
metadata=row.get("metadata"),
|
| 461 |
+
created_at=row["created_at"],
|
| 462 |
+
)
|
| 463 |
+
for row in (result.data or [])
|
| 464 |
+
]
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
# --- Delivery Order Management (Staff/Manager) ---
|
| 469 |
+
|
| 470 |
+
class DeliveryOrderStatusUpdate(BaseModel):
|
| 471 |
+
status: str # 'approved' (ready for pickup)
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
@router.get("/staff/delivery-orders")
|
| 475 |
+
async def get_staff_delivery_orders(current_user: dict = Depends(get_current_user)):
|
| 476 |
+
"""Get delivery orders grouped by group_id for all staff in the same department/store."""
|
| 477 |
+
sb = get_supabase()
|
| 478 |
+
user_id = current_user["sub"]
|
| 479 |
+
|
| 480 |
+
# Verify seller role and get department
|
| 481 |
+
user = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 482 |
+
if not user.data or user.data[0]["role"] not in ("seller", "manager"):
|
| 483 |
+
raise HTTPException(status_code=403, detail="Only sellers/managers can view delivery orders")
|
| 484 |
+
|
| 485 |
+
dept_id = user.data[0].get("department_id")
|
| 486 |
+
|
| 487 |
+
# Get all staff in the same department so orders are visible to all store staff
|
| 488 |
+
if dept_id:
|
| 489 |
+
staff = sb.table("users").select("id").eq("department_id", dept_id).execute()
|
| 490 |
+
seller_ids = [s["id"] for s in (staff.data or [])]
|
| 491 |
+
if user_id not in seller_ids:
|
| 492 |
+
seller_ids.append(user_id)
|
| 493 |
+
else:
|
| 494 |
+
seller_ids = [user_id]
|
| 495 |
+
|
| 496 |
+
txns = sb.table("product_transactions").select(
|
| 497 |
+
"*, products(title, price, images)"
|
| 498 |
+
).in_("seller_id", seller_ids).in_(
|
| 499 |
+
"status", ["pending", "approved", "ondeliver"]
|
| 500 |
+
).order("created_at", desc=False).execute()
|
| 501 |
+
|
| 502 |
+
if not txns.data:
|
| 503 |
+
return []
|
| 504 |
+
|
| 505 |
+
buyer_ids = set(t["buyer_id"] for t in txns.data)
|
| 506 |
+
assigned_ids = set(t["assigned_staff_id"] for t in txns.data if t.get("assigned_staff_id"))
|
| 507 |
+
all_user_ids = buyer_ids | assigned_ids
|
| 508 |
+
users_lookup = sb.table("users").select("id, full_name").in_("id", list(all_user_ids)).execute()
|
| 509 |
+
user_names = {u["id"]: u["full_name"] for u in (users_lookup.data or [])}
|
| 510 |
+
|
| 511 |
+
# Group transactions by group_id
|
| 512 |
+
groups = {}
|
| 513 |
+
for t in txns.data:
|
| 514 |
+
gid = t.get("group_id") or t["id"] # fallback: ungrouped orders use own id
|
| 515 |
+
prod = t.get("products") or {}
|
| 516 |
+
if gid not in groups:
|
| 517 |
+
groups[gid] = {
|
| 518 |
+
"group_id": gid,
|
| 519 |
+
"buyer_id": t["buyer_id"],
|
| 520 |
+
"buyer_name": user_names.get(t["buyer_id"], "Unknown"),
|
| 521 |
+
"delivery_address": t.get("delivery_address", ""),
|
| 522 |
+
"status": t["status"],
|
| 523 |
+
"assigned_staff_id": t.get("assigned_staff_id"),
|
| 524 |
+
"assigned_staff_name": user_names.get(t.get("assigned_staff_id", ""), ""),
|
| 525 |
+
"created_at": t["created_at"],
|
| 526 |
+
"delivery_fee": 90.0, # flat per group
|
| 527 |
+
"items": [],
|
| 528 |
+
"total_amount": 0.0,
|
| 529 |
+
}
|
| 530 |
+
groups[gid]["items"].append({
|
| 531 |
+
"id": t["id"],
|
| 532 |
+
"product_id": t["product_id"],
|
| 533 |
+
"product_title": prod.get("title", ""),
|
| 534 |
+
"product_price": float(prod.get("price", 0)),
|
| 535 |
+
"product_images": prod.get("images", []),
|
| 536 |
+
"quantity": int(t.get("quantity", 1)),
|
| 537 |
+
"amount": float(t["amount"]),
|
| 538 |
+
"status": t["status"],
|
| 539 |
+
})
|
| 540 |
+
groups[gid]["total_amount"] += float(t["amount"])
|
| 541 |
+
# Group status: escalate to worst (ondeliver > approved > pending)
|
| 542 |
+
current_group_status = groups[gid]["status"]
|
| 543 |
+
t_status = t["status"]
|
| 544 |
+
status_priority = {"pending": 0, "approved": 1, "ondeliver": 2}
|
| 545 |
+
if status_priority.get(t_status, 0) > status_priority.get(current_group_status, 0):
|
| 546 |
+
groups[gid]["status"] = t_status
|
| 547 |
+
|
| 548 |
+
return list(groups.values())
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
@router.put("/staff/delivery-orders/{group_id}/status")
|
| 552 |
+
async def update_delivery_order_status(
|
| 553 |
+
group_id: str,
|
| 554 |
+
req: DeliveryOrderStatusUpdate,
|
| 555 |
+
current_user: dict = Depends(get_current_user),
|
| 556 |
+
):
|
| 557 |
+
"""Staff approves all pending transactions in a group (delivery box)."""
|
| 558 |
+
sb = get_supabase()
|
| 559 |
+
user_id = current_user["sub"]
|
| 560 |
+
|
| 561 |
+
if req.status != "approved":
|
| 562 |
+
raise HTTPException(status_code=400, detail="Status must be 'approved'")
|
| 563 |
+
|
| 564 |
+
# Verify staff role and get department
|
| 565 |
+
user = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 566 |
+
if not user.data or user.data[0]["role"] not in ("seller", "manager"):
|
| 567 |
+
raise HTTPException(status_code=403, detail="Access denied")
|
| 568 |
+
|
| 569 |
+
dept_id = user.data[0].get("department_id")
|
| 570 |
+
user_role = user.data[0]["role"]
|
| 571 |
+
|
| 572 |
+
# Get all pending transactions in this group
|
| 573 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 574 |
+
"group_id", group_id
|
| 575 |
+
).eq("status", "pending").execute()
|
| 576 |
+
|
| 577 |
+
if not group_txns.data:
|
| 578 |
+
raise HTTPException(status_code=404, detail="No pending orders found in this group")
|
| 579 |
+
|
| 580 |
+
# Verify at least one transaction belongs to this department/seller
|
| 581 |
+
seller_id = group_txns.data[0]["seller_id"]
|
| 582 |
+
if dept_id:
|
| 583 |
+
seller_info = sb.table("users").select("department_id").eq("id", seller_id).execute()
|
| 584 |
+
if not seller_info.data or seller_info.data[0].get("department_id") != dept_id:
|
| 585 |
+
raise HTTPException(status_code=403, detail="Order does not belong to your department")
|
| 586 |
+
else:
|
| 587 |
+
if seller_id != user_id:
|
| 588 |
+
raise HTTPException(status_code=403, detail="Order does not belong to you")
|
| 589 |
+
|
| 590 |
+
# Approve all pending transactions in the group
|
| 591 |
+
update_data = {"status": "approved", "assigned_staff_id": user_id}
|
| 592 |
+
sb.table("product_transactions").update(update_data).eq("group_id", group_id).eq("status", "pending").execute()
|
| 593 |
+
|
| 594 |
+
return {"message": f"All orders in group approved and ready for delivery pickup"}
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
@router.get("/manager/delivery-orders")
|
| 598 |
+
async def get_manager_delivery_orders(current_user: dict = Depends(get_current_user)):
|
| 599 |
+
"""Get grouped delivery orders for all products in the manager's department."""
|
| 600 |
+
sb = get_supabase()
|
| 601 |
+
user_id = current_user["sub"]
|
| 602 |
+
|
| 603 |
+
# Get manager's department
|
| 604 |
+
user = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 605 |
+
if not user.data:
|
| 606 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 607 |
+
|
| 608 |
+
user_data = user.data[0]
|
| 609 |
+
if user_data["role"] not in ("seller", "manager"):
|
| 610 |
+
raise HTTPException(status_code=403, detail="Access denied")
|
| 611 |
+
|
| 612 |
+
dept_id = user_data.get("department_id")
|
| 613 |
+
|
| 614 |
+
# Fallback: look up department via departments.manager_id
|
| 615 |
+
if not dept_id:
|
| 616 |
+
dept_lookup = sb.table("departments").select("id").eq("manager_id", user_id).limit(1).execute()
|
| 617 |
+
if dept_lookup.data:
|
| 618 |
+
dept_id = dept_lookup.data[0]["id"]
|
| 619 |
+
|
| 620 |
+
# Get all sellers in this department (or just this seller if no department)
|
| 621 |
+
if dept_id:
|
| 622 |
+
staff = sb.table("users").select("id").eq("department_id", dept_id).execute()
|
| 623 |
+
seller_ids = [s["id"] for s in (staff.data or [])]
|
| 624 |
+
if user_id not in seller_ids:
|
| 625 |
+
seller_ids.append(user_id)
|
| 626 |
+
else:
|
| 627 |
+
seller_ids = [user_id]
|
| 628 |
+
|
| 629 |
+
if not seller_ids:
|
| 630 |
+
return []
|
| 631 |
+
|
| 632 |
+
txns = sb.table("product_transactions").select(
|
| 633 |
+
"*, products(title, price, images)"
|
| 634 |
+
).in_("seller_id", seller_ids).in_(
|
| 635 |
+
"status", ["pending", "approved", "ondeliver"]
|
| 636 |
+
).order("created_at", desc=False).execute()
|
| 637 |
+
|
| 638 |
+
if not txns.data:
|
| 639 |
+
return []
|
| 640 |
+
|
| 641 |
+
buyer_ids = set(t["buyer_id"] for t in txns.data)
|
| 642 |
+
assigned_ids = set(t["assigned_staff_id"] for t in txns.data if t.get("assigned_staff_id"))
|
| 643 |
+
all_user_ids = buyer_ids | set(t["seller_id"] for t in txns.data) | assigned_ids
|
| 644 |
+
users_result = sb.table("users").select("id, full_name").in_("id", list(all_user_ids)).execute()
|
| 645 |
+
user_names = {u["id"]: u["full_name"] for u in (users_result.data or [])}
|
| 646 |
+
|
| 647 |
+
# Group transactions by group_id
|
| 648 |
+
groups = {}
|
| 649 |
+
for t in txns.data:
|
| 650 |
+
gid = t.get("group_id") or t["id"]
|
| 651 |
+
prod = t.get("products") or {}
|
| 652 |
+
if gid not in groups:
|
| 653 |
+
groups[gid] = {
|
| 654 |
+
"group_id": gid,
|
| 655 |
+
"buyer_id": t["buyer_id"],
|
| 656 |
+
"buyer_name": user_names.get(t["buyer_id"], "Unknown"),
|
| 657 |
+
"seller_name": user_names.get(t["seller_id"], "Unknown"),
|
| 658 |
+
"delivery_address": t.get("delivery_address", ""),
|
| 659 |
+
"status": t["status"],
|
| 660 |
+
"assigned_staff_id": t.get("assigned_staff_id"),
|
| 661 |
+
"assigned_staff_name": user_names.get(t.get("assigned_staff_id", ""), ""),
|
| 662 |
+
"created_at": t["created_at"],
|
| 663 |
+
"delivery_fee": 90.0,
|
| 664 |
+
"items": [],
|
| 665 |
+
"total_amount": 0.0,
|
| 666 |
+
}
|
| 667 |
+
groups[gid]["items"].append({
|
| 668 |
+
"id": t["id"],
|
| 669 |
+
"product_id": t["product_id"],
|
| 670 |
+
"product_title": prod.get("title", ""),
|
| 671 |
+
"product_price": float(prod.get("price", 0)),
|
| 672 |
+
"product_images": prod.get("images", []),
|
| 673 |
+
"quantity": int(t.get("quantity", 1)),
|
| 674 |
+
"amount": float(t["amount"]),
|
| 675 |
+
"status": t["status"],
|
| 676 |
+
})
|
| 677 |
+
groups[gid]["total_amount"] += float(t["amount"])
|
| 678 |
+
status_priority = {"pending": 0, "approved": 1, "ondeliver": 2}
|
| 679 |
+
if status_priority.get(t["status"], 0) > status_priority.get(groups[gid]["status"], 0):
|
| 680 |
+
groups[gid]["status"] = t["status"]
|
| 681 |
+
|
| 682 |
+
return list(groups.values())
|
| 683 |
+
|
| 684 |
+
|
| 685 |
+
@router.put("/manager/delivery-orders/{group_id}/status")
|
| 686 |
+
async def manager_update_delivery_order_status(
|
| 687 |
+
group_id: str,
|
| 688 |
+
req: DeliveryOrderStatusUpdate,
|
| 689 |
+
current_user: dict = Depends(get_current_user),
|
| 690 |
+
):
|
| 691 |
+
"""Manager approves all pending transactions in a group."""
|
| 692 |
+
sb = get_supabase()
|
| 693 |
+
user_id = current_user["sub"]
|
| 694 |
+
|
| 695 |
+
if req.status != "approved":
|
| 696 |
+
raise HTTPException(status_code=400, detail="Status must be 'approved'")
|
| 697 |
+
|
| 698 |
+
# Get manager's department
|
| 699 |
+
user = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 700 |
+
if not user.data or user.data[0]["role"] not in ("seller", "manager"):
|
| 701 |
+
raise HTTPException(status_code=403, detail="Access denied")
|
| 702 |
+
|
| 703 |
+
dept_id = user.data[0].get("department_id")
|
| 704 |
+
|
| 705 |
+
# Fallback: look up department via departments.manager_id
|
| 706 |
+
if not dept_id:
|
| 707 |
+
dept_lookup = sb.table("departments").select("id").eq("manager_id", user_id).limit(1).execute()
|
| 708 |
+
if dept_lookup.data:
|
| 709 |
+
dept_id = dept_lookup.data[0]["id"]
|
| 710 |
+
|
| 711 |
+
# Get all pending transactions in this group
|
| 712 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 713 |
+
"group_id", group_id
|
| 714 |
+
).eq("status", "pending").execute()
|
| 715 |
+
|
| 716 |
+
if not group_txns.data:
|
| 717 |
+
raise HTTPException(status_code=404, detail="No pending orders found in this group")
|
| 718 |
+
|
| 719 |
+
seller_id = group_txns.data[0]["seller_id"]
|
| 720 |
+
if dept_id:
|
| 721 |
+
seller_info = sb.table("users").select("department_id").eq("id", seller_id).execute()
|
| 722 |
+
if not seller_info.data or seller_info.data[0].get("department_id") != dept_id:
|
| 723 |
+
raise HTTPException(status_code=403, detail="Order does not belong to your department")
|
| 724 |
+
else:
|
| 725 |
+
if seller_id != user_id:
|
| 726 |
+
raise HTTPException(status_code=403, detail="Order does not belong to you")
|
| 727 |
+
|
| 728 |
+
# Approve all pending transactions in the group
|
| 729 |
+
update_data = {"status": "approved", "assigned_staff_id": user_id}
|
| 730 |
+
sb.table("product_transactions").update(update_data).eq("group_id", group_id).eq("status", "pending").execute()
|
| 731 |
+
|
| 732 |
+
return {"message": "All orders in group approved for delivery pickup"}
|
| 733 |
+
|
| 734 |
+
|
| 735 |
+
# --- Manager Reassign Order ---
|
| 736 |
+
|
| 737 |
+
class ReassignOrderRequest(BaseModel):
|
| 738 |
+
staff_id: str
|
| 739 |
+
|
| 740 |
+
|
| 741 |
+
@router.put("/manager/reassign/{transaction_id}")
|
| 742 |
+
async def manager_reassign_order(
|
| 743 |
+
transaction_id: str,
|
| 744 |
+
req: ReassignOrderRequest,
|
| 745 |
+
current_user: dict = Depends(get_current_user),
|
| 746 |
+
):
|
| 747 |
+
"""Manager reassigns an order to a specific staff member."""
|
| 748 |
+
sb = get_supabase()
|
| 749 |
+
user_id = current_user["sub"]
|
| 750 |
+
|
| 751 |
+
# Verify manager role
|
| 752 |
+
user = sb.table("users").select("role, department_id").eq("id", user_id).execute()
|
| 753 |
+
if not user.data or user.data[0]["role"] != "manager":
|
| 754 |
+
raise HTTPException(status_code=403, detail="Only managers can reassign orders")
|
| 755 |
+
|
| 756 |
+
dept_id = user.data[0].get("department_id")
|
| 757 |
+
if not dept_id:
|
| 758 |
+
dept_lookup = sb.table("departments").select("id").eq("manager_id", user_id).limit(1).execute()
|
| 759 |
+
if dept_lookup.data:
|
| 760 |
+
dept_id = dept_lookup.data[0]["id"]
|
| 761 |
+
|
| 762 |
+
if not dept_id:
|
| 763 |
+
raise HTTPException(status_code=400, detail="Manager is not assigned to a department")
|
| 764 |
+
|
| 765 |
+
# Verify target staff belongs to same department
|
| 766 |
+
target_staff = sb.table("users").select("id, department_id, full_name").eq("id", req.staff_id).execute()
|
| 767 |
+
if not target_staff.data:
|
| 768 |
+
raise HTTPException(status_code=404, detail="Staff member not found")
|
| 769 |
+
if target_staff.data[0].get("department_id") != dept_id:
|
| 770 |
+
raise HTTPException(status_code=403, detail="Staff member is not in your department")
|
| 771 |
+
|
| 772 |
+
# Verify transaction exists and belongs to department
|
| 773 |
+
txn = sb.table("product_transactions").select("*").eq("id", transaction_id).execute()
|
| 774 |
+
if not txn.data:
|
| 775 |
+
raise HTTPException(status_code=404, detail="Transaction not found")
|
| 776 |
+
|
| 777 |
+
seller_id = txn.data[0]["seller_id"]
|
| 778 |
+
seller_info = sb.table("users").select("department_id").eq("id", seller_id).execute()
|
| 779 |
+
if not seller_info.data or seller_info.data[0].get("department_id") != dept_id:
|
| 780 |
+
raise HTTPException(status_code=403, detail="Order does not belong to your department")
|
| 781 |
+
|
| 782 |
+
sb.table("product_transactions").update({
|
| 783 |
+
"assigned_staff_id": req.staff_id,
|
| 784 |
+
}).eq("id", transaction_id).execute()
|
| 785 |
+
|
| 786 |
+
staff_name = target_staff.data[0]["full_name"]
|
| 787 |
+
return {"message": f"Order reassigned to {staff_name}"}
|
| 788 |
+
|
| 789 |
+
|
| 790 |
+
# --- Buyer Order Cancellation ---
|
| 791 |
+
|
| 792 |
+
CANCELLATION_FEE = 50.00
|
| 793 |
+
|
| 794 |
+
|
| 795 |
+
@router.put("/buyer/cancel/{group_id}")
|
| 796 |
+
async def buyer_cancel_order(
|
| 797 |
+
group_id: str,
|
| 798 |
+
current_user: dict = Depends(get_current_user),
|
| 799 |
+
):
|
| 800 |
+
"""
|
| 801 |
+
Buyer cancels all orders in a group (delivery box).
|
| 802 |
+
- Free cancel if pending/approved (not yet picked up).
|
| 803 |
+
- ₱50 cancellation fee if ondeliver; fee goes to delivery user.
|
| 804 |
+
"""
|
| 805 |
+
sb = get_supabase()
|
| 806 |
+
user_id = current_user["sub"]
|
| 807 |
+
|
| 808 |
+
# Get all transactions in this group that belong to this buyer
|
| 809 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 810 |
+
"group_id", group_id
|
| 811 |
+
).eq("buyer_id", user_id).execute()
|
| 812 |
+
|
| 813 |
+
# Fallback: try treating group_id as a single transaction_id for backwards compat
|
| 814 |
+
if not group_txns.data:
|
| 815 |
+
group_txns = sb.table("product_transactions").select("*").eq(
|
| 816 |
+
"id", group_id
|
| 817 |
+
).eq("buyer_id", user_id).execute()
|
| 818 |
+
|
| 819 |
+
if not group_txns.data:
|
| 820 |
+
raise HTTPException(status_code=404, detail="Order not found")
|
| 821 |
+
|
| 822 |
+
# Determine group status (worst-case: ondeliver beats approved/pending)
|
| 823 |
+
status_priority = {"pending": 0, "approved": 1, "ondeliver": 2}
|
| 824 |
+
group_status = max((t["status"] for t in group_txns.data if t["status"] in status_priority), key=lambda s: status_priority.get(s, 0))
|
| 825 |
+
|
| 826 |
+
# Cancellable only if pending, approved, or ondeliver
|
| 827 |
+
if group_status not in ("pending", "approved", "ondeliver"):
|
| 828 |
+
raise HTTPException(status_code=400, detail=f"Cannot cancel order group with status '{group_status}'.")
|
| 829 |
+
|
| 830 |
+
# Total refundable amount = sum of all item amounts + the delivery fee (from primary txn)
|
| 831 |
+
total_product_amount = sum(float(t.get("amount", 0)) for t in group_txns.data)
|
| 832 |
+
total_delivery_fee = sum(float(t.get("delivery_fee", 0)) for t in group_txns.data)
|
| 833 |
+
grand_total = total_product_amount + total_delivery_fee
|
| 834 |
+
|
| 835 |
+
if group_status in ("pending", "approved"):
|
| 836 |
+
refund_amount = grand_total
|
| 837 |
+
fee_to_delivery = 0.0
|
| 838 |
+
else: # ondeliver
|
| 839 |
+
fee_to_delivery = CANCELLATION_FEE
|
| 840 |
+
refund_amount = grand_total - fee_to_delivery
|
| 841 |
+
|
| 842 |
+
# 1. Cancel all transactions in the group
|
| 843 |
+
sb.table("product_transactions").update({"status": "cancelled"}).eq("group_id", group_id).eq("buyer_id", user_id).execute()
|
| 844 |
+
|
| 845 |
+
# 2. Refund buyer
|
| 846 |
+
if refund_amount > 0:
|
| 847 |
+
buyer_bal = sb.table("user_balances").select("balance").eq("user_id", user_id).execute()
|
| 848 |
+
if buyer_bal.data:
|
| 849 |
+
new_bal = float(buyer_bal.data[0]["balance"]) + refund_amount
|
| 850 |
+
sb.table("user_balances").update({"balance": new_bal}).eq("user_id", user_id).execute()
|
| 851 |
+
|
| 852 |
+
# 3. Pay cancellation fee to delivery user (if mid-delivery cancel)
|
| 853 |
+
delivery_user_id = next((t.get("delivery_user_id") for t in group_txns.data if t.get("delivery_user_id")), None)
|
| 854 |
+
representative_txn_id = group_txns.data[0]["id"]
|
| 855 |
+
|
| 856 |
+
if fee_to_delivery > 0 and delivery_user_id:
|
| 857 |
+
del_bal = sb.table("user_balances").select("balance").eq("user_id", delivery_user_id).execute()
|
| 858 |
+
if del_bal.data:
|
| 859 |
+
new_del_bal = float(del_bal.data[0]["balance"]) + fee_to_delivery
|
| 860 |
+
sb.table("user_balances").update({"balance": new_del_bal}).eq("user_id", delivery_user_id).execute()
|
| 861 |
+
|
| 862 |
+
# Log in delivery_earnings for earnings history
|
| 863 |
+
sb.table("delivery_earnings").insert({
|
| 864 |
+
"delivery_user_id": delivery_user_id,
|
| 865 |
+
"transaction_id": representative_txn_id,
|
| 866 |
+
"amount": fee_to_delivery,
|
| 867 |
+
}).execute()
|
| 868 |
+
|
| 869 |
+
# 4. Restore product stock for all cancelled items
|
| 870 |
+
for t in group_txns.data:
|
| 871 |
+
prod = sb.table("products").select("stock").eq("id", t["product_id"]).execute()
|
| 872 |
+
if prod.data:
|
| 873 |
+
new_stock = int(prod.data[0]["stock"]) + int(t.get("quantity", 1))
|
| 874 |
+
sb.table("products").update({"stock": new_stock}).eq("id", t["product_id"]).execute()
|
| 875 |
+
|
| 876 |
+
fee_msg = f" A cancellation fee of PHP {fee_to_delivery:.2f} was deducted." if fee_to_delivery > 0 else ""
|
| 877 |
+
return {
|
| 878 |
+
"message": f"All orders in group cancelled. PHP {refund_amount:.2f} refunded to your wallet.{fee_msg}",
|
| 879 |
+
"refund_amount": refund_amount,
|
| 880 |
+
"cancellation_fee": fee_to_delivery,
|
| 881 |
+
}
|
| 882 |
+
|
| 883 |
+
|
| 884 |
+
# --- Salary History (for staff/managers) ---
|
| 885 |
+
|
| 886 |
+
@router.get("/salary-history")
|
| 887 |
+
async def get_salary_history(current_user: dict = Depends(get_current_user)):
|
| 888 |
+
"""Get salary payment history for the current user (staff or manager)."""
|
| 889 |
+
sb = get_supabase()
|
| 890 |
+
user_id = current_user["sub"]
|
| 891 |
+
|
| 892 |
+
# Verify role
|
| 893 |
+
user = sb.table("users").select("role, salary").eq("id", user_id).execute()
|
| 894 |
+
if not user.data:
|
| 895 |
+
raise HTTPException(status_code=404, detail="User not found")
|
| 896 |
+
if user.data[0]["role"] not in ("seller", "manager"):
|
| 897 |
+
raise HTTPException(status_code=403, detail="Only staff and managers can view salary history")
|
| 898 |
+
|
| 899 |
+
fixed_salary = float(user.data[0].get("salary", 0))
|
| 900 |
+
|
| 901 |
+
# Get all salary payments for this user
|
| 902 |
+
payments = sb.table("salary_payments").select("*").eq(
|
| 903 |
+
"recipient_id", user_id
|
| 904 |
+
).order("created_at", desc=True).limit(100).execute()
|
| 905 |
+
|
| 906 |
+
# Current month paid
|
| 907 |
+
from datetime import datetime, timezone
|
| 908 |
+
now = datetime.now(timezone.utc)
|
| 909 |
+
current_month = now.strftime("%Y-%m")
|
| 910 |
+
paid_this_month = sum(
|
| 911 |
+
float(p["amount"]) for p in (payments.data or []) if p["payment_month"] == current_month
|
| 912 |
+
)
|
| 913 |
+
|
| 914 |
+
total_all_time = sum(float(p["amount"]) for p in (payments.data or []))
|
| 915 |
+
|
| 916 |
+
# Build salary deposit entries
|
| 917 |
+
salary_entries = []
|
| 918 |
+
for p in (payments.data or []):
|
| 919 |
+
salary_entries.append({
|
| 920 |
+
"id": p["id"],
|
| 921 |
+
"type": "salary_deposit",
|
| 922 |
+
"amount": float(p["amount"]),
|
| 923 |
+
"payment_month": p.get("payment_month", ""),
|
| 924 |
+
"notes": p.get("notes", "Salary payment"),
|
| 925 |
+
"created_at": p["created_at"],
|
| 926 |
+
})
|
| 927 |
+
|
| 928 |
+
# Get SVF withdrawal history for this user
|
| 929 |
+
svf = sb.table("stored_value").select("*").eq("user_id", user_id).eq(
|
| 930 |
+
"transaction_type", "withdrawal"
|
| 931 |
+
).order("created_at", desc=True).limit(100).execute()
|
| 932 |
+
|
| 933 |
+
withdrawal_entries = []
|
| 934 |
+
for w in (svf.data or []):
|
| 935 |
+
withdrawal_entries.append({
|
| 936 |
+
"id": w["id"],
|
| 937 |
+
"type": "withdrawal",
|
| 938 |
+
"amount": float(w["amount"]),
|
| 939 |
+
"payment_month": "",
|
| 940 |
+
"notes": "Salary withdrawal",
|
| 941 |
+
"created_at": w["created_at"],
|
| 942 |
+
})
|
| 943 |
+
|
| 944 |
+
# Merge and sort by date descending
|
| 945 |
+
all_transactions = salary_entries + withdrawal_entries
|
| 946 |
+
all_transactions.sort(key=lambda x: x["created_at"], reverse=True)
|
| 947 |
+
|
| 948 |
+
return {
|
| 949 |
+
"fixed_salary": fixed_salary,
|
| 950 |
+
"paid_this_month": round(paid_this_month, 2),
|
| 951 |
+
"remaining_this_month": round(max(fixed_salary - paid_this_month, 0), 2),
|
| 952 |
+
"total_all_time": round(total_all_time, 2),
|
| 953 |
+
"current_month": current_month,
|
| 954 |
+
"history": all_transactions,
|
| 955 |
+
}
|
backend/routes/wishlist.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Wishlist routes — buyers can save products to their wishlist.
|
| 3 |
+
Also includes seller-facing and admin-facing report endpoints for wishlist analytics.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from fastapi import APIRouter, HTTPException, Depends
|
| 7 |
+
from pydantic import BaseModel
|
| 8 |
+
from database import get_supabase
|
| 9 |
+
from routes.auth import get_current_user
|
| 10 |
+
|
| 11 |
+
router = APIRouter(prefix="/wishlist", tags=["Wishlist"])
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class AddWishlistRequest(BaseModel):
|
| 15 |
+
product_id: str
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class WishlistItemResponse(BaseModel):
|
| 19 |
+
id: str
|
| 20 |
+
product_id: str
|
| 21 |
+
title: str
|
| 22 |
+
description: str
|
| 23 |
+
price: float
|
| 24 |
+
stock: int
|
| 25 |
+
seller_id: str
|
| 26 |
+
seller_name: str
|
| 27 |
+
image_url: str
|
| 28 |
+
created_at: str
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# --- Routes ---
|
| 32 |
+
|
| 33 |
+
@router.get("/", response_model=list[WishlistItemResponse])
|
| 34 |
+
async def get_wishlist(current_user: dict = Depends(get_current_user)):
|
| 35 |
+
"""Get the current buyer's wishlist with product details."""
|
| 36 |
+
sb = get_supabase()
|
| 37 |
+
user_id = current_user["sub"]
|
| 38 |
+
|
| 39 |
+
wishlist_data = sb.table("wishlist_items").select(
|
| 40 |
+
"*, products(id, title, description, price, seller_id, images, stock)"
|
| 41 |
+
).eq("buyer_id", user_id).order("created_at", desc=True).execute()
|
| 42 |
+
|
| 43 |
+
items = []
|
| 44 |
+
seller_name_cache = {}
|
| 45 |
+
|
| 46 |
+
for w in (wishlist_data.data or []):
|
| 47 |
+
prod = w.get("products")
|
| 48 |
+
if not prod:
|
| 49 |
+
continue
|
| 50 |
+
|
| 51 |
+
sid = prod["seller_id"]
|
| 52 |
+
if sid not in seller_name_cache:
|
| 53 |
+
seller_resp = sb.table("users").select("full_name, department_id").eq("id", sid).execute()
|
| 54 |
+
if seller_resp.data:
|
| 55 |
+
full_name = seller_resp.data[0]["full_name"]
|
| 56 |
+
dept_id = seller_resp.data[0].get("department_id")
|
| 57 |
+
if dept_id:
|
| 58 |
+
dept_resp = sb.table("departments").select("name").eq("id", dept_id).execute()
|
| 59 |
+
if dept_resp.data:
|
| 60 |
+
seller_name_cache[sid] = dept_resp.data[0]["name"]
|
| 61 |
+
else:
|
| 62 |
+
seller_name_cache[sid] = full_name
|
| 63 |
+
else:
|
| 64 |
+
seller_name_cache[sid] = full_name
|
| 65 |
+
else:
|
| 66 |
+
seller_name_cache[sid] = "Seller"
|
| 67 |
+
|
| 68 |
+
images = prod.get("images") or []
|
| 69 |
+
|
| 70 |
+
items.append(WishlistItemResponse(
|
| 71 |
+
id=w["id"],
|
| 72 |
+
product_id=prod["id"],
|
| 73 |
+
title=prod["title"],
|
| 74 |
+
description=prod.get("description", ""),
|
| 75 |
+
price=float(prod["price"]),
|
| 76 |
+
stock=int(prod.get("stock", 0)),
|
| 77 |
+
seller_id=prod["seller_id"],
|
| 78 |
+
seller_name=seller_name_cache[sid],
|
| 79 |
+
image_url=images[0] if images else "",
|
| 80 |
+
created_at=w["created_at"],
|
| 81 |
+
))
|
| 82 |
+
|
| 83 |
+
return items
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@router.post("/add")
|
| 87 |
+
async def add_to_wishlist(req: AddWishlistRequest, current_user: dict = Depends(get_current_user)):
|
| 88 |
+
"""Add a product to the buyer's wishlist."""
|
| 89 |
+
sb = get_supabase()
|
| 90 |
+
user_id = current_user["sub"]
|
| 91 |
+
|
| 92 |
+
# Verify product exists and is active
|
| 93 |
+
prod = sb.table("products").select("id, seller_id").eq("id", req.product_id).eq("is_active", True).eq("status", "approved").execute()
|
| 94 |
+
if not prod.data:
|
| 95 |
+
raise HTTPException(status_code=404, detail="Product not found")
|
| 96 |
+
|
| 97 |
+
if prod.data[0]["seller_id"] == user_id:
|
| 98 |
+
raise HTTPException(status_code=400, detail="Cannot add your own product to wishlist")
|
| 99 |
+
|
| 100 |
+
# Check if already in wishlist
|
| 101 |
+
existing = sb.table("wishlist_items").select("id").eq("buyer_id", user_id).eq("product_id", req.product_id).execute()
|
| 102 |
+
if existing.data:
|
| 103 |
+
raise HTTPException(status_code=400, detail="Product already in wishlist")
|
| 104 |
+
|
| 105 |
+
sb.table("wishlist_items").insert({
|
| 106 |
+
"buyer_id": user_id,
|
| 107 |
+
"product_id": req.product_id,
|
| 108 |
+
}).execute()
|
| 109 |
+
|
| 110 |
+
return {"message": "Added to wishlist"}
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
@router.delete("/remove/{product_id}")
|
| 114 |
+
async def remove_from_wishlist(product_id: str, current_user: dict = Depends(get_current_user)):
|
| 115 |
+
"""Remove a product from the buyer's wishlist."""
|
| 116 |
+
sb = get_supabase()
|
| 117 |
+
sb.table("wishlist_items").delete().eq("buyer_id", current_user["sub"]).eq("product_id", product_id).execute()
|
| 118 |
+
return {"message": "Removed from wishlist"}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
@router.get("/check/{product_id}")
|
| 122 |
+
async def check_wishlist(product_id: str, current_user: dict = Depends(get_current_user)):
|
| 123 |
+
"""Check if a product is in the buyer's wishlist."""
|
| 124 |
+
sb = get_supabase()
|
| 125 |
+
existing = sb.table("wishlist_items").select("id").eq("buyer_id", current_user["sub"]).eq("product_id", product_id).execute()
|
| 126 |
+
return {"in_wishlist": bool(existing.data)}
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
@router.get("/seller-report")
|
| 130 |
+
async def get_seller_wishlist_report(current_user: dict = Depends(get_current_user)):
|
| 131 |
+
"""
|
| 132 |
+
Seller-facing wishlist report.
|
| 133 |
+
Returns wishlist counts for the seller's products and a wishlist-to-product ratio.
|
| 134 |
+
"""
|
| 135 |
+
sb = get_supabase()
|
| 136 |
+
user_id = current_user["sub"]
|
| 137 |
+
|
| 138 |
+
# Build list of seller IDs (same logic as /products/my — includes department manager)
|
| 139 |
+
seller_ids = [user_id]
|
| 140 |
+
user_info = sb.table("users").select("role, department_id, manager_id").eq("id", user_id).execute()
|
| 141 |
+
if user_info.data and user_info.data[0].get("department_id"):
|
| 142 |
+
dept_id = user_info.data[0]["department_id"]
|
| 143 |
+
dept = sb.table("departments").select("manager_id").eq("id", dept_id).execute()
|
| 144 |
+
if dept.data and dept.data[0].get("manager_id"):
|
| 145 |
+
manager_id = dept.data[0]["manager_id"]
|
| 146 |
+
if manager_id not in seller_ids:
|
| 147 |
+
seller_ids.append(manager_id)
|
| 148 |
+
# If user is a manager, also include products from their department sellers
|
| 149 |
+
if user_info.data and user_info.data[0].get("role") == "manager":
|
| 150 |
+
dept_id = user_info.data[0].get("department_id")
|
| 151 |
+
if dept_id:
|
| 152 |
+
dept_users = sb.table("users").select("id").eq("department_id", dept_id).eq("role", "seller").execute()
|
| 153 |
+
for du in (dept_users.data or []):
|
| 154 |
+
if du["id"] not in seller_ids:
|
| 155 |
+
seller_ids.append(du["id"])
|
| 156 |
+
|
| 157 |
+
# Get all products for these seller IDs
|
| 158 |
+
products = sb.table("products").select("id, title, images, stock").in_("seller_id", seller_ids).execute()
|
| 159 |
+
if not products.data:
|
| 160 |
+
return {
|
| 161 |
+
"total_products": 0,
|
| 162 |
+
"total_wishlists": 0,
|
| 163 |
+
"unique_buyers": 0,
|
| 164 |
+
"wishlist_per_product": 0,
|
| 165 |
+
"products": [],
|
| 166 |
+
"buyers": [],
|
| 167 |
+
}
|
| 168 |
+
|
| 169 |
+
product_ids = [p["id"] for p in products.data]
|
| 170 |
+
|
| 171 |
+
# Get all wishlist items for these products (include buyer_id and created_at)
|
| 172 |
+
wishlist_data = sb.table("wishlist_items").select("product_id, buyer_id, created_at").in_("product_id", product_ids).execute()
|
| 173 |
+
|
| 174 |
+
# Count wishlists per product
|
| 175 |
+
wishlist_counts = {}
|
| 176 |
+
# Track unique buyers
|
| 177 |
+
buyer_ids = set()
|
| 178 |
+
buyer_product_counts = {}
|
| 179 |
+
for w in (wishlist_data.data or []):
|
| 180 |
+
pid = w["product_id"]
|
| 181 |
+
bid = w["buyer_id"]
|
| 182 |
+
wishlist_counts[pid] = wishlist_counts.get(pid, 0) + 1
|
| 183 |
+
buyer_ids.add(bid)
|
| 184 |
+
buyer_product_counts[bid] = buyer_product_counts.get(bid, 0) + 1
|
| 185 |
+
|
| 186 |
+
total_wishlists = sum(wishlist_counts.values())
|
| 187 |
+
total_products = len(products.data)
|
| 188 |
+
unique_buyers = len(buyer_ids)
|
| 189 |
+
|
| 190 |
+
# Fetch buyer names
|
| 191 |
+
buyers_list = []
|
| 192 |
+
if buyer_ids:
|
| 193 |
+
buyer_data = sb.table("users").select("id, full_name, email").in_("id", list(buyer_ids)).execute()
|
| 194 |
+
buyer_map = {b["id"]: b for b in (buyer_data.data or [])}
|
| 195 |
+
for bid in buyer_ids:
|
| 196 |
+
b = buyer_map.get(bid, {})
|
| 197 |
+
buyers_list.append({
|
| 198 |
+
"buyer_id": bid,
|
| 199 |
+
"buyer_name": b.get("full_name", "Unknown"),
|
| 200 |
+
"buyer_email": b.get("email", ""),
|
| 201 |
+
"wishlist_count": buyer_product_counts.get(bid, 0),
|
| 202 |
+
})
|
| 203 |
+
buyers_list.sort(key=lambda x: x["wishlist_count"], reverse=True)
|
| 204 |
+
|
| 205 |
+
product_details = []
|
| 206 |
+
for p in products.data:
|
| 207 |
+
count = wishlist_counts.get(p["id"], 0)
|
| 208 |
+
images = p.get("images") or []
|
| 209 |
+
product_details.append({
|
| 210 |
+
"product_id": p["id"],
|
| 211 |
+
"title": p["title"],
|
| 212 |
+
"image_url": images[0] if images else "",
|
| 213 |
+
"wishlist_count": count,
|
| 214 |
+
"stock": int(p.get("stock") or 0),
|
| 215 |
+
})
|
| 216 |
+
|
| 217 |
+
# Sort by wishlist count descending
|
| 218 |
+
product_details.sort(key=lambda x: x["wishlist_count"], reverse=True)
|
| 219 |
+
|
| 220 |
+
return {
|
| 221 |
+
"total_products": total_products,
|
| 222 |
+
"total_wishlists": total_wishlists,
|
| 223 |
+
"unique_buyers": unique_buyers,
|
| 224 |
+
"wishlist_per_product": round(total_wishlists / total_products, 2) if total_products > 0 else 0,
|
| 225 |
+
"products": product_details,
|
| 226 |
+
"buyers": buyers_list,
|
| 227 |
+
}
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
@router.get("/admin-report")
|
| 231 |
+
async def get_admin_wishlist_report(current_user: dict = Depends(get_current_user)):
|
| 232 |
+
"""
|
| 233 |
+
Admin-facing platform-wide wishlist report.
|
| 234 |
+
Returns wishlists across all stores, top products, top buyers, per-store breakdown,
|
| 235 |
+
and recent activity.
|
| 236 |
+
"""
|
| 237 |
+
if current_user.get("role") != "admin":
|
| 238 |
+
raise HTTPException(status_code=403, detail="Admin access required")
|
| 239 |
+
|
| 240 |
+
sb = get_supabase()
|
| 241 |
+
|
| 242 |
+
# Get ALL wishlist items with product + buyer info
|
| 243 |
+
wishlist_data = sb.table("wishlist_items").select(
|
| 244 |
+
"id, product_id, buyer_id, created_at"
|
| 245 |
+
).order("created_at", desc=True).execute()
|
| 246 |
+
all_items = wishlist_data.data or []
|
| 247 |
+
|
| 248 |
+
if not all_items:
|
| 249 |
+
return {
|
| 250 |
+
"total_wishlists": 0,
|
| 251 |
+
"unique_buyers": 0,
|
| 252 |
+
"total_products_wishlisted": 0,
|
| 253 |
+
"wishlists_per_product": 0,
|
| 254 |
+
"top_products": [],
|
| 255 |
+
"top_buyers": [],
|
| 256 |
+
"by_store": [],
|
| 257 |
+
"recent_activity": [],
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
# Collect unique IDs
|
| 261 |
+
product_ids = list({w["product_id"] for w in all_items})
|
| 262 |
+
buyer_ids = list({w["buyer_id"] for w in all_items})
|
| 263 |
+
|
| 264 |
+
# Fetch product details
|
| 265 |
+
products_data = sb.table("products").select("id, title, images, seller_id").in_("id", product_ids).execute()
|
| 266 |
+
product_map = {p["id"]: p for p in (products_data.data or [])}
|
| 267 |
+
|
| 268 |
+
# Fetch buyer details
|
| 269 |
+
buyer_data = sb.table("users").select("id, full_name, email").in_("id", buyer_ids).execute()
|
| 270 |
+
buyer_map = {b["id"]: b for b in (buyer_data.data or [])}
|
| 271 |
+
|
| 272 |
+
# Fetch seller -> department mapping for store names
|
| 273 |
+
seller_ids = list({p["seller_id"] for p in (products_data.data or [])})
|
| 274 |
+
seller_data = sb.table("users").select("id, full_name, department_id").in_("id", seller_ids).execute()
|
| 275 |
+
seller_map = {s["id"]: s for s in (seller_data.data or [])}
|
| 276 |
+
|
| 277 |
+
dept_ids = list({s.get("department_id") for s in (seller_data.data or []) if s.get("department_id")})
|
| 278 |
+
dept_map = {}
|
| 279 |
+
if dept_ids:
|
| 280 |
+
dept_data = sb.table("departments").select("id, name").in_("id", dept_ids).execute()
|
| 281 |
+
dept_map = {d["id"]: d["name"] for d in (dept_data.data or [])}
|
| 282 |
+
|
| 283 |
+
def get_store_name(seller_id):
|
| 284 |
+
seller = seller_map.get(seller_id, {})
|
| 285 |
+
dept_id = seller.get("department_id")
|
| 286 |
+
if dept_id and dept_id in dept_map:
|
| 287 |
+
return dept_map[dept_id]
|
| 288 |
+
return seller.get("full_name", "Unknown")
|
| 289 |
+
|
| 290 |
+
# --- Counts ---
|
| 291 |
+
product_counts = {}
|
| 292 |
+
buyer_counts = {}
|
| 293 |
+
store_counts = {}
|
| 294 |
+
|
| 295 |
+
for w in all_items:
|
| 296 |
+
pid = w["product_id"]
|
| 297 |
+
bid = w["buyer_id"]
|
| 298 |
+
product_counts[pid] = product_counts.get(pid, 0) + 1
|
| 299 |
+
buyer_counts[bid] = buyer_counts.get(bid, 0) + 1
|
| 300 |
+
|
| 301 |
+
prod = product_map.get(pid)
|
| 302 |
+
if prod:
|
| 303 |
+
store = get_store_name(prod["seller_id"])
|
| 304 |
+
store_counts[store] = store_counts.get(store, 0) + 1
|
| 305 |
+
|
| 306 |
+
# --- Top Products ---
|
| 307 |
+
top_products = []
|
| 308 |
+
for pid, count in sorted(product_counts.items(), key=lambda x: x[1], reverse=True)[:10]:
|
| 309 |
+
prod = product_map.get(pid, {})
|
| 310 |
+
images = prod.get("images") or []
|
| 311 |
+
top_products.append({
|
| 312 |
+
"product_id": pid,
|
| 313 |
+
"title": prod.get("title", "Unknown"),
|
| 314 |
+
"image_url": images[0] if images else "",
|
| 315 |
+
"store": get_store_name(prod.get("seller_id", "")),
|
| 316 |
+
"wishlist_count": count,
|
| 317 |
+
})
|
| 318 |
+
|
| 319 |
+
# --- Top Buyers ---
|
| 320 |
+
top_buyers = []
|
| 321 |
+
for bid, count in sorted(buyer_counts.items(), key=lambda x: x[1], reverse=True)[:10]:
|
| 322 |
+
b = buyer_map.get(bid, {})
|
| 323 |
+
top_buyers.append({
|
| 324 |
+
"buyer_id": bid,
|
| 325 |
+
"buyer_name": b.get("full_name", "Unknown"),
|
| 326 |
+
"buyer_email": b.get("email", ""),
|
| 327 |
+
"wishlist_count": count,
|
| 328 |
+
})
|
| 329 |
+
|
| 330 |
+
# --- By Store ---
|
| 331 |
+
by_store = [
|
| 332 |
+
{"store": store, "wishlist_count": count}
|
| 333 |
+
for store, count in sorted(store_counts.items(), key=lambda x: x[1], reverse=True)
|
| 334 |
+
]
|
| 335 |
+
|
| 336 |
+
# --- Recent Activity (last 20) ---
|
| 337 |
+
recent_activity = []
|
| 338 |
+
for w in all_items[:20]:
|
| 339 |
+
prod = product_map.get(w["product_id"], {})
|
| 340 |
+
buyer = buyer_map.get(w["buyer_id"], {})
|
| 341 |
+
images = prod.get("images") or []
|
| 342 |
+
recent_activity.append({
|
| 343 |
+
"buyer_name": buyer.get("full_name", "Unknown"),
|
| 344 |
+
"product_title": prod.get("title", "Unknown"),
|
| 345 |
+
"product_image": images[0] if images else "",
|
| 346 |
+
"store": get_store_name(prod.get("seller_id", "")),
|
| 347 |
+
"created_at": w["created_at"],
|
| 348 |
+
})
|
| 349 |
+
|
| 350 |
+
total_wishlists = len(all_items)
|
| 351 |
+
unique_products = len(product_counts)
|
| 352 |
+
|
| 353 |
+
return {
|
| 354 |
+
"total_wishlists": total_wishlists,
|
| 355 |
+
"unique_buyers": len(buyer_counts),
|
| 356 |
+
"total_products_wishlisted": unique_products,
|
| 357 |
+
"wishlists_per_product": round(total_wishlists / unique_products, 2) if unique_products > 0 else 0,
|
| 358 |
+
"top_products": top_products,
|
| 359 |
+
"top_buyers": top_buyers,
|
| 360 |
+
"by_store": by_store,
|
| 361 |
+
"recent_activity": recent_activity,
|
| 362 |
+
}
|
backend/trained_model/intent_classifier/config.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bert_model": "bert-base-multilingual-uncased",
|
| 3 |
+
"max_length": 128,
|
| 4 |
+
"num_intents": 4,
|
| 5 |
+
"label_names": [
|
| 6 |
+
"single_search",
|
| 7 |
+
"multi_search",
|
| 8 |
+
"filtered_search",
|
| 9 |
+
"free_form"
|
| 10 |
+
],
|
| 11 |
+
"best_f1": 0.9704016456031906,
|
| 12 |
+
"training_time_seconds": 28256.4,
|
| 13 |
+
"dataset": "c:\\Moi\\Thesis\\Code\\RetailTalkFolder\\shopping_queries_dataset\\IntentDataset_cleaned.xlsx",
|
| 14 |
+
"dataset_size": 9819,
|
| 15 |
+
"train_size": 8346,
|
| 16 |
+
"val_size": 1473,
|
| 17 |
+
"final_metrics": {
|
| 18 |
+
"exact_match_accuracy": 0.9111,
|
| 19 |
+
"hamming_loss": 0.0246,
|
| 20 |
+
"bleu_score": 0.0657,
|
| 21 |
+
"micro_precision": 0.9758,
|
| 22 |
+
"micro_recall": 0.9761,
|
| 23 |
+
"micro_f1": 0.9759,
|
| 24 |
+
"macro_precision": 0.9696,
|
| 25 |
+
"macro_recall": 0.9712,
|
| 26 |
+
"macro_f1": 0.9704,
|
| 27 |
+
"weighted_f1": 0.9758,
|
| 28 |
+
"samples_f1": 0.9668
|
| 29 |
+
}
|
| 30 |
+
}
|
backend/trained_model/intent_classifier/label_map.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"single_search": 0,
|
| 3 |
+
"multi_search": 1,
|
| 4 |
+
"filtered_search": 2,
|
| 5 |
+
"free_form": 3
|
| 6 |
+
}
|
backend/trained_model/ranker/config.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"BertForSequenceClassification"
|
| 4 |
+
],
|
| 5 |
+
"attention_probs_dropout_prob": 0.1,
|
| 6 |
+
"classifier_dropout": null,
|
| 7 |
+
"dtype": "float32",
|
| 8 |
+
"gradient_checkpointing": false,
|
| 9 |
+
"hidden_act": "gelu",
|
| 10 |
+
"hidden_dropout_prob": 0.1,
|
| 11 |
+
"hidden_size": 384,
|
| 12 |
+
"id2label": {
|
| 13 |
+
"0": "LABEL_0"
|
| 14 |
+
},
|
| 15 |
+
"initializer_range": 0.02,
|
| 16 |
+
"intermediate_size": 1536,
|
| 17 |
+
"label2id": {
|
| 18 |
+
"LABEL_0": 0
|
| 19 |
+
},
|
| 20 |
+
"layer_norm_eps": 1e-12,
|
| 21 |
+
"max_position_embeddings": 512,
|
| 22 |
+
"model_type": "bert",
|
| 23 |
+
"num_attention_heads": 12,
|
| 24 |
+
"num_hidden_layers": 12,
|
| 25 |
+
"pad_token_id": 0,
|
| 26 |
+
"position_embedding_type": "absolute",
|
| 27 |
+
"sbert_ce_default_activation_function": "torch.nn.modules.linear.Identity",
|
| 28 |
+
"transformers_version": "4.57.6",
|
| 29 |
+
"type_vocab_size": 2,
|
| 30 |
+
"use_cache": true,
|
| 31 |
+
"vocab_size": 30522
|
| 32 |
+
}
|
backend/trained_model/ranker/special_tokens_map.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cls_token": {
|
| 3 |
+
"content": "[CLS]",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"mask_token": {
|
| 10 |
+
"content": "[MASK]",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "[PAD]",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"sep_token": {
|
| 24 |
+
"content": "[SEP]",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"unk_token": {
|
| 31 |
+
"content": "[UNK]",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
}
|
| 37 |
+
}
|
backend/trained_model/ranker/tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
backend/trained_model/ranker/tokenizer_config.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "[PAD]",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"100": {
|
| 12 |
+
"content": "[UNK]",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"101": {
|
| 20 |
+
"content": "[CLS]",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"102": {
|
| 28 |
+
"content": "[SEP]",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"103": {
|
| 36 |
+
"content": "[MASK]",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
}
|
| 43 |
+
},
|
| 44 |
+
"clean_up_tokenization_spaces": true,
|
| 45 |
+
"cls_token": "[CLS]",
|
| 46 |
+
"do_basic_tokenize": true,
|
| 47 |
+
"do_lower_case": true,
|
| 48 |
+
"extra_special_tokens": {},
|
| 49 |
+
"mask_token": "[MASK]",
|
| 50 |
+
"model_max_length": 512,
|
| 51 |
+
"never_split": null,
|
| 52 |
+
"pad_token": "[PAD]",
|
| 53 |
+
"sep_token": "[SEP]",
|
| 54 |
+
"strip_accents": null,
|
| 55 |
+
"tokenize_chinese_chars": true,
|
| 56 |
+
"tokenizer_class": "BertTokenizer",
|
| 57 |
+
"unk_token": "[UNK]"
|
| 58 |
+
}
|
backend/trained_model/ranker/vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
backend/trained_model/slot_extractor/config.json
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bert_model": "bert-base-multilingual-uncased",
|
| 3 |
+
"max_length": 64,
|
| 4 |
+
"num_tags": 21,
|
| 5 |
+
"tag_names": [
|
| 6 |
+
"B-BRAND",
|
| 7 |
+
"B-COLOR",
|
| 8 |
+
"B-CONN",
|
| 9 |
+
"B-MATERIAL",
|
| 10 |
+
"B-PRICE_MAX",
|
| 11 |
+
"B-PRICE_MIN",
|
| 12 |
+
"B-PRICE_MOD",
|
| 13 |
+
"B-PRODUCT1",
|
| 14 |
+
"B-PRODUCT2",
|
| 15 |
+
"B-RATING_MIN",
|
| 16 |
+
"B-RATING_MOD",
|
| 17 |
+
"B-SIZE",
|
| 18 |
+
"I-BRAND",
|
| 19 |
+
"I-COLOR",
|
| 20 |
+
"I-MATERIAL",
|
| 21 |
+
"I-PRICE_MOD",
|
| 22 |
+
"I-PRODUCT1",
|
| 23 |
+
"I-PRODUCT2",
|
| 24 |
+
"I-RATING_MOD",
|
| 25 |
+
"I-SIZE",
|
| 26 |
+
"O"
|
| 27 |
+
],
|
| 28 |
+
"best_micro_f1": 0.9360687022900763,
|
| 29 |
+
"training_time_seconds": 13681.3,
|
| 30 |
+
"dataset": "shopping_queries_dataset/slotannotationdataset_cleaned.xlsx",
|
| 31 |
+
"dataset_size": 5379,
|
| 32 |
+
"train_size": 4572,
|
| 33 |
+
"val_size": 807,
|
| 34 |
+
"final_token_metrics": {
|
| 35 |
+
"accuracy": 0.9389,
|
| 36 |
+
"bleu_score": 0.8819,
|
| 37 |
+
"micro_precision": 0.9264,
|
| 38 |
+
"micro_recall": 0.9385,
|
| 39 |
+
"micro_f1": 0.9324,
|
| 40 |
+
"macro_precision": 0.8037,
|
| 41 |
+
"macro_recall": 0.8294,
|
| 42 |
+
"macro_f1": 0.8134,
|
| 43 |
+
"weighted_f1": 0.9329
|
| 44 |
+
},
|
| 45 |
+
"final_entity_metrics": {
|
| 46 |
+
"precision": 0.9193,
|
| 47 |
+
"recall": 0.9363,
|
| 48 |
+
"f1": 0.9277
|
| 49 |
+
}
|
| 50 |
+
}
|
backend/trained_model/slot_extractor/tag_map.json
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"B-BRAND": 0,
|
| 3 |
+
"B-COLOR": 1,
|
| 4 |
+
"B-CONN": 2,
|
| 5 |
+
"B-MATERIAL": 3,
|
| 6 |
+
"B-PRICE_MAX": 4,
|
| 7 |
+
"B-PRICE_MIN": 5,
|
| 8 |
+
"B-PRICE_MOD": 6,
|
| 9 |
+
"B-PRODUCT1": 7,
|
| 10 |
+
"B-PRODUCT2": 8,
|
| 11 |
+
"B-RATING_MIN": 9,
|
| 12 |
+
"B-RATING_MOD": 10,
|
| 13 |
+
"B-SIZE": 11,
|
| 14 |
+
"I-BRAND": 12,
|
| 15 |
+
"I-COLOR": 13,
|
| 16 |
+
"I-MATERIAL": 14,
|
| 17 |
+
"I-PRICE_MOD": 15,
|
| 18 |
+
"I-PRODUCT1": 16,
|
| 19 |
+
"I-PRODUCT2": 17,
|
| 20 |
+
"I-RATING_MOD": 18,
|
| 21 |
+
"I-SIZE": 19,
|
| 22 |
+
"O": 20
|
| 23 |
+
}
|