syscred_duplicate / requirements-distilled.txt
D Ф m i И i q ц e L Ф y e r
fix: Add missing requirements-distilled.txt
d228524
# SysCRED - Optimized Requirements with Distilled Models
# Système Hybride de Vérification de Crédibilité
# (c) Dominique S. Loyer
#
# This version uses DISTILLED models for faster loading and lower memory:
# - DistilBERT instead of BERT (~60% smaller, 40% faster)
# - MiniLM for sentence embeddings (~5x smaller than all-mpnet)
# - Optimized for HuggingFace Spaces (16GB RAM limit)
# === Core Dependencies ===
requests>=2.28.0
beautifulsoup4>=4.11.0
python-whois>=0.8.0
# === RDF/Ontology ===
rdflib>=6.0.0
# === Machine Learning (Distilled/Optimized) ===
# Using CPU-only torch for smaller footprint
--extra-index-url https://download.pytorch.org/whl/cpu
torch>=2.0.0
# Transformers with minimal dependencies
transformers>=4.30.0
# Distilled sentence transformer (5x smaller than full models)
sentence-transformers>=2.2.0
# Data processing
numpy>=1.24.0
pandas>=2.0.0
# === Explainability ===
lime>=0.2.0
# === NLP for NER (French + English) ===
spacy>=3.5.0
# Note: Download models in Dockerfile with:
# python -m spacy download fr_core_news_sm
# python -m spacy download en_core_web_sm
# === Web Backend ===
flask>=2.3.0
flask-cors>=4.0.0
python-dotenv>=1.0.0
# === Production ===
gunicorn>=20.1.0
# === Development/Testing ===
pytest>=7.0.0