Spaces:
Sleeping
Sleeping
Shreya Pal commited on
Commit ·
885d181
1
Parent(s): eb29ff9
Fix hf_moderate error handling & loading fallback
Browse files- .gitignore +1 -0
- server/app.py +19 -8
.gitignore
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
venv/
|
| 2 |
__pycache__/
|
| 3 |
*.pyc
|
|
|
|
| 1 |
+
venv2/
|
| 2 |
venv/
|
| 3 |
__pycache__/
|
| 4 |
*.pyc
|
server/app.py
CHANGED
|
@@ -134,15 +134,26 @@ Respond with exactly this format:
|
|
| 134 |
timeout=30
|
| 135 |
)
|
| 136 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 137 |
raw = response.json()
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
"
|
| 145 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
|
| 147 |
if isinstance(raw, list):
|
| 148 |
text_out = raw[0].get("generated_text", "")
|
|
|
|
| 134 |
timeout=30
|
| 135 |
)
|
| 136 |
|
| 137 |
+
# Check for empty response (model loading or auth error)
|
| 138 |
+
if not response.text or response.status_code != 200:
|
| 139 |
+
raise ValueError(f"HF API returned status {response.status_code}: {response.text}")
|
| 140 |
+
|
| 141 |
raw = response.json()
|
| 142 |
+
|
| 143 |
+
# Handle model loading state
|
| 144 |
+
if isinstance(raw, dict) and raw.get("error"):
|
| 145 |
+
error = raw["error"]
|
| 146 |
+
if "loading" in str(error).lower():
|
| 147 |
+
# Model is warming up, return a fallback based on HF scores alone
|
| 148 |
+
top_score = max(hf_scores.get("toxicity", 0), hf_scores.get("threat", 0), hf_scores.get("insult", 0))
|
| 149 |
+
if top_score > 0.7:
|
| 150 |
+
decision = "remove"
|
| 151 |
+
elif top_score > 0.4:
|
| 152 |
+
decision = "flag"
|
| 153 |
+
else:
|
| 154 |
+
decision = "allow"
|
| 155 |
+
return {"decision": decision, "confidence": round(top_score, 2), "explanation": "Scored using toxicity model (LLM warming up)."}
|
| 156 |
+
raise ValueError(f"HF API error: {error}")
|
| 157 |
|
| 158 |
if isinstance(raw, list):
|
| 159 |
text_out = raw[0].get("generated_text", "")
|