Spaces:
Sleeping
Sleeping
Commit ·
cb0d682
1
Parent(s): 1659364
handle LLM network exceptions gracefully fallback to classify
Browse files- inference.py +12 -10
inference.py
CHANGED
|
@@ -51,16 +51,18 @@ def llm_decide_action(email):
|
|
| 51 |
Output only the action name.
|
| 52 |
"""
|
| 53 |
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
|
|
|
|
|
|
| 64 |
|
| 65 |
|
| 66 |
# ---------------- POLICY ----------------
|
|
|
|
| 51 |
Output only the action name.
|
| 52 |
"""
|
| 53 |
|
| 54 |
+
try:
|
| 55 |
+
completion = client.chat.completions.create(
|
| 56 |
+
model=MODEL_NAME,
|
| 57 |
+
messages=[{"role": "user", "content": prompt}],
|
| 58 |
+
max_tokens=10,
|
| 59 |
+
temperature=0.0,
|
| 60 |
+
)
|
| 61 |
+
action_text = completion.choices[0].message.content.strip().lower()
|
| 62 |
+
return action_text
|
| 63 |
+
except Exception as e:
|
| 64 |
+
print(f"LLM API error: {e}")
|
| 65 |
+
return "classify"
|
| 66 |
|
| 67 |
|
| 68 |
# ---------------- POLICY ----------------
|