Imsachin010 commited on
Commit
cb0d682
·
1 Parent(s): 1659364

handle LLM network exceptions gracefully fallback to classify

Browse files
Files changed (1) hide show
  1. inference.py +12 -10
inference.py CHANGED
@@ -51,16 +51,18 @@ def llm_decide_action(email):
51
  Output only the action name.
52
  """
53
 
54
- completion = client.chat.completions.create(
55
- model=MODEL_NAME,
56
- messages=[{"role": "user", "content": prompt}],
57
- max_tokens=10,
58
- temperature=0.0,
59
- )
60
-
61
- action_text = completion.choices[0].message.content.strip().lower()
62
-
63
- return action_text
 
 
64
 
65
 
66
  # ---------------- POLICY ----------------
 
51
  Output only the action name.
52
  """
53
 
54
+ try:
55
+ completion = client.chat.completions.create(
56
+ model=MODEL_NAME,
57
+ messages=[{"role": "user", "content": prompt}],
58
+ max_tokens=10,
59
+ temperature=0.0,
60
+ )
61
+ action_text = completion.choices[0].message.content.strip().lower()
62
+ return action_text
63
+ except Exception as e:
64
+ print(f"LLM API error: {e}")
65
+ return "classify"
66
 
67
 
68
  # ---------------- POLICY ----------------