Álvaro Valenzuela Valdes commited on
Commit
da1e56d
·
1 Parent(s): 583ea7d

debug: add LLM logging and model fallbacks

Browse files
backend/app/config.py CHANGED
@@ -17,3 +17,10 @@ class Settings(BaseSettings):
17
 
18
 
19
  settings = Settings()
 
 
 
 
 
 
 
 
17
 
18
 
19
  settings = Settings()
20
+
21
+ # Debug: Verify keys are loaded (Masked)
22
+ print("--- ENVIRONMENT CONFIG CHECK ---")
23
+ print(f"GEMINI_API_KEY: {'LOADED' if settings.gemini_api_key else 'MISSING'}")
24
+ print(f"GROQ_API_KEY: {'LOADED' if settings.groq_api_key else 'MISSING'}")
25
+ print(f"FEATHERLESS_API_KEY: {'LOADED' if settings.featherless_api_key else 'MISSING'}")
26
+ print("--------------------------------")
backend/app/services/llm.py CHANGED
@@ -110,16 +110,31 @@ async def call_gemini_with_model(prompt: str, model_name: str | None = None, is_
110
  }
111
 
112
  model_id = model_map.get(model_name, "gemini")
 
 
 
 
 
 
113
 
114
  if model_id == "gemini":
115
  res = await call_gemini(prompt, is_json=is_json)
116
  if not res and settings.groq_api_key:
 
117
  return await call_groq(prompt, "llama-3.3-70b-versatile")
118
  return res
119
  elif model_id.startswith("groq:"):
120
- return await call_groq(prompt, model=model_id[5:])
 
 
 
 
121
  else:
122
- return await call_featherless(prompt, model=model_id)
 
 
 
 
123
 
124
  def _parse_gemini_response(output: str) -> dict | None:
125
  if not output:
 
110
  }
111
 
112
  model_id = model_map.get(model_name, "gemini")
113
+ print(f"DEBUG: Calling LLM with model_name='{model_name}' -> model_id='{model_id}'")
114
+
115
+ # Check keys
116
+ if model_id.startswith("groq:") and not settings.groq_api_key:
117
+ print("DEBUG WARNING: GROQ_API_KEY is missing! Falling back to Gemini.")
118
+ model_id = "gemini"
119
 
120
  if model_id == "gemini":
121
  res = await call_gemini(prompt, is_json=is_json)
122
  if not res and settings.groq_api_key:
123
+ print("DEBUG: Gemini failed or returned empty. Trying Groq fallback.")
124
  return await call_groq(prompt, "llama-3.3-70b-versatile")
125
  return res
126
  elif model_id.startswith("groq:"):
127
+ res = await call_groq(prompt, model=model_id[5:])
128
+ if not res and settings.gemini_api_key:
129
+ print("DEBUG: Groq failed or returned empty. Trying Gemini fallback.")
130
+ return await call_gemini(prompt, is_json=is_json)
131
+ return res
132
  else:
133
+ res = await call_featherless(prompt, model=model_id)
134
+ if not res and settings.groq_api_key:
135
+ print("DEBUG: Featherless failed. Trying Groq fallback.")
136
+ return await call_groq(prompt, "llama-3.3-70b-versatile")
137
+ return res
138
 
139
  def _parse_gemini_response(output: str) -> dict | None:
140
  if not output: