Álvaro Valenzuela Valdes commited on
Commit ·
110158e
1
Parent(s): 2e21925
Fix: Enable Featherless fallback for agents when Gemini unavailable
Browse files
backend/app/services/agents.py
CHANGED
|
@@ -82,8 +82,9 @@ async def run_full_analysis(tender: Tender, company_profile: CompanyProfile, doc
|
|
| 82 |
if parse_result:
|
| 83 |
try:
|
| 84 |
# Add individual thoughts to the audit log for visual impact
|
| 85 |
-
audit_log.append(f"Resumen Legal: {legal_resp[:
|
| 86 |
-
audit_log.append(f"Resumen Técnico: {tech_resp[:
|
|
|
|
| 87 |
|
| 88 |
if not parse_result.get("report_markdown"):
|
| 89 |
parse_result["report_markdown"] = generate_markdown_report(parse_result)
|
|
|
|
| 82 |
if parse_result:
|
| 83 |
try:
|
| 84 |
# Add individual thoughts to the audit log for visual impact
|
| 85 |
+
audit_log.append(f"✅ Resumen Legal: {legal_resp[:100]}...")
|
| 86 |
+
audit_log.append(f"✅ Resumen Técnico: {tech_resp[:100]}...")
|
| 87 |
+
audit_log.append(f"✅ Síntesis completada")
|
| 88 |
|
| 89 |
if not parse_result.get("report_markdown"):
|
| 90 |
parse_result["report_markdown"] = generate_markdown_report(parse_result)
|
backend/app/services/llm.py
CHANGED
|
@@ -29,15 +29,16 @@ def get_gemini_model():
|
|
| 29 |
|
| 30 |
def call_gemini(prompt: str) -> str:
|
| 31 |
if not settings.gemini_api_key:
|
| 32 |
-
|
|
|
|
| 33 |
|
| 34 |
try:
|
| 35 |
model = get_gemini_model()
|
| 36 |
response = model.generate_content(prompt)
|
| 37 |
return response.text
|
| 38 |
except Exception as e:
|
| 39 |
-
print(f"Error calling Gemini: {e}")
|
| 40 |
-
return ""
|
| 41 |
|
| 42 |
def call_featherless(prompt: str, model: str = "deepseek-ai/DeepSeek-V3.2") -> str:
|
| 43 |
if not settings.featherless_api_key:
|
|
|
|
| 29 |
|
| 30 |
def call_gemini(prompt: str) -> str:
|
| 31 |
if not settings.gemini_api_key:
|
| 32 |
+
# Fallback to Featherless if Gemini not available
|
| 33 |
+
return call_featherless(prompt, "meta-llama/Llama-3.3-70B-Instruct")
|
| 34 |
|
| 35 |
try:
|
| 36 |
model = get_gemini_model()
|
| 37 |
response = model.generate_content(prompt)
|
| 38 |
return response.text
|
| 39 |
except Exception as e:
|
| 40 |
+
print(f"Error calling Gemini: {e}, trying Featherless fallback...")
|
| 41 |
+
return call_featherless(prompt, "meta-llama/Llama-3.3-70B-Instruct")
|
| 42 |
|
| 43 |
def call_featherless(prompt: str, model: str = "deepseek-ai/DeepSeek-V3.2") -> str:
|
| 44 |
if not settings.featherless_api_key:
|