Spaces:
Build error
Build error
File size: 3,608 Bytes
5e52bd7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | from datetime import datetime
from typing import List
from fastapi import APIRouter
from app.schemas.analysis import AnalysisRecord, AnalysisRequest, AnalysisResult, ChatRequest, SearchRecord
from app.services.agents import run_full_analysis
from app.services.llm import call_gemini_with_model
from app.services.persistence import save_to_json, load_from_json
router = APIRouter()
# Load initial history from disk
analysis_history: List[AnalysisRecord] = load_from_json(AnalysisRecord, "analysis_history.json")
search_history: List[SearchRecord] = load_from_json(SearchRecord, "search_history.json")
@router.post("/analyze", response_model=AnalysisResult)
async def analyze_opportunity(request: AnalysisRequest):
result = await run_full_analysis(request.tender, request.company_profile, request.document_text, request.models, request.tender_details, request.amd_settings)
record = AnalysisRecord(
tender_code=request.tender.code,
tender_name=request.tender.name,
analyzed_at=datetime.utcnow(),
analysis=result,
)
analysis_history.insert(0, record)
if len(analysis_history) > 20:
analysis_history.pop()
# Persist to disk
save_to_json(analysis_history, "analysis_history.json")
return result
@router.get("/analysis-history", response_model=List[AnalysisRecord])
def get_analysis_history():
return analysis_history
@router.post("/chat")
async def agent_chat(request: ChatRequest):
# Construct context
history_str = "\n".join([f"{m.role.upper()}{f' ({m.agent_name})' if m.agent_name else ''}: {m.content}" for m in request.history])
prompt = (
f"Eres {request.agent} en AndesOps AI, un consultor experto de élite. "
f"Actualmente estás operando bajo el motor de IA: {request.model}.\n\n"
f"CONTEXTO DE LA LICITACIÓN:\n{request.tender.model_dump_json()}\n\n"
f"DATOS DE MI EMPRESA:\n{request.company_profile.model_dump_json()}\n\n"
f"HISTORIAL DE CHAT:\n{history_str}\n\n"
f"PREGUNTA DEL USUARIO: {request.message}\n\n"
f"INSTRUCCIONES CRÍTICAS:\n"
f"1. Responde con la personalidad de {request.agent}. Sé agudo, profesional y estratégico.\n"
f"2. IDENTIDAD: Si el usuario pregunta qué modelo eres o quién te potencia, menciona que eres {request.agent} de AndesOps, funcionando sobre {request.model}.\n"
f"3. ANALIZA LAS BASES: Revisa el campo 'description' para responder.\n"
f"4. CITA EL DOCUMENTO: Menciona montos, multas o plazos explícitos si están disponibles.\n"
f"5. CONSEJO ESTRATÉGICO: Sugiere mejoras basadas en la experiencia de la empresa ({request.company_profile.experience}).\n"
f"RESPONDE EN ESPAÑOL."
)
if request.amd_settings:
settings.amd_inference_url = request.amd_settings.get("url")
settings.amd_api_key = request.amd_settings.get("key")
print(f"!!! AMD NODE ACTIVATED FOR CHAT: {settings.amd_inference_url} !!!")
response = await call_gemini_with_model(prompt, request.model)
if not response:
response = "Lo siento, tuve un problema procesando tu solicitud. ¿Podrías intentar de nuevo?"
return {"response": response}
@router.post("/search-history")
def save_search_history(record: SearchRecord):
search_history.insert(0, record)
if len(search_history) > 50:
search_history.pop()
save_to_json(search_history, "search_history.json")
return {"status": "ok"}
@router.get("/search-history", response_model=List[SearchRecord])
def get_search_history():
return search_history
|