AndesOps-AI / backend /app /routers /analysis.py
Álvaro Valenzuela Valdes
deploy: v10 AMD hardware monitor integration
2da34a9
from datetime import datetime
from typing import List
from fastapi import APIRouter
from app.schemas.analysis import AnalysisRecord, AnalysisRequest, AnalysisResult, ChatRequest, SearchRecord
from app.services.agents import run_full_analysis
from app.services.llm import call_gemini_with_model
from app.services.persistence import save_to_json, load_from_json
router = APIRouter()
# Load initial history from disk
analysis_history: List[AnalysisRecord] = load_from_json(AnalysisRecord, "analysis_history.json")
search_history: List[SearchRecord] = load_from_json(SearchRecord, "search_history.json")
@router.post("/analyze", response_model=AnalysisResult)
async def analyze_opportunity(request: AnalysisRequest):
result = await run_full_analysis(request.tender, request.company_profile, request.document_text, request.models, request.tender_details)
record = AnalysisRecord(
tender_code=request.tender.code,
tender_name=request.tender.name,
analyzed_at=datetime.utcnow(),
analysis=result,
)
analysis_history.insert(0, record)
if len(analysis_history) > 20:
analysis_history.pop()
# Persist to disk
save_to_json(analysis_history, "analysis_history.json")
return result
@router.get("/analysis-history", response_model=List[AnalysisRecord])
def get_analysis_history():
return analysis_history
@router.post("/chat")
async def agent_chat(request: ChatRequest):
# Construct context
history_str = "\n".join([f"{m.role.upper()}{f' ({m.agent_name})' if m.agent_name else ''}: {m.content}" for m in request.history])
prompt = (
f"Eres {request.agent} en AndesOps AI, un consultor experto de élite. "
f"Actualmente estás operando bajo el motor de IA: {request.model}.\n\n"
f"CONTEXTO DE LA LICITACIÓN:\n{request.tender.model_dump_json()}\n\n"
f"DATOS DE MI EMPRESA:\n{request.company_profile.model_dump_json()}\n\n"
f"HISTORIAL DE CHAT:\n{history_str}\n\n"
f"PREGUNTA DEL USUARIO: {request.message}\n\n"
f"INSTRUCCIONES CRÍTICAS:\n"
f"1. Responde con la personalidad de {request.agent}. Sé agudo, profesional y estratégico.\n"
f"2. IDENTIDAD: Si el usuario pregunta qué modelo eres o quién te potencia, menciona que eres {request.agent} de AndesOps, funcionando sobre {request.model}.\n"
f"3. ANALIZA LAS BASES: Revisa el campo 'description' para responder.\n"
f"4. CITA EL DOCUMENTO: Menciona montos, multas o plazos explícitos si están disponibles.\n"
f"5. CONSEJO ESTRATÉGICO: Sugiere mejoras basadas en la experiencia de la empresa ({request.company_profile.experience}).\n"
f"RESPONDE EN ESPAÑOL."
)
response = await call_gemini_with_model(prompt, request.model)
if not response:
response = "Lo siento, tuve un problema procesando tu solicitud. ¿Podrías intentar de nuevo?"
return {"response": response}
@router.post("/search-history")
def save_search_history(record: SearchRecord):
search_history.insert(0, record)
if len(search_history) > 50:
search_history.pop()
save_to_json(search_history, "search_history.json")
return {"status": "ok"}
@router.get("/search-history", response_model=List[SearchRecord])
def get_search_history():
return search_history