Umer797's picture
Update llm_node.py
e3c0d0c verified
raw
history blame
623 Bytes
from langchain_community.chat_models import ChatOpenAI
def llm_node(question):
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
prompt = f"""You are solving a GAIA benchmark evaluation question.
⚠️ VERY IMPORTANT:
- ONLY return the final answer, exactly as required.
- DO NOT include explanations, prefixes, or notes.
- Format the answer exactly as asked (e.g., comma-separated, plural, in requested order).
- If the question asks for a list, give only the list, no intro.
Here’s the question:
{question}
Your direct answer:"""
response = llm.invoke(prompt)
return response.content.strip()