File size: 1,705 Bytes
095d02f edf3100 095d02f edf3100 095d02f edf3100 095d02f edf3100 095d02f edf3100 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 | from llama_index.llms.google_genai import GoogleGenAI
from llama_index.tools.arxiv import ArxivToolSpec
from llama_index.tools.wikipedia import WikipediaToolSpec
from llama_index.tools.duckduckgo import DuckDuckGoSearchResultsToolSpec
from llama_index.core.tools import FunctionTool
from llama_index.core.agent.workflow import AgentWorkflow
from tools import interpret_python_math_code
from gaia_system_prompt import GAIA_SYSTEM_PROMPT
import os
GEMINI_API_KEY = os.getenv("GEMINI_TOKEN")
GEMINI_MODEL_NAME = "gemini-2.5-flash-preview-04-17"
class FinalAgent:
def __init__(self):
# LLM Initialization
self.llm = GoogleGenAI(model=GEMINI_MODEL_NAME, api_key=GEMINI_API_KEY)
# Tool Initialization
self.tools = [
FunctionTool.from_defaults(
func=interpret_python_math_code,
name="InterpretPythonMathCode",
description="Interprets Python code for mathematical expressions."
),
DuckDuckGoSearchResultsToolSpec(),
WikipediaToolSpec(),
ArxivToolSpec()
]
# Agent Workflow Initialization
self.agent = AgentWorkflow(
llm=self.llm,
tools=self.tools,
system_prompt=GAIA_SYSTEM_PROMPT
)
print("FinalAgent initialized.")
def __call__(self, question: str) -> str:
# Example
print(f"Agent received question (first 50 chars): {question[:50]}...")
fixed_answer = "This is a default answer."
print(f"Agent returning fixed answer: {fixed_answer}")
# Implement agent logic here
response = self.agent.run(question)
return response |