Umer797 commited on
Commit
94bbd69
·
verified ·
1 Parent(s): 3a66e59

Update graph_builder.py

Browse files
Files changed (1) hide show
  1. graph_builder.py +37 -9
graph_builder.py CHANGED
@@ -1,28 +1,56 @@
1
  from langgraph.graph import StateGraph
 
 
 
2
 
3
  def build_graph():
4
  graph = StateGraph(dict)
5
 
6
- def llm_step(state):
 
 
 
7
  question = state.get("question")
8
  if not question:
9
- raise ValueError("State is missing 'question'")
10
- # This is where you integrate your LLM call.
11
- llm_output = f"Dummy answer to: {question}"
12
- state["llm_output"] = llm_output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  return state
14
 
15
  def formatter_step(state):
16
  llm_output = state.get("llm_output")
17
  if not llm_output:
18
- raise ValueError("State is missing 'llm_output'")
19
- # For now, we just pass it through.
20
- state["final_answer"] = llm_output.strip()
21
  return state
22
 
 
 
23
  graph.add_node("llm", llm_step)
24
  graph.add_node("formatter", formatter_step)
25
- graph.set_entry_point("llm")
 
 
 
26
  graph.add_edge("llm", "formatter")
27
  graph.set_finish_point("formatter")
28
 
 
1
  from langgraph.graph import StateGraph
2
+ from langchain_core.tools import Tool
3
+ from langchain_community.chat_models import ChatOpenAI
4
+ from langchain_tavily import TavilySearchAPIWrapper
5
 
6
  def build_graph():
7
  graph = StateGraph(dict)
8
 
9
+ # Setup tools
10
+ search = TavilySearchAPIWrapper(api_key="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")
11
+
12
+ def search_step(state):
13
  question = state.get("question")
14
  if not question:
15
+ raise ValueError("Missing 'question' in state")
16
+ result = search.run(question)
17
+ state["search_result"] = result
18
+ return state
19
+
20
+ def llm_step(state):
21
+ llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0)
22
+ question = state.get("question")
23
+ search_info = state.get("search_result", "")
24
+ prompt = f"""
25
+ You are solving a GAIA benchmark question.
26
+ Here’s the question:
27
+ {question}
28
+
29
+ Here’s retrieved web info:
30
+ {search_info}
31
+
32
+ ONLY return the final exact answer (no explanation, no prefix).
33
+ """
34
+ response = llm.invoke(prompt)
35
+ state["llm_output"] = response.content.strip()
36
  return state
37
 
38
  def formatter_step(state):
39
  llm_output = state.get("llm_output")
40
  if not llm_output:
41
+ raise ValueError("Missing 'llm_output' in state")
42
+ cleaned = llm_output.strip()
43
+ state["final_answer"] = cleaned
44
  return state
45
 
46
+ # Add nodes
47
+ graph.add_node("search", search_step)
48
  graph.add_node("llm", llm_step)
49
  graph.add_node("formatter", formatter_step)
50
+
51
+ # Define flow
52
+ graph.set_entry_point("search")
53
+ graph.add_edge("search", "llm")
54
  graph.add_edge("llm", "formatter")
55
  graph.set_finish_point("formatter")
56