hanshan1988 commited on
Commit
f2f195a
·
1 Parent(s): af6fdc2

improved langfuse tracing to every question

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. agent.py +29 -10
  3. app.py +1 -1
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .env
agent.py CHANGED
@@ -13,6 +13,7 @@ from langgraph.graph.message import add_messages
13
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
14
 
15
  from langfuse.langchain import CallbackHandler
 
16
  from langfuse import get_client
17
 
18
  os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY", "pk-lf-***") # Public key is safe to expose in client-side code
@@ -193,19 +194,28 @@ class BasicAgent:
193
  print("Agent graph built successfully.")
194
  return agent_graph
195
 
196
- async def __call__(self, question: str) -> str:
 
197
  print(f"Agent received question (first 100 chars): {question[:100]}...")
198
- # fixed_answer = "This is a default answer."
199
- # print(f"Agent returning fixed answer: {fixed_answer}")
200
- # Example query agent might receive
201
- # fixed_answer = await agent.run(question)
 
 
 
 
 
 
 
 
 
202
  messages = [
203
  HumanMessage(
204
- # content="Who is Barack Obama?"
205
- # content="Divide 6790 by 5"
206
- content=question # + '/nothink'
207
  )
208
  ]
 
209
  response = await self.agent_graph.ainvoke(
210
  {"messages": messages},
211
  config={
@@ -213,6 +223,15 @@ class BasicAgent:
213
  "callbacks": [langfuse_handler],
214
  }
215
  )
 
216
  response_text = response['messages'][-1].content
217
- # return response_text.split('</think>')[-1]
218
- return extract_answer(response_text)
 
 
 
 
 
 
 
 
 
13
  from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint
14
 
15
  from langfuse.langchain import CallbackHandler
16
+ from langfuse.decorators import observe, langfuse_context
17
  from langfuse import get_client
18
 
19
  os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY", "pk-lf-***") # Public key is safe to expose in client-side code
 
194
  print("Agent graph built successfully.")
195
  return agent_graph
196
 
197
+ @observe(as_type="generation")
198
+ async def __call__(self, question: str, task_id: str = None) -> str:
199
  print(f"Agent received question (first 100 chars): {question[:100]}...")
200
+
201
+ # Update trace context with unique identifier and metadata
202
+ langfuse_context.update_current_trace(
203
+ name=f"agent_question_{task_id or 'unknown'}",
204
+ user_id="agent_user",
205
+ session_id=task_id,
206
+ metadata={
207
+ "task_id": task_id,
208
+ "question_preview": question[:200]
209
+ },
210
+ tags=["agent", "question_answering"]
211
+ )
212
+
213
  messages = [
214
  HumanMessage(
215
+ content=question
 
 
216
  )
217
  ]
218
+
219
  response = await self.agent_graph.ainvoke(
220
  {"messages": messages},
221
  config={
 
223
  "callbacks": [langfuse_handler],
224
  }
225
  )
226
+
227
  response_text = response['messages'][-1].content
228
+ answer = extract_answer(response_text)
229
+
230
+ # Update the current observation with input and output
231
+ langfuse_context.update_current_observation(
232
+ input=question,
233
+ output=answer,
234
+ metadata={"full_response_length": len(response_text)}
235
+ )
236
+
237
+ return answer
app.py CHANGED
@@ -93,7 +93,7 @@ async def run_and_submit_all( profile: gr.OAuthProfile | None):
93
  print(f"Skipping item with missing task_id or question: {item}")
94
  continue
95
  try:
96
- submitted_answer = await agent(question_text)
97
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
98
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
99
  except Exception as e:
 
93
  print(f"Skipping item with missing task_id or question: {item}")
94
  continue
95
  try:
96
+ submitted_answer = await agent(question_text, task_id=task_id)
97
  answers_payload.append({"task_id": task_id, "submitted_answer": submitted_answer})
98
  results_log.append({"Task ID": task_id, "Question": question_text, "Submitted Answer": submitted_answer})
99
  except Exception as e: