Files changed (1) hide show
  1. app.py +31 -13
app.py CHANGED
@@ -1,33 +1,51 @@
1
  import gradio as gr
2
-
3
- from mcp.client.stdio import StdioServerParameters
4
- from smolagents import ToolCollection, CodeAgent
5
  from smolagents import CodeAgent, InferenceClientModel
6
  from smolagents.mcp_client import MCPClient
7
 
8
- model = InferenceClientModel()
 
 
 
 
9
 
10
  try:
 
11
  mcp_client = MCPClient(
12
  {"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse"}
13
  )
14
 
 
15
  tools = mcp_client.get_tools()
16
- agent = CodeAgent(tools=[*tools], model=model)
 
 
17
 
18
  def call_agent(message, history):
19
- return str(agent.run(message))
 
 
 
 
 
20
 
 
21
  demo = gr.ChatInterface(
22
  fn=call_agent,
23
  type="messages",
24
- examples=["Prime factorization of 68"],
25
- title="Agent with MCP Tools",
26
- description="This is a simple agent that uses MCP tools to answer questions.",
27
  )
28
 
29
- demo.launch()
30
  except Exception as e:
31
- raise e
32
- finally:
33
- mcp_client.stop()
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ import os
 
 
3
  from smolagents import CodeAgent, InferenceClientModel
4
  from smolagents.mcp_client import MCPClient
5
 
6
+ # Initialize the model using Hugging Face's serverless inference
7
+ # Ensure you have an HF_TOKEN set in your Space's Secrets
8
+ model = InferenceClientModel("meta-llama/Llama-3.2-3B-Instruct")
9
+
10
+ mcp_client = None
11
 
12
  try:
13
+ # Initialize the MCP Client pointing to the Gradio SSE endpoint
14
  mcp_client = MCPClient(
15
  {"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse"}
16
  )
17
 
18
+ # Retrieve tools from the MCP server
19
  tools = mcp_client.get_tools()
20
+
21
+ # Create the agent with the retrieved tools
22
+ agent = CodeAgent(tools=[*tools], model=model, add_base_tools=True)
23
 
24
  def call_agent(message, history):
25
+ try:
26
+ # Run the agent and return the final answer as a string
27
+ response = agent.run(message)
28
+ return str(response)
29
+ except Exception as error:
30
+ return f"Agent Error: {str(error)}"
31
 
32
+ # Set up the Gradio Chat Interface
33
  demo = gr.ChatInterface(
34
  fn=call_agent,
35
  type="messages",
36
+ examples=["What is the prime factorization of 2026?"],
37
+ title="SKT AI: Agent with MCP Tools",
38
+ description="This agent uses the Model Context Protocol (MCP) to access external tools.",
39
  )
40
 
 
41
  except Exception as e:
42
+ print(f"Initialization Error: {e}")
43
+ # Fallback demo in case of connection issues
44
+ demo = gr.Interface(fn=lambda x: f"Initialization failed: {e}", inputs="text", outputs="text")
45
+
46
+ if __name__ == "__main__":
47
+ try:
48
+ demo.launch()
49
+ finally:
50
+ if mcp_client is not None:
51
+ mcp_client.stop()