Spaces:
Build error
Build error
File size: 1,679 Bytes
ecf6369 1d2910a ecf6369 1d2910a ecf6369 1d2910a ecf6369 22103b7 ecf6369 1d2910a ecf6369 1d2910a ecf6369 1d2910a ecf6369 1d2910a ecf6369 1d2910a ecf6369 22103b7 1d2910a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 | import gradio as gr
import os
from smolagents import CodeAgent, InferenceClientModel
from smolagents.mcp_client import MCPClient
# Initialize the model using Hugging Face's serverless inference
# Ensure you have an HF_TOKEN set in your Space's Secrets
model = InferenceClientModel("meta-llama/Llama-3.2-3B-Instruct")
mcp_client = None
try:
# Initialize the MCP Client pointing to the Gradio SSE endpoint
mcp_client = MCPClient(
{"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse"}
)
# Retrieve tools from the MCP server
tools = mcp_client.get_tools()
# Create the agent with the retrieved tools
agent = CodeAgent(tools=[*tools], model=model, add_base_tools=True)
def call_agent(message, history):
try:
# Run the agent and return the final answer as a string
response = agent.run(message)
return str(response)
except Exception as error:
return f"Agent Error: {str(error)}"
# Set up the Gradio Chat Interface
demo = gr.ChatInterface(
fn=call_agent,
type="messages",
examples=["What is the prime factorization of 2026?"],
title="SKT AI: Agent with MCP Tools",
description="This agent uses the Model Context Protocol (MCP) to access external tools.",
)
except Exception as e:
print(f"Initialization Error: {e}")
# Fallback demo in case of connection issues
demo = gr.Interface(fn=lambda x: f"Initialization failed: {e}", inputs="text", outputs="text")
if __name__ == "__main__":
try:
demo.launch()
finally:
if mcp_client is not None:
mcp_client.stop()
|