import os | |
import gradio as gr | |
from mcp import StdioServerParameters | |
from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient | |
if __name__ == "__main__": | |
try: | |
mcp_client = MCPClient( | |
{"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse", "transport": "sse"}) # TODO: Change this to my HF space | |
tools = mcp_client.get_tools() | |
print("\n".join(f"{t.name}: {t.description}" for t in tools)) | |
# Define model | |
model = InferenceClientModel( | |
model_id="Qwen/Qwen2.5-32B-Instruct", | |
token=os.getenv("HF_TOKEN") | |
) | |
agent = CodeAgent(tools=[*tools], model=model) | |
run_inference = False # TEMP | |
if run_inference: | |
chat_interface = gr.ChatInterface( | |
fn=lambda message, history: str(agent.run(message)), | |
type="messages", | |
examples=["Prime factorization of 68"], | |
title="Agent with MCP Tools", | |
description="This is a simple agent that uses MCP tools to answer questions." | |
) | |
chat_interface.launch() | |
finally: | |
mcp_client.close() |