File size: 1,188 Bytes
febfdb0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import os
import gradio as gr
from mcp import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
if __name__ == "__main__":
try:
mcp_client = MCPClient(
{"url": "https://abidlabs-mcp-tools2.hf.space/gradio_api/mcp/sse", "transport": "sse"}) # TODO: Change this to my HF space
tools = mcp_client.get_tools()
print("\n".join(f"{t.name}: {t.description}" for t in tools))
# Define model
model = InferenceClientModel(
model_id="Qwen/Qwen2.5-32B-Instruct",
token=os.getenv("HF_TOKEN")
)
agent = CodeAgent(tools=[*tools], model=model)
run_inference = False # TEMP
if run_inference:
chat_interface = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
examples=["Prime factorization of 68"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions."
)
chat_interface.launch()
finally:
mcp_client.close() |