|
""" |
|
This is a Gradio MCP client that connects to my MCP server (mcp-rag-workflow). |
|
This script initializes a Gradio interface for an agent that uses tools from the MCP server. |
|
It connects to the MCP server, retrieves available tools, and sets up a chat interface where users can interact with the agent. |
|
""" |
|
|
|
import os |
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
|
|
import gradio as gr |
|
from smolagents import InferenceClientModel, CodeAgent, MCPClient |
|
|
|
try: |
|
mcp_client = MCPClient( |
|
{ |
|
"url": "https://agents-mcp-hackathon-mcp-rag-workflow.hf.space/gradio_api/mcp/sse", |
|
"transport": "sse" |
|
} |
|
) |
|
|
|
tools = mcp_client.get_tools() |
|
|
|
model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN")) |
|
agent = CodeAgent(tools=[*tools], model=model) |
|
|
|
mcp_description = """ |
|
**Example Queries**: |
|
- "What are the main features of fuel system of SU-35?" |
|
- "What is the combat potential of SU-35?" |
|
- "Write me a report on origin of the universe." |
|
- "Write me a report on the impact of climate change on polar bears." |
|
""" |
|
demo = gr.ChatInterface( |
|
fn=lambda message, history: str(agent.run(message)), |
|
chatbot=gr.Chatbot(height=450, placeholder="Ask me about Sukhoi SU-35 or ask to write report on any topic."), |
|
type="messages", |
|
title="A Gradio MCP client that uses Tools from my Hackathon MCP server", |
|
examples=[ "What are the main features of fuel system of SU-35?", "What is the combat potential of SU-35?", "Write me a report on origin of the universe."], |
|
description=mcp_description, |
|
) |
|
|
|
demo.launch() |
|
finally: |
|
mcp_client.disconnect() |