File size: 3,813 Bytes
6d8aa95 4facc97 68aa226 107cced 68aa226 4facc97 107cced 4facc97 107cced 4facc97 68aa226 4facc97 107cced 68aa226 6d8aa95 afa6e6e 4facc97 afa6e6e 107cced 4facc97 107cced 4facc97 107cced 6d8aa95 107cced |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
'''RSS MCP server demonstration client app.'''
import os
import json
import logging
from pathlib import Path
from logging.handlers import RotatingFileHandler
import gradio as gr
from gradio.components.chatbot import ChatMessage
from client.mcp_client import MCPClientWrapper
from client.anthropic_bridge import AnthropicBridge
# Make sure log directory exists
Path('logs').mkdir(parents=True, exist_ok=True)
# Set-up logger
logger = logging.getLogger()
logging.basicConfig(
handlers=[RotatingFileHandler(
'logs/rss_client.log',
maxBytes=100000,
backupCount=10,
mode='w'
)],
level=logging.INFO,
format='%(levelname)s - %(name)s - %(message)s'
)
logger = logging.getLogger(__name__)
client = MCPClientWrapper('https://agents-mcp-hackathon-rss-mcp-server.hf.space/gradio_api/mcp/sse')
bridge = AnthropicBridge(
client,
api_key=os.environ['ANTHROPIC_API_KEY']
)
async def submit_input(message: str, chat_history: list) -> str:
'''Submits user message to agent'''
function_logger = logging.getLogger(__name__ + '.submit_input')
chat_history.append({"role": "user", "content": message})
input_messages = format_chat_history(chat_history)
function_logger.info(input_messages)
result = await bridge.process_query(input_messages)
function_logger.info(result)
function_logger.info(result.keys())
try:
chat_history.append({
"role": "assistant",
"content": result['llm_response'].content[0].text
})
except AttributeError:
function_logger.info('Model called the tool, but did not talk about it')
if result['tool_result']:
articles = json.loads(result['tool_result'].content)['text']
function_logger.info(articles)
tmp_chat_history = chat_history.copy()
tmp_chat_history.append({
"role": "assistant",
"content": ('Here are the three most recent entries from the RSS ' +
f'feed in JSON format. Tell the user what you have found: {json.dumps(articles)}')
})
tmp_input_messages = format_chat_history(tmp_chat_history)
function_logger.info(tmp_input_messages)
result = await bridge.process_query(tmp_input_messages)
chat_history.append({
"role": "assistant",
"content": result['llm_response'].content[0].text
})
return '', chat_history
def format_chat_history(history) -> list[dict]:
'''Formats gradio chat history for submission to anthropic.'''
messages = []
for chat_message in history:
if isinstance(msg, ChatMessage):
role, content = chat_message.role, chat_message.content
else:
role, content = chat_message.get("role"), chat_message.get("content")
if role in ["user", "assistant", "system"]:
messages.append({"role": role, "content": content})
return messages
with gr.Blocks(title='MCP RSS client') as demo:
gr.Markdown('# MCP RSS reader')
gr.Markdown(
'Connect to the MCP RSS server: ' +
'https://huggingface.co/spaces/Agents-MCP-Hackathon/rss-mcp-server'
)
connect_btn = gr.Button('Connect')
status = gr.Textbox(label='Connection Status', interactive=False, lines=10)
chatbot = gr.Chatbot(
value=[],
height=500,
type='messages',
show_copy_button=True,
avatar_images=('👤', '🤖')
)
msg = gr.Textbox(
label='Your Question',
placeholder='Ask about an RSS feed',
scale=4
)
connect_btn.click(client.list_tools, outputs=status) # pylint: disable=no-member
msg.submit(submit_input, [msg, chatbot], [msg, chatbot]) # pylint: disable=no-member
if __name__ == '__main__':
demo.launch(debug=True)
|