gperdrizet commited on
Commit
cbd458d
·
unverified ·
2 Parent(s): ecd30d6 390e1b0

Merge pull request #6 from gperdrizet/dev

Browse files
client/gradio_functions.py CHANGED
@@ -3,17 +3,75 @@
3
  import os
4
  import re
5
 
6
- def update_log(n: int = 10):
7
- '''Gets updated logging output from disk to display to user.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  Args:
10
- n: number of most recent lines of logging output to display
11
 
12
  Returns:
13
- Logging output as string
14
  '''
15
 
16
- with open('logs/rss_client.log', 'r', encoding='utf-8') as log_file:
17
  lines = log_file.readlines()
18
 
19
  return ''.join(lines[-n:])
 
3
  import os
4
  import re
5
 
6
+ import logging
7
+ from pathlib import Path
8
+ from logging.handlers import RotatingFileHandler
9
+
10
+
11
+ def configure_root_logger() -> None:
12
+ '''Configures root logger for project wide logging.'''
13
+
14
+ # Make sure log directory exists
15
+ Path('logs').mkdir(parents=True, exist_ok=True)
16
+
17
+ # Clear old logs if present
18
+ delete_old_logs('logs', 'rss_client')
19
+
20
+ # Set up the root logger so we catch logs from
21
+ logging.basicConfig(
22
+ handlers=[RotatingFileHandler(
23
+ 'logs/rss_client.log',
24
+ maxBytes=100000,
25
+ backupCount=10,
26
+ mode='w'
27
+ )],
28
+ level=logging.INFO,
29
+ format='%(levelname)s - %(name)s - %(message)s'
30
+ )
31
+
32
+
33
+ def get_dialog_logger(name: str = 'dialog', clear: bool = True) -> logging.Logger:
34
+ '''Sets up logger for model's internal dialog.'''
35
+
36
+ # Make sure log directory exists
37
+ Path('logs').mkdir(parents=True, exist_ok=True)
38
+
39
+ # Clear old logs if desired
40
+ if clear:
41
+ delete_old_logs('logs', 'dialog')
42
+
43
+ # Create logger
44
+ new_dialog_logger = logging.getLogger(name)
45
+
46
+ # Create handler
47
+ handler = RotatingFileHandler(
48
+ 'logs/dialog.log',
49
+ maxBytes=100000,
50
+ backupCount=10,
51
+ mode='w'
52
+ )
53
+
54
+ # Add format to handler
55
+ formatter = logging.Formatter('%(message)s')
56
+ handler.setFormatter(formatter)
57
+ new_dialog_logger.addHandler(handler)
58
+
59
+ # Set logging level
60
+ new_dialog_logger.setLevel(logging.INFO)
61
+
62
+ return new_dialog_logger
63
+
64
+ def update_dialog(n: int = 10):
65
+ '''Gets updated internal dialog logging output from disk to display to user.
66
 
67
  Args:
68
+ n: number of most recent lines of internal dialog output to display
69
 
70
  Returns:
71
+ Internal dialog logging output as string
72
  '''
73
 
74
+ with open('logs/dialog.log', 'r', encoding='utf-8') as log_file:
75
  lines = log_file.readlines()
76
 
77
  return ''.join(lines[-n:])
client/interface.py CHANGED
@@ -2,23 +2,36 @@
2
 
3
  import json
4
  import logging
 
5
  from anthropic.types import text_block
6
  from gradio.components.chatbot import ChatMessage
7
 
8
  from client import prompts
9
  from client.anthropic_bridge import AnthropicBridge
 
 
 
 
 
10
 
11
  async def agent_input(
12
  bridge: AnthropicBridge,
 
13
  chat_history: list
14
  ) -> list:
15
 
16
  '''Handles model interactions.'''
17
 
18
- function_logger = logging.getLogger(__name__ + '.agent_input')
 
 
 
19
 
20
  input_messages = format_chat_history(chat_history)
21
- result = await bridge.process_query(prompts.DEFAULT_SYSTEM_PROMPT, input_messages)
 
 
 
22
 
23
  if result['tool_result']:
24
  tool_call = result['tool_call']
@@ -28,7 +41,6 @@ async def agent_input(
28
 
29
  tool_parameters = tool_call['parameters']
30
  website = tool_parameters['website']
31
- user_query = input_messages[-1]['content']
32
  response_content = result['llm_response'].content[0]
33
 
34
  if isinstance(response_content, text_block.TextBlock):
@@ -36,9 +48,9 @@ async def agent_input(
36
  else:
37
  intermediate_reply = f'I Will check the {website} RSS feed for you'
38
 
39
- function_logger.info('User query: %s', user_query)
40
- function_logger.info('Model intermediate reply: %s', intermediate_reply)
41
- function_logger.info('LLM called %s on %s', tool_name, website)
42
 
43
  articles = json.loads(result['tool_result'].content)['text']
44
 
@@ -54,27 +66,23 @@ async def agent_input(
54
  'content': prompt
55
  }]
56
 
57
- function_logger.info('Re-prompting input %s', input_message)
58
- result = await bridge.process_query(prompts.GET_FEED_SYSTEM_PROMPT, input_message)
 
 
 
 
 
 
59
 
60
  try:
61
 
62
- final_reply = result['llm_response'].content[0].text
63
 
64
  except (IndexError, AttributeError):
65
- final_reply = 'No final reply from model'
66
-
67
- function_logger.info('LLM final reply: %s', final_reply)
68
 
69
- chat_history.append({
70
- "role": "assistant",
71
- "content": intermediate_reply
72
- })
73
-
74
- chat_history.append({
75
- "role": "assistant",
76
- "content": final_reply
77
- })
78
 
79
  else:
80
  try:
@@ -83,14 +91,11 @@ async def agent_input(
83
  except AttributeError:
84
  reply = 'Bad reply - could not parse'
85
 
86
- function_logger.info('Direct, no-tool reply: %s', reply)
87
-
88
- chat_history.append({
89
- "role": "assistant",
90
- "content": reply
91
- })
92
 
93
- return chat_history
 
 
94
 
95
 
96
  def format_chat_history(history) -> list[dict]:
@@ -102,9 +107,9 @@ def format_chat_history(history) -> list[dict]:
102
  if isinstance(chat_message, ChatMessage):
103
  role, content = chat_message.role, chat_message.content
104
  else:
105
- role, content = chat_message.get("role"), chat_message.get("content")
106
 
107
- if role in ["user", "assistant", "system"]:
108
- messages.append({"role": role, "content": content})
109
 
110
  return messages
 
2
 
3
  import json
4
  import logging
5
+ import queue
6
  from anthropic.types import text_block
7
  from gradio.components.chatbot import ChatMessage
8
 
9
  from client import prompts
10
  from client.anthropic_bridge import AnthropicBridge
11
+ import client.gradio_functions as gradio_funcs
12
+
13
+ # Create dialog logger
14
+ dialog = gradio_funcs.get_dialog_logger(clear = True)
15
+
16
 
17
  async def agent_input(
18
  bridge: AnthropicBridge,
19
+ output_queue: queue.Queue,
20
  chat_history: list
21
  ) -> list:
22
 
23
  '''Handles model interactions.'''
24
 
25
+ logger = logging.getLogger(__name__ + '.agent_input')
26
+
27
+ user_query = chat_history[-1]['content']
28
+ dialog.info('User: %s', user_query)
29
 
30
  input_messages = format_chat_history(chat_history)
31
+ result = await bridge.process_query(
32
+ prompts.DEFAULT_SYSTEM_PROMPT,
33
+ input_messages
34
+ )
35
 
36
  if result['tool_result']:
37
  tool_call = result['tool_call']
 
41
 
42
  tool_parameters = tool_call['parameters']
43
  website = tool_parameters['website']
 
44
  response_content = result['llm_response'].content[0]
45
 
46
  if isinstance(response_content, text_block.TextBlock):
 
48
  else:
49
  intermediate_reply = f'I Will check the {website} RSS feed for you'
50
 
51
+ output_queue.put(intermediate_reply)
52
+ dialog.info('LLM: %s', intermediate_reply)
53
+ dialog.info('LLM: called %s on %s', tool_name, website)
54
 
55
  articles = json.loads(result['tool_result'].content)['text']
56
 
 
66
  'content': prompt
67
  }]
68
 
69
+ dialog.info('System: re-prompting LLM with return from %s call', tool_name)
70
+ dialog.info('New prompt: %s ...', prompt[:150])
71
+
72
+ logger.info('Re-prompting input %s', input_message)
73
+ result = await bridge.process_query(
74
+ prompts.GET_FEED_SYSTEM_PROMPT,
75
+ input_message
76
+ )
77
 
78
  try:
79
 
80
+ reply = result['llm_response'].content[0].text
81
 
82
  except (IndexError, AttributeError):
83
+ reply = 'No final reply from model'
 
 
84
 
85
+ logger.info('LLM final reply: %s', reply)
 
 
 
 
 
 
 
 
86
 
87
  else:
88
  try:
 
91
  except AttributeError:
92
  reply = 'Bad reply - could not parse'
93
 
94
+ logger.info('Direct, no-tool reply: %s', reply)
 
 
 
 
 
95
 
96
+ dialog.info('LLM: %s ...', reply[:100])
97
+ output_queue.put(reply)
98
+ output_queue.put('bot-finished')
99
 
100
 
101
  def format_chat_history(history) -> list[dict]:
 
107
  if isinstance(chat_message, ChatMessage):
108
  role, content = chat_message.role, chat_message.content
109
  else:
110
+ role, content = chat_message.get('role'), chat_message.get('content')
111
 
112
+ if role in ['user', 'assistant', 'system']:
113
+ messages.append({'role': role, 'content': content})
114
 
115
  return messages
client/prompts.py CHANGED
@@ -5,19 +5,10 @@ from string import Template
5
 
6
  DEFAULT_SYSTEM_PROMPT = 'You are a helpful tool-using assistant.'
7
 
8
- GET_FEED_SYSTEM_PROMPT = '''
9
- You are a helpful assistant. Your job is to facilitate interactions between
10
- Human users and LLM agents.
11
- '''
12
 
13
  GET_FEED_PROMPT = Template(
14
- '''
15
- Below is an exchange between a user and an agent. The user has asked
16
- the agent to get new content from the $website RSS feed. In order to
17
- complete the request, the agent has called a function which returned
18
- the RSS feed content from $website in JSON format. Your job is to
19
- complete the exchange by using the returned JSON RSS feed data to write
20
- a human readable reply to the user.
21
 
22
  user: $user_query
23
 
@@ -27,6 +18,5 @@ function call: get_feed_content($website)
27
 
28
  function return: $articles
29
 
30
- assistant:
31
- '''
32
- )
 
5
 
6
  DEFAULT_SYSTEM_PROMPT = 'You are a helpful tool-using assistant.'
7
 
8
+ GET_FEED_SYSTEM_PROMPT = '''You are a helpful assistant. Your job is to facilitate interactions between Human users and LLM agents.'''
 
 
 
9
 
10
  GET_FEED_PROMPT = Template(
11
+ '''Below is an exchange between a user and an agent. The user has asked the agent to get new content from the $website RSS feed. In order to complete the request, the agent has called a function which returned the RSS feed content from $website in JSON format. Your job is to complete the exchange by using the returned JSON RSS feed data to write a human readable reply to the user.
 
 
 
 
 
 
12
 
13
  user: $user_query
14
 
 
18
 
19
  function return: $articles
20
 
21
+ assistant:'''
22
+ )
 
rss_client.py CHANGED
@@ -1,10 +1,13 @@
1
  '''RSS MCP server demonstration client app.'''
2
 
3
  import os
 
4
  import logging
 
 
 
5
  from pathlib import Path
6
  from logging.handlers import RotatingFileHandler
7
-
8
  import gradio as gr
9
  import assets.html as html
10
  import client.gradio_functions as gradio_funcs
@@ -12,15 +15,15 @@ import client.interface as interface
12
  from client.mcp_client import MCPClientWrapper
13
  from client.anthropic_bridge import AnthropicBridge
14
 
 
 
15
  # Make sure log directory exists
16
  Path('logs').mkdir(parents=True, exist_ok=True)
17
 
18
  # Clear old logs if present
19
  gradio_funcs.delete_old_logs('logs', 'rss_client')
20
 
21
- # Set-up logger
22
- logger = logging.getLogger()
23
-
24
  logging.basicConfig(
25
  handlers=[RotatingFileHandler(
26
  'logs/rss_client.log',
@@ -32,12 +35,14 @@ logging.basicConfig(
32
  format='%(levelname)s - %(name)s - %(message)s'
33
  )
34
 
 
35
  logger = logging.getLogger(__name__)
36
 
37
  # Handle MCP server connection and interactions
38
  RSS_CLIENT = MCPClientWrapper(
39
  'https://agents-mcp-hackathon-rss-mcp-server.hf.space/gradio_api/mcp/sse'
40
  )
 
41
 
42
  # Handles Anthropic API I/O
43
  BRIDGE = AnthropicBridge(
@@ -45,25 +50,53 @@ BRIDGE = AnthropicBridge(
45
  api_key=os.environ['ANTHROPIC_API_KEY']
46
  )
47
 
48
- async def send_message(message: str, chat_history: list) -> str:
49
- '''Submits user message to agent.
50
-
 
 
 
 
 
 
51
  Args:
52
  message: the new message from the user as a string
53
  chat_history: list containing conversation history where each element is
54
  a dictionary with keys 'role' and 'content'
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  Returns
57
  New chat history with model's response to user added.
58
  '''
59
 
60
- function_logger = logging.getLogger(__name__ + '.submit_input')
61
- function_logger.info('Submitting user message: %s', message)
62
 
63
- chat_history.append({"role": "user", "content": message})
64
- chat_history = await interface.agent_input(BRIDGE, chat_history)
65
 
66
- return '', chat_history
 
 
 
 
 
 
 
 
 
67
 
68
 
69
  with gr.Blocks(title='MCP RSS client') as demo:
@@ -75,15 +108,18 @@ with gr.Blocks(title='MCP RSS client') as demo:
75
  # MCP connection/tool dump
76
  connect_btn = gr.Button('Connect to MCP server')
77
  status = gr.Textbox(label='MCP server tool dump', interactive=False, lines=4)
78
- connect_btn.click(RSS_CLIENT.list_tools, outputs=status) # pylint: disable=no-member
 
 
 
79
 
80
- # Log output
81
- logs = gr.Textbox(label='Client logs', lines=10, max_lines=10)
82
- timer = gr.Timer(1, active=True)
83
 
84
  timer.tick( # pylint: disable=no-member
85
- lambda: gradio_funcs.update_log(), # pylint: disable=unnecessary-lambda
86
- outputs=logs
87
  )
88
 
89
  # Chat interface
@@ -102,17 +138,20 @@ with gr.Blocks(title='MCP RSS client') as demo:
102
  )
103
 
104
  msg.submit( # pylint: disable=no-member
105
- send_message,
106
- [msg, chatbot],
107
- [msg, chatbot]
108
  )
109
 
 
110
  if __name__ == '__main__':
111
 
112
  current_directory = os.getcwd()
113
 
114
  if 'pyrite' in current_directory:
 
115
  demo.launch(server_name='0.0.0.0', server_port=7860)
116
 
117
  else:
 
118
  demo.launch()
 
1
  '''RSS MCP server demonstration client app.'''
2
 
3
  import os
4
+ import asyncio
5
  import logging
6
+ import time
7
+ import queue
8
+ from typing import Tuple
9
  from pathlib import Path
10
  from logging.handlers import RotatingFileHandler
 
11
  import gradio as gr
12
  import assets.html as html
13
  import client.gradio_functions as gradio_funcs
 
15
  from client.mcp_client import MCPClientWrapper
16
  from client.anthropic_bridge import AnthropicBridge
17
 
18
+ # Set-up root logger so we send logs from the MCP client,
19
+ # Gradio and the rest of the project to the same file.
20
  # Make sure log directory exists
21
  Path('logs').mkdir(parents=True, exist_ok=True)
22
 
23
  # Clear old logs if present
24
  gradio_funcs.delete_old_logs('logs', 'rss_client')
25
 
26
+ # Configure
 
 
27
  logging.basicConfig(
28
  handlers=[RotatingFileHandler(
29
  'logs/rss_client.log',
 
35
  format='%(levelname)s - %(name)s - %(message)s'
36
  )
37
 
38
+ # Get a logger
39
  logger = logging.getLogger(__name__)
40
 
41
  # Handle MCP server connection and interactions
42
  RSS_CLIENT = MCPClientWrapper(
43
  'https://agents-mcp-hackathon-rss-mcp-server.hf.space/gradio_api/mcp/sse'
44
  )
45
+ logger.info('Started MCP client')
46
 
47
  # Handles Anthropic API I/O
48
  BRIDGE = AnthropicBridge(
 
50
  api_key=os.environ['ANTHROPIC_API_KEY']
51
  )
52
 
53
+ logger.info('Started Anthropic API bridge')
54
+
55
+ # Queue to return responses to user
56
+ OUTPUT_QUEUE = queue.Queue()
57
+ logger.info('Created response queue')
58
+
59
+ def user_message(message: str, history: list) -> Tuple[str, list]:
60
+ '''Adds user message to conversation and returns for immediate posting.
61
+
62
  Args:
63
  message: the new message from the user as a string
64
  chat_history: list containing conversation history where each element is
65
  a dictionary with keys 'role' and 'content'
66
 
67
+ Returns
68
+ New chat history with user's message added.
69
+ '''
70
+
71
+ return '', history + [{'role': 'user', 'content': message}]
72
+
73
+
74
+ def send_message(chat_history: list):
75
+ '''Submits chat history to agent, streams reply, one character at a time.
76
+
77
+ Args:
78
+ chat_history: list containing conversation history where each element is
79
+ a dictionary with keys 'role' and 'content'
80
+
81
  Returns
82
  New chat history with model's response to user added.
83
  '''
84
 
85
+ asyncio.run(interface.agent_input(BRIDGE, OUTPUT_QUEUE, chat_history))
 
86
 
87
+ while True:
88
+ response = OUTPUT_QUEUE.get()
89
 
90
+ if response == 'bot-finished':
91
+ break
92
+
93
+ chat_history.append({'role': 'assistant', 'content': ''})
94
+
95
+ for character in response:
96
+ chat_history[-1]['content'] += character
97
+ time.sleep(0.005)
98
+
99
+ yield chat_history
100
 
101
 
102
  with gr.Blocks(title='MCP RSS client') as demo:
 
108
  # MCP connection/tool dump
109
  connect_btn = gr.Button('Connect to MCP server')
110
  status = gr.Textbox(label='MCP server tool dump', interactive=False, lines=4)
111
+ connect_btn.click(# pylint: disable=no-member
112
+ RSS_CLIENT.list_tools,
113
+ outputs=status
114
+ )
115
 
116
+ # Dialog log output
117
+ dialog_output = gr.Textbox(label='Internal dialog', lines=10, max_lines=100)
118
+ timer = gr.Timer(0.5, active=True)
119
 
120
  timer.tick( # pylint: disable=no-member
121
+ lambda: gradio_funcs.update_dialog(), # pylint: disable=unnecessary-lambda
122
+ outputs=dialog_output
123
  )
124
 
125
  # Chat interface
 
138
  )
139
 
140
  msg.submit( # pylint: disable=no-member
141
+ user_message, [msg, chatbot], [msg, chatbot], queue=False
142
+ ).then(
143
+ send_message, chatbot, chatbot
144
  )
145
 
146
+
147
  if __name__ == '__main__':
148
 
149
  current_directory = os.getcwd()
150
 
151
  if 'pyrite' in current_directory:
152
+ logger.info('Starting RASS on LAN')
153
  demo.launch(server_name='0.0.0.0', server_port=7860)
154
 
155
  else:
156
+ logger.info('Starting RASS')
157
  demo.launch()