gperdrizet commited on
Commit
7cc32e5
·
verified ·
1 Parent(s): 06ffc6c

Added UI element that shows the LLM's internal dialog to user.

Browse files
Files changed (3) hide show
  1. client/gradio_functions.py +63 -5
  2. client/interface.py +22 -10
  3. rss_client.py +20 -13
client/gradio_functions.py CHANGED
@@ -3,17 +3,75 @@
3
  import os
4
  import re
5
 
6
- def update_log(n: int = 10):
7
- '''Gets updated logging output from disk to display to user.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  Args:
10
- n: number of most recent lines of logging output to display
11
 
12
  Returns:
13
- Logging output as string
14
  '''
15
 
16
- with open('logs/rss_client.log', 'r', encoding='utf-8') as log_file:
17
  lines = log_file.readlines()
18
 
19
  return ''.join(lines[-n:])
 
3
  import os
4
  import re
5
 
6
+ import logging
7
+ from pathlib import Path
8
+ from logging.handlers import RotatingFileHandler
9
+
10
+
11
+ def configure_root_logger() -> None:
12
+ '''Configures root logger for project wide logging.'''
13
+
14
+ # Make sure log directory exists
15
+ Path('logs').mkdir(parents=True, exist_ok=True)
16
+
17
+ # Clear old logs if present
18
+ delete_old_logs('logs', 'rss_client')
19
+
20
+ # Set up the root logger so we catch logs from
21
+ logging.basicConfig(
22
+ handlers=[RotatingFileHandler(
23
+ 'logs/rss_client.log',
24
+ maxBytes=100000,
25
+ backupCount=10,
26
+ mode='w'
27
+ )],
28
+ level=logging.INFO,
29
+ format='%(levelname)s - %(name)s - %(message)s'
30
+ )
31
+
32
+
33
+ def get_dialog_logger(name: str = 'dialog', clear: bool = True) -> logging.Logger:
34
+ '''Sets up logger for model's internal dialog.'''
35
+
36
+ # Make sure log directory exists
37
+ Path('logs').mkdir(parents=True, exist_ok=True)
38
+
39
+ # Clear old logs if desired
40
+ if clear:
41
+ delete_old_logs('logs', 'dialog')
42
+
43
+ # Create logger
44
+ new_dialog_logger = logging.getLogger(name)
45
+
46
+ # Create handler
47
+ handler = RotatingFileHandler(
48
+ 'logs/dialog.log',
49
+ maxBytes=100000,
50
+ backupCount=10,
51
+ mode='w'
52
+ )
53
+
54
+ # Add format to handler
55
+ formatter = logging.Formatter('%(message)s')
56
+ handler.setFormatter(formatter)
57
+ new_dialog_logger.addHandler(handler)
58
+
59
+ # Set logging level
60
+ new_dialog_logger.setLevel(logging.INFO)
61
+
62
+ return new_dialog_logger
63
+
64
+ def update_dialog(n: int = 10):
65
+ '''Gets updated internal dialog logging output from disk to display to user.
66
 
67
  Args:
68
+ n: number of most recent lines of internal dialog output to display
69
 
70
  Returns:
71
+ Internal dialog logging output as string
72
  '''
73
 
74
+ with open('logs/dialog.log', 'r', encoding='utf-8') as log_file:
75
  lines = log_file.readlines()
76
 
77
  return ''.join(lines[-n:])
client/interface.py CHANGED
@@ -7,6 +7,11 @@ from gradio.components.chatbot import ChatMessage
7
 
8
  from client import prompts
9
  from client.anthropic_bridge import AnthropicBridge
 
 
 
 
 
10
 
11
  async def agent_input(
12
  bridge: AnthropicBridge,
@@ -15,10 +20,16 @@ async def agent_input(
15
 
16
  '''Handles model interactions.'''
17
 
18
- function_logger = logging.getLogger(__name__ + '.agent_input')
 
 
 
19
 
20
  input_messages = format_chat_history(chat_history)
21
- result = await bridge.process_query(prompts.DEFAULT_SYSTEM_PROMPT, input_messages)
 
 
 
22
 
23
  if result['tool_result']:
24
  tool_call = result['tool_call']
@@ -28,7 +39,6 @@ async def agent_input(
28
 
29
  tool_parameters = tool_call['parameters']
30
  website = tool_parameters['website']
31
- user_query = input_messages[-1]['content']
32
  response_content = result['llm_response'].content[0]
33
 
34
  if isinstance(response_content, text_block.TextBlock):
@@ -36,9 +46,8 @@ async def agent_input(
36
  else:
37
  intermediate_reply = f'I Will check the {website} RSS feed for you'
38
 
39
- function_logger.info('User query: %s', user_query)
40
- function_logger.info('Model intermediate reply: %s', intermediate_reply)
41
- function_logger.info('LLM called %s on %s', tool_name, website)
42
 
43
  articles = json.loads(result['tool_result'].content)['text']
44
 
@@ -54,8 +63,11 @@ async def agent_input(
54
  'content': prompt
55
  }]
56
 
57
- function_logger.info('Re-prompting input %s', input_message)
58
- result = await bridge.process_query(prompts.GET_FEED_SYSTEM_PROMPT, input_message)
 
 
 
59
 
60
  try:
61
 
@@ -64,7 +76,7 @@ async def agent_input(
64
  except (IndexError, AttributeError):
65
  final_reply = 'No final reply from model'
66
 
67
- function_logger.info('LLM final reply: %s', final_reply)
68
 
69
  chat_history.append({
70
  "role": "assistant",
@@ -83,7 +95,7 @@ async def agent_input(
83
  except AttributeError:
84
  reply = 'Bad reply - could not parse'
85
 
86
- function_logger.info('Direct, no-tool reply: %s', reply)
87
 
88
  chat_history.append({
89
  "role": "assistant",
 
7
 
8
  from client import prompts
9
  from client.anthropic_bridge import AnthropicBridge
10
+ import client.gradio_functions as gradio_funcs
11
+
12
+ # Create dialog logger
13
+ dialog = gradio_funcs.get_dialog_logger(clear = True)
14
+
15
 
16
  async def agent_input(
17
  bridge: AnthropicBridge,
 
20
 
21
  '''Handles model interactions.'''
22
 
23
+ logger = logging.getLogger(__name__ + '.agent_input')
24
+
25
+ user_query = chat_history[-1]['content']
26
+ dialog.info('User: %s', user_query)
27
 
28
  input_messages = format_chat_history(chat_history)
29
+ result = await bridge.process_query(
30
+ prompts.DEFAULT_SYSTEM_PROMPT,
31
+ input_messages
32
+ )
33
 
34
  if result['tool_result']:
35
  tool_call = result['tool_call']
 
39
 
40
  tool_parameters = tool_call['parameters']
41
  website = tool_parameters['website']
 
42
  response_content = result['llm_response'].content[0]
43
 
44
  if isinstance(response_content, text_block.TextBlock):
 
46
  else:
47
  intermediate_reply = f'I Will check the {website} RSS feed for you'
48
 
49
+ dialog.info('LLM: %s', intermediate_reply)
50
+ dialog.info('LLM: called %s on %s', tool_name, website)
 
51
 
52
  articles = json.loads(result['tool_result'].content)['text']
53
 
 
63
  'content': prompt
64
  }]
65
 
66
+ logger.info('Re-prompting input %s', input_message)
67
+ result = await bridge.process_query(
68
+ prompts.GET_FEED_SYSTEM_PROMPT,
69
+ input_message
70
+ )
71
 
72
  try:
73
 
 
76
  except (IndexError, AttributeError):
77
  final_reply = 'No final reply from model'
78
 
79
+ logger.info('LLM final reply: %s', final_reply)
80
 
81
  chat_history.append({
82
  "role": "assistant",
 
95
  except AttributeError:
96
  reply = 'Bad reply - could not parse'
97
 
98
+ logger.info('Direct, no-tool reply: %s', reply)
99
 
100
  chat_history.append({
101
  "role": "assistant",
rss_client.py CHANGED
@@ -4,7 +4,6 @@ import os
4
  import logging
5
  from pathlib import Path
6
  from logging.handlers import RotatingFileHandler
7
-
8
  import gradio as gr
9
  import assets.html as html
10
  import client.gradio_functions as gradio_funcs
@@ -12,15 +11,15 @@ import client.interface as interface
12
  from client.mcp_client import MCPClientWrapper
13
  from client.anthropic_bridge import AnthropicBridge
14
 
 
 
15
  # Make sure log directory exists
16
  Path('logs').mkdir(parents=True, exist_ok=True)
17
 
18
  # Clear old logs if present
19
  gradio_funcs.delete_old_logs('logs', 'rss_client')
20
 
21
- # Set-up logger
22
- logger = logging.getLogger()
23
-
24
  logging.basicConfig(
25
  handlers=[RotatingFileHandler(
26
  'logs/rss_client.log',
@@ -32,12 +31,14 @@ logging.basicConfig(
32
  format='%(levelname)s - %(name)s - %(message)s'
33
  )
34
 
 
35
  logger = logging.getLogger(__name__)
36
 
37
  # Handle MCP server connection and interactions
38
  RSS_CLIENT = MCPClientWrapper(
39
  'https://agents-mcp-hackathon-rss-mcp-server.hf.space/gradio_api/mcp/sse'
40
  )
 
41
 
42
  # Handles Anthropic API I/O
43
  BRIDGE = AnthropicBridge(
@@ -45,6 +46,9 @@ BRIDGE = AnthropicBridge(
45
  api_key=os.environ['ANTHROPIC_API_KEY']
46
  )
47
 
 
 
 
48
  async def send_message(message: str, chat_history: list) -> str:
49
  '''Submits user message to agent.
50
 
@@ -57,10 +61,7 @@ async def send_message(message: str, chat_history: list) -> str:
57
  New chat history with model's response to user added.
58
  '''
59
 
60
- function_logger = logging.getLogger(__name__ + '.submit_input')
61
- function_logger.info('Submitting user message: %s', message)
62
-
63
- chat_history.append({"role": "user", "content": message})
64
  chat_history = await interface.agent_input(BRIDGE, chat_history)
65
 
66
  return '', chat_history
@@ -75,15 +76,18 @@ with gr.Blocks(title='MCP RSS client') as demo:
75
  # MCP connection/tool dump
76
  connect_btn = gr.Button('Connect to MCP server')
77
  status = gr.Textbox(label='MCP server tool dump', interactive=False, lines=4)
78
- connect_btn.click(RSS_CLIENT.list_tools, outputs=status) # pylint: disable=no-member
 
 
 
79
 
80
- # Log output
81
- logs = gr.Textbox(label='Client logs', lines=10, max_lines=10)
82
  timer = gr.Timer(1, active=True)
83
 
84
  timer.tick( # pylint: disable=no-member
85
- lambda: gradio_funcs.update_log(), # pylint: disable=unnecessary-lambda
86
- outputs=logs
87
  )
88
 
89
  # Chat interface
@@ -107,12 +111,15 @@ with gr.Blocks(title='MCP RSS client') as demo:
107
  [msg, chatbot]
108
  )
109
 
 
110
  if __name__ == '__main__':
111
 
112
  current_directory = os.getcwd()
113
 
114
  if 'pyrite' in current_directory:
 
115
  demo.launch(server_name='0.0.0.0', server_port=7860)
116
 
117
  else:
 
118
  demo.launch()
 
4
  import logging
5
  from pathlib import Path
6
  from logging.handlers import RotatingFileHandler
 
7
  import gradio as gr
8
  import assets.html as html
9
  import client.gradio_functions as gradio_funcs
 
11
  from client.mcp_client import MCPClientWrapper
12
  from client.anthropic_bridge import AnthropicBridge
13
 
14
+ # Set-up root logger so we send logs from the MCP client,
15
+ # Gradio and the rest of the project to the same file.
16
  # Make sure log directory exists
17
  Path('logs').mkdir(parents=True, exist_ok=True)
18
 
19
  # Clear old logs if present
20
  gradio_funcs.delete_old_logs('logs', 'rss_client')
21
 
22
+ # Configure
 
 
23
  logging.basicConfig(
24
  handlers=[RotatingFileHandler(
25
  'logs/rss_client.log',
 
31
  format='%(levelname)s - %(name)s - %(message)s'
32
  )
33
 
34
+ # Get a logger
35
  logger = logging.getLogger(__name__)
36
 
37
  # Handle MCP server connection and interactions
38
  RSS_CLIENT = MCPClientWrapper(
39
  'https://agents-mcp-hackathon-rss-mcp-server.hf.space/gradio_api/mcp/sse'
40
  )
41
+ logger.info('Started MCP client')
42
 
43
  # Handles Anthropic API I/O
44
  BRIDGE = AnthropicBridge(
 
46
  api_key=os.environ['ANTHROPIC_API_KEY']
47
  )
48
 
49
+ logger.info('Started Anthropic API bridge')
50
+
51
+
52
  async def send_message(message: str, chat_history: list) -> str:
53
  '''Submits user message to agent.
54
 
 
61
  New chat history with model's response to user added.
62
  '''
63
 
64
+ chat_history.append({'role': 'user', 'content': message})
 
 
 
65
  chat_history = await interface.agent_input(BRIDGE, chat_history)
66
 
67
  return '', chat_history
 
76
  # MCP connection/tool dump
77
  connect_btn = gr.Button('Connect to MCP server')
78
  status = gr.Textbox(label='MCP server tool dump', interactive=False, lines=4)
79
+ connect_btn.click(# pylint: disable=no-member
80
+ RSS_CLIENT.list_tools,
81
+ outputs=status
82
+ )
83
 
84
+ # Dialog log output
85
+ dialog_output = gr.Textbox(label='Internal dialog', lines=10, max_lines=10)
86
  timer = gr.Timer(1, active=True)
87
 
88
  timer.tick( # pylint: disable=no-member
89
+ lambda: gradio_funcs.update_dialog(), # pylint: disable=unnecessary-lambda
90
+ outputs=dialog_output
91
  )
92
 
93
  # Chat interface
 
111
  [msg, chatbot]
112
  )
113
 
114
+
115
  if __name__ == '__main__':
116
 
117
  current_directory = os.getcwd()
118
 
119
  if 'pyrite' in current_directory:
120
+ logger.info('Starting RASS on LAN')
121
  demo.launch(server_name='0.0.0.0', server_port=7860)
122
 
123
  else:
124
+ logger.info('Starting RASS')
125
  demo.launch()