helloparthshah commited on
Commit
8875451
·
1 Parent(s): 980918c

Streaming works perfectly!

Browse files
Files changed (2) hide show
  1. mainV2.py +3 -3
  2. src/manager.py +6 -6
mainV2.py CHANGED
@@ -38,7 +38,7 @@ if __name__ == "__main__":
38
  editable="user",
39
  scale=1
40
  )
41
- input_box = gr.Textbox(submit_btn=True, stop_btn=True, max_lines=5, label="Chat Message", scale=0)
42
 
43
  chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
44
  chatbot.retry(handle_retry, chatbot, chatbot)
@@ -52,10 +52,10 @@ if __name__ == "__main__":
52
  ).then(
53
  model_manager.run, # Generate and stream response
54
  inputs=chatbot,
55
- outputs=chatbot,
 
56
  show_progress="full",
57
  trigger_mode="always_last"
58
  )
59
- input_box.submit(lambda: "", None, [input_box])
60
 
61
  demo.launch(share=True)
 
38
  editable="user",
39
  scale=1
40
  )
41
+ input_box = gr.Textbox(max_lines=5, label="Chat Message", scale=0)
42
 
43
  chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
44
  chatbot.retry(handle_retry, chatbot, chatbot)
 
52
  ).then(
53
  model_manager.run, # Generate and stream response
54
  inputs=chatbot,
55
+ outputs=[chatbot, input_box],
56
+ queue=True,
57
  show_progress="full",
58
  trigger_mode="always_last"
59
  )
 
60
 
61
  demo.launch(share=True)
src/manager.py CHANGED
@@ -7,9 +7,7 @@ import sys
7
  from src.tool_loader import ToolLoader
8
  from src.utils.suppress_outputs import suppress_output
9
  import logging
10
- from gradio import ChatMessage
11
-
12
- from src.utils.streamlit_interface import get_user_message, output_assistant_response
13
 
14
  logger = logging.getLogger(__name__)
15
  handler = logging.StreamHandler(sys.stdout)
@@ -120,7 +118,8 @@ class GeminiManager:
120
  "content": f"Error generating response: {e}"
121
  })
122
  logger.error(f"Error generating response: {e}")
123
- return messages
 
124
  logger.debug(f"Response: {response}")
125
  print("Response: ", response)
126
 
@@ -137,7 +136,7 @@ class GeminiManager:
137
  "role": "assistant",
138
  "content": response.text
139
  })
140
- yield messages
141
 
142
  # Attach the function call response to the messages
143
  if response.candidates[0].content and response.candidates[0].content.parts:
@@ -152,5 +151,6 @@ class GeminiManager:
152
  calls = self.handle_tool_calls(response)
153
  messages.append(calls)
154
  yield from self.run(messages)
 
155
  print("Final messages: ", messages)
156
- return messages
 
7
  from src.tool_loader import ToolLoader
8
  from src.utils.suppress_outputs import suppress_output
9
  import logging
10
+ import gradio as gr
 
 
11
 
12
  logger = logging.getLogger(__name__)
13
  handler = logging.StreamHandler(sys.stdout)
 
118
  "content": f"Error generating response: {e}"
119
  })
120
  logger.error(f"Error generating response: {e}")
121
+ yield messages, gr.update(interactive=True)
122
+ return
123
  logger.debug(f"Response: {response}")
124
  print("Response: ", response)
125
 
 
136
  "role": "assistant",
137
  "content": response.text
138
  })
139
+ yield messages, gr.update(interactive=False,)
140
 
141
  # Attach the function call response to the messages
142
  if response.candidates[0].content and response.candidates[0].content.parts:
 
151
  calls = self.handle_tool_calls(response)
152
  messages.append(calls)
153
  yield from self.run(messages)
154
+ return
155
  print("Final messages: ", messages)
156
+ yield messages, gr.update(interactive=True)