helloparthshah commited on
Commit
4af653a
·
1 Parent(s): 6900003

Pushing UI updates

Browse files
Files changed (2) hide show
  1. main.py +54 -45
  2. src/manager.py +4 -4
main.py CHANGED
@@ -51,49 +51,58 @@ if __name__ == "__main__":
51
  #title-row { background: #2c2c2c; border-radius: 8px; padding: 8px; }
52
  """
53
  with gr.Blocks(css=css, fill_width=True, fill_height=True) as demo:
54
- with gr.Row():
55
- gr.HTML(_header_html)
56
- model_dropdown = gr.Dropdown(
57
- choices=[
58
- "HASHIRU",
59
- "Static-HASHIRU",
60
- "Cloud-Only HASHIRU",
61
- "Local-Only HASHIRU",
62
- "No-Economy HASHIRU",
63
- ],
64
- value="HASHIRU",
65
- # label="HASHIRU",
66
- scale=2,
67
- interactive=True,
68
- )
 
69
 
70
- model_dropdown.change(fn=update_model, inputs=model_dropdown, outputs=[])
71
-
72
- chatbot = gr.Chatbot(
73
- avatar_images=("HASHIRU_2.png", "HASHIRU.png"),
74
- type="messages",
75
- show_copy_button=True,
76
- editable="user",
77
- scale=1
78
- )
79
- input_box = gr.Textbox(label="Chat Message", scale=0, interactive=True, submit_btn=True)
80
-
81
- chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
82
- chatbot.retry(handle_retry, chatbot, [chatbot, input_box])
83
- chatbot.edit(handle_edit, chatbot, [chatbot, input_box])
84
-
85
- input_box.submit(
86
- user_message, # Add user message to chat
87
- inputs=[input_box, chatbot],
88
- outputs=[input_box, chatbot],
89
- queue=False,
90
- ).then(
91
- model_manager.run, # Generate and stream response
92
- inputs=chatbot,
93
- outputs=[chatbot, input_box],
94
- queue=True,
95
- show_progress="full",
96
- trigger_mode="always_last"
97
- )
98
-
99
- demo.launch(share=True)
 
 
 
 
 
 
 
 
 
51
  #title-row { background: #2c2c2c; border-radius: 8px; padding: 8px; }
52
  """
53
  with gr.Blocks(css=css, fill_width=True, fill_height=True) as demo:
54
+ local_storage = gr.BrowserState(["", ""])
55
+ with gr.Column(scale=1):
56
+ with gr.Row(scale=0):
57
+ gr.HTML(_header_html)
58
+ model_dropdown = gr.Dropdown(
59
+ choices=[
60
+ "HASHIRU",
61
+ "Static-HASHIRU",
62
+ "Cloud-Only HASHIRU",
63
+ "Local-Only HASHIRU",
64
+ "No-Economy HASHIRU",
65
+ ],
66
+ value="HASHIRU",
67
+ # label="HASHIRU",
68
+ interactive=True,
69
+ )
70
 
71
+ model_dropdown.change(fn=update_model, inputs=model_dropdown, outputs=[])
72
+ with gr.Row(scale=1):
73
+ with gr.Sidebar(position="left"):
74
+ buttons = []
75
+ for i in range(1, 6):
76
+ button = gr.Button(f"Button {i}", elem_id=f"button-{i}")
77
+ button.click(fn=lambda x=i: print(f"Button {x} clicked"), inputs=[], outputs=[])
78
+ buttons.append(button)
79
+ with gr.Column(scale=1):
80
+ chatbot = gr.Chatbot(
81
+ avatar_images=("HASHIRU_2.png", "HASHIRU.png"),
82
+ type="messages",
83
+ show_copy_button=True,
84
+ editable="user",
85
+ scale=1,
86
+ render_markdown=True,
87
+ )
88
+ input_box = gr.Textbox(label="Chat Message", scale=0, interactive=True, submit_btn=True)
89
+
90
+ chatbot.undo(handle_undo, chatbot, [chatbot, input_box])
91
+ chatbot.retry(handle_retry, chatbot, [chatbot, input_box])
92
+ chatbot.edit(handle_edit, chatbot, [chatbot, input_box])
93
+
94
+ input_box.submit(
95
+ user_message, # Add user message to chat
96
+ inputs=[input_box, chatbot],
97
+ outputs=[input_box, chatbot],
98
+ queue=False,
99
+ ).then(
100
+ model_manager.ask_llm, # Generate and stream response
101
+ inputs=chatbot,
102
+ outputs=[chatbot, input_box],
103
+ queue=True,
104
+ show_progress="full",
105
+ trigger_mode="always_last"
106
+ )
107
+
108
+ demo.launch()
src/manager.py CHANGED
@@ -116,9 +116,11 @@ class GeminiManager:
116
  parts=parts
117
  ))
118
  return formatted_history
119
-
 
 
 
120
  def run(self, messages):
121
- print("Messages: ", messages)
122
  chat_history = self.format_chat_history(messages)
123
  logger.debug(f"Chat history: {chat_history}")
124
  try:
@@ -133,7 +135,6 @@ class GeminiManager:
133
  yield messages, gr.update(interactive=True)
134
  return
135
  logger.debug(f"Response: {response}")
136
- print("Response: ", response)
137
 
138
  if (not response.text and not response.function_calls):
139
  messages.append({
@@ -164,5 +165,4 @@ class GeminiManager:
164
  messages.append(calls)
165
  yield from self.run(messages)
166
  return
167
- print("Final messages: ", messages)
168
  yield messages, gr.update(interactive=True)
 
116
  parts=parts
117
  ))
118
  return formatted_history
119
+
120
+ def ask_llm(self, messages):
121
+ yield from self.run(messages)
122
+
123
  def run(self, messages):
 
124
  chat_history = self.format_chat_history(messages)
125
  logger.debug(f"Chat history: {chat_history}")
126
  try:
 
135
  yield messages, gr.update(interactive=True)
136
  return
137
  logger.debug(f"Response: {response}")
 
138
 
139
  if (not response.text and not response.function_calls):
140
  messages.append({
 
165
  messages.append(calls)
166
  yield from self.run(messages)
167
  return
 
168
  yield messages, gr.update(interactive=True)