ybelkada commited on
Commit
5d625d1
·
1 Parent(s): 1e67058

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -21
app.py CHANGED
@@ -49,7 +49,6 @@ header = "A chat between a curious human and an artificial intelligence assistan
49
  prompt_template = "### Human: {query} ### Assistant:{response}"
50
 
51
  def generate(
52
- system_message,
53
  user_message,
54
  chatbot,
55
  history,
@@ -58,7 +57,6 @@ def generate(
58
  top_p,
59
  max_new_tokens,
60
  repetition_penalty,
61
- do_save=True,
62
  ):
63
  # Don't return meaningless message when the input is empty
64
  if not user_message:
@@ -115,7 +113,6 @@ def generate(
115
 
116
  output = ""
117
  for idx, response in enumerate(stream):
118
- print(f'step {idx} - {response.token.text}')
119
  if response.token.text == '':
120
  break
121
 
@@ -173,18 +170,6 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
173
  """
174
  )
175
 
176
- with gr.Row():
177
- do_save = gr.Checkbox(
178
- value=True,
179
- label="Store data",
180
- info="You agree to the storage of your prompt and generated text for research and development purposes:",
181
- )
182
- with gr.Accordion(label="System Prompt", open=False, elem_id="parameters-accordion"):
183
- system_message = gr.Textbox(
184
- elem_id="system-message",
185
- placeholder="Below is a conversation between a human user and a helpful AI coding assistant.",
186
- show_label=False,
187
- )
188
  with gr.Row():
189
  with gr.Box():
190
  output = gr.Markdown()
@@ -254,13 +239,11 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
254
  )
255
 
256
  history = gr.State([])
257
- # To clear out "message" input textbox and use this to regenerate message
258
  last_user_message = gr.State("")
259
 
260
  user_message.submit(
261
  generate,
262
  inputs=[
263
- system_message,
264
  user_message,
265
  chatbot,
266
  history,
@@ -269,7 +252,6 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
269
  top_p,
270
  max_new_tokens,
271
  repetition_penalty,
272
- do_save,
273
  ],
274
  outputs=[chatbot, history, last_user_message, user_message],
275
  )
@@ -277,7 +259,6 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
277
  send_button.click(
278
  generate,
279
  inputs=[
280
- system_message,
281
  user_message,
282
  chatbot,
283
  history,
@@ -286,12 +267,10 @@ with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
286
  top_p,
287
  max_new_tokens,
288
  repetition_penalty,
289
- do_save,
290
  ],
291
  outputs=[chatbot, history, last_user_message, user_message],
292
  )
293
 
294
  clear_chat_button.click(clear_chat, outputs=[chatbot, history])
295
- # share_button.click(None, [], [], _js=share_js)
296
 
297
  demo.queue(concurrency_count=16).launch(debug=True)
 
49
  prompt_template = "### Human: {query} ### Assistant:{response}"
50
 
51
  def generate(
 
52
  user_message,
53
  chatbot,
54
  history,
 
57
  top_p,
58
  max_new_tokens,
59
  repetition_penalty,
 
60
  ):
61
  # Don't return meaningless message when the input is empty
62
  if not user_message:
 
113
 
114
  output = ""
115
  for idx, response in enumerate(stream):
 
116
  if response.token.text == '':
117
  break
118
 
 
170
  """
171
  )
172
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  with gr.Row():
174
  with gr.Box():
175
  output = gr.Markdown()
 
239
  )
240
 
241
  history = gr.State([])
 
242
  last_user_message = gr.State("")
243
 
244
  user_message.submit(
245
  generate,
246
  inputs=[
 
247
  user_message,
248
  chatbot,
249
  history,
 
252
  top_p,
253
  max_new_tokens,
254
  repetition_penalty,
 
255
  ],
256
  outputs=[chatbot, history, last_user_message, user_message],
257
  )
 
259
  send_button.click(
260
  generate,
261
  inputs=[
 
262
  user_message,
263
  chatbot,
264
  history,
 
267
  top_p,
268
  max_new_tokens,
269
  repetition_penalty,
 
270
  ],
271
  outputs=[chatbot, history, last_user_message, user_message],
272
  )
273
 
274
  clear_chat_button.click(clear_chat, outputs=[chatbot, history])
 
275
 
276
  demo.queue(concurrency_count=16).launch(debug=True)