ianeksdi commited on
Commit
0c08de9
·
verified ·
1 Parent(s): 987b836

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -18,6 +18,7 @@ def respond(message, history: list[tuple[str, str]]):
18
  messages.append({"role": "user", "content": message})
19
 
20
  response = ""
 
21
  for token_message in client.chat_completion(
22
  messages,
23
  max_tokens=max_tokens,
@@ -25,12 +26,18 @@ def respond(message, history: list[tuple[str, str]]):
25
  temperature=temperature,
26
  top_p=top_p,
27
  ):
28
- # Retrieve token content; if empty, break the loop
29
  token = token_message.choices[0].delta.get("content", "")
30
  if not token:
31
  break
32
  response += token
33
- yield response
 
 
 
 
 
 
 
34
 
35
  demo = gr.ChatInterface(respond)
36
 
 
18
  messages.append({"role": "user", "content": message})
19
 
20
  response = ""
21
+ previous_response = ""
22
  for token_message in client.chat_completion(
23
  messages,
24
  max_tokens=max_tokens,
 
26
  temperature=temperature,
27
  top_p=top_p,
28
  ):
 
29
  token = token_message.choices[0].delta.get("content", "")
30
  if not token:
31
  break
32
  response += token
33
+ # Only yield if new content was added
34
+ if response != previous_response:
35
+ yield response
36
+ previous_response = response
37
+
38
+ # Optional: break out if the response is too long to avoid infinite loops
39
+ if len(response) > 3000: # adjust threshold as needed
40
+ break
41
 
42
  demo = gr.ChatInterface(respond)
43