FractalAIR commited on
Commit
b10c582
·
verified ·
1 Parent(s): 23dc432

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -1
app.py CHANGED
@@ -72,7 +72,7 @@ def generate_response(user_message, max_tokens, temperature, top_p, history_stat
72
  {"role": "user", "content": user_message},
73
  {"role": "assistant", "content": ""}
74
  ]
75
-
76
  try:
77
  for new_token in streamer:
78
  if "<|end" in new_token:
@@ -84,6 +84,23 @@ def generate_response(user_message, max_tokens, temperature, top_p, history_stat
84
  except Exception:
85
  pass
86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  yield new_history, new_history
88
 
89
  example_messages = {
 
72
  {"role": "user", "content": user_message},
73
  {"role": "assistant", "content": ""}
74
  ]
75
+ '''
76
  try:
77
  for new_token in streamer:
78
  if "<|end" in new_token:
 
84
  except Exception:
85
  pass
86
 
87
+ yield new_history, new_history'''
88
+
89
+ try:
90
+ for new_token in streamer:
91
+ if "<|end" in new_token:
92
+ continue
93
+ cleaned_token = new_token.replace("<|im_start|>", "").replace("<|im_sep|>", "").replace("<|im_end|>", "")
94
+ assistant_response += cleaned_token
95
+ new_history[-1]["content"] = assistant_response.strip()
96
+ yield new_history, new_history
97
+ except Exception:
98
+ new_history[-1]["content"] += "\n⚠️ Incomplete response due to generation limit or stream error."
99
+ yield new_history, new_history
100
+ return
101
+
102
+ # Final yield to clean up in case of clean stop
103
+ new_history[-1]["content"] = assistant_response.strip()
104
  yield new_history, new_history
105
 
106
  example_messages = {