Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -22,8 +22,7 @@ def log(msg: str):
|
|
22 |
with open(LOG_FILE, "a") as f:
|
23 |
f.write(line + "\n")
|
24 |
except Exception:
|
25 |
-
#
|
26 |
-
pass
|
27 |
|
28 |
# ---------------------------------------------------------------------------
|
29 |
# 1. Configuration constants
|
@@ -122,25 +121,22 @@ def build_prompt(raw_history: list[dict]) -> str:
|
|
122 |
# ---------------------------------------------------------------------------
|
123 |
def chat_fn(user_msg: str, display_history: list, state: dict):
|
124 |
user_msg = strip(user_msg or "")
|
125 |
-
# Yield nothing if empty input
|
126 |
if not user_msg:
|
127 |
yield display_history, state
|
128 |
return
|
129 |
|
130 |
-
# Input length check
|
131 |
if len(user_msg) > MAX_INPUT_CH:
|
132 |
display_history.append((user_msg, f"Input >{MAX_INPUT_CH} chars."))
|
133 |
yield display_history, state
|
134 |
return
|
135 |
|
136 |
-
# Model load error
|
137 |
if MODEL_ERR:
|
138 |
display_history.append((user_msg, MODEL_ERR))
|
139 |
yield display_history, state
|
140 |
return
|
141 |
|
142 |
try:
|
143 |
-
#
|
144 |
state["raw"].append({"role": "user", "content": user_msg})
|
145 |
display_history.append((user_msg, ""))
|
146 |
|
@@ -148,28 +144,25 @@ def chat_fn(user_msg: str, display_history: list, state: dict):
|
|
148 |
start = time.time()
|
149 |
partial = ""
|
150 |
|
151 |
-
#
|
152 |
for chunk in generator(prompt):
|
153 |
try:
|
154 |
new_text = strip(chunk.get("generated_text", ""))
|
155 |
-
# Truncate any hallucinated next-turn
|
156 |
if "User:" in new_text:
|
157 |
new_text = new_text.split("User:", 1)[0].strip()
|
158 |
partial += new_text
|
159 |
display_history[-1] = (user_msg, partial)
|
160 |
yield display_history, state
|
161 |
except Exception:
|
162 |
-
# Skip malformed chunk but keep streaming
|
163 |
log("Malformed chunk:\n" + traceback.format_exc())
|
164 |
continue
|
165 |
|
166 |
-
#
|
167 |
full_reply = display_history[-1][1]
|
168 |
state["raw"].append({"role": "assistant", "content": full_reply})
|
169 |
-
log(f"Reply in {time.time()-start:.2f}s ({len(full_reply)} chars)")
|
170 |
|
171 |
except Exception:
|
172 |
-
# Catch-all for any unexpected errors in chat flow
|
173 |
log("Unexpected chat_fn error:\n" + traceback.format_exc())
|
174 |
err = "Apologies—an internal error occurred. Please try again."
|
175 |
display_history[-1] = (user_msg, err)
|
@@ -183,9 +176,10 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
|
183 |
gr.Markdown("### SchoolSpirit AI Chat")
|
184 |
|
185 |
chatbot = gr.Chatbot(
|
186 |
-
value=[
|
187 |
height=480,
|
188 |
label="SchoolSpirit AI",
|
|
|
189 |
)
|
190 |
|
191 |
state = gr.State(
|
@@ -199,12 +193,12 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
|
199 |
txt = gr.Textbox(placeholder="Type your question here…", show_label=False, scale=4, lines=1)
|
200 |
send_btn = gr.Button("Send", variant="primary")
|
201 |
|
202 |
-
#
|
203 |
-
send_btn.click(chat_fn, inputs=[txt, chatbot, state], outputs=[chatbot, state],
|
204 |
-
txt.submit(chat_fn, inputs=[txt, chatbot, state], outputs=[chatbot, state],
|
205 |
|
206 |
if __name__ == "__main__":
|
207 |
try:
|
208 |
demo.launch()
|
209 |
-
except Exception
|
210 |
-
log(
|
|
|
22 |
with open(LOG_FILE, "a") as f:
|
23 |
f.write(line + "\n")
|
24 |
except Exception:
|
25 |
+
pass # tolerate logging failures
|
|
|
26 |
|
27 |
# ---------------------------------------------------------------------------
|
28 |
# 1. Configuration constants
|
|
|
121 |
# ---------------------------------------------------------------------------
|
122 |
def chat_fn(user_msg: str, display_history: list, state: dict):
|
123 |
user_msg = strip(user_msg or "")
|
|
|
124 |
if not user_msg:
|
125 |
yield display_history, state
|
126 |
return
|
127 |
|
|
|
128 |
if len(user_msg) > MAX_INPUT_CH:
|
129 |
display_history.append((user_msg, f"Input >{MAX_INPUT_CH} chars."))
|
130 |
yield display_history, state
|
131 |
return
|
132 |
|
|
|
133 |
if MODEL_ERR:
|
134 |
display_history.append((user_msg, MODEL_ERR))
|
135 |
yield display_history, state
|
136 |
return
|
137 |
|
138 |
try:
|
139 |
+
# record user
|
140 |
state["raw"].append({"role": "user", "content": user_msg})
|
141 |
display_history.append((user_msg, ""))
|
142 |
|
|
|
144 |
start = time.time()
|
145 |
partial = ""
|
146 |
|
147 |
+
# stream chunks
|
148 |
for chunk in generator(prompt):
|
149 |
try:
|
150 |
new_text = strip(chunk.get("generated_text", ""))
|
|
|
151 |
if "User:" in new_text:
|
152 |
new_text = new_text.split("User:", 1)[0].strip()
|
153 |
partial += new_text
|
154 |
display_history[-1] = (user_msg, partial)
|
155 |
yield display_history, state
|
156 |
except Exception:
|
|
|
157 |
log("Malformed chunk:\n" + traceback.format_exc())
|
158 |
continue
|
159 |
|
160 |
+
# finalize
|
161 |
full_reply = display_history[-1][1]
|
162 |
state["raw"].append({"role": "assistant", "content": full_reply})
|
163 |
+
log(f"Reply in {time.time() - start:.2f}s ({len(full_reply)} chars)")
|
164 |
|
165 |
except Exception:
|
|
|
166 |
log("Unexpected chat_fn error:\n" + traceback.format_exc())
|
167 |
err = "Apologies—an internal error occurred. Please try again."
|
168 |
display_history[-1] = (user_msg, err)
|
|
|
176 |
gr.Markdown("### SchoolSpirit AI Chat")
|
177 |
|
178 |
chatbot = gr.Chatbot(
|
179 |
+
value=[{"role":"assistant","content":WELCOME_MSG}],
|
180 |
height=480,
|
181 |
label="SchoolSpirit AI",
|
182 |
+
type="messages", # use the new messages format
|
183 |
)
|
184 |
|
185 |
state = gr.State(
|
|
|
193 |
txt = gr.Textbox(placeholder="Type your question here…", show_label=False, scale=4, lines=1)
|
194 |
send_btn = gr.Button("Send", variant="primary")
|
195 |
|
196 |
+
# Use streaming=True (not stream) per Gradio API
|
197 |
+
send_btn.click(chat_fn, inputs=[txt, chatbot, state], outputs=[chatbot, state], streaming=True)
|
198 |
+
txt.submit(chat_fn, inputs=[txt, chatbot, state], outputs=[chatbot, state], streaming=True)
|
199 |
|
200 |
if __name__ == "__main__":
|
201 |
try:
|
202 |
demo.launch()
|
203 |
+
except Exception:
|
204 |
+
log("UI launch error:\n" + traceback.format_exc())
|