Spaces:
Running
Running
Change chatbot
Browse files
app.py
CHANGED
@@ -518,11 +518,15 @@ def create_interface():
|
|
518 |
with gr.Row():
|
519 |
with gr.Column(scale=3):
|
520 |
gr.Markdown("**Note**: You are chatting with the currently loaded model. If you've just completed processing, you're testing the modified model. To test the original model, reload it in the Model Processing tab.")
|
521 |
-
|
522 |
-
|
523 |
-
|
524 |
-
|
|
|
|
|
|
|
525 |
)
|
|
|
526 |
msg = gr.Textbox(
|
527 |
label="Input Message",
|
528 |
placeholder="Enter your question...",
|
@@ -577,30 +581,42 @@ def create_interface():
|
|
577 |
outputs=[process_output, process_image]
|
578 |
)
|
579 |
|
580 |
-
# Chat functionality with
|
581 |
-
def user(user_message,
|
582 |
-
|
|
|
|
|
|
|
583 |
|
584 |
-
def bot(
|
585 |
-
|
586 |
-
|
587 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
588 |
print(f"DEBUG: Bot function received response: {response[:200]}...")
|
589 |
print(f"DEBUG: Bot function full response: {response}")
|
590 |
-
|
591 |
-
|
592 |
-
|
593 |
-
|
|
|
594 |
|
595 |
-
msg.submit(user, [msg,
|
596 |
-
bot, [
|
597 |
)
|
598 |
|
599 |
-
send_btn.click(user, [msg,
|
600 |
-
bot, [
|
601 |
)
|
602 |
|
603 |
-
clear.click(lambda:
|
604 |
|
605 |
# Bind organization selection event
|
606 |
export_to_org.change(
|
|
|
518 |
with gr.Row():
|
519 |
with gr.Column(scale=3):
|
520 |
gr.Markdown("**Note**: You are chatting with the currently loaded model. If you've just completed processing, you're testing the modified model. To test the original model, reload it in the Model Processing tab.")
|
521 |
+
|
522 |
+
# Use Textbox instead of Chatbot for better compatibility
|
523 |
+
chat_display = gr.Textbox(
|
524 |
+
label="Chat History",
|
525 |
+
lines=20,
|
526 |
+
interactive=False,
|
527 |
+
value="Chat history will appear here..."
|
528 |
)
|
529 |
+
|
530 |
msg = gr.Textbox(
|
531 |
label="Input Message",
|
532 |
placeholder="Enter your question...",
|
|
|
581 |
outputs=[process_output, process_image]
|
582 |
)
|
583 |
|
584 |
+
# Chat functionality with simple text display
|
585 |
+
def user(user_message, chat_history):
|
586 |
+
if chat_history == "Chat history will appear here...":
|
587 |
+
chat_history = ""
|
588 |
+
new_history = chat_history + f"\n\n👤 User: {user_message}"
|
589 |
+
return "", new_history
|
590 |
|
591 |
+
def bot(chat_history, max_new_tokens, temperature):
|
592 |
+
# Extract the last user message
|
593 |
+
lines = chat_history.split('\n')
|
594 |
+
user_message = None
|
595 |
+
for line in reversed(lines):
|
596 |
+
if line.startswith('👤 User: '):
|
597 |
+
user_message = line[9:] # Remove "👤 User: " prefix
|
598 |
+
break
|
599 |
+
|
600 |
+
if user_message:
|
601 |
+
# Get complete response
|
602 |
+
response, _ = processor.chat(user_message, [], max_new_tokens, temperature)
|
603 |
print(f"DEBUG: Bot function received response: {response[:200]}...")
|
604 |
print(f"DEBUG: Bot function full response: {response}")
|
605 |
+
|
606 |
+
# Add assistant response to chat history
|
607 |
+
new_history = chat_history + f"\n\n🤖 Assistant: {response}"
|
608 |
+
return new_history
|
609 |
+
return chat_history
|
610 |
|
611 |
+
msg.submit(user, [msg, chat_display], [msg, chat_display], queue=False).then(
|
612 |
+
bot, [chat_display, max_new_tokens, temperature], chat_display
|
613 |
)
|
614 |
|
615 |
+
send_btn.click(user, [msg, chat_display], [msg, chat_display], queue=False).then(
|
616 |
+
bot, [chat_display, max_new_tokens, temperature], chat_display
|
617 |
)
|
618 |
|
619 |
+
clear.click(lambda: "Chat history will appear here...", None, chat_display, queue=False)
|
620 |
|
621 |
# Bind organization selection event
|
622 |
export_to_org.change(
|