Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -412,160 +412,342 @@ llm_response_agent = LLMResponseAgent(message_bus)
|
|
412 |
coordinator_agent = CoordinatorAgent(message_bus)
|
413 |
|
414 |
def create_interface():
|
415 |
-
"""Create Gradio interface"""
|
416 |
-
|
417 |
with gr.Blocks(
|
418 |
-
theme=gr.themes.
|
|
|
|
|
|
|
|
|
419 |
css="""
|
|
|
420 |
.gradio-container {
|
421 |
-
|
422 |
-
|
|
|
|
|
|
|
423 |
}
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
|
|
430 |
}
|
431 |
-
|
432 |
-
|
433 |
-
|
434 |
-
|
435 |
-
|
|
|
|
|
436 |
}
|
437 |
-
|
438 |
-
|
439 |
-
|
440 |
-
|
441 |
-
|
442 |
-
|
443 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
444 |
.chat-container {
|
445 |
-
|
446 |
-
|
447 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
448 |
border-radius: 8px;
|
449 |
-
padding:
|
450 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
451 |
}
|
452 |
-
|
453 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
454 |
}
|
455 |
-
|
456 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
457 |
}
|
458 |
""",
|
459 |
-
title="Agentic RAG
|
460 |
) as iface:
|
461 |
|
462 |
# Header
|
463 |
-
gr.
|
464 |
-
|
465 |
-
|
466 |
-
|
467 |
-
|
468 |
-
|
469 |
-
|
470 |
-
|
471 |
-
|
472 |
-
file_upload = gr.File(
|
473 |
-
file_count="multiple",
|
474 |
-
file_types=[".pdf", ".pptx", ".csv", ".docx", ".txt", ".md"],
|
475 |
-
label="Upload Documents (PDF, PPTX, CSV, DOCX, TXT, MD)",
|
476 |
-
elem_classes=["upload-section"]
|
477 |
-
)
|
478 |
-
|
479 |
-
upload_status = gr.Textbox(
|
480 |
-
label="Upload Status",
|
481 |
-
interactive=False,
|
482 |
-
max_lines=3
|
483 |
-
)
|
484 |
-
|
485 |
-
process_btn = gr.Button(
|
486 |
-
"Process Documents",
|
487 |
-
variant="primary",
|
488 |
-
size="lg"
|
489 |
-
)
|
490 |
-
|
491 |
-
gr.Markdown("## Architecture Info")
|
492 |
-
gr.Markdown("""
|
493 |
-
**Agents:**
|
494 |
-
- IngestionAgent: Document parsing
|
495 |
-
- RetrievalAgent: Semantic search
|
496 |
-
- LLMResponseAgent: Response generation
|
497 |
-
- CoordinatorAgent: Workflow orchestration
|
498 |
-
|
499 |
-
**MCP Communication:** Structured message passing between agents
|
500 |
""")
|
501 |
|
502 |
-
|
503 |
-
|
504 |
-
|
|
|
|
|
505 |
chatbot = gr.Chatbot(
|
|
|
506 |
height=500,
|
507 |
-
elem_classes=["chat-container"],
|
508 |
show_copy_button=True,
|
509 |
-
|
|
|
510 |
)
|
511 |
-
|
512 |
-
|
513 |
-
|
514 |
-
|
515 |
-
|
516 |
-
|
517 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
518 |
)
|
519 |
-
submit_btn = gr.Button("Send", scale=1, variant="primary")
|
520 |
-
|
521 |
-
gr.Examples(
|
522 |
-
examples=[
|
523 |
-
"What are the main topics discussed in the documents?",
|
524 |
-
"Can you summarize the key findings?",
|
525 |
-
"What metrics or KPIs are mentioned?",
|
526 |
-
"What recommendations are provided?",
|
527 |
-
"Are there any trends or patterns identified?"
|
528 |
-
],
|
529 |
-
inputs=msg
|
530 |
-
)
|
531 |
|
|
|
|
|
|
|
532 |
# Event handlers
|
533 |
-
def
|
534 |
-
|
535 |
-
|
536 |
-
|
537 |
-
|
538 |
-
|
539 |
-
|
540 |
-
|
541 |
-
|
542 |
-
|
543 |
-
|
544 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
545 |
|
546 |
-
|
547 |
-
|
|
|
548 |
inputs=[file_upload],
|
549 |
-
outputs=[
|
550 |
)
|
551 |
|
552 |
-
|
|
|
553 |
respond,
|
554 |
-
inputs=[
|
555 |
-
outputs=[chatbot,
|
556 |
show_progress=True
|
557 |
)
|
558 |
|
559 |
-
|
560 |
respond,
|
561 |
-
inputs=[
|
562 |
-
outputs=[chatbot,
|
563 |
show_progress=True
|
564 |
)
|
565 |
|
566 |
return iface
|
567 |
|
568 |
-
|
569 |
# Launch the application
|
570 |
if __name__ == "__main__":
|
571 |
demo = create_interface()
|
|
|
412 |
coordinator_agent = CoordinatorAgent(message_bus)
|
413 |
|
414 |
def create_interface():
|
415 |
+
"""Create ChatGPT-style Gradio interface"""
|
416 |
+
|
417 |
with gr.Blocks(
|
418 |
+
theme=gr.themes.Base(
|
419 |
+
primary_hue="yellow",
|
420 |
+
secondary_hue="gray",
|
421 |
+
neutral_hue="slate"
|
422 |
+
),
|
423 |
css="""
|
424 |
+
/* Dark theme styling */
|
425 |
.gradio-container {
|
426 |
+
background-color: #1a1a1a !important;
|
427 |
+
color: #ffffff !important;
|
428 |
+
height: 100vh !important;
|
429 |
+
max-width: none !important;
|
430 |
+
padding: 0 !important;
|
431 |
}
|
432 |
+
|
433 |
+
/* Main container */
|
434 |
+
.main-container {
|
435 |
+
display: flex;
|
436 |
+
flex-direction: column;
|
437 |
+
height: 100vh;
|
438 |
+
background: linear-gradient(135deg, #1a1a1a 0%, #2d2d2d 100%);
|
439 |
}
|
440 |
+
|
441 |
+
/* Header */
|
442 |
+
.header {
|
443 |
+
background: rgba(255, 193, 7, 0.1);
|
444 |
+
border-bottom: 1px solid rgba(255, 193, 7, 0.2);
|
445 |
+
padding: 1rem 2rem;
|
446 |
+
backdrop-filter: blur(10px);
|
447 |
}
|
448 |
+
|
449 |
+
.header h1 {
|
450 |
+
color: #ffc107;
|
451 |
+
margin: 0;
|
452 |
+
font-size: 1.5rem;
|
453 |
+
font-weight: 600;
|
454 |
}
|
455 |
+
|
456 |
+
.header p {
|
457 |
+
color: #cccccc;
|
458 |
+
margin: 0.25rem 0 0 0;
|
459 |
+
font-size: 0.9rem;
|
460 |
+
}
|
461 |
+
|
462 |
+
/* Chat area */
|
463 |
.chat-container {
|
464 |
+
flex: 1;
|
465 |
+
display: flex;
|
466 |
+
flex-direction: column;
|
467 |
+
max-width: 800px;
|
468 |
+
margin: 0 auto;
|
469 |
+
width: 100%;
|
470 |
+
padding: 1rem;
|
471 |
+
}
|
472 |
+
|
473 |
+
/* Chatbot styling */
|
474 |
+
.gradio-chatbot {
|
475 |
+
flex: 1 !important;
|
476 |
+
background: transparent !important;
|
477 |
+
border: none !important;
|
478 |
+
margin-bottom: 1rem;
|
479 |
+
}
|
480 |
+
|
481 |
+
.gradio-chatbot .message {
|
482 |
+
background: rgba(45, 45, 45, 0.8) !important;
|
483 |
+
border: 1px solid rgba(255, 193, 7, 0.1) !important;
|
484 |
+
border-radius: 12px !important;
|
485 |
+
margin: 0.5rem 0 !important;
|
486 |
+
}
|
487 |
+
|
488 |
+
.gradio-chatbot .message.user {
|
489 |
+
background: rgba(255, 193, 7, 0.1) !important;
|
490 |
+
border-color: rgba(255, 193, 7, 0.3) !important;
|
491 |
+
margin-left: 20% !important;
|
492 |
+
}
|
493 |
+
|
494 |
+
.gradio-chatbot .message.bot {
|
495 |
+
background: rgba(45, 45, 45, 0.9) !important;
|
496 |
+
margin-right: 20% !important;
|
497 |
+
}
|
498 |
+
|
499 |
+
/* Input area */
|
500 |
+
.input-area {
|
501 |
+
background: rgba(45, 45, 45, 0.6);
|
502 |
+
border-radius: 16px;
|
503 |
+
padding: 1rem;
|
504 |
+
border: 1px solid rgba(255, 193, 7, 0.2);
|
505 |
+
backdrop-filter: blur(10px);
|
506 |
+
}
|
507 |
+
|
508 |
+
/* File upload */
|
509 |
+
.upload-area {
|
510 |
+
background: rgba(255, 193, 7, 0.05);
|
511 |
+
border: 2px dashed rgba(255, 193, 7, 0.3);
|
512 |
+
border-radius: 12px;
|
513 |
+
padding: 1rem;
|
514 |
+
margin-bottom: 1rem;
|
515 |
+
transition: all 0.3s ease;
|
516 |
+
}
|
517 |
+
|
518 |
+
.upload-area:hover {
|
519 |
+
background: rgba(255, 193, 7, 0.1);
|
520 |
+
border-color: rgba(255, 193, 7, 0.5);
|
521 |
+
}
|
522 |
+
|
523 |
+
/* Buttons */
|
524 |
+
.primary-btn {
|
525 |
+
background: linear-gradient(135deg, #ffc107 0%, #ff8f00 100%) !important;
|
526 |
+
color: #000000 !important;
|
527 |
+
border: none !important;
|
528 |
+
border-radius: 8px !important;
|
529 |
+
font-weight: 600 !important;
|
530 |
+
transition: all 0.3s ease !important;
|
531 |
+
}
|
532 |
+
|
533 |
+
.primary-btn:hover {
|
534 |
+
background: linear-gradient(135deg, #ffcd38 0%, #ffa726 100%) !important;
|
535 |
+
transform: translateY(-1px);
|
536 |
+
box-shadow: 0 4px 12px rgba(255, 193, 7, 0.3);
|
537 |
+
}
|
538 |
+
|
539 |
+
/* Text inputs */
|
540 |
+
.gradio-textbox input, .gradio-textbox textarea {
|
541 |
+
background: rgba(45, 45, 45, 0.8) !important;
|
542 |
+
color: #ffffff !important;
|
543 |
+
border: 1px solid rgba(255, 193, 7, 0.2) !important;
|
544 |
+
border-radius: 8px !important;
|
545 |
+
}
|
546 |
+
|
547 |
+
.gradio-textbox input:focus, .gradio-textbox textarea:focus {
|
548 |
+
border-color: #ffc107 !important;
|
549 |
+
box-shadow: 0 0 0 2px rgba(255, 193, 7, 0.2) !important;
|
550 |
+
}
|
551 |
+
|
552 |
+
/* File component */
|
553 |
+
.gradio-file {
|
554 |
+
background: transparent !important;
|
555 |
+
border: none !important;
|
556 |
+
}
|
557 |
+
|
558 |
+
/* Processing indicator */
|
559 |
+
.processing-indicator {
|
560 |
+
background: rgba(255, 193, 7, 0.1);
|
561 |
+
border: 1px solid rgba(255, 193, 7, 0.3);
|
562 |
border-radius: 8px;
|
563 |
+
padding: 0.75rem;
|
564 |
+
margin: 0.5rem 0;
|
565 |
+
color: #ffc107;
|
566 |
+
text-align: center;
|
567 |
+
animation: pulse 2s infinite;
|
568 |
+
}
|
569 |
+
|
570 |
+
@keyframes pulse {
|
571 |
+
0%, 100% { opacity: 1; }
|
572 |
+
50% { opacity: 0.7; }
|
573 |
}
|
574 |
+
|
575 |
+
/* Hide labels for cleaner look */
|
576 |
+
.gradio-textbox label,
|
577 |
+
.gradio-file label {
|
578 |
+
display: none !important;
|
579 |
+
}
|
580 |
+
|
581 |
+
/* Scrollbar styling */
|
582 |
+
::-webkit-scrollbar {
|
583 |
+
width: 8px;
|
584 |
+
}
|
585 |
+
|
586 |
+
::-webkit-scrollbar-track {
|
587 |
+
background: rgba(45, 45, 45, 0.3);
|
588 |
+
border-radius: 4px;
|
589 |
}
|
590 |
+
|
591 |
+
::-webkit-scrollbar-thumb {
|
592 |
+
background: rgba(255, 193, 7, 0.5);
|
593 |
+
border-radius: 4px;
|
594 |
+
}
|
595 |
+
|
596 |
+
::-webkit-scrollbar-thumb:hover {
|
597 |
+
background: rgba(255, 193, 7, 0.7);
|
598 |
}
|
599 |
""",
|
600 |
+
title="Agentic RAG Assistant"
|
601 |
) as iface:
|
602 |
|
603 |
# Header
|
604 |
+
with gr.Row(elem_classes=["header"]):
|
605 |
+
with gr.Column():
|
606 |
+
gr.HTML("""
|
607 |
+
<div style="display: flex; align-items: center; gap: 1rem;">
|
608 |
+
<div>
|
609 |
+
<h1>🤖 Agentic RAG Assistant</h1>
|
610 |
+
<p>Upload documents and ask questions - powered by Multi-Agent Architecture</p>
|
611 |
+
</div>
|
612 |
+
</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
613 |
""")
|
614 |
|
615 |
+
# Main chat container
|
616 |
+
with gr.Row(elem_classes=["chat-container"]):
|
617 |
+
with gr.Column():
|
618 |
+
|
619 |
+
# Chatbot
|
620 |
chatbot = gr.Chatbot(
|
621 |
+
value=[],
|
622 |
height=500,
|
|
|
623 |
show_copy_button=True,
|
624 |
+
bubble_full_width=False,
|
625 |
+
elem_classes=["gradio-chatbot"]
|
626 |
)
|
627 |
+
|
628 |
+
# Input area
|
629 |
+
with gr.Column(elem_classes=["input-area"]):
|
630 |
+
|
631 |
+
# File upload (initially hidden, shows when needed)
|
632 |
+
file_upload = gr.File(
|
633 |
+
file_count="multiple",
|
634 |
+
file_types=[".pdf", ".pptx", ".csv", ".docx", ".txt", ".md"],
|
635 |
+
elem_classes=["upload-area"],
|
636 |
+
visible=True
|
637 |
+
)
|
638 |
+
|
639 |
+
# Processing status
|
640 |
+
processing_status = gr.HTML(visible=False)
|
641 |
+
|
642 |
+
# Message input row
|
643 |
+
with gr.Row():
|
644 |
+
msg_input = gr.Textbox(
|
645 |
+
placeholder="Upload documents above, then ask your questions here...",
|
646 |
+
scale=5,
|
647 |
+
max_lines=3,
|
648 |
+
autofocus=True
|
649 |
+
)
|
650 |
+
send_btn = gr.Button(
|
651 |
+
"Send",
|
652 |
+
scale=1,
|
653 |
+
elem_classes=["primary-btn"]
|
654 |
+
)
|
655 |
+
|
656 |
+
# Quick examples
|
657 |
+
gr.Examples(
|
658 |
+
examples=[
|
659 |
+
"What are the main topics in the documents?",
|
660 |
+
"Summarize the key findings",
|
661 |
+
"What metrics are mentioned?",
|
662 |
+
"What are the recommendations?"
|
663 |
+
],
|
664 |
+
inputs=msg_input,
|
665 |
+
elem_classes=["examples"]
|
666 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
667 |
|
668 |
+
# State to track document processing
|
669 |
+
doc_processed = gr.State(False)
|
670 |
+
|
671 |
# Event handlers
|
672 |
+
def handle_file_upload(files):
|
673 |
+
if not files:
|
674 |
+
return gr.update(visible=False), False
|
675 |
+
|
676 |
+
# Show processing indicator
|
677 |
+
processing_html = f"""
|
678 |
+
<div class="processing-indicator">
|
679 |
+
📄 Processing {len(files)} documents... Please wait.
|
680 |
+
</div>
|
681 |
+
"""
|
682 |
+
|
683 |
+
# Process files
|
684 |
+
try:
|
685 |
+
result = coordinator_agent.process_files(files)
|
686 |
+
|
687 |
+
# Wait a moment for processing to complete
|
688 |
+
import time
|
689 |
+
time.sleep(2)
|
690 |
+
|
691 |
+
success_html = f"""
|
692 |
+
<div style="background: rgba(76, 175, 80, 0.1); border: 1px solid rgba(76, 175, 80, 0.3);
|
693 |
+
border-radius: 8px; padding: 0.75rem; color: #4caf50; text-align: center;">
|
694 |
+
✅ Documents processed successfully! You can now ask questions.
|
695 |
+
</div>
|
696 |
+
"""
|
697 |
+
return gr.update(value=success_html, visible=True), True
|
698 |
+
|
699 |
+
except Exception as e:
|
700 |
+
error_html = f"""
|
701 |
+
<div style="background: rgba(244, 67, 54, 0.1); border: 1px solid rgba(244, 67, 54, 0.3);
|
702 |
+
border-radius: 8px; padding: 0.75rem; color: #f44336; text-align: center;">
|
703 |
+
❌ Error processing documents: {str(e)}
|
704 |
+
</div>
|
705 |
+
"""
|
706 |
+
return gr.update(value=error_html, visible=True), False
|
707 |
+
|
708 |
+
def respond(message, history, doc_ready):
|
709 |
+
if not doc_ready:
|
710 |
+
return history + [["Please upload and process documents first.", None]], ""
|
711 |
+
|
712 |
+
if not message.strip():
|
713 |
+
return history, message
|
714 |
+
|
715 |
+
# Add user message
|
716 |
+
history.append([message, None])
|
717 |
+
|
718 |
+
# Generate response
|
719 |
+
response = ""
|
720 |
+
for token in coordinator_agent.handle_query(message, history):
|
721 |
+
response += token
|
722 |
+
history[-1][1] = response
|
723 |
+
yield history, ""
|
724 |
+
|
725 |
+
return history, ""
|
726 |
|
727 |
+
# File upload triggers processing
|
728 |
+
file_upload.change(
|
729 |
+
handle_file_upload,
|
730 |
inputs=[file_upload],
|
731 |
+
outputs=[processing_status, doc_processed]
|
732 |
)
|
733 |
|
734 |
+
# Send message
|
735 |
+
send_btn.click(
|
736 |
respond,
|
737 |
+
inputs=[msg_input, chatbot, doc_processed],
|
738 |
+
outputs=[chatbot, msg_input],
|
739 |
show_progress=True
|
740 |
)
|
741 |
|
742 |
+
msg_input.submit(
|
743 |
respond,
|
744 |
+
inputs=[msg_input, chatbot, doc_processed],
|
745 |
+
outputs=[chatbot, msg_input],
|
746 |
show_progress=True
|
747 |
)
|
748 |
|
749 |
return iface
|
750 |
|
|
|
751 |
# Launch the application
|
752 |
if __name__ == "__main__":
|
753 |
demo = create_interface()
|