File size: 5,736 Bytes
a7c9a13 7ca8f3d a7c9a13 7ca8f3d c1c0be5 a7c9a13 7ca8f3d a7c9a13 a7dea26 7ca8f3d a7dea26 396dc0c 7ca8f3d a7dea26 0672ab4 396dc0c 7ca8f3d a7c9a13 a7dea26 7ca8f3d a7dea26 396dc0c 7ca8f3d a7dea26 0672ab4 396dc0c c7258a9 7ca8f3d a7c9a13 baf5524 a7c9a13 baf5524 c1c0be5 a7c9a13 7ca8f3d a7c9a13 c1c0be5 7ca8f3d d415c32 a7c9a13 7ca8f3d a7c9a13 7ca8f3d a7c9a13 7ca8f3d a7c9a13 7ca8f3d a7c9a13 7ca8f3d a7c9a13 a7dea26 a7c9a13 8b8246a a7c9a13 a7dea26 a7c9a13 a7dea26 a7c9a13 833b23c a7c9a13 baf5524 a7c9a13 c99d8d7 a7c9a13 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
"""
This file is the main file for the hackathon.
It contains the Gradio interface for the hackathon as an MCP server.
It exposes the following tools:
- search_knowledge_base_for_context
- research_write_review_topic
"""
import os
import requests
import gradio as gr
def search_knowledge_base_for_context(query: str) -> str:
"""
Searches and retrieves relevant context from a knowledge base on Sukhoi SU-35,
based on the user's query.
Example queries:
- "What are the main features of fuel system of SU-35."
- "What are the combat potential of SU-35."
Args:
query: The search query supplied by the user.
Returns:
str: Relevant text content that can be used by the LLM to answer the query.
"""
print(f"Searching knowledge base for RAG context via modal labs: {query}")
data = {"query": query}
modal_url = os.getenv("MODAL_LABS_HACKATHON_RAG_TOOLS_URL")
response = requests.post(modal_url, json=data, timeout=600.0)
if response.status_code != 200:
print(f"Error in modal RAG response: {response.status_code} - {response.text}")
return "No relevant information found from the knowledge base."
print(f"modal RAG Response: {response.text}")
return response.text or "No relevant information found"
def research_write_review_topic(query: str) -> str:
"""
Helps with writing a report with research, writing, and review on any topic.
Returns a reviewed topic.
The query is a string that contains the topic to be researched and reviewed.
Example queries:
- "Write me a report on the history of the internet."
- "Write me a report on origin of the universe."
- "Write me a report on the impact of climate change on polar bears."
- "Write me a report on the benefits of meditation."
- "Write me a report on the future of artificial intelligence."
- "Write me a report on the effects of social media on mental health."
Args:
query (str): The query to research, write and review .
Returns:
str: A nicely formatted string.
"""
print(f"Researching the topic via modal labs: {query}")
data = {"query": query}
modal_url = os.getenv("MODAL_LABS_HACKATHON_RESEARCH_TOOLS_URL")
response = requests.post(modal_url, json=data, timeout=600.0)
if response.status_code != 200:
print(f"Error in modal RESEARCH response: {response.status_code} - {response.text}")
return "Research completed, but no content was generated."
print(f"modal RESEARCH Response: {response.text}")
return response.text or "Research completed, but no content was generated."
with gr.Blocks() as server_info:
gr.Markdown("""
# π MCP Powered RAG and Research Topic π
I present to you an π MCP-powered RAG and Research Topic π.
The Tools are hosted and executed on **Modal Labs** platform.
RAG Tool uses the **GroundX** storage by **eyelevel.ai** to fetch the knowledge base. The knowledge base is a document that contains information about the SU-35 aircraft, including its features, capabilities, and specifications. Please check [this PDF](https://airgroup2000.com/gallery/albums/userpics/32438/SU-35_TM_eng.pdf) to formulate queries on Sukhoi.
The Research Tool is implemented using a Multi-Agent Workflow using **LlamaIndex**.
<br>
The Agents use **Nebius** provided LLM (meta-llama/Meta-Llama-3.1-8B-Instruct).
## Available Tools
### search_knowledge_base_for_context
- **Description**: Searches and retrieves relevant context from a knowledge base based on the user's query.
- **Example Queries**:
- "What are the main features of fuel system of SU-35?"
- "What is the combat potential of SU-35?"
### research_write_review_topic
- **Description**: Helps with writing a report with research, writing, and review on any topic.
- **Example Queries**:
- "Write me a report on the history of the internet."
- "Write me a report on origin of the universe."
- "Write me a report on the impact of climate change on polar bears."
## How to Use
- Use the MCP RAG Tool tab above to query the knowledge base.
- Use the Research Tool tab above to write a report on any topic.
## Watch the Demo Video here
[Link to Demo on Youtube](https://youtu.be/wvHBqW2ABGg)
""")
mcp_rag_tool = gr.Interface(
fn=search_knowledge_base_for_context,
inputs=["text"],
outputs=[gr.Textbox(label="Knowledge Base", max_lines=15)],
title="MCP RAG Tool",
description="Searches and retrieves relevant context from a knowledge base",
concurrency_limit=1
)
research_tool = gr.Interface(
fn=research_write_review_topic,
inputs=["text"],
outputs=[gr.Textbox(label="Reviewed Topic", max_lines=15)],
title="Research Tool",
description="Helps with report writing with research, writing, and review agents on any topic. ",
concurrency_limit=1
)
named_interfaces = {
"Project Information": server_info,
"Search Knowledge Base": mcp_rag_tool,
"Research a Topic": research_tool
}
# Tab names and interfaces
tab_names = list(named_interfaces.keys())
interface_list = list(named_interfaces.values())
mcp_server = gr.TabbedInterface(
interface_list,
tab_names=tab_names,
title="π MCP-Powered RAG and Research Topic π"
)
# Launch the MCP Server
if __name__ == "__main__":
mcp_server.queue(default_concurrency_limit=1)
mcp_server.launch(
server_name="0.0.0.0",
server_port=7860,
share=False,
debug=False,
mcp_server=True
) |