File size: 5,299 Bytes
a7c9a13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
"""
This file is the main file for the hackathon.
It contains the Gradio interface for the hackathon as a MCP server.
It exposes the following tools:
- search_knowledge_base_for_context
- research_write_review_topic

"""
from dotenv import load_dotenv
load_dotenv()  # Load environment variables from .env file

import logging

# Configure logging to write to a file instead of stdout/stderr
# This avoids interference with the MCP communication channel
logging.basicConfig(
    filename='hackathon-mcp.log',  # Log to a file instead of stdout/stderr
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

import gradio as gr
from tools.rag_tools import search_groundx_for_rag_context
from tools.multi_agent_workflow_for_research import run_research_workflow

def search_knowledge_base_for_context(query: str) -> str:
    """
    Searches and retrieves relevant context from a knowledge base (GroundX),
    based on the user's query.

     Example queries:
     - "What are the main features of fuel system of SU-35."
     - "What are the combat potential of SU-35."

    Args:
        query: The search query supplied by the user.

    Returns:
        str: Relevant text content that can be used by the LLM to answer the query.
    """
    logger.info(f"Searching document for RAG context {query}")
    response = search_groundx_for_rag_context(query)
    logger.info(f"RAG Response: {response}")
    return response

def research_write_review_topic(query: str) -> str:
    """
    Helps with writing a report with research, writing, and review on any topic.
    Returns a reviewed topic.

    The query is a string that contains the topic to be researched and reviewed.

    Example queries:
    - "Write me a report on the history of the internet."
    - "Write me a report on origin of the universe."
    - "Write me a report on the impact of climate change on polar bears."
    - "Write me a report on the benefits of meditation."
    - "Write me a report on the future of artificial intelligence."
    - "Write me a report on the effects of social media on mental health."

    Args:
        query (str): The query to research, write and review .

    Returns:
        str: A nicely formatted string.
    """
    try:
        logger.info(f"Researching the topic: {query}")
        result = run_research_workflow(query)
        return result or "Research completed, but no content was generated."
    except Exception as e:
        return f"Error: {e}"

with gr.Blocks() as server_info:
    gr.Markdown("""
    # MCP powered RAG and Research

    I present to you a MCP powered RAG and Research.

    RAG Tool uses GroundX service to fetch the knowledge base. The knowledge base is a document that contains information about the SU-35 aircraft, including its features, capabilities, and specifications.
    Please check [this PDF](https://airgroup2000.com/gallery/albums/userpics/32438/SU-35_TM_eng.pdf) to formulate queries on Sukhoi.

    The Research Tool is implemented using multi-agent workflow using LlamaIndex (ResearchAgent, WriteAgent, and ReviewAgent).

    ## Available Tools

    ### search_knowledge_base_for_context
    - **Description**: Searches and retrieves relevant context from a knowledge base based on the user's query.
    - **Example Queries**:
        - "What are the main features of fuel system of SU-35."
        - "What are the combat potential of SU-35."

    ### research_write_review_topic
    - **Description**: Helps with writing a report with research, writing, and review on any topic.
    - **Example Queries**:
        - "Write me a report on the history of the internet."
        - "Write me a report on origin of the universe."
        - "Write me a report on the impact of climate change on polar bears."

    ## How to Use
    - Use the MCP RAG Tool tab above to query the knowledge base.
    - Use the Research Tool tab above to write report on any topic.

    ## Demo Link
    [Link to Demo on Youtube](https://www.youtube.com/mcp-rag-research)
    """)

mcp_rag_tool = gr.Interface(
    fn=search_knowledge_base_for_context,
    inputs=["text"],
    outputs=[gr.Textbox(label="Knowledge Base", max_lines=10)],
    title="MCP RAG Tool",
    description="Searches and retrieves relevant context from a knowledge base"
)

research_tool = gr.Interface(
    fn=research_write_review_topic,
    inputs=["text"],
    outputs=[gr.Textbox(label="Reviewed Topic", max_lines=10)],
    title="Research Tool",
    description="Helps with report writing with research, writing, and review agents on any topic. ",
    concurrency_limit=10
)

named_interfaces = {
    "Project Information": server_info,
    "RAG - Tool": mcp_rag_tool,
    "Research a Topic - Tool": research_tool
}

# Tab names and interfaces
tab_names = list(named_interfaces.keys())
interface_list = list(named_interfaces.values())

mcp_server = gr.TabbedInterface(
    interface_list,
    tab_names=tab_names,
    title="πŸ‘ MCP powered RAG and Research 🌍"
)

# Launch the MCP Server
if __name__ == "__main__":
    mcp_server.queue(default_concurrency_limit=10)
    mcp_server.launch(
        server_name="0.0.0.0",
        server_port=7860,
        share=False,
        debug=False,
        mcp_server=True
    )