import gradio as gr import os import json import uuid import asyncio from datetime import datetime from typing import List, Dict, Any, Optional, Generator import logging # Import required libraries from huggingface_hub import InferenceClient from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_community.embeddings import HuggingFaceEmbeddings from langchain_community.vectorstores import FAISS from langchain.docstore.document import Document # Import document parsers import PyPDF2 from pptx import Presentation import pandas as pd from docx import Document as DocxDocument import io # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Get HuggingFace token from environment HF_TOKEN = os.getenv("hf_token") if not HF_TOKEN: raise ValueError("HuggingFace token not found in environment variables") # Initialize HuggingFace Inference Client client = InferenceClient(model="meta-llama/Llama-3.1-8B-Instruct", token=HF_TOKEN) # Initialize embeddings embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") class MCPMessage: """Model Context Protocol Message Structure""" def __init__(self, sender: str, receiver: str, msg_type: str, trace_id: str = None, payload: Dict = None): self.sender = sender self.receiver = receiver self.type = msg_type self.trace_id = trace_id or str(uuid.uuid4()) self.payload = payload or {} self.timestamp = datetime.now().isoformat() def to_dict(self): return { "sender": self.sender, "receiver": self.receiver, "type": self.type, "trace_id": self.trace_id, "payload": self.payload, "timestamp": self.timestamp } class MessageBus: """In-memory message bus for MCP communication""" def __init__(self): self.messages = [] self.subscribers = {} def publish(self, message: MCPMessage): """Publish message to the bus""" self.messages.append(message) logger.info(f"Message published: {message.sender} -> {message.receiver} [{message.type}]") # Notify subscribers if message.receiver in self.subscribers: for callback in self.subscribers[message.receiver]: callback(message) def subscribe(self, agent_name: str, callback): """Subscribe agent to receive messages""" if agent_name not in self.subscribers: self.subscribers[agent_name] = [] self.subscribers[agent_name].append(callback) # Global message bus message_bus = MessageBus() class IngestionAgent: """Agent responsible for document parsing and preprocessing""" def __init__(self, message_bus: MessageBus): self.name = "IngestionAgent" self.message_bus = message_bus self.message_bus.subscribe(self.name, self.handle_message) self.text_splitter = RecursiveCharacterTextSplitter( chunk_size=1000, chunk_overlap=200 ) def handle_message(self, message: MCPMessage): """Handle incoming MCP messages""" if message.type == "INGESTION_REQUEST": self.process_documents(message) def parse_pdf(self, file_path: str) -> str: """Parse PDF document""" try: with open(file_path, 'rb') as file: pdf_reader = PyPDF2.PdfReader(file) text = "" for page in pdf_reader.pages: text += page.extract_text() return text except Exception as e: logger.error(f"Error parsing PDF: {e}") return "" def parse_pptx(self, file_path: str) -> str: """Parse PPTX document""" try: prs = Presentation(file_path) text = "" for slide in prs.slides: for shape in slide.shapes: if hasattr(shape, "text"): text += shape.text + "\n" return text except Exception as e: logger.error(f"Error parsing PPTX: {e}") return "" def parse_csv(self, file_path: str) -> str: """Parse CSV document""" try: df = pd.read_csv(file_path) return df.to_string() except Exception as e: logger.error(f"Error parsing CSV: {e}") return "" def parse_docx(self, file_path: str) -> str: """Parse DOCX document""" try: doc = DocxDocument(file_path) text = "" for paragraph in doc.paragraphs: text += paragraph.text + "\n" return text except Exception as e: logger.error(f"Error parsing DOCX: {e}") return "" def parse_txt(self, file_path: str) -> str: """Parse TXT/Markdown document""" try: with open(file_path, 'r', encoding='utf-8') as file: return file.read() except Exception as e: logger.error(f"Error parsing TXT: {e}") return "" def process_documents(self, message: MCPMessage): """Process uploaded documents""" files = message.payload.get("files", []) processed_docs = [] for file_path in files: file_ext = os.path.splitext(file_path)[1].lower() # Parse document based on file type if file_ext == '.pdf': text = self.parse_pdf(file_path) elif file_ext == '.pptx': text = self.parse_pptx(file_path) elif file_ext == '.csv': text = self.parse_csv(file_path) elif file_ext == '.docx': text = self.parse_docx(file_path) elif file_ext in ['.txt', '.md']: text = self.parse_txt(file_path) else: logger.warning(f"Unsupported file type: {file_ext}") continue if text: # Split text into chunks chunks = self.text_splitter.split_text(text) docs = [Document(page_content=chunk, metadata={"source": file_path}) for chunk in chunks] processed_docs.extend(docs) # Send processed documents to RetrievalAgent response = MCPMessage( sender=self.name, receiver="RetrievalAgent", msg_type="INGESTION_COMPLETE", trace_id=message.trace_id, payload={"documents": processed_docs} ) self.message_bus.publish(response) class RetrievalAgent: """Agent responsible for embedding and semantic retrieval""" def __init__(self, message_bus: MessageBus): self.name = "RetrievalAgent" self.message_bus = message_bus self.message_bus.subscribe(self.name, self.handle_message) self.vector_store = None def handle_message(self, message: MCPMessage): """Handle incoming MCP messages""" if message.type == "INGESTION_COMPLETE": self.create_vector_store(message) elif message.type == "RETRIEVAL_REQUEST": self.retrieve_context(message) def create_vector_store(self, message: MCPMessage): """Create vector store from processed documents""" documents = message.payload.get("documents", []) if documents: try: self.vector_store = FAISS.from_documents(documents, embeddings) logger.info(f"Vector store created with {len(documents)} documents") # Notify completion response = MCPMessage( sender=self.name, receiver="CoordinatorAgent", msg_type="VECTORSTORE_READY", trace_id=message.trace_id, payload={"status": "ready"} ) self.message_bus.publish(response) except Exception as e: logger.error(f"Error creating vector store: {e}") def retrieve_context(self, message: MCPMessage): """Retrieve relevant context for a query""" query = message.payload.get("query", "") k = message.payload.get("k", 3) if self.vector_store and query: try: docs = self.vector_store.similarity_search(query, k=k) context = [{"content": doc.page_content, "source": doc.metadata.get("source", "")} for doc in docs] response = MCPMessage( sender=self.name, receiver="LLMResponseAgent", msg_type="CONTEXT_RESPONSE", trace_id=message.trace_id, payload={ "query": query, "retrieved_context": context, "top_chunks": [doc.page_content for doc in docs] } ) self.message_bus.publish(response) except Exception as e: logger.error(f"Error retrieving context: {e}") class LLMResponseAgent: """Agent responsible for generating LLM responses""" def __init__(self, message_bus: MessageBus): self.name = "LLMResponseAgent" self.message_bus = message_bus self.message_bus.subscribe(self.name, self.handle_message) def handle_message(self, message: MCPMessage): """Handle incoming MCP messages""" if message.type == "CONTEXT_RESPONSE": self.generate_response(message) def generate_response(self, message: MCPMessage): """Generate response using retrieved context""" query = message.payload.get("query", "") context = message.payload.get("retrieved_context", []) # Build context string context_text = "\n\n".join([f"Source: {ctx['source']}\nContent: {ctx['content']}" for ctx in context]) # Create messages for conversational format messages = [ { "role": "system", "content": "You are a helpful assistant. Based on the provided context below, answer the user's question accurately and comprehensively. Cite the sources if possible.", }, { "role": "user", "content": f"Context:\n\n{context_text}\n\nQuestion: {query}" } ] try: # Use client.chat_completion for conversational models response_stream = client.chat_completion( messages=messages, max_tokens=512, temperature=0.7, stream=True ) # Send streaming response response = MCPMessage( sender=self.name, receiver="CoordinatorAgent", msg_type="LLM_RESPONSE_STREAM", trace_id=message.trace_id, payload={ "query": query, "response_stream": response_stream, "context": context } ) self.message_bus.publish(response) except Exception as e: logger.error(f"Error generating response: {e}") # Send an error stream back error_msg = f"Error from LLM: {e}" def error_generator(): yield error_msg response = MCPMessage( sender=self.name, receiver="CoordinatorAgent", msg_type="LLM_RESPONSE_STREAM", trace_id=message.trace_id, payload={"response_stream": error_generator()} ) self.message_bus.publish(response) class CoordinatorAgent: """Coordinator agent that orchestrates the entire workflow""" def __init__(self, message_bus: MessageBus): self.name = "CoordinatorAgent" self.message_bus = message_bus self.message_bus.subscribe(self.name, self.handle_message) self.current_response_stream = None self.vector_store_ready = False def handle_message(self, message: MCPMessage): """Handle incoming MCP messages""" if message.type == "VECTORSTORE_READY": self.vector_store_ready = True elif message.type == "LLM_RESPONSE_STREAM": self.current_response_stream = message.payload.get("response_stream") def process_files(self, files): """Process uploaded files""" if not files: return "No files uploaded." file_paths = [file.name for file in files] # Send ingestion request message = MCPMessage( sender=self.name, receiver="IngestionAgent", msg_type="INGESTION_REQUEST", payload={"files": file_paths} ) self.message_bus.publish(message) return f"Processing {len(files)} files: {', '.join([os.path.basename(fp) for fp in file_paths])}" def handle_query(self, query: str, history: List) -> Generator[str, None, None]: """Handle user query and return streaming response""" if not self.vector_store_ready: yield "Please upload and process documents first." return # Send retrieval request message = MCPMessage( sender=self.name, receiver="RetrievalAgent", msg_type="RETRIEVAL_REQUEST", payload={"query": query} ) self.message_bus.publish(message) # Wait for response and stream import time timeout = 20 # seconds start_time = time.time() while not self.current_response_stream and (time.time() - start_time) < timeout: time.sleep(0.1) if self.current_response_stream: try: # Stream tokens directly for chunk in self.current_response_stream: # The token is in chunk.choices[0].delta.content for chat_completion if hasattr(chunk, 'choices') and chunk.choices: token = chunk.choices[0].delta.content if token: yield token else: # Fallback for different response format if hasattr(chunk, 'token'): yield chunk.token elif isinstance(chunk, str): yield chunk except Exception as e: yield f"Error streaming response: {e}" finally: self.current_response_stream = None # Reset for next query else: yield "Timeout: No response received from LLM agent." # Initialize agents ingestion_agent = IngestionAgent(message_bus) retrieval_agent = RetrievalAgent(message_bus) llm_response_agent = LLMResponseAgent(message_bus) coordinator_agent = CoordinatorAgent(message_bus) def create_interface(): """Create enhanced ChatGPT-style Gradio interface with glowing effects""" with gr.Blocks( theme=gr.themes.Base(), css=""" /* Import Google Fonts for better typography */ [cite_start]@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap'); [cite: 52] /* Dark theme styling with enhanced visuals */ .gradio-container { [cite_start]background: linear-gradient(135deg, #0a0a0a 0%, #1a1a2e 50%, #16213e 100%) !important; [cite: 52] color: #ffffff !important; [cite_start]/* Ensure base text color is bright */ [cite: 53] [cite_start]height: 100vh !important; [cite: 53] [cite_start]max-width: none !important; [cite: 54] [cite_start]padding: 0 !important; [cite: 54] [cite_start]font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif !important; [cite: 55] } /* Main container with animated background */ .main-container { [cite_start]display: flex; [cite: 56] [cite_start]flex-direction: column; [cite: 56] [cite_start]height: 100vh; [cite: 56] background: radial-gradient(circle at 20% 50%, rgba(255, 193, 7, 0.05) 0%, transparent 50%), radial-gradient(circle at 80% 20%, rgba(0, 123, 255, 0.05) 0%, transparent 50%), radial-gradient(circle at 40% 80%, rgba(255, 87, 34, 0.03) 0%, transparent 50%), [cite_start]linear-gradient(135deg, #0a0a0a 0%, #1a1a2e 50%, #16213e 100%); [cite: 56] [cite_start]animation: backgroundShift 15s ease-in-out infinite alternate; [cite: 57] } @keyframes backgroundShift { [cite_start]0% { filter: hue-rotate(0deg); [cite: 58] } [cite_start]100% { filter: hue-rotate(10deg); [cite: 59] } } /* Enhanced Header with glow */ .header { [cite_start]background: rgba(255, 193, 7, 0.08); [cite: 60] [cite_start]border-bottom: 2px solid transparent; [cite: 60] [cite_start]border-image: linear-gradient(90deg, rgba(255, 193, 7, 0.5), rgba(0, 123, 255, 0.3)) 1; [cite: 60] [cite_start]padding: 1.5rem 2rem; [cite: 60] [cite_start]backdrop-filter: blur(20px); [cite: 61] box-shadow: 0 4px 20px rgba(255, 193, 7, 0.1), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.1); [cite: 61] [cite_start]position: relative; [cite: 62] [cite_start]overflow: hidden; [cite: 62] } .header::before { [cite_start]content: ''; [cite: 63] [cite_start]position: absolute; [cite: 63] [cite_start]top: 0; [cite: 63] [cite_start]left: -100%; [cite: 63] [cite_start]width: 100%; [cite: 63] [cite_start]height: 100%; [cite: 63] [cite_start]background: linear-gradient(90deg, transparent, rgba(255, 193, 7, 0.1), transparent); [cite: 63] [cite_start]animation: shimmer 3s ease-in-out infinite; [cite: 64] } @keyframes shimmer { [cite_start]0% { left: -100%; [cite: 65] } [cite_start]100% { left: 100%; [cite: 66] } } .header h1 { [cite_start]color: #ffc107; [cite: 67] [cite_start]margin: 0; [cite: 67] [cite_start]font-size: 2rem; [cite: 67] [cite_start]font-weight: 700; [cite: 67] text-shadow: 0 0 10px rgba(255, 193, 7, 0.3), [cite_start]0 0 20px rgba(255, 193, 7, 0.2); [cite: 67] [cite_start]letter-spacing: -0.02em; [cite: 68] } .header p { color: #ffffff; [cite_start]/* UPDATED from #e0e0e0 */ [cite: 69] [cite_start]margin: 0.5rem 0 0 0; [cite: 69] [cite_start]font-size: 1rem; [cite: 69] [cite_start]font-weight: 400; [cite: 69] [cite_start]opacity: 0.9; [cite: 70] } /* Enhanced Chat container */ .chat-container { [cite_start]flex: 1; [cite: 71] [cite_start]display: flex; [cite: 71] [cite_start]flex-direction: column; [cite: 71] [cite_start]max-width: 1200px; [cite: 71] [cite_start]margin: 0 auto; [cite: 71] [cite_start]width: 100%; [cite: 71] [cite_start]padding: 2rem; [cite: 71] [cite_start]height: calc(100vh - 200px) !important; [cite: 71] [cite_start]gap: 1.5rem; [cite: 72] } /* Enhanced Chatbot with glow effect */ .gradio-chatbot { [cite_start]height: 400px !important; [cite: 73] [cite_start]max-height: 400px !important; [cite: 73] [cite_start]background: rgba(45, 45, 45, 0.4) !important; [cite: 73] [cite_start]border: 2px solid rgba(255, 193, 7, 0.2) !important; [cite: 73] [cite_start]border-radius: 20px !important; [cite: 74] [cite_start]margin-bottom: 1rem; [cite: 74] [cite_start]overflow-y: auto !important; [cite: 74] box-shadow: 0 0 30px rgba(255, 193, 7, 0.15), 0 8px 32px rgba(0, 0, 0, 0.3), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05) !important; [cite: 74] [cite_start]backdrop-filter: blur(20px) !important; [cite: 75] [cite_start]position: relative; [cite: 75] [cite_start]animation: chatGlow 4s ease-in-out infinite alternate; [cite: 76] } @keyframes chatGlow { 0% { box-shadow: 0 0 30px rgba(255, 193, 7, 0.15), 0 8px 32px rgba(0, 0, 0, 0.3), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05); [cite: 77] } 100% { box-shadow: 0 0 40px rgba(255, 193, 7, 0.25), 0 12px 40px rgba(0, 0, 0, 0.4), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.08); [cite: 78] } } /* Enhanced chat messages */ .message { [cite_start]background: rgba(255, 255, 255, 0.05) !important; [cite: 79] [cite_start]border-radius: 16px !important; [cite: 79] [cite_start]padding: 1rem 1.5rem !important; [cite: 79] [cite_start]margin: 0.75rem 0 !important; [cite: 79] [cite_start]border: 1px solid rgba(255, 255, 255, 0.1) !important; [cite: 80] [cite_start]backdrop-filter: blur(10px) !important; [cite: 80] [cite_start]font-size: 0.95rem !important; [cite: 80] [cite_start]line-height: 1.6 !important; [cite: 80] [cite_start]box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1) !important; [cite: 81] [cite_start]transition: all 0.3s ease !important; [cite: 81] color: #ffffff !important; [cite_start]/* Made brighter for all messages */ [cite: 81] } .message:hover { [cite_start]transform: translateY(-1px) !important; [cite: 82] [cite_start]box-shadow: 0 4px 15px rgba(0, 0, 0, 0.15) !important; [cite: 82] } /* User message styling */ .message.user { [cite_start]background: linear-gradient(135deg, rgba(255, 193, 7, 0.1), rgba(255, 193, 7, 0.05)) !important; [cite: 83] [cite_start]border-color: rgba(255, 193, 7, 0.2) !important; [cite: 83] [cite_start]margin-left: 15% !important; [cite: 83] box-shadow: 0 2px 10px rgba(255, 193, 7, 0.1), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05) !important; [cite: 84] } /* Assistant message styling - important for streaming text */ .message.assistant { [cite_start]background: linear-gradient(135deg, rgba(0, 123, 255, 0.08), rgba(0, 123, 255, 0.04)) !important; [cite: 85] [cite_start]border-color: rgba(0, 123, 255, 0.2) !important; [cite: 85] [cite_start]margin-right: 15% !important; [cite: 85] box-shadow: 0 2px 10px rgba(0, 123, 255, 0.1), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05) !important; [cite: 86] color: #ffffff !important; [cite_start]/* Ensures generated text is bright white */ [cite: 86] } /* Enhanced Input area with glow */ .input-area { [cite_start]background: rgba(45, 45, 45, 0.6); [cite: 87] [cite_start]border-radius: 20px; [cite: 87] [cite_start]padding: 1.5rem; [cite: 87] [cite_start]border: 2px solid rgba(255, 193, 7, 0.2); [cite: 87] [cite_start]backdrop-filter: blur(20px); [cite: 87] [cite_start]position: sticky; [cite: 87] [cite_start]bottom: 0; [cite: 88] box-shadow: 0 0 25px rgba(255, 193, 7, 0.1), 0 8px 32px rgba(0, 0, 0, 0.2), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05); [cite: 89] [cite_start]animation: inputGlow 3s ease-in-out infinite alternate; [cite: 89] } @keyframes inputGlow { 0% { box-shadow: 0 0 25px rgba(255, 193, 7, 0.1), 0 8px 32px rgba(0, 0, 0, 0.2), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05); [cite: 90] } 100% { box-shadow: 0 0 35px rgba(255, 193, 7, 0.2), 0 12px 40px rgba(0, 0, 0, 0.3), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.08); [cite: 91] } } /* Enhanced File upload area */ .upload-area { [cite_start]background: linear-gradient(135deg, rgba(255, 193, 7, 0.08), rgba(255, 193, 7, 0.04)) !important; [cite: 92] [cite_start]border: 2px dashed rgba(255, 193, 7, 0.4) !important; [cite: 92] [cite_start]border-radius: 16px !important; [cite: 92] [cite_start]padding: 1.5rem !important; [cite: 92] [cite_start]margin-bottom: 1.5rem !important; [cite: 93] [cite_start]transition: all 0.4s cubic-bezier(0.4, 0, 0.2, 1) !important; [cite: 93] [cite_start]backdrop-filter: blur(10px) !important; [cite: 94] box-shadow: 0 0 20px rgba(255, 193, 7, 0.05), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.05) !important; [cite: 95] } .upload-area:hover { [cite_start]background: linear-gradient(135deg, rgba(255, 193, 7, 0.12), rgba(255, 193, 7, 0.06)) !important; [cite: 96] [cite_start]border-color: rgba(255, 193, 7, 0.6) !important; [cite: 96] box-shadow: 0 0 30px rgba(255, 193, 7, 0.15), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.08) !important; [cite: 97] [cite_start]transform: translateY(-2px) !important; [cite: 97] } /* Sidebar styling */ .sidebar { [cite_start]background: rgba(30, 30, 30, 0.6) !important; [cite: 98] [cite_start]border-right: 2px solid rgba(255, 193, 7, 0.1) !important; [cite: 98] [cite_start]backdrop-filter: blur(15px) !important; [cite: 99] box-shadow: inset -1px 0 0 rgba(255, 255, 255, 0.05), [cite_start]4px 0 20px rgba(0, 0, 0, 0.1) !important; [cite: 100] } /* Enhanced buttons with glow effects */ .send-btn { [cite_start]background: linear-gradient(135deg, #ffc107 0%, #ff8f00 100%) !important; [cite: 101] [cite_start]color: #000000 !important; [cite: 101] [cite_start]border: none !important; [cite: 101] [cite_start]border-radius: 12px !important; [cite: 101] [cite_start]font-weight: 600 !important; [cite: 101] [cite_start]min-height: 48px !important; [cite: 101] [cite_start]font-size: 0.95rem !important; [cite: 102] [cite_start]transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important; [cite: 102] box-shadow: 0 4px 15px rgba(255, 193, 7, 0.3), [cite_start]0 0 20px rgba(255, 193, 7, 0.2) !important; [cite: 103] [cite_start]position: relative; [cite: 103] [cite_start]overflow: hidden; [cite: 103] } .send-btn::before { [cite_start]content: ''; [cite: 104] [cite_start]position: absolute; [cite: 104] [cite_start]top: 0; [cite: 104] [cite_start]left: -100%; [cite: 104] [cite_start]width: 100%; [cite: 104] [cite_start]height: 100%; [cite: 104] [cite_start]background: linear-gradient(90deg, transparent, rgba(255, 255, 255, 0.2), transparent); [cite: 105] [cite_start]transition: left 0.5s; [cite: 105] } .send-btn:hover::before { [cite_start]left: 100%; [cite: 106] } .send-btn:hover { [cite_start]transform: translateY(-2px) !important; [cite: 107] box-shadow: 0 8px 25px rgba(255, 193, 7, 0.4), [cite_start]0 0 30px rgba(255, 193, 7, 0.3) !important; [cite: 108] } .primary-btn { [cite_start]background: linear-gradient(135deg, #ffc107 0%, #ff8f00 100%) !important; [cite: 109] [cite_start]color: #000000 !important; [cite: 109] [cite_start]border: none !important; [cite: 109] [cite_start]border-radius: 12px !important; [cite: 109] [cite_start]font-weight: 600 !important; [cite: 109] [cite_start]padding: 0.75rem 1.5rem !important; [cite: 109] [cite_start]font-size: 0.95rem !important; [cite: 110] [cite_start]transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important; [cite: 110] box-shadow: 0 4px 15px rgba(255, 193, 7, 0.3), [cite_start]0 0 20px rgba(255, 193, 7, 0.2) !important; [cite: 111] } .primary-btn:hover { [cite_start]transform: translateY(-2px) !important; [cite: 112] box-shadow: 0 8px 25px rgba(255, 193, 7, 0.4), [cite_start]0 0 30px rgba(255, 193, 7, 0.3) !important; [cite: 113] } /* Enhanced Text inputs with glow */ .gradio-textbox input, .gradio-textbox textarea { [cite_start]background: rgba(45, 45, 45, 0.8) !important; [cite: 114] [cite_start]color: #ffffff !important; [cite: 114] [cite_start]border: 2px solid rgba(255, 193, 7, 0.2) !important; [cite: 114] [cite_start]border-radius: 12px !important; [cite: 114] [cite_start]font-size: 0.95rem !important; [cite: 115] [cite_start]padding: 0.75rem 1rem !important; [cite: 115] [cite_start]transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1) !important; [cite: 115] [cite_start]backdrop-filter: blur(10px) !important; [cite: 116] [cite_start]box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1) !important; [cite: 117] } .gradio-textbox input:focus, .gradio-textbox textarea:focus { [cite_start]border-color: rgba(255, 193, 7, 0.5) !important; [cite: 118] box-shadow: 0 0 20px rgba(255, 193, 7, 0.2), [cite_start]inset 0 2px 4px rgba(0, 0, 0, 0.1) !important; [cite: 119] [cite_start]outline: none !important; [cite: 119] } /* Enhanced Processing indicator */ .processing-indicator { [cite_start]background: linear-gradient(135deg, rgba(255, 193, 7, 0.15), rgba(255, 193, 7, 0.08)); [cite: 120] [cite_start]border: 2px solid rgba(255, 193, 7, 0.3); [cite: 120] [cite_start]border-radius: 12px; [cite: 120] [cite_start]padding: 1rem 1.5rem; [cite: 120] [cite_start]margin: 1rem 0; [cite: 120] [cite_start]color: #ffc107; [cite: 120] [cite_start]text-align: center; [cite: 121] [cite_start]font-weight: 500; [cite: 121] [cite_start]backdrop-filter: blur(10px); [cite: 121] box-shadow: 0 0 25px rgba(255, 193, 7, 0.1), [cite_start]inset 0 1px 0 rgba(255, 255, 255, 0.1); [cite: 122] [cite_start]animation: processingPulse 2s ease-in-out infinite; [cite: 122] } @keyframes processingPulse { [cite_start]0%, 100% { opacity: 1; [cite: 123] } [cite_start]50% { opacity: 0.8; [cite: 124] } } /* Enhanced Input row styling */ .input-row { [cite_start]display: flex !important; [cite: 125] [cite_start]gap: 12px !important; [cite: 125] [cite_start]align-items: end !important; [cite: 125] } /* Message input */ .message-input { [cite_start]flex: 1 !important; [cite: 126] [cite_start]min-height: 48px !important; [cite: 126] } /* Markdown content styling - applies to text within gr.Markdown components */ .markdown-content { color: #ffffff !important; [cite_start]/* UPDATED from #e0e0e0 */ [cite: 127] [cite_start]line-height: 1.6 !important; [cite: 128] [cite_start]font-size: 0.95rem !important; [cite: 128] } .markdown-content h1, .markdown-content h2, .markdown-content h3 { [cite_start]color: #ffc107 !important; [cite: 129] [cite_start]margin-top: 1.5rem !important; [cite: 129] [cite_start]margin-bottom: 0.5rem !important; [cite: 129] } .markdown-content code { [cite_start]background: rgba(255, 193, 7, 0.1) !important; [cite: 130] [cite_start]color: #ffc107 !important; [cite: 130] [cite_start]padding: 0.2rem 0.4rem !important; [cite: 130] [cite_start]border-radius: 4px !important; [cite: 131] } .markdown-content pre { [cite_start]background: rgba(0, 0, 0, 0.3) !important; [cite: 132] [cite_start]border: 1px solid rgba(255, 193, 7, 0.2) !important; [cite: 132] [cite_start]border-radius: 8px !important; [cite: 132] [cite_start]padding: 1rem !important; [cite: 132] [cite_start]margin: 1rem 0 !important; [cite: 133] } /* Examples styling */ .examples { [cite_start]background: rgba(45, 45, 45, 0.3) !important; [cite: 134] [cite_start]border-radius: 12px !important; [cite: 134] [cite_start]padding: 1rem !important; [cite: 134] [cite_start]border: 1px solid rgba(255, 193, 7, 0.1) !important; [cite: 134] [cite_start]backdrop-filter: blur(10px) !important; [cite: 134] color: #ffffff !important; [cite_start]/* UPDATED from #e0e0e0 */ [cite: 135] } /* General paragraph text and Gradio HTML components (for architecture text) */ .gradio-container p, .gradio-container .gr-html { color: #ffffff !important; [cite_start]/* UPDATED from #e0e0e0 */ [cite: 136] } /* Loading animation */ @keyframes loading { [cite_start]0% { transform: rotate(0deg); [cite: 137] } [cite_start]100% { transform: rotate(360deg); [cite: 138] } } .loading { [cite_start]animation: loading 2s linear infinite; [cite: 139] } /* Responsive design */ """, title="Agentic RAG Assistant" ) as iface: # Header with gr.Row(): with gr.Column(): gr.HTML("""
Upload documents and ask questions - powered by Multi-Agent Architecture with streaming responses