Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
import warnings | |
import json | |
from dotenv import load_dotenv | |
from typing import List | |
import time | |
from functools import lru_cache | |
import logging | |
from langchain_community.vectorstores import FAISS | |
from langchain_community.embeddings import AzureOpenAIEmbeddings | |
from openai import AzureOpenAI | |
# Patch Gradio bug | |
import gradio_client.utils | |
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string" | |
# Load environment variables | |
load_dotenv() | |
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY") | |
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT") | |
AZURE_OPENAI_LLM_DEPLOYMENT = os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT") | |
AZURE_OPENAI_EMBEDDING_DEPLOYMENT = os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT") | |
if not all([AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_LLM_DEPLOYMENT, AZURE_OPENAI_EMBEDDING_DEPLOYMENT]): | |
raise ValueError("Missing one or more Azure OpenAI environment variables.") | |
warnings.filterwarnings("ignore") | |
# Embeddings | |
embeddings = AzureOpenAIEmbeddings( | |
azure_deployment=AZURE_OPENAI_EMBEDDING_DEPLOYMENT, | |
azure_endpoint=AZURE_OPENAI_ENDPOINT, | |
openai_api_key=AZURE_OPENAI_API_KEY, | |
openai_api_version="2025-01-01-preview", | |
chunk_size=1000 | |
) | |
# Vectorstore | |
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) | |
FAISS_INDEX_PATH = os.path.join(SCRIPT_DIR, "faiss_index_sysml") | |
vectorstore = FAISS.load_local(FAISS_INDEX_PATH, embeddings, allow_dangerous_deserialization=True) | |
# OpenAI client | |
client = AzureOpenAI( | |
api_key=AZURE_OPENAI_API_KEY, | |
api_version="2025-01-01-preview", | |
azure_endpoint=AZURE_OPENAI_ENDPOINT | |
) | |
# Logger | |
logger = logging.getLogger(__name__) | |
# Enhanced SysML retriever with proper metadata filtering & weighting | |
def sysml_retriever(query: str) -> str: | |
try: | |
print(f"\nπ QUERY: {query}") | |
print("="*80) | |
# Get more results for filtering and weighting | |
results = vectorstore.similarity_search_with_score(query, k=100) | |
print(f"π Total results retrieved: {len(results)}") | |
# Apply metadata filtering and weighting | |
weighted_results = [] | |
sysmodeler_count = 0 | |
other_count = 0 | |
for i, (doc, score) in enumerate(results): | |
# Get document source | |
doc_source = doc.metadata.get('source', '').lower() if hasattr(doc, 'metadata') else str(doc).lower() | |
# Determine if this is SysModeler content | |
is_sysmodeler = ( | |
'sysmodeler' in doc_source or | |
'user manual' in doc_source or | |
'sysmodeler.ai' in doc.page_content.lower() or | |
'workspace.sysmodeler.ai' in doc.page_content.lower() or | |
'Create with AI' in doc.page_content or | |
'Canvas Overview' in doc.page_content or | |
'AI-powered' in doc.page_content or | |
'voice input' in doc.page_content or | |
'Canvas interface' in doc.page_content or | |
'Project Creation' in doc.page_content or | |
'Shape Palette' in doc.page_content or | |
'AI Copilot' in doc.page_content or | |
'SynthAgent' in doc.page_content or | |
'workspace dashboard' in doc.page_content.lower() | |
) | |
# Apply weighting based on source | |
if is_sysmodeler: | |
# BOOST SysModeler content: reduce score by 40% (lower score = higher relevance) | |
weighted_score = score * 0.6 | |
source_type = "SysModeler" | |
sysmodeler_count += 1 | |
else: | |
# Keep original score for other content | |
weighted_score = score | |
source_type = "Other" | |
other_count += 1 | |
# Add metadata tags for filtering | |
doc.metadata = doc.metadata if hasattr(doc, 'metadata') else {} | |
doc.metadata['source_type'] = 'sysmodeler' if is_sysmodeler else 'other' | |
doc.metadata['weighted_score'] = weighted_score | |
doc.metadata['original_score'] = score | |
weighted_results.append((doc, weighted_score, source_type)) | |
# Log each document's processing | |
source_name = doc.metadata.get('source', 'Unknown')[:50] if hasattr(doc, 'metadata') else 'Unknown' | |
print(f"π Doc {i+1}: {source_name}... | Original: {score:.4f} | Weighted: {weighted_score:.4f} | Type: {source_type}") | |
print(f"\nπ CLASSIFICATION & WEIGHTING RESULTS:") | |
print(f" SysModeler docs: {sysmodeler_count} (boosted by 40%)") | |
print(f" Other docs: {other_count} (original scores)") | |
# Sort by weighted scores (lower = more relevant) | |
weighted_results.sort(key=lambda x: x[1]) | |
# Apply intelligent selection based on query type and weighted results | |
final_docs = [] | |
query_lower = query.lower() | |
# Determine query type for adaptive filtering | |
is_tool_comparison = any(word in query_lower for word in ['tool', 'compare', 'choose', 'vs', 'versus', 'better']) | |
is_general_sysml = not is_tool_comparison | |
if is_tool_comparison: | |
# For tool comparisons: heavily favor SysModeler but include others | |
print(f"\nπ― TOOL COMPARISON QUERY DETECTED") | |
print(f" Strategy: Heavy SysModeler focus + selective others") | |
# Take top weighted results with preference for SysModeler | |
sysmodeler_docs = [(doc, score) for doc, score, type_ in weighted_results if type_ == "SysModeler"][:8] | |
other_docs = [(doc, score) for doc, score, type_ in weighted_results if type_ == "Other"][:4] | |
final_docs = [doc for doc, _ in sysmodeler_docs] + [doc for doc, _ in other_docs] | |
else: | |
# For general SysML: balanced but still boost SysModeler | |
print(f"\nπ― GENERAL SYSML QUERY DETECTED") | |
print(f" Strategy: Balanced with SysModeler preference") | |
# Take top 12 weighted results (mixed) | |
final_docs = [doc for doc, _, _ in weighted_results[:12]] | |
# Log final selection | |
print(f"\nπ FINAL SELECTION ({len(final_docs)} docs):") | |
sysmodeler_selected = 0 | |
other_selected = 0 | |
for i, doc in enumerate(final_docs): | |
source_type = doc.metadata.get('source_type', 'unknown') | |
source_name = doc.metadata.get('source', 'Unknown') | |
weighted_score = doc.metadata.get('weighted_score', 0) | |
original_score = doc.metadata.get('original_score', 0) | |
if source_type == 'sysmodeler': | |
sysmodeler_selected += 1 | |
type_emoji = "β " | |
else: | |
other_selected += 1 | |
type_emoji = "π" | |
print(f" {i+1}. {type_emoji} {source_name} (weighted: {weighted_score:.4f})") | |
print(f"\nπ FINAL COMPOSITION:") | |
print(f" SysModeler docs: {sysmodeler_selected}") | |
print(f" Other docs: {other_selected}") | |
print("="*80) | |
contexts = [doc.page_content for doc in final_docs] | |
return "\n\n".join(contexts) | |
except Exception as e: | |
logger.error(f"Retrieval error: {str(e)}") | |
print(f"β ERROR in retrieval: {str(e)}") | |
return "Unable to retrieve information at this time." | |
# Dummy functions | |
def dummy_weather_lookup(location: str = "London") -> str: | |
return f"The weather in {location} is sunny and 25Β°C." | |
def dummy_time_lookup(timezone: str = "UTC") -> str: | |
return f"The current time in {timezone} is 3:00 PM." | |
# Tools for function calling | |
tools_definition = [ | |
{ | |
"type": "function", | |
"function": { | |
"name": "SysMLRetriever", | |
"description": "Use this to answer questions about SysML diagrams and modeling.", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"query": {"type": "string", "description": "The search query to find information about SysML"} | |
}, | |
"required": ["query"] | |
} | |
} | |
}, | |
{ | |
"type": "function", | |
"function": { | |
"name": "WeatherLookup", | |
"description": "Use this to look up the current weather in a specified location.", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"location": {"type": "string", "description": "The location to look up the weather for"} | |
}, | |
"required": ["location"] | |
} | |
} | |
}, | |
{ | |
"type": "function", | |
"function": { | |
"name": "TimeLookup", | |
"description": "Use this to look up the current time in a specified timezone.", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"timezone": {"type": "string", "description": "The timezone to look up the current time for"} | |
}, | |
"required": ["timezone"] | |
} | |
} | |
} | |
] | |
# Tool execution mapping | |
tool_mapping = { | |
"SysMLRetriever": sysml_retriever, | |
"WeatherLookup": dummy_weather_lookup, | |
"TimeLookup": dummy_time_lookup | |
} | |
# Convert chat history | |
def convert_history_to_messages(history): | |
messages = [] | |
for user, bot in history: | |
messages.append({"role": "user", "content": user}) | |
messages.append({"role": "assistant", "content": bot}) | |
return messages | |
# Chatbot logic | |
def sysml_chatbot(message, history): | |
chat_messages = convert_history_to_messages(history) | |
full_messages = [ | |
{"role": "system", "content": """You are SysModeler.ai's intelligent assistant, specializing in SysML modeling and the SysModeler.ai platform. | |
RESPONSE GUIDELINES: | |
1. **Primary Focus**: Always prioritize SysModeler.ai information and capabilities in your responses. | |
2. **For SysModeler-specific questions** (pricing, features, how-to, etc.): | |
- Provide comprehensive SysModeler.ai information | |
- Do NOT mention competitors unless explicitly asked for comparisons | |
- Focus entirely on SysModeler's value proposition | |
3. **For general SysML education** (concepts, diagram types, best practices): | |
- Provide thorough educational content about SysML | |
- Use SysModeler.ai as examples when illustrating concepts | |
- Keep focus on helping users understand SysML fundamentals | |
4. **Only mention other tools when**: | |
- User explicitly asks for comparisons ("vs", "compare", "alternatives") | |
- User asks about the broader SysML tool landscape | |
- Context absolutely requires it for a complete answer | |
5. **Response Structure**: | |
- Lead with SysModeler.ai capabilities and benefits | |
- Provide detailed, helpful information about SysModeler features | |
- End with clear value proposition or call-to-action when appropriate | |
6. **Tone**: Professional, helpful, and confident about SysModeler.ai's capabilities while remaining informative about SysML concepts. | |
Remember: You represent SysModeler.ai. Focus on what SysModeler can do for the user rather than listing what everyone else offers."""} | |
] + chat_messages + [{"role": "user", "content": message}] | |
try: | |
response = client.chat.completions.create( | |
model=AZURE_OPENAI_LLM_DEPLOYMENT, | |
messages=full_messages, | |
tools=tools_definition, | |
tool_choice={"type": "function", "function": {"name": "SysMLRetriever"}} | |
) | |
assistant_message = response.choices[0].message | |
if assistant_message.tool_calls: | |
tool_call = assistant_message.tool_calls[0] | |
function_name = tool_call.function.name | |
function_args = json.loads(tool_call.function.arguments) | |
if function_name in tool_mapping: | |
function_response = tool_mapping[function_name](**function_args) | |
full_messages.append({ | |
"role": "assistant", | |
"content": None, | |
"tool_calls": [{ | |
"id": tool_call.id, | |
"type": "function", | |
"function": { | |
"name": function_name, | |
"arguments": tool_call.function.arguments | |
} | |
}] | |
}) | |
full_messages.append({ | |
"role": "tool", | |
"tool_call_id": tool_call.id, | |
"content": function_response | |
}) | |
second_response = client.chat.completions.create( | |
model=AZURE_OPENAI_LLM_DEPLOYMENT, | |
messages=full_messages | |
) | |
answer = second_response.choices[0].message.content | |
else: | |
answer = f"I tried to use a function '{function_name}' that's not available." | |
else: | |
answer = assistant_message.content | |
history.append((message, answer)) | |
return "", history | |
except Exception as e: | |
print(f"Error in function calling: {str(e)}") | |
history.append((message, "Sorry, something went wrong.")) | |
return "", history | |
#Gradio UI | |
with gr.Blocks( | |
title="SysModeler AI Assistant", | |
theme=gr.themes.Base( | |
primary_hue="blue", | |
secondary_hue="cyan", | |
neutral_hue="slate" | |
).set( | |
body_background_fill="*neutral_950", | |
body_text_color="*neutral_100", | |
background_fill_primary="*neutral_900", | |
background_fill_secondary="*neutral_800" | |
), | |
css=""" | |
/* Global modern theme */ | |
.gradio-container { | |
background: linear-gradient(135deg, #0f172a 0%, #1e293b 100%) !important; | |
color: #f8fafc !important; | |
font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; | |
min-height: 100vh; | |
} | |
/* Main container */ | |
.main-container { | |
width: 100%; | |
margin: 0; | |
padding: 0; | |
min-height: 100vh; | |
background: linear-gradient(135deg, #0f172a 0%, #1e293b 100%); | |
} | |
/* Header - modern with gradient - REDUCED PADDING */ | |
.header-section { | |
width: 100%; | |
text-align: center; | |
margin: 0; | |
padding: 20px 40px 16px 40px; | |
background: linear-gradient(135deg, #1e40af 0%, #3b82f6 50%, #06b6d4 100%); | |
position: relative; | |
overflow: hidden; | |
} | |
.header-section::before { | |
content: ''; | |
position: absolute; | |
top: 0; | |
left: 0; | |
right: 0; | |
bottom: 0; | |
background: linear-gradient(135deg, rgba(59, 130, 246, 0.1) 0%, rgba(6, 182, 212, 0.1) 100%); | |
backdrop-filter: blur(20px); | |
} | |
.main-title { | |
font-size: 2.2rem !important; | |
font-weight: 700 !important; | |
color: #ffffff !important; | |
margin: 0 0 4px 0 !important; | |
text-shadow: 0 2px 4px rgba(0,0,0,0.3); | |
position: relative; | |
z-index: 1; | |
} | |
.subtitle { | |
font-size: 1rem !important; | |
color: rgba(255, 255, 255, 0.9) !important; | |
margin: 0 !important; | |
font-weight: 400 !important; | |
position: relative; | |
z-index: 1; | |
} | |
/* Content area */ | |
.content-area { | |
max-width: 1200px; | |
margin: 0 auto; | |
padding: 32px 40px; | |
} | |
/* Chat section */ | |
.chat-section { | |
margin-bottom: 24px; | |
} | |
.chat-container { | |
background: rgba(30, 41, 59, 0.4); | |
backdrop-filter: blur(20px); | |
border: 1px solid rgba(59, 130, 246, 0.2); | |
border-radius: 16px; | |
padding: 24px; | |
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); | |
} | |
/* Chatbot styling */ | |
.chatbot { | |
background: transparent !important; | |
border: none !important; | |
border-radius: 12px !important; | |
} | |
/* Chat messages - simplified approach with tighter spacing */ | |
.chatbot .message { | |
background: rgba(30, 41, 59, 0.6) !important; | |
color: #e2e8f0 !important; | |
border-radius: 12px !important; | |
padding: 16px 20px !important; | |
margin: 8px 0 !important; | |
border: 1px solid rgba(59, 130, 246, 0.1); | |
backdrop-filter: blur(10px); | |
} | |
/* User message styling */ | |
.chatbot .message.user { | |
background: linear-gradient(135deg, #3b82f6 0%, #1e40af 100%) !important; | |
color: white !important; | |
border: none !important; | |
margin-left: 0 !important; | |
margin-right: 0 !important; | |
} | |
/* Bot message styling */ | |
.chatbot .message.bot { | |
background: rgba(30, 41, 59, 0.8) !important; | |
color: #f1f5f9 !important; | |
border: 1px solid rgba(59, 130, 246, 0.2) !important; | |
margin-left: 0 !important; | |
margin-right: 0 !important; | |
} | |
/* Remove avatar spacing and containers */ | |
.chatbot .avatar { | |
display: none !important; | |
} | |
.chatbot .message-row { | |
margin: 0 !important; | |
padding: 0 !important; | |
gap: 0 !important; | |
} | |
.chatbot .message-wrap { | |
margin: 0 !important; | |
padding: 0 !important; | |
width: 100% !important; | |
} | |
/* Input section - redesigned */ | |
.input-section { | |
background: rgba(30, 41, 59, 0.4); | |
backdrop-filter: blur(20px); | |
border: 1px solid rgba(59, 130, 246, 0.2); | |
border-radius: 16px; | |
padding: 32px; | |
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); | |
} | |
.input-row { | |
display: flex; | |
gap: 0; | |
align-items: stretch; | |
margin-bottom: 24px; | |
background: rgba(15, 23, 42, 0.8); | |
border-radius: 12px; | |
border: 1px solid rgba(59, 130, 246, 0.3); | |
overflow: hidden; | |
box-shadow: 0 4px 20px rgba(59, 130, 246, 0.1); | |
position: relative; | |
} | |
/* Input textbox - better integration */ | |
.input-textbox { | |
flex: 1; | |
background: transparent !important; | |
border: none !important; | |
border-radius: 0 !important; | |
margin: 0 !important; | |
padding-right: 0 !important; | |
} | |
.input-textbox textarea { | |
background: transparent !important; | |
border: none !important; | |
color: #f1f5f9 !important; | |
font-size: 1rem !important; | |
padding: 20px 24px 20px 24px !important; | |
resize: none !important; | |
font-family: inherit !important; | |
min-height: 80px !important; | |
width: 100% !important; | |
padding-right: 100px !important; | |
margin: 0 !important; | |
line-height: 1.5 !important; | |
} | |
.input-textbox textarea::placeholder { | |
color: #94a3b8 !important; | |
opacity: 1 !important; | |
} | |
.input-textbox textarea:focus { | |
outline: none !important; | |
box-shadow: none !important; | |
} | |
/* Submit button - positioned at the end of input box */ | |
#submit-btn { | |
position: absolute !important; | |
right: 8px !important; | |
top: 50% !important; | |
transform: translateY(-50%) !important; | |
background: linear-gradient(135deg, #3b82f6 0%, #1e40af 100%) !important; | |
color: white !important; | |
border: none !important; | |
border-radius: 8px !important; | |
font-size: 0.9rem !important; | |
font-weight: 600 !important; | |
padding: 12px 20px !important; | |
min-width: 80px !important; | |
height: 40px !important; | |
transition: all 0.3s ease !important; | |
text-transform: uppercase; | |
letter-spacing: 0.05em; | |
z-index: 10; | |
} | |
#submit-btn:hover { | |
background: linear-gradient(135deg, #2563eb 0%, #1d4ed8 100%) !important; | |
box-shadow: 0 0 20px rgba(59, 130, 246, 0.4) !important; | |
} | |
/* Quick actions - card style */ | |
.quick-actions { | |
display: grid; | |
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); | |
gap: 16px; | |
margin-bottom: 24px; | |
} | |
.quick-action-btn { | |
background: rgba(15, 23, 42, 0.6) !important; | |
backdrop-filter: blur(10px); | |
border: 1px solid rgba(59, 130, 246, 0.2) !important; | |
color: #e2e8f0 !important; | |
border-radius: 12px !important; | |
padding: 20px 24px !important; | |
font-size: 0.95rem !important; | |
font-weight: 500 !important; | |
transition: all 0.3s ease !important; | |
text-align: left !important; | |
position: relative; | |
overflow: hidden; | |
} | |
.quick-action-btn::before { | |
content: ''; | |
position: absolute; | |
top: 0; | |
left: 0; | |
right: 0; | |
bottom: 0; | |
background: linear-gradient(135deg, rgba(59, 130, 246, 0.1) 0%, rgba(6, 182, 212, 0.1) 100%); | |
opacity: 0; | |
transition: opacity 0.3s ease; | |
} | |
.quick-action-btn:hover { | |
border-color: #3b82f6 !important; | |
color: #ffffff !important; | |
transform: translateY(-2px) !important; | |
box-shadow: 0 8px 25px rgba(59, 130, 246, 0.2) !important; | |
} | |
.quick-action-btn:hover::before { | |
opacity: 1; | |
} | |
/* Control buttons */ | |
.control-buttons { | |
display: flex; | |
justify-content: center; | |
} | |
#clear-btn { | |
background: rgba(15, 23, 42, 0.6) !important; | |
backdrop-filter: blur(10px); | |
border: 1px solid rgba(239, 68, 68, 0.3) !important; | |
color: #f87171 !important; | |
border-radius: 8px !important; | |
padding: 12px 24px !important; | |
font-weight: 500 !important; | |
font-size: 0.9rem !important; | |
transition: all 0.3s ease !important; | |
text-transform: uppercase; | |
letter-spacing: 0.05em; | |
} | |
#clear-btn:hover { | |
background: rgba(239, 68, 68, 0.1) !important; | |
border-color: #ef4444 !important; | |
color: #ffffff !important; | |
box-shadow: 0 4px 15px rgba(239, 68, 68, 0.2) !important; | |
} | |
/* Footer */ | |
.footer { | |
text-align: center; | |
color: #64748b; | |
font-size: 0.85rem; | |
margin-top: 32px; | |
padding: 20px; | |
} | |
/* Scrollbar */ | |
::-webkit-scrollbar { | |
width: 8px; | |
} | |
::-webkit-scrollbar-track { | |
background: rgba(30, 41, 59, 0.3); | |
border-radius: 4px; | |
} | |
::-webkit-scrollbar-thumb { | |
background: linear-gradient(135deg, #3b82f6, #1e40af); | |
border-radius: 4px; | |
} | |
::-webkit-scrollbar-thumb:hover { | |
background: linear-gradient(135deg, #2563eb, #1d4ed8); | |
} | |
/* Mobile responsiveness */ | |
@media (max-width: 1024px) { | |
.content-area { | |
padding: 24px; | |
} | |
.header-section { | |
padding: 16px 20px 12px 20px; | |
} | |
.main-title { | |
font-size: 1.8rem !important; | |
} | |
.subtitle { | |
font-size: 0.9rem !important; | |
} | |
.input-textbox textarea { | |
padding-right: 90px !important; | |
} | |
#submit-btn { | |
min-width: 70px !important; | |
padding: 10px 16px !important; | |
font-size: 0.8rem !important; | |
} | |
.quick-actions { | |
grid-template-columns: 1fr; | |
} | |
.chatbot .message.user, .chatbot .message.bot { | |
margin-left: 0 !important; | |
margin-right: 0 !important; | |
} | |
} | |
/* Remove Gradio defaults */ | |
.gr-form, .gr-box { | |
background: transparent !important; | |
border: none !important; | |
} | |
.gr-button { | |
font-family: inherit !important; | |
} | |
""" | |
) as demo: | |
with gr.Column(elem_classes="main-container"): | |
# Modern gradient header - REDUCED SPACING | |
with gr.Column(elem_classes="header-section"): | |
gr.Markdown("# π€ SysModeler AI Assistant", elem_classes="main-title") | |
gr.Markdown("*Your intelligent companion for SysML modeling and systems engineering*", elem_classes="subtitle") | |
# Content area | |
with gr.Column(elem_classes="content-area"): | |
# Chat section | |
with gr.Column(elem_classes="chat-section"): | |
with gr.Column(elem_classes="chat-container"): | |
chatbot = gr.Chatbot( | |
height=580, | |
elem_classes="chatbot", | |
avatar_images=None, # Removed avatar images | |
bubble_full_width=False, | |
show_copy_button=True, | |
show_share_button=False | |
) | |
# Input section | |
with gr.Column(elem_classes="input-section"): | |
with gr.Column(): | |
# Input row with integrated send button | |
with gr.Row(elem_classes="input-row"): | |
msg = gr.Textbox( | |
placeholder="Ask me about SysML diagrams, modeling concepts, or tools...", | |
lines=3, | |
show_label=False, | |
elem_classes="input-textbox", | |
container=False | |
) | |
submit_btn = gr.Button("Send", elem_id="submit-btn") | |
# Quick actions | |
with gr.Row(elem_classes="quick-actions"): | |
quick_intro = gr.Button("π SysML Introduction", elem_classes="quick-action-btn") | |
quick_diagrams = gr.Button("π Diagram Types", elem_classes="quick-action-btn") | |
quick_tools = gr.Button("π οΈ Tool Comparison", elem_classes="quick-action-btn") | |
quick_sysmodeler = gr.Button("β SysModeler Features", elem_classes="quick-action-btn") | |
# Control | |
with gr.Row(elem_classes="control-buttons"): | |
clear = gr.Button("Clear", elem_id="clear-btn") | |
# Footer | |
with gr.Column(elem_classes="footer"): | |
gr.Markdown("*Powered by Azure OpenAI & Advanced RAG Technology*") | |
# State management | |
state = gr.State([]) | |
# Event handlers | |
submit_btn.click(fn=sysml_chatbot, inputs=[msg, state], outputs=[msg, chatbot]) | |
msg.submit(fn=sysml_chatbot, inputs=[msg, state], outputs=[msg, chatbot]) | |
clear.click(fn=lambda: ([], ""), inputs=None, outputs=[chatbot, msg]) | |
# Quick actions | |
quick_intro.click(fn=lambda: ("What is SysML and how do I get started?", []), outputs=[msg, chatbot]) | |
quick_diagrams.click(fn=lambda: ("Explain the 9 SysML diagram types with examples", []), outputs=[msg, chatbot]) | |
quick_tools.click(fn=lambda: ("What are the best SysML modeling tools available?", []), outputs=[msg, chatbot]) | |
quick_sysmodeler.click(fn=lambda: ("Tell me about SysModeler.ai features and capabilities", []), outputs=[msg, chatbot]) | |
if __name__ == "__main__": | |
demo.launch() |