Spaces:
Sleeping
Sleeping
File size: 2,567 Bytes
211c6f5 828f1ff 211c6f5 828f1ff 211c6f5 828f1ff 211c6f5 38e58eb b0bb368 828f1ff 4b881d2 828f1ff 60ffbab 211c6f5 828f1ff afe94e7 828f1ff 211c6f5 828f1ff 32b4566 828f1ff afe94e7 828f1ff 211c6f5 828f1ff 211c6f5 828f1ff 211c6f5 e038228 211c6f5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import os
import warnings
import gradio as gr
from dotenv import load_dotenv
from langchain.chains import ConversationalRetrievalChain
from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import AzureOpenAIEmbeddings
from langchain_community.chat_models import AzureChatOpenAI
# Patch Gradio bug
import gradio_client.utils
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string"
# Load environment variables
load_dotenv()
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
AZURE_OPENAI_LLM_DEPLOYMENT = os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT")
AZURE_OPENAI_EMBEDDING_DEPLOYMENT = os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT")
if not all([AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_LLM_DEPLOYMENT, AZURE_OPENAI_EMBEDDING_DEPLOYMENT]):
raise ValueError("Missing one or more Azure OpenAI environment variables.")
# Suppress warnings
warnings.filterwarnings("ignore")
# Initialize Azure embedding model
embeddings = AzureOpenAIEmbeddings(
azure_deployment=AZURE_OPENAI_EMBEDDING_DEPLOYMENT,
azure_endpoint=AZURE_OPENAI_ENDPOINT,
openai_api_key=AZURE_OPENAI_API_KEY,
openai_api_version="2025-01-01-preview", # updated to latest recommended version
chunk_size=1000
)
# Load FAISS vector store
vectorstore = FAISS.load_local(
"faiss_index_sysml", embeddings, allow_dangerous_deserialization=True
)
# Initialize Azure chat model
llm = AzureChatOpenAI(
deployment_name=AZURE_OPENAI_LLM_DEPLOYMENT,
azure_endpoint=AZURE_OPENAI_ENDPOINT,
openai_api_key=AZURE_OPENAI_API_KEY,
openai_api_version="2025-01-01-preview", # updated to latest recommended version
temperature=0.5
)
# Build conversational RAG chain
qa = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=vectorstore.as_retriever(),
return_source_documents=False
)
history = []
# Chatbot logic
def sysml_chatbot(message, history):
result = qa({"question": message, "chat_history": history})
answer = result["answer"]
history.append((message, answer))
return "", history
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## SysModeler Chatbot")
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Ask me about SysML diagrams or concepts...")
clear = gr.Button("Clear")
state = gr.State(history)
msg.submit(sysml_chatbot, [msg, state], [msg, chatbot])
clear.click(lambda: ([], ""), None, [chatbot, msg])
if __name__ == "__main__":
demo.launch()
|