SysModeler's picture
Update app.py
1e676ab verified
raw
history blame
2.79 kB
import os
import warnings
import gradio as gr
from dotenv import load_dotenv
from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import OpenAIEmbeddings, AzureOpenAIEmbeddings
# from langchain_community.embeddings.openai import OpenAIEmbeddings
from openai import AzureOpenAI
# Patch Gradio bug (schema parsing issue)
import gradio_client.utils
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string"
# Load environment variables
load_dotenv()
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
AZURE_OPENAI_LLM_DEPLOYMENT = os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT")
embeddings = OpenAIEmbeddings(openai_api_key=AZURE_OPENAI_API_KEY)
if not all([AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_LLM_DEPLOYMENT]):
raise ValueError("Azure OpenAI environment variables are missing.")
# Suppress warnings
warnings.filterwarnings("ignore")
# Initialize embedding model
# embeddings = AzureOpenAIEmbeddings(
# azure_deployment=OPENAI_EMBEDDING,
# azure_endpoint=AZURE_OPENAI_ENDPOINT,
# openai_api_key=AZURE_OPENAI_API_KEY,
# openai_api_version="2024-08-01-preview",
# chunk_size=1000
# )
# Load FAISS vector store
vectorstore = FAISS.load_local(
"faiss_index_sysml", embeddings, allow_dangerous_deserialization=True
)
# Initialize Azure OpenAI client directly
client = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY,
azure_endpoint=AZURE_OPENAI_ENDPOINT,
api_version="2024-08-01-preview"
)
history = []
# Chatbot logic using AzureOpenAI directly
def sysml_chatbot(message, history):
# Perform retrieval
retriever = vectorstore.as_retriever()
docs = retriever.get_relevant_documents(message)
context = "\n\n".join(doc.page_content for doc in docs[:4])
# Compose prompt with retrieved context
system_prompt = "You are a helpful assistant knowledgeable in SysML. Use the context below to answer the user's question.\n\nContext:\n" + context
response = client.chat.completions.create(
model=AZURE_OPENAI_LLM_DEPLOYMENT,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": message}
]
)
answer = response.choices[0].message.content
history.append((message, answer))
return "", history
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## SysML Chatbot")
chatbot = gr.Chatbot()
msg = gr.Textbox(placeholder="Ask me about SysML diagrams or concepts...")
clear = gr.Button("Clear")
state = gr.State(history)
msg.submit(sysml_chatbot, [msg, state], [msg, chatbot])
clear.click(lambda: ([], ""), None, [chatbot, msg])
if __name__ == "__main__":
demo.launch()