File size: 2,742 Bytes
211c6f5
 
 
 
 
 
38e58eb
eaf3ce7
 
211c6f5
 
 
 
 
 
 
38e58eb
 
b0bb368
eaf3ce7
211c6f5
eaf3ce7
7e38020
60ffbab
211c6f5
 
 
eaf3ce7
 
 
 
 
 
 
 
211c6f5
 
 
 
 
 
eaf3ce7
 
 
32b4566
eaf3ce7
211c6f5
 
 
 
eaf3ce7
211c6f5
eaf3ce7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211c6f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
import os
import warnings
import gradio as gr
from dotenv import load_dotenv

from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import AzureOpenAIEmbeddings
from langchain.embeddings.openai import OpenAIEmbeddings
from openai import AzureOpenAI

# Patch Gradio bug (schema parsing issue)
import gradio_client.utils
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string"

# Load environment variables
load_dotenv()
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
AZURE_OPENAI_LLM_DEPLOYMENT = os.getenv("AZURE_OPENAI_LLM_DEPLOYMENT")
embeddings = OpenAIEmbeddings()

if not all([AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_LLM_DEPLOYMENT, OPENAI_EMBEDDING]):
    raise ValueError("Azure OpenAI environment variables are missing.")

# Suppress warnings
warnings.filterwarnings("ignore")

# Initialize embedding model
# embeddings = AzureOpenAIEmbeddings(
#     azure_deployment=OPENAI_EMBEDDING,
#     azure_endpoint=AZURE_OPENAI_ENDPOINT,
#     openai_api_key=AZURE_OPENAI_API_KEY,
#     openai_api_version="2024-08-01-preview",
#     chunk_size=1000
# )

# Load FAISS vector store
vectorstore = FAISS.load_local(
    "faiss_index_sysml", embeddings, allow_dangerous_deserialization=True
)

# Initialize Azure OpenAI client directly
client = AzureOpenAI(
    api_key=AZURE_OPENAI_API_KEY,
    azure_endpoint=AZURE_OPENAI_ENDPOINT,
    api_version="2024-08-01-preview"
)

history = []

# Chatbot logic using AzureOpenAI directly
def sysml_chatbot(message, history):
    # Perform retrieval
    retriever = vectorstore.as_retriever()
    docs = retriever.get_relevant_documents(message)
    context = "\n\n".join(doc.page_content for doc in docs[:4])

    # Compose prompt with retrieved context
    system_prompt = "You are a helpful assistant knowledgeable in SysML. Use the context below to answer the user's question.\n\nContext:\n" + context

    response = client.chat.completions.create(
        model=AZURE_OPENAI_LLM_DEPLOYMENT,
        messages=[
            {"role": "system", "content": system_prompt},
            {"role": "user", "content": message}
        ]
    )

    answer = response.choices[0].message.content
    history.append((message, answer))
    return "", history

# Gradio UI
with gr.Blocks() as demo:
    gr.Markdown("## SysML Chatbot")
    chatbot = gr.Chatbot()
    msg = gr.Textbox(placeholder="Ask me about SysML diagrams or concepts...")
    clear = gr.Button("Clear")

    state = gr.State(history)

    msg.submit(sysml_chatbot, [msg, state], [msg, chatbot])
    clear.click(lambda: ([], ""), None, [chatbot, msg])

if __name__ == "__main__":
    demo.launch()