Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -5,38 +5,45 @@ from dotenv import load_dotenv
|
|
5 |
|
6 |
from langchain.chains import ConversationalRetrievalChain
|
7 |
from langchain_community.vectorstores import FAISS
|
8 |
-
from
|
9 |
-
from langchain_community.embeddings import
|
10 |
|
11 |
# Patch Gradio bug (schema parsing issue)
|
12 |
import gradio_client.utils
|
13 |
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string"
|
14 |
|
15 |
-
|
16 |
-
|
17 |
# Load environment variables
|
18 |
load_dotenv()
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
|
|
|
|
|
23 |
|
24 |
# Suppress warnings
|
25 |
warnings.filterwarnings("ignore")
|
26 |
|
27 |
# Initialize embedding model
|
28 |
-
embeddings =
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
# Load FAISS vector store
|
31 |
vectorstore = FAISS.load_local(
|
32 |
"faiss_index_sysml", embeddings, allow_dangerous_deserialization=True
|
33 |
)
|
34 |
|
35 |
-
#
|
36 |
-
llm =
|
37 |
-
|
38 |
-
|
39 |
-
|
|
|
|
|
40 |
)
|
41 |
|
42 |
# Build conversational chain with history
|
|
|
5 |
|
6 |
from langchain.chains import ConversationalRetrievalChain
|
7 |
from langchain_community.vectorstores import FAISS
|
8 |
+
from langchain_community.chat_models import AzureChatOpenAI
|
9 |
+
from langchain_community.embeddings import AzureOpenAIEmbeddings
|
10 |
|
11 |
# Patch Gradio bug (schema parsing issue)
|
12 |
import gradio_client.utils
|
13 |
gradio_client.utils.json_schema_to_python_type = lambda schema, defs=None: "string"
|
14 |
|
|
|
|
|
15 |
# Load environment variables
|
16 |
load_dotenv()
|
17 |
+
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
|
18 |
+
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
|
19 |
+
AZURE_OPENAI_DEPLOYMENT_NAME = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")
|
20 |
|
21 |
+
if not all([AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, AZURE_OPENAI_DEPLOYMENT_NAME]):
|
22 |
+
raise ValueError("Azure OpenAI environment variables missing.")
|
23 |
|
24 |
# Suppress warnings
|
25 |
warnings.filterwarnings("ignore")
|
26 |
|
27 |
# Initialize embedding model
|
28 |
+
embeddings = AzureOpenAIEmbeddings(
|
29 |
+
azure_deployment=AZURE_OPENAI_DEPLOYMENT_NAME,
|
30 |
+
openai_api_key=AZURE_OPENAI_API_KEY,
|
31 |
+
openai_api_base=AZURE_OPENAI_ENDPOINT,
|
32 |
+
openai_api_version="2024-08-01-preview"
|
33 |
+
)
|
34 |
|
35 |
# Load FAISS vector store
|
36 |
vectorstore = FAISS.load_local(
|
37 |
"faiss_index_sysml", embeddings, allow_dangerous_deserialization=True
|
38 |
)
|
39 |
|
40 |
+
# Initialize LLM
|
41 |
+
llm = AzureChatOpenAI(
|
42 |
+
deployment_name=AZURE_OPENAI_DEPLOYMENT_NAME,
|
43 |
+
openai_api_key=AZURE_OPENAI_API_KEY,
|
44 |
+
openai_api_base=AZURE_OPENAI_ENDPOINT,
|
45 |
+
openai_api_version="2024-08-01-preview",
|
46 |
+
temperature=0.5
|
47 |
)
|
48 |
|
49 |
# Build conversational chain with history
|