Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,10 @@ from langchain.chains.question_answering import load_qa_chain
|
|
12 |
from langchain.callbacks import get_openai_callback
|
13 |
import os
|
14 |
|
|
|
|
|
|
|
|
|
15 |
# Step 1: Clone the Dataset Repository
|
16 |
repo = Repository(
|
17 |
local_dir="Private_Book", # Local directory to clone the repository
|
@@ -94,7 +98,7 @@ def load_chatbot():
|
|
94 |
|
95 |
|
96 |
def main():
|
97 |
-
|
98 |
hide_streamlit_style = """
|
99 |
<style>
|
100 |
#MainMenu {visibility: hidden;}
|
@@ -105,7 +109,7 @@ def main():
|
|
105 |
|
106 |
# Main content
|
107 |
st.title("Welcome to BinDocs ChatBot! 🤖")
|
108 |
-
|
109 |
# Directly specifying the path to the PDF file
|
110 |
pdf_path = pdf_file_path
|
111 |
if not os.path.exists(pdf_path):
|
@@ -139,7 +143,6 @@ def main():
|
|
139 |
if st.button("Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?"):
|
140 |
query = "Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?"
|
141 |
|
142 |
-
|
143 |
if st.button("Ask") or (not st.session_state['chat_history'] and query) or (st.session_state['chat_history'] and query != st.session_state['chat_history'][-1][1]):
|
144 |
st.session_state['chat_history'].append(("User", query, "new"))
|
145 |
|
@@ -149,9 +152,16 @@ def main():
|
|
149 |
VectorStore = load_pdf(pdf_path)
|
150 |
chain = load_chatbot()
|
151 |
docs = VectorStore.similarity_search(query=query, k=3)
|
|
|
|
|
|
|
|
|
152 |
with get_openai_callback() as cb:
|
153 |
response = chain.run(input_documents=docs, question=query)
|
154 |
|
|
|
|
|
|
|
155 |
st.session_state['chat_history'].append(("Bot", response, "new"))
|
156 |
|
157 |
# Display new messages at the bottom
|
@@ -170,6 +180,24 @@ def main():
|
|
170 |
|
171 |
# Mark all messages as old after displaying
|
172 |
st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
173 |
|
174 |
|
175 |
def display_chat_history(chat_history):
|
|
|
12 |
from langchain.callbacks import get_openai_callback
|
13 |
import os
|
14 |
|
15 |
+
# Initialize conversation memory as an empty list
|
16 |
+
conversation_memory = []
|
17 |
+
|
18 |
+
|
19 |
# Step 1: Clone the Dataset Repository
|
20 |
repo = Repository(
|
21 |
local_dir="Private_Book", # Local directory to clone the repository
|
|
|
98 |
|
99 |
|
100 |
def main():
|
101 |
+
# Hide Streamlit style
|
102 |
hide_streamlit_style = """
|
103 |
<style>
|
104 |
#MainMenu {visibility: hidden;}
|
|
|
109 |
|
110 |
# Main content
|
111 |
st.title("Welcome to BinDocs ChatBot! 🤖")
|
112 |
+
|
113 |
# Directly specifying the path to the PDF file
|
114 |
pdf_path = pdf_file_path
|
115 |
if not os.path.exists(pdf_path):
|
|
|
143 |
if st.button("Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?"):
|
144 |
query = "Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?"
|
145 |
|
|
|
146 |
if st.button("Ask") or (not st.session_state['chat_history'] and query) or (st.session_state['chat_history'] and query != st.session_state['chat_history'][-1][1]):
|
147 |
st.session_state['chat_history'].append(("User", query, "new"))
|
148 |
|
|
|
152 |
VectorStore = load_pdf(pdf_path)
|
153 |
chain = load_chatbot()
|
154 |
docs = VectorStore.similarity_search(query=query, k=3)
|
155 |
+
|
156 |
+
# Add user's query to conversation memory
|
157 |
+
conversation_memory.append(("User", query))
|
158 |
+
|
159 |
with get_openai_callback() as cb:
|
160 |
response = chain.run(input_documents=docs, question=query)
|
161 |
|
162 |
+
# Add bot's response to conversation memory
|
163 |
+
conversation_memory.append(("Bot", response))
|
164 |
+
|
165 |
st.session_state['chat_history'].append(("Bot", response, "new"))
|
166 |
|
167 |
# Display new messages at the bottom
|
|
|
180 |
|
181 |
# Mark all messages as old after displaying
|
182 |
st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
|
183 |
+
|
184 |
+
|
185 |
+
# Display new messages at the bottom
|
186 |
+
new_messages = st.session_state['chat_history'][-2:]
|
187 |
+
for chat in new_messages:
|
188 |
+
background_color = "#FFA07A" if chat[2] == "new" else "#D1D1E0" if chat[0] == "User" else "#D1D1E0" # Use a very light purple-grey color
|
189 |
+
new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
|
190 |
+
|
191 |
+
|
192 |
+
|
193 |
+
# Scroll to the latest response using JavaScript
|
194 |
+
st.write("<script>document.getElementById('response').scrollIntoView();</script>", unsafe_allow_html=True)
|
195 |
+
|
196 |
+
loading_message.empty()
|
197 |
+
|
198 |
+
# Clear the input field by setting the query variable to an empty string
|
199 |
+
query = ""
|
200 |
+
|
201 |
|
202 |
|
203 |
def display_chat_history(chat_history):
|