Anne31415's picture
Update app.py
191e71b
raw
history blame
8.02 kB
import streamlit as st
from dotenv import load_dotenv
import pickle
from huggingface_hub import Repository
from PyPDF2 import PdfReader
from streamlit_extras.add_vertical_space import add_vertical_space
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.callbacks import get_openai_callback
import os
st.markdown("""
<style>
.cloud-button {
position: relative;
background: #E0E0E0;
border: none;
padding: 20px 40px;
cursor: pointer;
overflow: hidden;
outline: none;
box-shadow: 2px 2px 12px rgba(0,0,0,0.1);
}
/* Main cloud shape */
.cloud-button::before {
content: '';
position: absolute;
background: #E0E0E0;
border-radius: 50%;
width: 150px;
height: 150px;
top: -50px;
left: 50%;
transform: translateX(-50%);
}
/* Additional cloud bubbles */
.cloud-button::after {
content: '';
position: absolute;
background: #E0E0E0;
border-radius: 50%;
width: 120px;
height: 120px;
top: 20px;
left: 15%;
}
.cloud-button span {
position: relative;
z-index: 1;
}
/* Hover effect */
.cloud-button:hover {
box-shadow: 2px 2px 18px rgba(0,0,0,0.2);
}
/* Override some default styles for the button to ensure cloud shape */
.cloud-button, .cloud-button::before, .cloud-button::after {
border: none;
outline: none;
text-decoration: none;
}
</style>
""", unsafe_allow_html=True)
if hasattr(st.session_state, "cloud_button_pressed"):
query = st.session_state.cloud_button_pressed
del st.session_state.cloud_button_pressed # remove the attribute after using it
# Step 1: Clone the Dataset Repository
repo = Repository(
local_dir="Private_Book", # Local directory to clone the repository
repo_type="dataset", # Specify that this is a dataset repository
clone_from="Anne31415/Private_Book", # Replace with your repository URL
token=os.environ["HUB_TOKEN"] # Use the secret token to authenticate
)
repo.git_pull() # Pull the latest changes (if any)
# Step 2: Load the PDF File
pdf_file_path = "Private_Book/KOMBI_all2.pdf" # Replace with your PDF file path
with st.sidebar:
st.title('BinDoc GmbH')
st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
add_vertical_space(1) # Adjust as per the desired spacing
st.markdown("""
Hello! I’m here to assist you with:<br><br>
📘 **Glossary Inquiries:**<br>
I can clarify terms like "DiGA", "AOP", or "BfArM", providing clear and concise explanations to help you understand our content better.<br><br>
🆘 **Help Page Navigation:**<br>
Ask me if you forgot your password or want to know more about topics related to the platform.<br><br>
📰 **Latest Whitepapers Insights:**<br>
Curious about our recent publications? Feel free to ask about our latest whitepapers!<br><br>
""", unsafe_allow_html=True)
add_vertical_space(1) # Adjust as per the desired spacing
st.write('Made with ❤️ by BinDoc GmbH')
api_key = os.getenv("OPENAI_API_KEY")
# Retrieve the API key from st.secrets
def load_pdf(file_path):
pdf_reader = PdfReader(file_path)
text = ""
for page in pdf_reader.pages:
text += page.extract_text()
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=200,
length_function=len
)
chunks = text_splitter.split_text(text=text)
store_name, _ = os.path.splitext(os.path.basename(file_path))
if os.path.exists(f"{store_name}.pkl"):
with open(f"{store_name}.pkl", "rb") as f:
VectorStore = pickle.load(f)
else:
embeddings = OpenAIEmbeddings()
VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
with open(f"{store_name}.pkl", "wb") as f:
pickle.dump(VectorStore, f)
return VectorStore
# Load the PDF file when the app starts
if "pdf_data" not in st.session_state:
st.session_state.pdf_data = load_pdf(pdf_file_path)
def load_chatbot():
return load_qa_chain(llm=OpenAI(), chain_type="stuff")
# Load the chatbot when the app starts
if "chatbot_instance" not in st.session_state:
st.session_state.chatbot_instance = load_chatbot()
def main():
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
# Main content
st.title("Welcome to BinDocs ChatBot! 🤖")
# Directly specifying the path to the PDF file
pdf_path = pdf_file_path
if not os.path.exists(pdf_path):
st.error("File not found. Please check the file path.")
return
if "chat_history" not in st.session_state:
st.session_state['chat_history'] = []
display_chat_history(st.session_state['chat_history'])
st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
st.write("<!-- End Spacer -->", unsafe_allow_html=True)
new_messages_placeholder = st.empty()
if pdf_path is not None:
query = st.text_input("Ask questions about your PDF file (in any preferred language):")
st.markdown("""
<button class="cloud-button" onclick="document.dispatchEvent(new CustomEvent('cloud_button_event', {detail: 'Was genau ist ein Belegarzt?'}));">
<span>Was genau ist ein Belegarzt?</span>
</button>
<script>
document.addEventListener('cloud_button_event', function(e) {
window.streamlitSetComponentValue(e.detail);
});
</script>
""", unsafe_allow_html=True)
if st.button("Wofür wird die Alpha-ID verwendet?"):
query = "Wofür wird die Alpha-ID verwendet?"
if st.button("Ask") or is_cloud_button_pressed or (not st.session_state['chat_history'] and query) or (st.session_state['chat_history'] and query != st.session_state['chat_history'][-1][1]):
if is_cloud_button_pressed:
query = is_cloud_button_pressed
loading_message = st.empty()
loading_message.text('Bot is thinking...')
docs = st.session_state.pdf_data.similarity_search(query=query, k=3)
with get_openai_callback() as cb:
response = st.session_state.chatbot_instance.run(input_documents=docs, question=query)
st.session_state['chat_history'].append(("Bot", response, "new"))
# Display new messages at the bottom
new_messages = st.session_state['chat_history'][-2:]
for chat in new_messages:
background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf"
new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
# Scroll to the latest response using JavaScript
st.write("<script>document.getElementById('response').scrollIntoView();</script>", unsafe_allow_html=True)
loading_message.empty()
# Clear the input field by setting the query variable to an empty string
query = ""
# Mark all messages as old after displaying
st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
def display_chat_history(chat_history):
for chat in chat_history:
background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf"
st.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
if __name__ == "__main__":
main()