Anne31415's picture
Update app.py
5d91cf0
raw
history blame
7.64 kB
import streamlit as st
from dotenv import load_dotenv
import pinecone
import pickle
from huggingface_hub import Repository
from PyPDF2 import PdfReader
from streamlit_extras.add_vertical_space import add_vertical_space
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.callbacks import get_openai_callback
import os
# Load all necessary environment variables at the beginning of the script
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
pinecone.init(PINECONE_API_KEY="PINECONE_API_KEY")
INDEX_NAME = "pdfbot1"
if INDEX_NAME not in pinecone.list_indexes():
pinecone.create_index(name=INDEX_NAME, metric="cosine", shards=1)
# Step 1: Clone the Dataset Repository
repo = Repository(
local_dir="Private_Book",
repo_type="dataset",
clone_from="Anne31415/Private_Book",
token=os.environ["HUB_TOKEN"]
)
repo.git_pull()
# Step 2: Load the PDF File
pdf_file_path = "Private_Book/Glossar_HELP_DESK_combi.pdf" # Replace with your PDF file path
with st.sidebar:
st.title('BinDoc GmbH')
st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
add_vertical_space(1) # Adjust as per the desired spacing
st.markdown("""
Hello! I’m here to assist you with:<br><br>
📘 **Glossary Inquiries:**<br>
I can clarify terms like "DiGA", "AOP", or "BfArM", providing clear and concise explanations to help you understand our content better.<br><br>
🆘 **Help Page Navigation:**<br>
Ask me if you forgot your password or want to know more about topics related to the platform.<br><br>
📰 **Latest Whitepapers Insights:**<br>
Curious about our recent publications? Feel free to ask about our latest whitepapers!<br><br>
""", unsafe_allow_html=True)
add_vertical_space(1) # Adjust as per the desired spacing
st.write('Made with ❤️ by BinDoc GmbH')
def load_pdf(file_path):
pdf_reader = PdfReader(file_path)
text = ""
for page in pdf_reader.pages:
text += page.extract_text()
text_splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=200,
length_function=len
)
chunks = text_splitter.split_text(text=text)
store_name, _ = os.path.splitext(os.path.basename(file_path))
if os.path.exists(f"{store_name}.pkl"):
with open(f"{store_name}.pkl", "rb") as f:
VectorStore = pickle.load(f)
else:
embeddings = OpenAIEmbeddings()
VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
with open(f"{store_name}.pkl", "wb") as f:
pickle.dump(VectorStore, f)
vector_dict = {str(i): vector for i, vector in enumerate(VectorStore.vectors)}
pinecone.upsert(items=vector_dict, index_name=INDEX_NAME)
return VectorStore
def load_chatbot():
return load_qa_chain(llm=OpenAI(), chain_type="stuff")
def main():
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
# Main content
st.title("Welcome to BinDocs ChatBot! 🤖")
# Directly specifying the path to the PDF file
pdf_path = pdf_file_path
if not os.path.exists(pdf_path):
st.error("File not found. Please check the file path.")
return
if "chat_history" not in st.session_state:
st.session_state['chat_history'] = []
display_chat_history(st.session_state['chat_history'])
st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
st.write("<!-- End Spacer -->", unsafe_allow_html=True)
new_messages_placeholder = st.empty()
if pdf_path is not None:
query = st.text_input("Ask questions about your PDF file (in any preferred language):")
if st.button("Was genau ist ein Belegarzt?"):
query = "Was genau ist ein Belegarzt?"
if st.button("Wofür wird die Alpha-ID verwendet?"):
query = "Wofür wird die Alpha-ID verwendet?"
if st.button("Was sind die Vorteile des ambulanten operierens?"):
query = "Was sind die Vorteile des ambulanten operierens?"
if st.button("Was kann ich mit dem Prognose-Analyse Toll machen?"):
query = "Was kann ich mit dem Prognose-Analyse Toll machen?"
if st.button("Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"):
query = "Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"
if st.button("Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?"):
query = ("Ich habe mein Meta Password vergessen, wie kann ich es zurücksetzen?")
if st.button("Ask") or (not st.session_state['chat_history'] and query) or (st.session_state['chat_history'] and query != st.session_state['chat_history'][-1][1]):
st.session_state['chat_history'].append(("User", query, "new"))
loading_message = st.empty()
loading_message.text('Bot is thinking...')
VectorStore = load_pdf(pdf_path)
chain = load_chatbot()
docs = VectorStore.similarity_search(query=query, k=3)
# Searching for similar documents in Pinecone
query_vector = embeddings.embed_text(query)
search_results = pinecone.query(queries=[query_vector], index_name=INDEX_NAME, top_k=3)
# Extracting document ids from Pinecone's results
doc_ids = [int(item.id) for item in search_results.results[0].matches]
# Retrieving the actual document texts based on the ids
docs = [texts[id] for id in doc_ids]
with get_openai_callback() as cb:
response = chain.run(input_documents=docs, question=query)
st.session_state['chat_history'].append(("Bot", response, "new"))
# Display new messages at the bottom
new_messages = st.session_state['chat_history'][-2:]
for chat in new_messages:
background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf"
new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
# Scroll to the latest response using JavaScript
st.write("<script>document.getElementById('response').scrollIntoView();</script>", unsafe_allow_html=True)
loading_message.empty()
# Clear the input field by setting the query variable to an empty string
query = ""
# Mark all messages as old after displaying
st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]
def display_chat_history(chat_history):
for chat in chat_history:
background_color = "#FFA07A" if chat[2] == "new" else "#acf" if chat[0] == "User" else "#caf"
st.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
if __name__ == "__main__":
main()