File size: 9,336 Bytes
12d891e
8f2ad06
5a28160
d3a0859
f6250a9
 
 
 
 
 
 
 
 
 
 
ae1d88c
73d7e50
c393edc
 
f6250a9
 
 
 
 
 
 
 
73d7e50
f6250a9
d3a0859
f6250a9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d3a0859
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f6250a9
 
 
 
d3a0859
 
 
f6250a9
 
 
d3a0859
 
 
 
 
 
 
f6250a9
 
 
 
 
b06fd67
 
 
 
 
 
 
 
 
c7ab214
 
7350525
 
c7ab214
6b3cf69
c7ab214
 
 
 
 
 
 
 
 
 
6b3cf69
7350525
 
c7ab214
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6b3cf69
7350525
 
b06fd67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5a28160
b06fd67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f6250a9
b06fd67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f6250a9
b06fd67
 
 
 
 
 
 
 
 
f26adcb
b06fd67
 
 
 
 
 
 
 
f6250a9
b06fd67
 
 
f6250a9
 
 
 
f26adcb
f6250a9
 
b06fd67
f6250a9
d3a0859
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
import streamlit as st
from PIL import Image
import time
import streamlit_analytics
from dotenv import load_dotenv
import pickle
from huggingface_hub import Repository
from PyPDF2 import PdfReader
from streamlit_extras.add_vertical_space import add_vertical_space
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import FAISS
from langchain.llms import OpenAI
from langchain.chains.question_answering import load_qa_chain
from langchain.callbacks import get_openai_callback
import os

st.set_page_config(layout="wide")

# Step 1: Clone the Dataset Repository
repo = Repository(
    local_dir="Private_Book",  # Local directory to clone the repository
    repo_type="dataset",  # Specify that this is a dataset repository
    clone_from="Anne31415/Private_Book",  # Replace with your repository URL
    token=os.environ["HUB_TOKEN"]  # Use the secret token to authenticate
)
repo.git_pull()  # Pull the latest changes (if any)

# Step 2: Load the PDF File
pdf_path = "Private_Book/KOMBI_all2.pdf"  # Replace with your PDF file path

with st.sidebar:
    st.title('BinDoc GmbH')
    st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
    
    add_vertical_space(1)  # Adjust as per the desired spacing
    
    st.markdown("""
    Hello! I’m here to assist you with:<br><br>
    📘 **Glossary Inquiries:**<br>
    I can clarify terms like "DiGA", "AOP", or "BfArM", providing clear and concise explanations to help you understand our content better.<br><br>
    🆘 **Help Page Navigation:**<br>
    Ask me if you forgot your password or want to know more about topics related to the platform.<br><br>
    📰 **Latest Whitepapers Insights:**<br>
    Curious about our recent publications? Feel free to ask about our latest whitepapers!<br><br>
    """, unsafe_allow_html=True)
    
    add_vertical_space(1)  # Adjust as per the desired spacing

    st.write('Made with ❤️ by BinDoc GmbH')

    api_key = os.getenv("OPENAI_API_KEY")
    # Retrieve the API key from st.secrets

# Updated caching mechanism using st.cache_data
@st.cache_data(persist="disk")  # Using persist="disk" to save cache across sessions


def load_vector_store(file_path, store_name, force_reload=False):
    # Check if we need to force reload the vector store (e.g., when the PDF changes)
    if force_reload or not os.path.exists(f"{store_name}.pkl"):
        text_splitter = RecursiveCharacterTextSplitter(
            chunk_size=1000,
            chunk_overlap=200,
            length_function=len
        )
        
        text = load_pdf_text(file_path)
        chunks = text_splitter.split_text(text=text)
        
        embeddings = OpenAIEmbeddings()
        VectorStore = FAISS.from_texts(chunks, embedding=embeddings)
        with open(f"{store_name}.pkl", "wb") as f:
            pickle.dump(VectorStore, f)
    else:
        with open(f"{store_name}.pkl", "rb") as f:
            VectorStore = pickle.load(f)

    return VectorStore

# Utility function to load text from a PDF
def load_pdf_text(file_path):
    pdf_reader = PdfReader(file_path)
    text = ""
    for page in pdf_reader.pages:
        text += page.extract_text() or ""  # Add fallback for pages where text extraction fails
    return text

def load_chatbot():
    return load_qa_chain(llm=OpenAI(), chain_type="stuff")

def main():
    try:
        hide_streamlit_style = """
                <style>
                #MainMenu {visibility: hidden;}
                footer {visibility: hidden;}
                </style>
                """
        st.markdown(hide_streamlit_style, unsafe_allow_html=True)
    
         # Create a two-column layout with a custom ratio
        col1, col2 = st.columns([0.8, 0.2])

        with col1:
            # Use custom CSS to align the text vertically
            st.markdown("""
                <style>
                .title {
                    display: flex;
                    align-items: center;  /* This will vertically center the title in the column */
                    height: 100%;
                }
                </style>
                <div class="title">
                    <h1>Welcome to BinDocs ChatBot!</h1>
                </div>
                """, unsafe_allow_html=True)

        with col2:
            # Align the image to the center of the column
            st.markdown("""
                <style>
                .logo {
                    display: flex;
                    align-items: center;  /* This will vertically center the logo in the column */
                    height: 100%;
                }
                img {
                    vertical-align: middle;  /* Align the image vertically */
                }
                </style>
                <div class="logo">
                    <img src="BinDoc Logo (Quadratisch).png" alt="BinDocs Logo" width="100%">  /* Adjust the path and size as needed */
                </div>
                """, unsafe_allow_html=True)
 


            
    
        # Start tracking user interactions
        with streamlit_analytics.track():
            if not os.path.exists(pdf_path):
                st.error("File not found. Please check the file path.")
                return
    
            VectorStore = load_vector_store(pdf_path, "my_vector_store", force_reload=False)
    
    
            if "chat_history" not in st.session_state:
                st.session_state['chat_history'] = []
    
            display_chat_history(st.session_state['chat_history'])
    
            st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
            st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
            st.write("<!-- End Spacer -->", unsafe_allow_html=True)
    
            new_messages_placeholder = st.empty()
    
            query = st.text_input("Ask questions about your PDF file (in any preferred language):")
            
            if st.button("Was genau ist ein Belegarzt?"):
                query = "Was genau ist ein Belegarzt?"
            if st.button("Wofür wird die Alpha-ID verwendet?"):
                query = "Wofür wird die Alpha-ID verwendet?"
            if st.button("Was sind die Vorteile des ambulanten Operierens?"):
                query = "Was sind die Vorteile des ambulanten Operierens?"
            if st.button("Was kann ich mit dem Prognose-Analyse-Tool machen?"):
                query = "Was kann ich mit dem Prognose-Analyse-Tool machen?"
            if st.button("Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"):
                query = "Was sagt mir die Farbe der Balken der Bevölkerungsentwicklung?"
            if st.button("Ich habe mein Meta-Password vergessen, wie kann ich es zurücksetzen?"):
                query = "Ich habe mein Meta-Password vergessen, wie kann ich es zurücksetzen?"
        
            if query:
                st.session_state['chat_history'].append(("User", query, "new"))

                # Start timing
                start_time = time.time()
                
                with st.spinner('Bot is thinking...'):
                    # Use the VectorStore loaded at the start from the session state
                    chain = load_chatbot()
                    docs = VectorStore.similarity_search(query=query, k=3)
                    with get_openai_callback() as cb:
                        response = chain.run(input_documents=docs, question=query)

                        
                # Stop timing
                end_time = time.time()
                
                # Calculate duration
                duration = end_time - start_time

                # You can use Streamlit's text function to display the timing
                st.text(f"Response time: {duration:.2f} seconds")
    
                st.session_state['chat_history'].append(("Bot", response, "new"))
    
    
                # Display new messages at the bottom
                new_messages = st.session_state['chat_history'][-2:]
                for chat in new_messages:
                    background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
                    new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
    
    
                # Clear the input field after the query is made
                query = ""
    
            # Mark all messages as old after displaying
            st.session_state['chat_history'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history']]

    except Exception as e:
        st.error(f"Upsi, an unexpected error occurred: {e}")
        # Optionally log the exception details to a file or error tracking service


def display_chat_history(chat_history):
    for chat in chat_history:
        background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
        st.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)


if __name__ == "__main__":
    main()