Anne31415 commited on
Commit
197dc26
·
verified ·
1 Parent(s): 696f4d3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +351 -228
app.py CHANGED
@@ -2,7 +2,6 @@ import streamlit as st
2
  from PIL import Image
3
  import random
4
  import time
5
- import streamlit_analytics
6
  from dotenv import load_dotenv
7
  import pickle
8
  from huggingface_hub import Repository
@@ -15,6 +14,9 @@ from langchain.llms import OpenAI
15
  from langchain.chains.question_answering import load_qa_chain
16
  from langchain.callbacks import get_openai_callback
17
  import os
 
 
 
18
 
19
  import pandas as pd
20
  import pydeck as pdk
@@ -30,6 +32,10 @@ if 'chat_history_page2' not in st.session_state:
30
  if 'chat_history_page3' not in st.session_state:
31
  st.session_state['chat_history_page3'] = []
32
 
 
 
 
 
33
 
34
 
35
  # Step 1: Clone the Dataset Repository
@@ -41,6 +47,17 @@ repo = Repository(
41
  )
42
  repo.git_pull() # Pull the latest changes (if any)
43
 
 
 
 
 
 
 
 
 
 
 
 
44
  # Step 2: Load the PDF File
45
  pdf_path = "Private_Book/KH_Reform230124.pdf" # Replace with your PDF file path
46
 
@@ -90,7 +107,6 @@ def load_vector_store(file_path, store_name, force_reload=False):
90
  repo.git_add(f"{store_name}.pkl") # Use just the file name
91
  repo.git_commit(f"Update vector store: {store_name}")
92
  repo.git_push()
93
- #st.text("Committed and pushed vector store to repository.")
94
  except Exception as e:
95
  st.error(f"Error during Git operations: {e}")
96
  finally:
@@ -171,11 +187,55 @@ def handle_no_answer(response):
171
 
172
  def ask_bot(query):
173
  # Definiere den standardmäßigen Prompt
174
- standard_prompt = "Schreibe immer höflich und auf antworte immer in der Sprache in der der User auch schreibt. Formuliere immer ganze freundliche ganze Sätze und biete wenn möglich auch mehr Informationen (aber nicht mehr als 1 Satz mehr). Wenn der User sehr vage schreibt frage nach."
175
  # Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
176
  full_query = standard_prompt + query
177
  return full_query
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
 
180
  def page1():
181
  try:
@@ -199,88 +259,97 @@ def page1():
199
  st.image(image, use_column_width='always')
200
 
201
 
202
- # Start tracking user interactions
203
- with streamlit_analytics.track():
204
- if not os.path.exists(pdf_path):
205
- st.error("File not found. Please check the file path.")
206
- return
207
-
208
- VectorStore = load_vector_store(pdf_path, "KH_Reform_2301", force_reload=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
 
210
- display_chat_history(st.session_state['chat_history_page1'])
211
-
212
- st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
213
- st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
214
- st.write("<!-- End Spacer -->", unsafe_allow_html=True)
215
-
216
- new_messages_placeholder = st.empty()
217
 
218
- query = st.text_input("Geben Sie hier Ihre Frage ein / Enter your question here:")
 
 
219
 
220
- add_vertical_space(2) # Adjust as per the desired spacing
221
-
222
- # Create two columns for the buttons
223
- col1, col2 = st.columns(2)
224
 
225
- with col1:
226
- if st.button("Wie viele Ärzte benötigt eine Klinik in der Leistungsgruppe Stammzell-transplantation?"):
227
- query = "Wie viele Ärzte benötigt eine Klinik in der Leistungsgruppe Stammzell-transplantation?"
228
- if st.button("Wie viele Leistungsgruppen soll es durch die neue Krankenhaus-Reform geben?"):
229
- query = ("Wie viele Leistungsgruppen soll es durch die neue Krankenhaus-Reform geben?")
230
- if st.button("Was sind die hauptsächlichen Änderungsvorhaben der Krankenhausreform?"):
231
- query = "Was sind die hauptsächlichen Änderungsvorhaben der Krankenhausreform?"
232
 
 
 
 
 
233
 
234
- with col2:
235
- if st.button("Welche und wieviele Fachärzte benötige ich für die Leistungsgruppe Pädiatrie? "):
236
- query = "Welche und wieviele Fachärzte benötige ich für die Leistungsgruppe Pädiatrie"
237
- if st.button("Was soll die Reform der Notfallversorgung beinhalten?"):
238
- query = "Was soll die Reform der Notfallversorgung beinhalten?"
239
- if st.button("Was bedeutet die Vorhaltefinanzierung?"):
240
- query = "Was bedeutet die Vorhaltefinanzierung?"
 
 
 
 
 
 
 
 
 
241
 
242
 
243
-
244
- if query:
245
- full_query = ask_bot(query)
246
- st.session_state['chat_history_page1'].append(("User", query, "new"))
247
-
248
- # Start timing
249
- start_time = time.time()
250
-
251
- with st.spinner('Bot is thinking...'):
252
- chain = load_chatbot()
253
- docs = VectorStore.similarity_search(query=query, k=5)
254
- with get_openai_callback() as cb:
255
- response = chain.run(input_documents=docs, question=full_query)
256
- response = handle_no_answer(response) # Process the response through the new function
257
-
258
-
259
-
260
- # Stop timing
261
- end_time = time.time()
262
-
263
- # Calculate duration
264
- duration = end_time - start_time
265
-
266
- # You can use Streamlit's text function to display the timing
267
- st.text(f"Response time: {duration:.2f} seconds")
268
-
269
- st.session_state['chat_history_page1'].append(("Bot", response, "new"))
270
-
271
-
272
- # Display new messages at the bottom
273
- new_messages = st.session_state['chat_history_page1'][-2:]
274
- for chat in new_messages:
275
- background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
276
- new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
277
-
278
-
279
- # Clear the input field after the query is made
280
- query = ""
281
-
282
- # Mark all messages as old after displaying
283
- st.session_state['chat_history_page1'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page1']]
284
 
285
  except Exception as e:
286
  st.error(f"Upsi, an unexpected error occurred: {e}")
@@ -309,90 +378,96 @@ def page2():
309
  image = Image.open('BinDoc Logo (Quadratisch).png')
310
  st.image(image, use_column_width='always')
311
 
312
-
313
- # Start tracking user interactions
314
- with streamlit_analytics.track():
315
 
316
- if not os.path.exists(pdf_path2):
317
- st.error("File not found. Please check the file path.")
318
- return
319
-
320
- VectorStore = load_vector_store(pdf_path2, "Buch_2301", force_reload=False)
321
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
322
 
323
 
324
- display_chat_history(st.session_state['chat_history_page2'])
325
-
326
- st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
327
- st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
328
- st.write("<!-- End Spacer -->", unsafe_allow_html=True)
329
-
330
- new_messages_placeholder = st.empty()
331
-
332
- query = st.text_input("Ask questions about your PDF file (in any preferred language):")
333
 
334
- add_vertical_space(2) # Adjust as per the desired spacing
335
-
336
- # Create two columns for the buttons
337
- col1, col2 = st.columns(2)
 
 
338
 
339
- with col1:
340
- if st.button("Nenne mir 5 wichtige Personalkennzahlen im Krankenhaus."):
341
- query = "Nenne mir 5 wichtige Personalkennzahlen im Krankenhaus."
342
- if st.button("Wie ist die durchschnittliche Bettenauslastung eines Krankenhauses?"):
343
- query = ("Wie ist die durchschnittliche Bettenauslastung eines Krankenhauses?")
344
- if st.button("Welches sind die häufigsten DRGs, die von den Krankenhäusern abgerechnet werden?"):
345
- query = "Welches sind die häufigsten DRGs, die von den Krankenhäusern abgerechnet werden? "
346
 
 
 
 
347
 
348
- with col2:
349
- if st.button("Wie viel Casemixpunkte werden im Median von einer ärztlichen Vollkraft erbracht?"):
350
- query = "Wie viel Casemixpunkte werden im Median von einer ärztlichen Vollkraft erbracht?"
351
- if st.button("Bitte erstelle mir einer Übersicht der wichtiger Strukturkennzahlen eines Krankenhauses der Grund- und Regelversorgung."):
352
- query = "Bitte erstelle mir einer Übersicht der wichtiger Strukturkennzahlen eines Krankenhauses der Grund- und Regelversorgung."
353
- if st.button("Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"):
354
- query = "Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"
355
 
356
-
357
 
358
- if query:
359
- full_query = ask_bot(query)
360
- st.session_state['chat_history_page2'].append(("User", query, "new"))
361
-
362
- # Start timing
363
- start_time = time.time()
364
-
365
- with st.spinner('Bot is thinking...'):
366
- chain = load_chatbot()
367
- docs = VectorStore.similarity_search(query=query, k=5)
368
- with get_openai_callback() as cb:
369
- response = chain.run(input_documents=docs, question=full_query)
370
- response = handle_no_answer(response) # Process the response through the new function
371
-
372
- # Stop timing
373
- end_time = time.time()
374
-
375
- # Calculate duration
376
- duration = end_time - start_time
377
-
378
- # You can use Streamlit's text function to display the timing
379
- st.text(f"Response time: {duration:.2f} seconds")
380
-
381
- st.session_state['chat_history_page2'].append(("Bot", response, "new"))
382
-
383
-
384
- # Display new messages at the bottom
385
- new_messages = st.session_state['chat_history_page2'][-2:]
386
- for chat in new_messages:
387
- background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
388
- new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
389
-
390
-
391
- # Clear the input field after the query is made
392
- query = ""
393
-
394
- # Mark all messages as old after displaying
395
- st.session_state['chat_history_page2'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page2']]
396
 
397
  except Exception as e:
398
  st.error(f"Upsi, an unexpected error occurred: {e}")
@@ -421,107 +496,153 @@ def page3():
421
  image = Image.open('BinDoc Logo (Quadratisch).png')
422
  st.image(image, use_column_width='always')
423
 
424
-
425
- # Start tracking user interactions
426
- with streamlit_analytics.track():
427
 
428
- if not os.path.exists(pdf_path2):
429
- st.error("File not found. Please check the file path.")
430
- return
431
-
432
- VectorStore = load_vector_store(pdf_path3, "Kosten_Str_2301", force_reload=False)
433
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
434
 
435
 
436
- display_chat_history(st.session_state['chat_history_page3'])
437
-
438
- st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
439
- st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
440
- st.write("<!-- End Spacer -->", unsafe_allow_html=True)
441
-
442
- new_messages_placeholder = st.empty()
443
-
444
- query = st.text_input("Ask questions about your PDF file (in any preferred language):")
445
 
446
- add_vertical_space(2) # Adjust as per the desired spacing
 
 
 
 
 
447
 
448
- # Create two columns for the buttons
449
- col1, col2 = st.columns(2)
 
 
 
 
 
 
 
 
 
450
 
451
- with col1:
452
- if st.button("Wie hat sich die Bettenanzahl in den letzten 10 Jahren entwickelt?"):
453
- query = "Wie hat sich die Bettenanzahl in den letzten 10 Jahren entwickelt?"
454
- if st.button("Wie viele Patienten werden pro Jahr vollstationär behandelt?"):
455
- query = ("Wie viele Patienten werden pro Jahr vollstationär behandelt?")
456
- if st.button("Wie viele Vollkräfte arbeiten in Summe in deutschen Krankenhäusern?"):
457
- query = "Wie viele Vollkräfte arbeiten in Summe in deutschen Krankenhäusern? "
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
458
 
459
 
460
- with col2:
461
- if st.button("Welche unterschiedlichen Personalkosten gibt es im Krankenhaus?"):
462
- query = "Welche unterschiedlichen Personalkosten gibt es im Krankenhaus?"
463
- if st.button("Welche Sachkosten werden in Krankenhäusern unterschieden?"):
464
- query = "Welche Sachkosten werden in Krankenhäusern unterschieden? "
465
- if st.button("Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"):
466
- query = "Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"
467
 
468
-
 
469
 
470
- if query:
471
- full_query = ask_bot(query)
472
- st.session_state['chat_history_page3'].append(("User", query, "new"))
473
-
474
- # Start timing
475
- start_time = time.time()
476
-
477
- with st.spinner('Bot is thinking...'):
478
- chain = load_chatbot()
479
- docs = VectorStore.similarity_search(query=query, k=5)
480
- with get_openai_callback() as cb:
481
- response = chain.run(input_documents=docs, question=full_query)
482
- response = handle_no_answer(response) # Process the response through the new function
483
-
484
- # Stop timing
485
- end_time = time.time()
486
-
487
- # Calculate duration
488
- duration = end_time - start_time
489
-
490
- # You can use Streamlit's text function to display the timing
491
- st.text(f"Response time: {duration:.2f} seconds")
492
-
493
- st.session_state['chat_history_page3'].append(("Bot", response, "new"))
494
-
495
-
496
- # Display new messages at the bottom
497
- new_messages = st.session_state['chat_history_page3'][-2:]
498
- for chat in new_messages:
499
- background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
500
- new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
501
-
502
-
503
- # Clear the input field after the query is made
504
- query = ""
505
-
506
- # Mark all messages as old after displaying
507
- st.session_state['chat_history_page3'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page3']]
508
 
509
  except Exception as e:
510
  st.error(f"Upsi, an unexpected error occurred: {e}")
511
  # Optionally log the exception details to a file or error tracking service
512
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
513
 
 
 
 
 
514
 
 
515
  def main():
516
  # Sidebar content
517
  with st.sidebar:
518
  st.title('BinDoc GmbH')
519
  st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
520
  add_vertical_space(1)
521
- page = st.sidebar.selectbox("Choose a page", ["KH_Reform", "Kennzahlenbuch 100 Kennzahlen", "Kosten- und Strukturdaten der Krankenhäuser"])
522
- add_vertical_space(1)
 
523
  st.write('Made with ❤️ by BinDoc GmbH')
524
 
 
525
  # Main area content based on page selection
526
  if page == "KH_Reform":
527
  page1()
@@ -529,6 +650,8 @@ def main():
529
  page2()
530
  elif page == "Kosten- und Strukturdaten der Krankenhäuser":
531
  page3()
 
 
532
 
533
 
534
  if __name__ == "__main__":
 
2
  from PIL import Image
3
  import random
4
  import time
 
5
  from dotenv import load_dotenv
6
  import pickle
7
  from huggingface_hub import Repository
 
14
  from langchain.chains.question_answering import load_qa_chain
15
  from langchain.callbacks import get_openai_callback
16
  import os
17
+ import uuid
18
+ import json
19
+
20
 
21
  import pandas as pd
22
  import pydeck as pdk
 
32
  if 'chat_history_page3' not in st.session_state:
33
  st.session_state['chat_history_page3'] = []
34
 
35
+ # This session ID will be unique per user session and consistent across all pages.
36
+ if 'session_id' not in st.session_state:
37
+ st.session_state['session_id'] = str(uuid.uuid4())
38
+
39
 
40
 
41
  # Step 1: Clone the Dataset Repository
 
47
  )
48
  repo.git_pull() # Pull the latest changes (if any)
49
 
50
+
51
+ # Step 1: Clone the ChatSet Repository - save all the chats anonymously
52
+ repo2 = Repository(
53
+ local_dir="Chat_Store", # Local directory to clone the repository
54
+ repo_type="dataset", # Specify that this is a dataset repository
55
+ clone_from="Anne31415/Chat_Store", # Replace with your repository URL
56
+ token=os.environ["HUB_TOKEN"] # Use the secret token to authenticate
57
+ )
58
+ repo.git_pull() # Pull the latest changes (if any)
59
+
60
+
61
  # Step 2: Load the PDF File
62
  pdf_path = "Private_Book/KH_Reform230124.pdf" # Replace with your PDF file path
63
 
 
107
  repo.git_add(f"{store_name}.pkl") # Use just the file name
108
  repo.git_commit(f"Update vector store: {store_name}")
109
  repo.git_push()
 
110
  except Exception as e:
111
  st.error(f"Error during Git operations: {e}")
112
  finally:
 
187
 
188
  def ask_bot(query):
189
  # Definiere den standardmäßigen Prompt
190
+ standard_prompt = "Schreibe immer höflich und auf antworte immer in der Sprache in der der User auch schreibt. Formuliere immer ganze freundliche ganze Sätze und biete wenn möglich auch mehr Informationen (aber nicht mehr als 1 Satz mehr). Wenn der User sehr vage schreibt frage nach. Wenn du zu einer bestimmten Frage Daten aus mehreren Jahren hast, frage den User für welche Jahre er sich interessiert und nenne ihm natürlich Möglichkeiten über die Jahre die du hast. "
191
  # Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
192
  full_query = standard_prompt + query
193
  return full_query
194
+
195
+ def save_conversation(chat_histories, session_id):
196
+ base_path = "Chat_Store/conversation_logs"
197
+ if not os.path.exists(base_path):
198
+ os.makedirs(base_path)
199
+
200
+ filename = f"{base_path}/{session_id}.json"
201
+
202
+ # Check if the log file already exists
203
+ existing_data = {"page1": [], "page2": [], "page3": []}
204
+ if os.path.exists(filename):
205
+ with open(filename, 'r', encoding='utf-8') as file:
206
+ existing_data = json.load(file)
207
+
208
+ # Append the new chat history to the existing data for each page
209
+ for page_number, chat_history in enumerate(chat_histories, start=1):
210
+ existing_data[f"page{page_number}"] += chat_history
211
+
212
+ with open(filename, 'w', encoding='utf-8') as file:
213
+ json.dump(existing_data, file, indent=4, ensure_ascii=False)
214
+
215
+ # Git operations
216
+ try:
217
+ # Change directory to Chat_Store for Git operations
218
+ original_dir = os.getcwd()
219
+ os.chdir('Chat_Store')
220
+
221
+ # Correct file path relative to the Git repository's root
222
+ git_file_path = f"conversation_logs/{session_id}.json"
223
 
224
+ repo2.git_add(git_file_path)
225
+ repo2.git_commit(f"Add/update conversation log for session {session_id}")
226
+ repo2.git_push()
227
+
228
+ # Change back to the original directory
229
+ os.chdir(original_dir)
230
+ except Exception as e:
231
+ st.error(f"Error during Git operations: {e}")
232
+
233
+
234
+ def display_session_id():
235
+ session_id = st.session_state['session_id']
236
+ st.sidebar.markdown(f"**Your Session ID:** `{session_id}`")
237
+ st.sidebar.markdown("Use this ID for reference in communications or feedback.")
238
+
239
 
240
  def page1():
241
  try:
 
259
  st.image(image, use_column_width='always')
260
 
261
 
262
+ if not os.path.exists(pdf_path):
263
+ st.error("File not found. Please check the file path.")
264
+ return
265
+
266
+ VectorStore = load_vector_store(pdf_path, "KH_Reform_2301", force_reload=False)
267
+
268
+ display_chat_history(st.session_state['chat_history_page1'])
269
+
270
+ st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
271
+ st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
272
+ st.write("<!-- End Spacer -->", unsafe_allow_html=True)
273
+
274
+ new_messages_placeholder = st.empty()
275
+
276
+ query = st.text_input("Geben Sie hier Ihre Frage ein / Enter your question here:")
277
+
278
+ add_vertical_space(2) # Adjust as per the desired spacing
279
+
280
+ # Create two columns for the buttons
281
+ col1, col2 = st.columns(2)
282
+
283
+ with col1:
284
+ if st.button("Wie viele Ärzte benötigt eine Klinik in der Leistungsgruppe Stammzell-transplantation?"):
285
+ query = "Wie viele Ärzte benötigt eine Klinik in der Leistungsgruppe Stammzell-transplantation?"
286
+ if st.button("Wie viele Leistungsgruppen gibt es?"):
287
+ query = ("Wie viele Leistungsgruppen gibt es?")
288
+ if st.button("Was sind die hauptsächlichen Änderungsvorhaben der Krankenhausreform?"):
289
+ query = "Was sind die hauptsächlichen Änderungsvorhaben der Krankenhausreform?"
290
+
291
+
292
+ with col2:
293
+ if st.button("Welche und wieviele Fachärzte benötige ich für die Leistungsgruppe Pädiatrie? "):
294
+ query = "Welche und wieviele Fachärzte benötige ich für die Leistungsgruppe Pädiatrie"
295
+ if st.button("Was soll die Reform der Notfallversorgung beinhalten?"):
296
+ query = "Was soll die Reform der Notfallversorgung beinhalten?"
297
+ if st.button("Was bedeutet die Vorhaltefinanzierung?"):
298
+ query = "Was bedeutet die Vorhaltefinanzierung?"
299
+
300
 
 
 
 
 
 
 
 
301
 
302
+ if query:
303
+ full_query = ask_bot(query)
304
+ st.session_state['chat_history_page1'].append(("User", query, "new"))
305
 
306
+ # Start timing
307
+ start_time = time.time()
 
 
308
 
309
+ with st.spinner('Bot is thinking...'):
310
+ chain = load_chatbot()
311
+ docs = VectorStore.similarity_search(query=query, k=5)
312
+ with get_openai_callback() as cb:
313
+ response = chain.run(input_documents=docs, question=full_query)
314
+ response = handle_no_answer(response) # Process the response through the new function
 
315
 
316
+
317
+
318
+ # Stop timing
319
+ end_time = time.time()
320
 
321
+ # Calculate duration
322
+ duration = end_time - start_time
323
+ st.text(f"Response time: {duration:.2f} seconds")
324
+
325
+ st.session_state['chat_history_page1'].append(("Bot", response, "new"))
326
+
327
+ # Combine chat histories from all pages
328
+ all_chat_histories = [
329
+ st.session_state['chat_history_page1'],
330
+ st.session_state['chat_history_page2'],
331
+ st.session_state['chat_history_page3']
332
+ ]
333
+
334
+ # Save the combined chat histories
335
+ save_conversation(all_chat_histories, st.session_state['session_id'])
336
+
337
 
338
 
339
+
340
+ # Display new messages at the bottom
341
+ new_messages = st.session_state['chat_history_page1'][-2:]
342
+ for chat in new_messages:
343
+ background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
344
+ new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
345
+
346
+
347
+
348
+ # Clear the input field after the query is made
349
+ query = ""
350
+
351
+ # Mark all messages as old after displaying
352
+ st.session_state['chat_history_page1'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page1']]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
353
 
354
  except Exception as e:
355
  st.error(f"Upsi, an unexpected error occurred: {e}")
 
378
  image = Image.open('BinDoc Logo (Quadratisch).png')
379
  st.image(image, use_column_width='always')
380
 
 
 
 
381
 
382
+ if not os.path.exists(pdf_path2):
383
+ st.error("File not found. Please check the file path.")
384
+ return
385
+
386
+ VectorStore = load_vector_store(pdf_path2, "Buch_2301", force_reload=False)
387
+
388
+
389
+
390
+ display_chat_history(st.session_state['chat_history_page2'])
391
+
392
+ st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
393
+ st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
394
+ st.write("<!-- End Spacer -->", unsafe_allow_html=True)
395
+
396
+ new_messages_placeholder = st.empty()
397
+
398
+ query = st.text_input("Ask questions about your PDF file (in any preferred language):")
399
+
400
+ add_vertical_space(2) # Adjust as per the desired spacing
401
+
402
+ # Create two columns for the buttons
403
+ col1, col2 = st.columns(2)
404
+
405
+ with col1:
406
+ if st.button("Nenne mir 5 wichtige Personalkennzahlen im Krankenhaus."):
407
+ query = "Nenne mir 5 wichtige Personalkennzahlen im Krankenhaus."
408
+ if st.button("Wie ist die durchschnittliche Bettenauslastung eines Krankenhauses?"):
409
+ query = ("Wie ist die durchschnittliche Bettenauslastung eines Krankenhauses?")
410
+ if st.button("Welches sind die häufigsten DRGs, die von den Krankenhäusern abgerechnet werden?"):
411
+ query = "Welches sind die häufigsten DRGs, die von den Krankenhäusern abgerechnet werden? "
412
+
413
+
414
+ with col2:
415
+ if st.button("Wie viel Casemixpunkte werden im Median von einer ärztlichen Vollkraft erbracht?"):
416
+ query = "Wie viel Casemixpunkte werden im Median von einer ärztlichen Vollkraft erbracht?"
417
+ if st.button("Bitte erstelle mir einer Übersicht der wichtiger Strukturkennzahlen eines Krankenhauses der Grund- und Regelversorgung."):
418
+ query = "Bitte erstelle mir einer Übersicht der wichtiger Strukturkennzahlen eines Krankenhauses der Grund- und Regelversorgung."
419
+ if st.button("Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"):
420
+ query = "Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"
421
 
422
 
 
 
 
 
 
 
 
 
 
423
 
424
+ if query:
425
+ full_query = ask_bot(query)
426
+ st.session_state['chat_history_page2'].append(("User", query, "new"))
427
+
428
+ # Start timing
429
+ start_time = time.time()
430
 
431
+ with st.spinner('Bot is thinking...'):
432
+ chain = load_chatbot()
433
+ docs = VectorStore.similarity_search(query=query, k=5)
434
+ with get_openai_callback() as cb:
435
+ response = chain.run(input_documents=docs, question=full_query)
436
+ response = handle_no_answer(response) # Process the response through the new function
437
+
438
 
439
+
440
+ # Stop timing
441
+ end_time = time.time()
442
 
443
+ # Calculate duration
444
+ duration = end_time - start_time
445
+ st.text(f"Response time: {duration:.2f} seconds")
 
 
 
 
446
 
447
+ st.session_state['chat_history_page2'].append(("Bot", response, "new"))
448
 
449
+ # Combine chat histories from all pages
450
+ all_chat_histories = [
451
+ st.session_state['chat_history_page1'],
452
+ st.session_state['chat_history_page2'],
453
+ st.session_state['chat_history_page3']
454
+ ]
455
+
456
+ # Save the combined chat histories
457
+ save_conversation(all_chat_histories, st.session_state['session_id'])
458
+
459
+ # Display new messages at the bottom
460
+ new_messages = st.session_state['chat_history_page2'][-2:]
461
+ for chat in new_messages:
462
+ background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
463
+ new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
464
+
465
+
466
+ # Clear the input field after the query is made
467
+ query = ""
468
+
469
+ # Mark all messages as old after displaying
470
+ st.session_state['chat_history_page2'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page2']]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
471
 
472
  except Exception as e:
473
  st.error(f"Upsi, an unexpected error occurred: {e}")
 
496
  image = Image.open('BinDoc Logo (Quadratisch).png')
497
  st.image(image, use_column_width='always')
498
 
 
 
 
499
 
500
+ if not os.path.exists(pdf_path2):
501
+ st.error("File not found. Please check the file path.")
502
+ return
503
+
504
+ VectorStore = load_vector_store(pdf_path3, "Kosten_Str_2301", force_reload=False)
505
+
506
+
507
+
508
+ display_chat_history(st.session_state['chat_history_page3'])
509
+
510
+ st.write("<!-- Start Spacer -->", unsafe_allow_html=True)
511
+ st.write("<div style='flex: 1;'></div>", unsafe_allow_html=True)
512
+ st.write("<!-- End Spacer -->", unsafe_allow_html=True)
513
+
514
+ new_messages_placeholder = st.empty()
515
+
516
+ query = st.text_input("Ask questions about your PDF file (in any preferred language):")
517
+
518
+ add_vertical_space(2) # Adjust as per the desired spacing
519
+
520
+ # Create two columns for the buttons
521
+ col1, col2 = st.columns(2)
522
+
523
+ with col1:
524
+ if st.button("Wie hat sich die Bettenanzahl in den letzten 10 Jahren entwickelt?"):
525
+ query = "Wie hat sich die Bettenanzahl in den letzten 10 Jahren entwickelt?"
526
+ if st.button("Wie viele Patienten werden pro Jahr vollstationär behandelt?"):
527
+ query = ("Wie viele Patienten werden pro Jahr vollstationär behandelt?")
528
+ if st.button("Wie viele Vollkräfte arbeiten in Summe in deutschen Krankenhäusern?"):
529
+ query = "Wie viele Vollkräfte arbeiten in Summe in deutschen Krankenhäusern? "
530
+
531
+
532
+ with col2:
533
+ if st.button("Welche unterschiedlichen Personalkosten gibt es im Krankenhaus?"):
534
+ query = "Welche unterschiedlichen Personalkosten gibt es im Krankenhaus?"
535
+ if st.button("Welche Sachkosten werden in Krankenhäusern unterschieden?"):
536
+ query = "Welche Sachkosten werden in Krankenhäusern unterschieden? "
537
+ if st.button("Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"):
538
+ query = "Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"
539
 
540
 
 
 
 
 
 
 
 
 
 
541
 
542
+ if query:
543
+ full_query = ask_bot(query)
544
+ st.session_state['chat_history_page3'].append(("User", query, "new"))
545
+
546
+ # Start timing
547
+ start_time = time.time()
548
 
549
+ with st.spinner('Bot is thinking...'):
550
+ chain = load_chatbot()
551
+ docs = VectorStore.similarity_search(query=query, k=5)
552
+ with get_openai_callback() as cb:
553
+ response = chain.run(input_documents=docs, question=full_query)
554
+ response = handle_no_answer(response) # Process the response through the new function
555
+
556
+
557
+
558
+ # Stop timing
559
+ end_time = time.time()
560
 
561
+ # Calculate duration
562
+ duration = end_time - start_time
563
+ st.text(f"Response time: {duration:.2f} seconds")
564
+
565
+ st.session_state['chat_history_page3'].append(("Bot", response, "new"))
566
+
567
+ # Combine chat histories from all pages
568
+ all_chat_histories = [
569
+ st.session_state['chat_history_page1'],
570
+ st.session_state['chat_history_page2'],
571
+ st.session_state['chat_history_page3']
572
+ ]
573
+
574
+ # Save the combined chat histories
575
+ save_conversation(all_chat_histories, st.session_state['session_id'])
576
+
577
+
578
+ # Display new messages at the bottom
579
+ new_messages = st.session_state['chat_history_page3'][-2:]
580
+ for chat in new_messages:
581
+ background_color = "#ffeecf" if chat[2] == "new" else "#ffeecf" if chat[0] == "User" else "#ffeecf"
582
+ new_messages_placeholder.markdown(f"<div style='background-color: {background_color}; padding: 10px; border-radius: 10px; margin: 10px;'>{chat[0]}: {chat[1]}</div>", unsafe_allow_html=True)
583
 
584
 
585
+ save_conversation(st.session_state['chat_history_page3'], st.session_state['session_id'], 3)
 
 
 
 
 
 
586
 
587
+ # Clear the input field after the query is made
588
+ query = ""
589
 
590
+ # Mark all messages as old after displaying
591
+ st.session_state['chat_history_page3'] = [(sender, msg, "old") for sender, msg, _ in st.session_state['chat_history_page3']]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
592
 
593
  except Exception as e:
594
  st.error(f"Upsi, an unexpected error occurred: {e}")
595
  # Optionally log the exception details to a file or error tracking service
596
 
597
+ def page4():
598
+ try:
599
+ st.header(":mailbox: Get In Touch With Me!")
600
+ st.markdown("Ihre Session-ID finden Sie auf der linken Seite!")
601
+
602
+ contact_form = """
603
+ <form action="https://formsubmit.co/anne.demond@googlemail.com" method="POST">
604
+ <input type="hidden" name="_captcha" value="false">
605
+ <input type="text" name="Session-ID" placeholder="Your Session-ID goes here" required>
606
+ <input type="email" name="email" placeholder="Your email" required>
607
+ <textarea name="message" placeholder="Your message here"></textarea>
608
+ <form action="https://formsubmit.co/your-random-string" method="POST" />
609
+ <button type="submit">Send</button>
610
+ </form>
611
+ """
612
+
613
+ st.markdown(contact_form, unsafe_allow_html=True)
614
+
615
+ # Use Local CSS File
616
+ def local_css(file_name):
617
+ with open(file_name) as f:
618
+ st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)
619
+
620
+
621
+ local_css("style.css")
622
+
623
+ except Exception as e:
624
+ st.error(f"Upsi, an unexpected error occurred: {e}")
625
+ # Optionally log the exception details to a file or error tracking service
626
+
627
 
628
+ def display_session_id():
629
+ session_id = st.session_state['session_id']
630
+ st.sidebar.markdown(f"**Your Session ID:** `{session_id}`")
631
+ st.sidebar.markdown("Use this ID for reference in communications or feedback.")
632
 
633
+ # Main function
634
  def main():
635
  # Sidebar content
636
  with st.sidebar:
637
  st.title('BinDoc GmbH')
638
  st.markdown("Experience revolutionary interaction with BinDocs Chat App, leveraging state-of-the-art AI technology.")
639
  add_vertical_space(1)
640
+ page = st.sidebar.selectbox("Choose a page", ["KH_Reform", "Kennzahlenbuch 100 Kennzahlen", "Kosten- und Strukturdaten der Krankenhäuser", "Form"])
641
+ add_vertical_space(4)
642
+ display_session_id() # Display the session ID in the sidebar
643
  st.write('Made with ❤️ by BinDoc GmbH')
644
 
645
+
646
  # Main area content based on page selection
647
  if page == "KH_Reform":
648
  page1()
 
650
  page2()
651
  elif page == "Kosten- und Strukturdaten der Krankenhäuser":
652
  page3()
653
+ elif page == "Form":
654
+ page4()
655
 
656
 
657
  if __name__ == "__main__":