Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -169,8 +169,13 @@ def handle_no_answer(response):
|
|
169 |
return random.choice(alternative_responses) # Randomly select a response
|
170 |
return response
|
171 |
|
172 |
-
|
173 |
-
|
|
|
|
|
|
|
|
|
|
|
174 |
|
175 |
def page1():
|
176 |
try:
|
@@ -237,6 +242,7 @@ def page1():
|
|
237 |
|
238 |
|
239 |
if query:
|
|
|
240 |
st.session_state['chat_history_page1'].append(("User", query, "new"))
|
241 |
|
242 |
# Start timing
|
@@ -246,7 +252,7 @@ def page1():
|
|
246 |
chain = load_chatbot()
|
247 |
docs = VectorStore.similarity_search(query=query, k=5)
|
248 |
with get_openai_callback() as cb:
|
249 |
-
response = chain.run(input_documents=docs, question=
|
250 |
response = handle_no_answer(response) # Process the response through the new function
|
251 |
|
252 |
|
@@ -282,7 +288,6 @@ def page1():
|
|
282 |
|
283 |
|
284 |
|
285 |
-
|
286 |
def page2():
|
287 |
try:
|
288 |
hide_streamlit_style = """
|
@@ -349,7 +354,9 @@ def page2():
|
|
349 |
query = "Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"
|
350 |
|
351 |
|
|
|
352 |
if query:
|
|
|
353 |
st.session_state['chat_history_page2'].append(("User", query, "new"))
|
354 |
|
355 |
# Start timing
|
@@ -359,10 +366,8 @@ def page2():
|
|
359 |
chain = load_chatbot()
|
360 |
docs = VectorStore.similarity_search(query=query, k=5)
|
361 |
with get_openai_callback() as cb:
|
362 |
-
response = chain.run(input_documents=docs, question=
|
363 |
response = handle_no_answer(response) # Process the response through the new function
|
364 |
-
|
365 |
-
|
366 |
|
367 |
# Stop timing
|
368 |
end_time = time.time()
|
@@ -461,7 +466,9 @@ def page3():
|
|
461 |
query = "Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"
|
462 |
|
463 |
|
|
|
464 |
if query:
|
|
|
465 |
st.session_state['chat_history_page3'].append(("User", query, "new"))
|
466 |
|
467 |
# Start timing
|
@@ -471,10 +478,8 @@ def page3():
|
|
471 |
chain = load_chatbot()
|
472 |
docs = VectorStore.similarity_search(query=query, k=5)
|
473 |
with get_openai_callback() as cb:
|
474 |
-
response = chain.run(input_documents=docs, question=
|
475 |
response = handle_no_answer(response) # Process the response through the new function
|
476 |
-
|
477 |
-
|
478 |
|
479 |
# Stop timing
|
480 |
end_time = time.time()
|
|
|
169 |
return random.choice(alternative_responses) # Randomly select a response
|
170 |
return response
|
171 |
|
172 |
+
def ask_bot(query):
|
173 |
+
# Definiere den standardmäßigen Prompt
|
174 |
+
standard_prompt = "Schreibe immer höflich und auf antworte immer in der Sprache in der der User auch schreibt. Formuliere immer ganze freundliche ganze Sätze und biete wenn möglich auch mehr Informationen (aber nicht mehr als 1 Satz mehr). Wenn der User sehr vage schreibt frage nach - gerade wenn es um Jahre geht."
|
175 |
+
# Kombiniere den standardmäßigen Prompt mit der Benutzeranfrage
|
176 |
+
full_query = standard_prompt + query
|
177 |
+
return full_query
|
178 |
+
|
179 |
|
180 |
def page1():
|
181 |
try:
|
|
|
242 |
|
243 |
|
244 |
if query:
|
245 |
+
full_query = ask_bot(query)
|
246 |
st.session_state['chat_history_page1'].append(("User", query, "new"))
|
247 |
|
248 |
# Start timing
|
|
|
252 |
chain = load_chatbot()
|
253 |
docs = VectorStore.similarity_search(query=query, k=5)
|
254 |
with get_openai_callback() as cb:
|
255 |
+
response = chain.run(input_documents=docs, question=full_query)
|
256 |
response = handle_no_answer(response) # Process the response through the new function
|
257 |
|
258 |
|
|
|
288 |
|
289 |
|
290 |
|
|
|
291 |
def page2():
|
292 |
try:
|
293 |
hide_streamlit_style = """
|
|
|
354 |
query = "Wie viele Patienten eines Grund- und Regelversorgers kommen aus welcher Fahrzeitzone?"
|
355 |
|
356 |
|
357 |
+
|
358 |
if query:
|
359 |
+
full_query = ask_bot(query)
|
360 |
st.session_state['chat_history_page2'].append(("User", query, "new"))
|
361 |
|
362 |
# Start timing
|
|
|
366 |
chain = load_chatbot()
|
367 |
docs = VectorStore.similarity_search(query=query, k=5)
|
368 |
with get_openai_callback() as cb:
|
369 |
+
response = chain.run(input_documents=docs, question=full_query)
|
370 |
response = handle_no_answer(response) # Process the response through the new function
|
|
|
|
|
371 |
|
372 |
# Stop timing
|
373 |
end_time = time.time()
|
|
|
466 |
query = "Wie hoch sind die Gesamtkosten der Krankenhäuser pro Jahr?"
|
467 |
|
468 |
|
469 |
+
|
470 |
if query:
|
471 |
+
full_query = ask_bot(query)
|
472 |
st.session_state['chat_history_page3'].append(("User", query, "new"))
|
473 |
|
474 |
# Start timing
|
|
|
478 |
chain = load_chatbot()
|
479 |
docs = VectorStore.similarity_search(query=query, k=5)
|
480 |
with get_openai_callback() as cb:
|
481 |
+
response = chain.run(input_documents=docs, question=full_query)
|
482 |
response = handle_no_answer(response) # Process the response through the new function
|
|
|
|
|
483 |
|
484 |
# Stop timing
|
485 |
end_time = time.time()
|