abhivsh commited on
Commit
a16969c
·
verified ·
1 Parent(s): 39785e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -4
app.py CHANGED
@@ -56,7 +56,7 @@ def get_file(source_documents):
56
 
57
 
58
  def chat_query_doc(question, history):
59
- history = []
60
  query_old = f"""Provide an elaborate, precise and pointwise reply to the question: {question}.
61
  Also, Please consider the provided chat history: {history}.
62
  Ensure that your current response is detailed, accurate, and addresses each aspect of the question thoroughly.
@@ -85,6 +85,9 @@ def chat_query_doc(question, history):
85
  # Replace input() with question variable for Gradio
86
  result = qa({"question": query, "chat_history" : history})
87
 
 
 
 
88
  source_docs = result["source_documents"]
89
  file_names = get_file(source_docs)
90
  #file_name = os.path.basename(source_docs[0].metadata['source'])
@@ -93,7 +96,7 @@ def chat_query_doc(question, history):
93
  # print("History : ", history)
94
  # print("\n Chat_his : ", chat_history)
95
 
96
- return result["answer"] + "\n\nSources : " + file_name
97
 
98
 
99
  def chat_query_IS(question, history):
@@ -109,7 +112,12 @@ def chat_query_IS(question, history):
109
 
110
 
111
  result = llm.invoke(system_old)
112
- return result.content
 
 
 
 
 
113
 
114
 
115
  iface_doc = gr.ChatInterface(
@@ -161,7 +169,10 @@ This model is trained on **Model Technical Specifications** of the SS-Engg. Dept
161
 
162
  with gr.Blocks(css="CSS/style.css", fill_height=True) as demo:
163
 
164
- with gr.Column():
 
 
 
165
 
166
  with gr.Row():
167
  with gr.Column(scale=1):
 
56
 
57
 
58
  def chat_query_doc(question, history):
59
+ #history = [] ###
60
  query_old = f"""Provide an elaborate, precise and pointwise reply to the question: {question}.
61
  Also, Please consider the provided chat history: {history}.
62
  Ensure that your current response is detailed, accurate, and addresses each aspect of the question thoroughly.
 
85
  # Replace input() with question variable for Gradio
86
  result = qa({"question": query, "chat_history" : history})
87
 
88
+ # Update the history with the latest question and response
89
+ history.append({"user": question, "bot": result["answer"]})
90
+
91
  source_docs = result["source_documents"]
92
  file_names = get_file(source_docs)
93
  #file_name = os.path.basename(source_docs[0].metadata['source'])
 
96
  # print("History : ", history)
97
  # print("\n Chat_his : ", chat_history)
98
 
99
+ return result["answer"] + "\n\nSources : " + file_name, history
100
 
101
 
102
  def chat_query_IS(question, history):
 
112
 
113
 
114
  result = llm.invoke(system_old)
115
+
116
+ # Update the history with the latest question and response
117
+ history.append({"user": question, "bot": result.content})
118
+
119
+
120
+ return result.content, history
121
 
122
 
123
  iface_doc = gr.ChatInterface(
 
169
 
170
  with gr.Blocks(css="CSS/style.css", fill_height=True) as demo:
171
 
172
+ history = gr.State([]) # Initialize the state component
173
+
174
+
175
+ with gr.Column():
176
 
177
  with gr.Row():
178
  with gr.Column(scale=1):