zakiyahfarooque commited on
Commit
ba063d9
·
verified ·
1 Parent(s): 31d5975

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -5
app.py CHANGED
@@ -7,12 +7,20 @@ tokenizer = BlenderbotTokenizer.from_pretrained(MODEL_NAME)
7
  model = BlenderbotForConditionalGeneration.from_pretrained(MODEL_NAME)
8
 
9
  def chatbot_response(user_input, chat_history=[]):
10
- """Generates a response from Blenderbot based on user input."""
11
- inputs = tokenizer(user_input, return_tensors="pt")
12
- reply_ids = model.generate(**inputs)
 
 
 
 
 
13
  response = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
14
- chat_history.append((user_input, response))
15
- return response, chat_history
 
 
 
16
 
17
  # Set up Gradio interface
18
  with gr.Blocks() as demo:
 
7
  model = BlenderbotForConditionalGeneration.from_pretrained(MODEL_NAME)
8
 
9
  def chatbot_response(user_input, chat_history=[]):
10
+ """Generates a response from Blenderbot with memory."""
11
+
12
+ # Format input with chat history
13
+ history_text = " ".join([f"User: {msg[0]} Assistant: {msg[1]}" for msg in chat_history])
14
+
15
+ formatted_input = f"{history_text} User: {user_input} Assistant:"
16
+ inputs = tokenizer(formatted_input, return_tensors="pt")
17
+ reply_ids = model.generate(**inputs, max_length=100)
18
  response = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
19
+
20
+ # Update chat history correctly
21
+ chat_history.append((user_input, response)) # ✅ Now a tuple (user message, bot response)
22
+
23
+ return chat_history # ✅ Returns history formatted correctly
24
 
25
  # Set up Gradio interface
26
  with gr.Blocks() as demo: