fix
Browse files
app.py
CHANGED
@@ -87,7 +87,7 @@ def chat_with_model(messages):
|
|
87 |
|
88 |
print(f'Step 1: {messages}')
|
89 |
|
90 |
-
prompt_text = current_tokenizer.decode(inputs["input_ids"][0], skip_special_tokens=
|
91 |
|
92 |
for token_info in streamer:
|
93 |
token_str = token_info["token"]
|
@@ -142,6 +142,7 @@ def chat_with_model(messages):
|
|
142 |
current_model.to("cpu")
|
143 |
torch.cuda.empty_cache()
|
144 |
|
|
|
145 |
print(f'Step 3: {messages}')
|
146 |
|
147 |
return messages
|
|
|
87 |
|
88 |
print(f'Step 1: {messages}')
|
89 |
|
90 |
+
prompt_text = current_tokenizer.decode(inputs["input_ids"][0], skip_special_tokens=False)
|
91 |
|
92 |
for token_info in streamer:
|
93 |
token_str = token_info["token"]
|
|
|
142 |
current_model.to("cpu")
|
143 |
torch.cuda.empty_cache()
|
144 |
|
145 |
+
messages[-1]["content"] = output_text
|
146 |
print(f'Step 3: {messages}')
|
147 |
|
148 |
return messages
|