Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,19 +3,15 @@ from huggingface_hub import InferenceClient
|
|
3 |
|
4 |
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
{user_input}
|
12 |
-
|
13 |
-
### Response:
|
14 |
-
"""
|
15 |
return prompt
|
16 |
|
17 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
18 |
-
formatted_prompt = format_alpaca_prompt(message, system_message)
|
19 |
|
20 |
response = client.text_generation(
|
21 |
formatted_prompt,
|
@@ -25,8 +21,9 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
25 |
)
|
26 |
|
27 |
# ✅ Extract only the response
|
28 |
-
cleaned_response = response.
|
29 |
|
|
|
30 |
yield cleaned_response # ✅ Output only the answer
|
31 |
|
32 |
demo = gr.ChatInterface(
|
|
|
3 |
|
4 |
client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
|
5 |
|
6 |
+
# Store chat history
|
7 |
+
def format_alpaca_prompt(user_input, history, system_prompt):
|
8 |
+
"""Formats input in Alpaca/LLaMA style with history"""
|
9 |
+
history_text = "\n".join([f"User: {h[0]}\nAssistant: {h[1]}" for h in history])
|
10 |
+
prompt = f"""{system_prompt}\n{history_text}\nUser: {user_input}\nAssistant:"""
|
|
|
|
|
|
|
|
|
11 |
return prompt
|
12 |
|
13 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
14 |
+
formatted_prompt = format_alpaca_prompt(message, history, system_message)
|
15 |
|
16 |
response = client.text_generation(
|
17 |
formatted_prompt,
|
|
|
21 |
)
|
22 |
|
23 |
# ✅ Extract only the response
|
24 |
+
cleaned_response = response.strip()
|
25 |
|
26 |
+
history.append((message, cleaned_response)) # ✅ Store conversation history
|
27 |
yield cleaned_response # ✅ Output only the answer
|
28 |
|
29 |
demo = gr.ChatInterface(
|