Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
|
|
7 |
def format_alpaca_prompt(history, user_input, system_prompt):
|
8 |
"""Formats input in Alpaca/LLaMA style with conversation history"""
|
9 |
formatted_history = "\n".join([f"User: {h[0]}\nAssistant: {h[1]}" for h in history])
|
10 |
-
prompt = f"""{system_prompt}\n{formatted_history}\nUser: {user_input}\
|
11 |
return prompt
|
12 |
|
13 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
@@ -21,7 +21,7 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
21 |
)
|
22 |
|
23 |
# Extract only the response
|
24 |
-
cleaned_response = response.strip()
|
25 |
|
26 |
# Update history
|
27 |
history.append((message, cleaned_response))
|
|
|
7 |
def format_alpaca_prompt(history, user_input, system_prompt):
|
8 |
"""Formats input in Alpaca/LLaMA style with conversation history"""
|
9 |
formatted_history = "\n".join([f"User: {h[0]}\nAssistant: {h[1]}" for h in history])
|
10 |
+
prompt = f"""{system_prompt}\n{formatted_history}\nUser: {user_input}\nAssistant:"""
|
11 |
return prompt
|
12 |
|
13 |
def respond(message, history, system_message, max_tokens, temperature, top_p):
|
|
|
21 |
)
|
22 |
|
23 |
# Extract only the response
|
24 |
+
cleaned_response = response.strip().split("Assistant:")[-1].strip()
|
25 |
|
26 |
# Update history
|
27 |
history.append((message, cleaned_response))
|