Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,18 +45,17 @@ model = HfApiModel(
|
|
45 |
with open("prompts.yaml", 'r') as stream:
|
46 |
prompt_templates = yaml.safe_load(stream)
|
47 |
|
48 |
-
def run_agent(user_input):
|
49 |
"""
|
50 |
Constructs a prompt from the system prompt and the user input,
|
51 |
then directly calls the model to generate an answer.
|
52 |
After generation, it removes any chain-of-thought, debug logs, or code snippets.
|
|
|
53 |
"""
|
54 |
prompt = f"{system_prompt}\nUser Input: {user_input}\nFinal Answer:"
|
55 |
try:
|
56 |
-
# Try using the model's run method if available.
|
57 |
raw_response = model.run(prompt)
|
58 |
except AttributeError:
|
59 |
-
# Otherwise, assume the model is callable.
|
60 |
raw_response = model(prompt)
|
61 |
|
62 |
print("Raw Model Response:", raw_response) # Debug output (optional)
|
@@ -70,12 +69,13 @@ def run_agent(user_input):
|
|
70 |
return "I'm unable to generate a meaningful response. Please refine your query."
|
71 |
return clean_response
|
72 |
|
73 |
-
# Create a simple agent wrapper with a run method.
|
74 |
class SimpleAgent:
|
75 |
def __init__(self, run_function):
|
76 |
-
self.
|
|
|
|
|
|
|
77 |
|
78 |
agent = SimpleAgent(run_agent)
|
79 |
|
80 |
-
# Launch the Gradio UI using the simple agent.
|
81 |
GradioUI(agent).launch()
|
|
|
45 |
with open("prompts.yaml", 'r') as stream:
|
46 |
prompt_templates = yaml.safe_load(stream)
|
47 |
|
48 |
+
def run_agent(user_input, stream=False, reset=False, additional_args=None):
|
49 |
"""
|
50 |
Constructs a prompt from the system prompt and the user input,
|
51 |
then directly calls the model to generate an answer.
|
52 |
After generation, it removes any chain-of-thought, debug logs, or code snippets.
|
53 |
+
Accepts extra keyword arguments to be compatible with Gradio's expectations.
|
54 |
"""
|
55 |
prompt = f"{system_prompt}\nUser Input: {user_input}\nFinal Answer:"
|
56 |
try:
|
|
|
57 |
raw_response = model.run(prompt)
|
58 |
except AttributeError:
|
|
|
59 |
raw_response = model(prompt)
|
60 |
|
61 |
print("Raw Model Response:", raw_response) # Debug output (optional)
|
|
|
69 |
return "I'm unable to generate a meaningful response. Please refine your query."
|
70 |
return clean_response
|
71 |
|
|
|
72 |
class SimpleAgent:
|
73 |
def __init__(self, run_function):
|
74 |
+
self.run_function = run_function
|
75 |
+
|
76 |
+
def run(self, *args, **kwargs):
|
77 |
+
return self.run_function(*args, **kwargs)
|
78 |
|
79 |
agent = SimpleAgent(run_agent)
|
80 |
|
|
|
81 |
GradioUI(agent).launch()
|