Aharneish commited on
Commit
7dda9e1
·
verified ·
1 Parent(s): 9d1d9bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -7
app.py CHANGED
@@ -2,7 +2,7 @@ import os
2
  import gradio as gr
3
  import requests
4
  import pandas as pd
5
- from smolagents import CodeAgent, DuckDuckGoSearchTool,TransformersModel
6
 
7
  # --- Constants ---
8
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
@@ -11,8 +11,26 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
11
  # --- Define Agent ---
12
  class SmolAgentWrapper:
13
  def __init__(self):
14
- self.model = TransformersModel(model_id="google/flan-t5-base", use_chat_template=False)
15
- self.tools = [DuckDuckGoSearchTool()] # You can add more tools here
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  self.agent = CodeAgent(model=self.model, tools=self.tools)
17
 
18
  def __call__(self, question: str) -> str:
@@ -98,15 +116,14 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
98
 
99
  # --- Gradio Interface ---
100
  with gr.Blocks() as demo:
101
- gr.Markdown("# SmolAgent Evaluation Runner (Flan-T5 + DuckDuckGo Tool)")
102
  gr.Markdown(
103
  """
104
  **Instructions:**
105
  1. Log in to Hugging Face with the button below.
106
  2. Click the button to run all GAIA questions through the SmolAgent.
107
  3. Results will be submitted automatically and your score will be shown.
108
-
109
- **Note:** Model runs on Hugging Face Inference API using `flan-t5-base`, optimized for CPU.
110
  """
111
  )
112
 
@@ -124,4 +141,4 @@ if __name__ == "__main__":
124
  print("-" * 60)
125
  print("Launching SmolAgent Space...")
126
  print("-" * 60)
127
- demo.launch(debug=True, share=False)
 
2
  import gradio as gr
3
  import requests
4
  import pandas as pd
5
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, TransformersModel
6
 
7
  # --- Constants ---
8
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
11
  # --- Define Agent ---
12
  class SmolAgentWrapper:
13
  def __init__(self):
14
+ # Use a model that's compatible with AutoModelForCausalLM
15
+ # GPT-2 should work, but we need to properly handle the chat template issue
16
+ self.model = TransformersModel(
17
+ model_id="gpt2",
18
+ generation_kwargs={
19
+ "do_sample": True,
20
+ "max_new_tokens": 256,
21
+ "temperature": 0.7,
22
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\nUser: {{ message['content'] }}\n{% elif message['role'] == 'assistant' %}\nAssistant: {{ message['content'] }}\n{% elif message['role'] == 'system' %}\nSystem: {{ message['content'] }}\n{% endif %}\n{% endfor %}\n{% if add_generation_prompt %}\nAssistant: {% endif %}"
23
+ }
24
+ )
25
+
26
+ # Alternative options if the above doesn't work:
27
+ # Option 1: Using a different GPT model that might handle chat better
28
+ # self.model = TransformersModel(model_id="facebook/opt-350m")
29
+
30
+ # Option 2: Using a model with better instruction following
31
+ # self.model = TransformersModel(model_id="databricks/dolly-v2-3b")
32
+
33
+ self.tools = [DuckDuckGoSearchTool()]
34
  self.agent = CodeAgent(model=self.model, tools=self.tools)
35
 
36
  def __call__(self, question: str) -> str:
 
116
 
117
  # --- Gradio Interface ---
118
  with gr.Blocks() as demo:
119
+ gr.Markdown("# SmolAgent Evaluation Runner")
120
  gr.Markdown(
121
  """
122
  **Instructions:**
123
  1. Log in to Hugging Face with the button below.
124
  2. Click the button to run all GAIA questions through the SmolAgent.
125
  3. Results will be submitted automatically and your score will be shown.
126
+ **Note:** Model runs on Hugging Face Inference API.
 
127
  """
128
  )
129
 
 
141
  print("-" * 60)
142
  print("Launching SmolAgent Space...")
143
  print("-" * 60)
144
+ demo.launch(debug=True, share=False)