Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,47 @@ import requests
|
|
4 |
import inspect
|
5 |
import pandas as pd
|
6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
##Roba per la valutazione
|
8 |
|
9 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
@@ -183,36 +224,3 @@ if __name__ == "__main__":
|
|
183 |
|
184 |
|
185 |
|
186 |
-
# (Keep Constants as is)
|
187 |
-
# --- Constants ---
|
188 |
-
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
189 |
-
|
190 |
-
# --- Basic Agent Definition ---
|
191 |
-
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
192 |
-
class BasicAgent:
|
193 |
-
print("Initializing LlamaIndex-based agent...")
|
194 |
-
|
195 |
-
# Imposta l'LLM (puoi usare anche altri modelli via HuggingFace o OpenRouter)
|
196 |
-
self.llm = HfApiModel()
|
197 |
-
|
198 |
-
#OpenAI(model="gpt-3.5-turbo", temperature=0)
|
199 |
-
|
200 |
-
# Crea un ServiceContext con il tuo LLM
|
201 |
-
self.service_context = ServiceContext.from_defaults(llm=self.llm)
|
202 |
-
|
203 |
-
# Carica i documenti dalla directory "data/"
|
204 |
-
self.documents = SimpleDirectoryReader("data").load_data()
|
205 |
-
|
206 |
-
# Crea un indice vettoriale
|
207 |
-
self.index = VectorStoreIndex.from_documents(
|
208 |
-
self.documents, service_context=self.service_context
|
209 |
-
)
|
210 |
-
|
211 |
-
# Crea il query engine
|
212 |
-
self.query_engine = self.index.as_query_engine()
|
213 |
-
|
214 |
-
def __call__(self, question: str) -> str:
|
215 |
-
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
216 |
-
response = self.query_engine.query(question)
|
217 |
-
print(f"Agent returning response: {response}")
|
218 |
-
return str(response)
|
|
|
4 |
import inspect
|
5 |
import pandas as pd
|
6 |
|
7 |
+
|
8 |
+
# (Keep Constants as is)
|
9 |
+
# --- Constants ---
|
10 |
+
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
11 |
+
|
12 |
+
# --- Basic Agent Definition ---
|
13 |
+
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
14 |
+
class BasicAgent:
|
15 |
+
print("Initializing LlamaIndex-based agent...")
|
16 |
+
|
17 |
+
# Imposta l'LLM (puoi usare anche altri modelli via HuggingFace o OpenRouter)
|
18 |
+
self.llm = HfApiModel()
|
19 |
+
|
20 |
+
#OpenAI(model="gpt-3.5-turbo", temperature=0)
|
21 |
+
|
22 |
+
# Crea un ServiceContext con il tuo LLM
|
23 |
+
self.service_context = ServiceContext.from_defaults(llm=self.llm)
|
24 |
+
|
25 |
+
# Carica i documenti dalla directory "data/"
|
26 |
+
self.documents = SimpleDirectoryReader("data").load_data()
|
27 |
+
|
28 |
+
# Crea un indice vettoriale
|
29 |
+
self.index = VectorStoreIndex.from_documents(
|
30 |
+
self.documents, service_context=self.service_context
|
31 |
+
)
|
32 |
+
|
33 |
+
# Crea il query engine
|
34 |
+
self.query_engine = self.index.as_query_engine()
|
35 |
+
|
36 |
+
def __call__(self, question: str) -> str:
|
37 |
+
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
38 |
+
response = self.query_engine.query(question)
|
39 |
+
print(f"Agent returning response: {response}")
|
40 |
+
return str(response)
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
##Roba per la valutazione
|
49 |
|
50 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
|
|
224 |
|
225 |
|
226 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|