Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -192,24 +192,24 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
|
192 |
class BasicAgent:
|
193 |
print("Initializing LlamaIndex-based agent...")
|
194 |
|
195 |
-
|
196 |
-
|
197 |
|
198 |
-
|
199 |
|
200 |
-
|
201 |
-
|
202 |
|
203 |
-
|
204 |
-
|
205 |
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
|
211 |
-
|
212 |
-
|
213 |
|
214 |
def __call__(self, question: str) -> str:
|
215 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
192 |
class BasicAgent:
|
193 |
print("Initializing LlamaIndex-based agent...")
|
194 |
|
195 |
+
# Imposta l'LLM (puoi usare anche altri modelli via HuggingFace o OpenRouter)
|
196 |
+
self.llm = HfApiModel()
|
197 |
|
198 |
+
#OpenAI(model="gpt-3.5-turbo", temperature=0)
|
199 |
|
200 |
+
# Crea un ServiceContext con il tuo LLM
|
201 |
+
self.service_context = ServiceContext.from_defaults(llm=self.llm)
|
202 |
|
203 |
+
# Carica i documenti dalla directory "data/"
|
204 |
+
self.documents = SimpleDirectoryReader("data").load_data()
|
205 |
|
206 |
+
# Crea un indice vettoriale
|
207 |
+
self.index = VectorStoreIndex.from_documents(
|
208 |
+
self.documents, service_context=self.service_context
|
209 |
+
)
|
210 |
|
211 |
+
# Crea il query engine
|
212 |
+
self.query_engine = self.index.as_query_engine()
|
213 |
|
214 |
def __call__(self, question: str) -> str:
|
215 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|