Spaces:
Running
Running
Commit
·
d8709c0
1
Parent(s):
25fe98a
Cleaning debug logs
Browse files- src/manager/manager.py +0 -3
src/manager/manager.py
CHANGED
@@ -188,7 +188,6 @@ class GeminiManager:
|
|
188 |
device = 'mps'
|
189 |
else:
|
190 |
device = 'cpu'
|
191 |
-
print(f"Using device: {device}")
|
192 |
model = SentenceTransformer('all-MiniLM-L6-v2', device=device)
|
193 |
doc_embeddings = model.encode(memories, convert_to_tensor=True, device=device)
|
194 |
query_embedding = model.encode(query, convert_to_tensor=True, device=device)
|
@@ -196,7 +195,6 @@ class GeminiManager:
|
|
196 |
scores, indices = torch.topk(similarity_scores, k=top_k)
|
197 |
results = []
|
198 |
for score, idx in zip(scores, indices):
|
199 |
-
print(memories[idx], f"(Score: {score:.4f})")
|
200 |
if score >= threshold:
|
201 |
results.append(memories[idx])
|
202 |
return results
|
@@ -222,7 +220,6 @@ class GeminiManager:
|
|
222 |
|
223 |
def invoke_manager(self, messages):
|
224 |
chat_history = self.format_chat_history(messages)
|
225 |
-
print(f"Chat history: {chat_history}")
|
226 |
logger.debug(f"Chat history: {chat_history}")
|
227 |
try:
|
228 |
response = suppress_output(self.generate_response)(chat_history)
|
|
|
188 |
device = 'mps'
|
189 |
else:
|
190 |
device = 'cpu'
|
|
|
191 |
model = SentenceTransformer('all-MiniLM-L6-v2', device=device)
|
192 |
doc_embeddings = model.encode(memories, convert_to_tensor=True, device=device)
|
193 |
query_embedding = model.encode(query, convert_to_tensor=True, device=device)
|
|
|
195 |
scores, indices = torch.topk(similarity_scores, k=top_k)
|
196 |
results = []
|
197 |
for score, idx in zip(scores, indices):
|
|
|
198 |
if score >= threshold:
|
199 |
results.append(memories[idx])
|
200 |
return results
|
|
|
220 |
|
221 |
def invoke_manager(self, messages):
|
222 |
chat_history = self.format_chat_history(messages)
|
|
|
223 |
logger.debug(f"Chat history: {chat_history}")
|
224 |
try:
|
225 |
response = suppress_output(self.generate_response)(chat_history)
|