Kunal Pai commited on
Commit
82fe65b
·
1 Parent(s): ab59793

Enable conditional memory usage in GeminiManager

Browse files

Will still store and delete because it is part of system prompt

Files changed (1) hide show
  1. src/manager/manager.py +19 -14
src/manager/manager.py CHANGED
@@ -23,7 +23,8 @@ class GeminiManager:
23
  system_prompt_file="./src/models/system4.prompt",
24
  gemini_model="gemini-2.5-pro-exp-03-25",
25
  local_only=False, allow_tool_creation=True,
26
- cloud_only=False, use_economy=True):
 
27
  load_dotenv()
28
  self.toolsLoader: ToolManager = toolsLoader
29
  if not toolsLoader:
@@ -33,12 +34,13 @@ class GeminiManager:
33
  self.allow_tool_creation = allow_tool_creation
34
  self.cloud_only = cloud_only
35
  self.use_economy = use_economy
 
36
 
37
  self.API_KEY = os.getenv("GEMINI_KEY")
38
  self.client = genai.Client(api_key=self.API_KEY)
39
  self.toolsLoader.load_tools()
40
  self.model_name = gemini_model
41
- self.memory_manager = MemoryManager()
42
  with open(system_prompt_file, 'r', encoding="utf8") as f:
43
  self.system_prompt = f.read()
44
  self.messages = []
@@ -157,6 +159,8 @@ class GeminiManager:
157
  return formatted_history
158
 
159
  def get_k_memories(self, query, k=5, threshold=0.0):
 
 
160
  memories = MemoryManager().get_memories()
161
  for i in range(len(memories)):
162
  memories[i] = memories[i]['memory']
@@ -184,18 +188,19 @@ class GeminiManager:
184
  return results
185
 
186
  def run(self, messages):
187
- memories = self.get_k_memories(messages[-1]['content'], k=5, threshold=0.1)
188
- if len(memories) > 0:
189
- messages.append({
190
- "role": "memories",
191
- "content": f"{memories}",
192
- })
193
- messages.append({
194
- "role": "assistant",
195
- "content": f"Memories: {memories}",
196
- "metadata": {"title": "Memories"}
197
- })
198
- yield messages
 
199
  yield from self.invoke_manager(messages)
200
 
201
  def invoke_manager(self, messages):
 
23
  system_prompt_file="./src/models/system4.prompt",
24
  gemini_model="gemini-2.5-pro-exp-03-25",
25
  local_only=False, allow_tool_creation=True,
26
+ cloud_only=False, use_economy=True,
27
+ use_memory=True):
28
  load_dotenv()
29
  self.toolsLoader: ToolManager = toolsLoader
30
  if not toolsLoader:
 
34
  self.allow_tool_creation = allow_tool_creation
35
  self.cloud_only = cloud_only
36
  self.use_economy = use_economy
37
+ self.use_memory = use_memory
38
 
39
  self.API_KEY = os.getenv("GEMINI_KEY")
40
  self.client = genai.Client(api_key=self.API_KEY)
41
  self.toolsLoader.load_tools()
42
  self.model_name = gemini_model
43
+ self.memory_manager = MemoryManager() if use_memory else None
44
  with open(system_prompt_file, 'r', encoding="utf8") as f:
45
  self.system_prompt = f.read()
46
  self.messages = []
 
159
  return formatted_history
160
 
161
  def get_k_memories(self, query, k=5, threshold=0.0):
162
+ if not self.use_memory:
163
+ return []
164
  memories = MemoryManager().get_memories()
165
  for i in range(len(memories)):
166
  memories[i] = memories[i]['memory']
 
188
  return results
189
 
190
  def run(self, messages):
191
+ if self.use_memory:
192
+ memories = self.get_k_memories(messages[-1]['content'], k=5, threshold=0.1)
193
+ if len(memories) > 0:
194
+ messages.append({
195
+ "role": "memories",
196
+ "content": f"{memories}",
197
+ })
198
+ messages.append({
199
+ "role": "assistant",
200
+ "content": f"Memories: {memories}",
201
+ "metadata": {"title": "Memories"}
202
+ })
203
+ yield messages
204
  yield from self.invoke_manager(messages)
205
 
206
  def invoke_manager(self, messages):