helloparthshah Kunal Pai commited on
Commit
231c049
·
1 Parent(s): 545222f

Implemented mode selection

Browse files

Co-authored-by: Kunal Pai <kunpai@users.noreply.github.com>

main.py CHANGED
@@ -1,7 +1,9 @@
 
1
  import gradio as gr
2
 
3
  import base64
4
- from src.manager.manager import GeminiManager
 
5
 
6
  _logo_bytes = open("HASHIRU_LOGO.png", "rb").read()
7
  _logo_b64 = base64.b64encode(_logo_bytes).decode()
@@ -25,8 +27,6 @@ css = """
25
 
26
 
27
  def run_model(message, history):
28
- print(f"User: {message}")
29
- print(f"History: {history}")
30
  if 'text' in message:
31
  history.append({
32
  "role": "user",
@@ -38,33 +38,30 @@ def run_model(message, history):
38
  "role": "user",
39
  "content": (file,)
40
  })
41
- print(f"History: {history}")
42
  yield "", history
43
  for messages in model_manager.run(history):
44
  yield "", messages
45
 
46
 
47
- def update_model(model_name):
48
- print(f"Model changed to: {model_name}")
49
- pass
50
 
 
 
 
 
51
 
52
- with gr.Blocks(css=css, fill_width=True, fill_height=True) as demo:
53
- model_manager = GeminiManager(gemini_model="gemini-2.0-flash")
54
-
55
  with gr.Column(scale=1):
56
  with gr.Row(scale=0):
57
  gr.Markdown(_header_html)
58
  model_dropdown = gr.Dropdown(
59
- choices=[
60
- "HASHIRU",
61
- "Static-HASHIRU",
62
- "Cloud-Only HASHIRU",
63
- "Local-Only HASHIRU",
64
- "No-Economy HASHIRU",
65
- ],
66
- value="HASHIRU",
67
  interactive=True,
 
 
 
68
  )
69
 
70
  model_dropdown.change(
 
1
+ from typing import List
2
  import gradio as gr
3
 
4
  import base64
5
+ from src.manager.manager import GeminiManager, Mode
6
+ from enum import Enum
7
 
8
  _logo_bytes = open("HASHIRU_LOGO.png", "rb").read()
9
  _logo_b64 = base64.b64encode(_logo_bytes).decode()
 
27
 
28
 
29
  def run_model(message, history):
 
 
30
  if 'text' in message:
31
  history.append({
32
  "role": "user",
 
38
  "role": "user",
39
  "content": (file,)
40
  })
 
41
  yield "", history
42
  for messages in model_manager.run(history):
43
  yield "", messages
44
 
45
 
46
+ with gr.Blocks(css=css, fill_width=True, fill_height=True) as demo:
47
+ model_manager = GeminiManager(
48
+ gemini_model="gemini-2.0-flash", modes=[mode for mode in Mode])
49
 
50
+ def update_model(modeIndexes: List[int]):
51
+ modes = [Mode(i+1) for i in modeIndexes]
52
+ print(f"Selected modes: {modes}")
53
+ model_manager.set_modes(modes)
54
 
 
 
 
55
  with gr.Column(scale=1):
56
  with gr.Row(scale=0):
57
  gr.Markdown(_header_html)
58
  model_dropdown = gr.Dropdown(
59
+ choices=[mode.name for mode in Mode],
60
+ value=model_manager.get_current_modes,
 
 
 
 
 
 
61
  interactive=True,
62
+ type="index",
63
+ multiselect=True,
64
+ label="Select Modes",
65
  )
66
 
67
  model_dropdown.change(
src/manager/agent_manager.py CHANGED
@@ -16,12 +16,13 @@ from src.manager.budget_manager import BudgetManager
16
  MODEL_PATH = "./src/models/"
17
  MODEL_FILE_PATH = "./src/models/models.json"
18
 
 
19
  class Agent(ABC):
20
-
21
- def __init__(self, agent_name: str,
22
- base_model: str,
23
- system_prompt: str,
24
- create_resource_cost: int,
25
  invoke_resource_cost: int,
26
  create_expense_cost: int = 0,
27
  invoke_expense_cost: int = 0,):
@@ -33,22 +34,27 @@ class Agent(ABC):
33
  self.create_expense_cost = create_expense_cost
34
  self.invoke_expense_cost = invoke_expense_cost
35
  self.create_model()
36
-
37
  @abstractmethod
38
  def create_model(self) -> None:
39
  """Create and Initialize agent"""
40
  pass
41
-
42
  @abstractmethod
43
  def ask_agent(self, prompt: str) -> str:
44
  """ask agent a question"""
45
  pass
46
-
47
  @abstractmethod
48
- def delete_agent(self) ->None:
49
  """delete agent"""
50
  pass
51
-
 
 
 
 
 
52
  def get_costs(self):
53
  return {
54
  "create_resource_cost": self.create_resource_cost,
@@ -56,58 +62,68 @@ class Agent(ABC):
56
  "create_expense_cost": self.create_expense_cost,
57
  "invoke_expense_cost": self.invoke_expense_cost
58
  }
59
-
 
60
  class OllamaAgent(Agent):
61
-
 
62
  def create_model(self):
63
  ollama_response = ollama.create(
64
- model = self.agent_name,
65
- from_ = self.base_model,
66
- system = self.system_prompt,
67
- stream = False
68
  )
69
-
70
  def ask_agent(self, prompt):
71
  output_assistant_response(f"Asked Agent {self.agent_name} a question")
72
  agent_response = ollama.chat(
73
  model=self.agent_name,
74
  messages=[{"role": "user", "content": prompt}],
75
  )
76
- output_assistant_response(f"Agent {self.agent_name} answered with {agent_response.message.content}")
 
77
  return agent_response.message.content
78
-
79
  def delete_agent(self):
80
  ollama.delete(self.agent_name)
81
-
 
 
 
 
82
  class GeminiAgent(Agent):
83
- def __init__(self,
84
- agent_name: str,
85
- base_model: str,
86
- system_prompt: str,
87
- create_resource_cost: int,
 
 
88
  invoke_resource_cost: int,
89
  create_expense_cost: int = 0,
90
  invoke_expense_cost: int = 0,):
91
  load_dotenv()
92
  self.api_key = os.getenv("GEMINI_KEY")
93
  if not self.api_key:
94
- raise ValueError("Google API key is required for Gemini models. Set GOOGLE_API_KEY environment variable or pass api_key parameter.")
95
-
 
96
  # Initialize the Gemini API
97
  self.client = genai.Client(api_key=self.api_key)
98
-
99
  # Call parent constructor after API setup
100
- super().__init__(agent_name,
101
- base_model,
102
- system_prompt,
103
- create_resource_cost,
104
  invoke_resource_cost,
105
  create_expense_cost,
106
  invoke_expense_cost)
107
 
108
  def create_model(self):
109
  self.messages = []
110
-
111
  def ask_agent(self, prompt):
112
  response = self.client.models.generate_content(
113
  model=self.base_model,
@@ -117,11 +133,17 @@ class GeminiAgent(Agent):
117
  )
118
  )
119
  return response.text
120
-
121
  def delete_agent(self):
122
  self.messages = []
123
 
 
 
 
 
124
  class GroqAgent(Agent):
 
 
125
  def __init__(
126
  self,
127
  agent_name: str,
@@ -158,34 +180,65 @@ class GroqAgent(Agent):
158
  # No delete support for Groq
159
  pass
160
 
 
 
 
 
161
  @singleton
162
  class AgentManager():
163
  budget_manager: BudgetManager = BudgetManager()
 
 
 
 
164
  def __init__(self):
165
  self._agents: Dict[str, Agent] = {}
166
- self._agent_types ={
167
  "ollama": OllamaAgent,
168
  "gemini": GeminiAgent,
169
  "groq": GroqAgent,
170
  }
171
-
172
  self._load_agents()
173
-
174
- def create_agent(self, agent_name: str,
175
- base_model: str, system_prompt: str,
176
- description: str = "", create_resource_cost: float = 0,
177
- invoke_resource_cost: float = 0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
  create_expense_cost: float = 0,
179
  invoke_expense_cost: float = 0,
180
- **additional_params) -> Tuple[Agent, int]:
181
-
 
 
182
  if agent_name in self._agents:
183
  raise ValueError(f"Agent {agent_name} already exists")
184
-
185
  self._agents[agent_name] = self.create_agent_class(
186
- agent_name,
187
- base_model,
188
- system_prompt,
189
  description=description,
190
  create_resource_cost=create_resource_cost,
191
  invoke_resource_cost=invoke_resource_cost,
@@ -193,12 +246,12 @@ class AgentManager():
193
  invoke_expense_cost=invoke_expense_cost,
194
  **additional_params # For any future parameters we might want to add
195
  )
196
-
197
- #save agent to file
198
  self._save_agent(
199
- agent_name,
200
- base_model,
201
- system_prompt,
202
  description=description,
203
  create_resource_cost=create_resource_cost,
204
  invoke_resource_cost=invoke_resource_cost,
@@ -206,47 +259,47 @@ class AgentManager():
206
  invoke_expense_cost=invoke_expense_cost,
207
  **additional_params # For any future parameters we might want to add
208
  )
209
- return (self._agents[agent_name],
210
- self.budget_manager.get_current_remaining_resource_budget(),
211
  self.budget_manager.get_current_remaining_expense_budget())
212
-
213
- def validate_budget(self,
214
- resource_cost: float=0,
215
- expense_cost: float=0) -> None:
216
  if not self.budget_manager.can_spend_resource(resource_cost):
217
  raise ValueError(f"Do not have enough resource budget to create/use the agent. "
218
- +f"Creating/Using the agent costs {resource_cost} but only {self.budget_manager.get_current_remaining_resource_budget()} is remaining")
219
  if not self.budget_manager.can_spend_expense(expense_cost):
220
  raise ValueError(f"Do not have enough expense budget to create/use the agent. "
221
- +f"Creating/Using the agent costs {expense_cost} but only {self.budget_manager.get_current_remaining_expense_budget()} is remaining")
222
-
223
- def create_agent_class(self,
224
  agent_name: str,
225
- base_model: str,
226
- system_prompt: str,
227
- description: str = "",
228
- create_resource_cost: float = 0,
229
  invoke_resource_cost: float = 0,
230
  create_expense_cost: float = 0,
231
  invoke_expense_cost: float = 0,
232
- **additional_params) -> Agent:
233
  agent_type = self._get_agent_type(base_model)
234
  agent_class = self._agent_types.get(agent_type)
235
-
236
  if not agent_class:
237
  raise ValueError(f"Unsupported base model {base_model}")
238
-
239
- created_agent = agent_class(agent_name,
240
- base_model,
241
- system_prompt,
242
  create_resource_cost,
243
  invoke_resource_cost,
244
  create_expense_cost,
245
  invoke_expense_cost,)
246
-
247
- self.validate_budget(create_resource_cost,
248
  create_expense_cost)
249
-
250
  self.budget_manager.add_to_resource_budget(create_resource_cost)
251
  self.budget_manager.add_to_expense_budget(create_expense_cost)
252
  # create agent
@@ -257,14 +310,14 @@ class AgentManager():
257
  if agent_name not in self._agents:
258
  raise ValueError(f"Agent {agent_name} does not exists")
259
  return self._agents[agent_name]
260
-
261
  def list_agents(self) -> dict:
262
  """Return agent information (name, description, costs)"""
263
  try:
264
  if os.path.exists(MODEL_FILE_PATH):
265
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
266
  full_models = json.loads(f.read())
267
-
268
  # Create a simplified version with only the description and costs
269
  simplified_agents = {}
270
  for name, data in full_models.items():
@@ -282,47 +335,56 @@ class AgentManager():
282
  except Exception as e:
283
  output_assistant_response(f"Error listing agents: {e}")
284
  return {}
285
-
286
  def delete_agent(self, agent_name: str) -> int:
287
- agent = self.get_agent(agent_name)
288
-
289
- self.budget_manager.remove_from_resource_expense(agent.create_resource_cost)
 
290
  agent.delete_agent()
291
-
292
  del self._agents[agent_name]
293
  try:
294
  if os.path.exists(MODEL_FILE_PATH):
295
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
296
  models = json.loads(f.read())
297
-
298
  del models[agent_name]
299
  with open(MODEL_FILE_PATH, "w", encoding="utf8") as f:
300
  f.write(json.dumps(models, indent=4))
301
  except Exception as e:
302
  output_assistant_response(f"Error deleting agent: {e}")
303
- return (self.budget_manager.get_current_remaining_resource_budget(),
304
  self.budget_manager.get_current_remaining_expense_budget())
305
-
306
- def ask_agent(self, agent_name: str, prompt: str) -> Tuple[str,int]:
307
- agent = self.get_agent(agent_name)
308
-
309
- self.validate_budget(agent.invoke_resource_cost,
 
 
 
 
 
 
 
 
310
  agent.invoke_expense_cost)
311
-
312
  self.budget_manager.add_to_expense_budget(agent.invoke_expense_cost)
313
 
314
- response = agent.ask_agent(prompt)
315
- return (response,
316
- self.budget_manager.get_current_remaining_resource_budget(),
317
  self.budget_manager.get_current_remaining_expense_budget())
318
-
319
- def _save_agent(self,
320
- agent_name: str,
321
- base_model: str,
322
- system_prompt: str,
323
- description: str = "",
324
- create_resource_cost: float = 0,
325
- invoke_resource_cost: float = 0,
326
  create_expense_cost: float = 0,
327
  invoke_expense_cost: float = 0,
328
  **additional_params) -> None:
@@ -330,14 +392,14 @@ class AgentManager():
330
  try:
331
  # Ensure the directory exists
332
  os.makedirs(MODEL_PATH, exist_ok=True)
333
-
334
  # Read existing models file or create empty dict if it doesn't exist
335
  try:
336
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
337
  models = json.loads(f.read())
338
  except (FileNotFoundError, json.JSONDecodeError):
339
  models = {}
340
-
341
  # Update the models dict with the new agent
342
  models[agent_name] = {
343
  "base_model": base_model,
@@ -348,19 +410,19 @@ class AgentManager():
348
  "create_expense_cost": create_expense_cost,
349
  "invoke_expense_cost": invoke_expense_cost,
350
  }
351
-
352
  # Add any additional parameters that were passed
353
  for key, value in additional_params.items():
354
  models[agent_name][key] = value
355
-
356
  # Write the updated models back to the file
357
  with open(MODEL_FILE_PATH, "w", encoding="utf8") as f:
358
  f.write(json.dumps(models, indent=4))
359
-
360
  except Exception as e:
361
  output_assistant_response(f"Error saving agent {agent_name}: {e}")
362
 
363
- def _get_agent_type(self, base_model)->str:
364
 
365
  if base_model == "llama3.2":
366
  return "ollama"
@@ -374,16 +436,16 @@ class AgentManager():
374
  return "groq"
375
  else:
376
  return "unknown"
377
-
378
  def _load_agents(self) -> None:
379
  """Load agent configurations from disk"""
380
  try:
381
  if not os.path.exists(MODEL_FILE_PATH):
382
  return
383
-
384
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
385
  models = json.loads(f.read())
386
-
387
  for name, data in models.items():
388
  if name in self._agents:
389
  continue
@@ -395,13 +457,13 @@ class AgentManager():
395
  invoke_expense_cost = data.get("invoke_expense_cost", 0)
396
  model_type = self._get_agent_type(base_model)
397
  manager_class = self._agent_types.get(model_type)
398
-
399
  if manager_class:
400
  # Create the agent with the appropriate manager class
401
  self._agents[name] = self.create_agent_class(
402
- name,
403
- base_model,
404
- system_prompt,
405
  description=data.get("description", ""),
406
  create_resource_cost=create_resource_cost,
407
  invoke_resource_cost=invoke_resource_cost,
@@ -410,7 +472,7 @@ class AgentManager():
410
  **data.get("additional_params", {})
411
  )
412
  self._agents[name] = manager_class(
413
- name,
414
  base_model,
415
  system_prompt,
416
  create_resource_cost,
@@ -419,4 +481,4 @@ class AgentManager():
419
  invoke_expense_cost,
420
  )
421
  except Exception as e:
422
- output_assistant_response(f"Error loading agents: {e}")
 
16
  MODEL_PATH = "./src/models/"
17
  MODEL_FILE_PATH = "./src/models/models.json"
18
 
19
+
20
  class Agent(ABC):
21
+
22
+ def __init__(self, agent_name: str,
23
+ base_model: str,
24
+ system_prompt: str,
25
+ create_resource_cost: int,
26
  invoke_resource_cost: int,
27
  create_expense_cost: int = 0,
28
  invoke_expense_cost: int = 0,):
 
34
  self.create_expense_cost = create_expense_cost
35
  self.invoke_expense_cost = invoke_expense_cost
36
  self.create_model()
37
+
38
  @abstractmethod
39
  def create_model(self) -> None:
40
  """Create and Initialize agent"""
41
  pass
42
+
43
  @abstractmethod
44
  def ask_agent(self, prompt: str) -> str:
45
  """ask agent a question"""
46
  pass
47
+
48
  @abstractmethod
49
+ def delete_agent(self) -> None:
50
  """delete agent"""
51
  pass
52
+
53
+ @abstractmethod
54
+ def get_type(self) -> None:
55
+ """get agent type"""
56
+ pass
57
+
58
  def get_costs(self):
59
  return {
60
  "create_resource_cost": self.create_resource_cost,
 
62
  "create_expense_cost": self.create_expense_cost,
63
  "invoke_expense_cost": self.invoke_expense_cost
64
  }
65
+
66
+
67
  class OllamaAgent(Agent):
68
+ type = "local"
69
+
70
  def create_model(self):
71
  ollama_response = ollama.create(
72
+ model=self.agent_name,
73
+ from_=self.base_model,
74
+ system=self.system_prompt,
75
+ stream=False
76
  )
77
+
78
  def ask_agent(self, prompt):
79
  output_assistant_response(f"Asked Agent {self.agent_name} a question")
80
  agent_response = ollama.chat(
81
  model=self.agent_name,
82
  messages=[{"role": "user", "content": prompt}],
83
  )
84
+ output_assistant_response(
85
+ f"Agent {self.agent_name} answered with {agent_response.message.content}")
86
  return agent_response.message.content
87
+
88
  def delete_agent(self):
89
  ollama.delete(self.agent_name)
90
+
91
+ def get_type(self):
92
+ return self.type
93
+
94
+
95
  class GeminiAgent(Agent):
96
+ type = "cloud"
97
+
98
+ def __init__(self,
99
+ agent_name: str,
100
+ base_model: str,
101
+ system_prompt: str,
102
+ create_resource_cost: int,
103
  invoke_resource_cost: int,
104
  create_expense_cost: int = 0,
105
  invoke_expense_cost: int = 0,):
106
  load_dotenv()
107
  self.api_key = os.getenv("GEMINI_KEY")
108
  if not self.api_key:
109
+ raise ValueError(
110
+ "Google API key is required for Gemini models. Set GOOGLE_API_KEY environment variable or pass api_key parameter.")
111
+
112
  # Initialize the Gemini API
113
  self.client = genai.Client(api_key=self.api_key)
114
+
115
  # Call parent constructor after API setup
116
+ super().__init__(agent_name,
117
+ base_model,
118
+ system_prompt,
119
+ create_resource_cost,
120
  invoke_resource_cost,
121
  create_expense_cost,
122
  invoke_expense_cost)
123
 
124
  def create_model(self):
125
  self.messages = []
126
+
127
  def ask_agent(self, prompt):
128
  response = self.client.models.generate_content(
129
  model=self.base_model,
 
133
  )
134
  )
135
  return response.text
136
+
137
  def delete_agent(self):
138
  self.messages = []
139
 
140
+ def get_type(self):
141
+ return self.type
142
+
143
+
144
  class GroqAgent(Agent):
145
+ type = "cloud"
146
+
147
  def __init__(
148
  self,
149
  agent_name: str,
 
180
  # No delete support for Groq
181
  pass
182
 
183
+ def get_type(self):
184
+ return self.type
185
+
186
+
187
  @singleton
188
  class AgentManager():
189
  budget_manager: BudgetManager = BudgetManager()
190
+ is_creation_enabled: bool = True
191
+ is_cloud_invocation_enabled: bool = True
192
+ is_local_invocation_enabled: bool = True
193
+
194
  def __init__(self):
195
  self._agents: Dict[str, Agent] = {}
196
+ self._agent_types = {
197
  "ollama": OllamaAgent,
198
  "gemini": GeminiAgent,
199
  "groq": GroqAgent,
200
  }
201
+
202
  self._load_agents()
203
+
204
+ def set_creation_mode(self, status: bool):
205
+ self.is_creation_enabled = status
206
+ if status:
207
+ output_assistant_response("Agent creation mode is enabled.")
208
+ else:
209
+ output_assistant_response("Agent creation mode is disabled.")
210
+
211
+ def set_cloud_invocation_mode(self, status: bool):
212
+ self.is_cloud_invocation_enabled = status
213
+ if status:
214
+ output_assistant_response("Cloud invocation mode is enabled.")
215
+ else:
216
+ output_assistant_response("Cloud invocation mode is disabled.")
217
+
218
+ def set_local_invocation_mode(self, status: bool):
219
+ self.is_local_invocation_enabled = status
220
+ if status:
221
+ output_assistant_response("Local invocation mode is enabled.")
222
+ else:
223
+ output_assistant_response("Local invocation mode is disabled.")
224
+
225
+ def create_agent(self, agent_name: str,
226
+ base_model: str, system_prompt: str,
227
+ description: str = "", create_resource_cost: float = 0,
228
+ invoke_resource_cost: float = 0,
229
  create_expense_cost: float = 0,
230
  invoke_expense_cost: float = 0,
231
+ **additional_params) -> Tuple[Agent, int]:
232
+ if not self.is_creation_enabled:
233
+ raise ValueError("Agent creation mode is disabled.")
234
+
235
  if agent_name in self._agents:
236
  raise ValueError(f"Agent {agent_name} already exists")
237
+
238
  self._agents[agent_name] = self.create_agent_class(
239
+ agent_name,
240
+ base_model,
241
+ system_prompt,
242
  description=description,
243
  create_resource_cost=create_resource_cost,
244
  invoke_resource_cost=invoke_resource_cost,
 
246
  invoke_expense_cost=invoke_expense_cost,
247
  **additional_params # For any future parameters we might want to add
248
  )
249
+
250
+ # save agent to file
251
  self._save_agent(
252
+ agent_name,
253
+ base_model,
254
+ system_prompt,
255
  description=description,
256
  create_resource_cost=create_resource_cost,
257
  invoke_resource_cost=invoke_resource_cost,
 
259
  invoke_expense_cost=invoke_expense_cost,
260
  **additional_params # For any future parameters we might want to add
261
  )
262
+ return (self._agents[agent_name],
263
+ self.budget_manager.get_current_remaining_resource_budget(),
264
  self.budget_manager.get_current_remaining_expense_budget())
265
+
266
+ def validate_budget(self,
267
+ resource_cost: float = 0,
268
+ expense_cost: float = 0) -> None:
269
  if not self.budget_manager.can_spend_resource(resource_cost):
270
  raise ValueError(f"Do not have enough resource budget to create/use the agent. "
271
+ + f"Creating/Using the agent costs {resource_cost} but only {self.budget_manager.get_current_remaining_resource_budget()} is remaining")
272
  if not self.budget_manager.can_spend_expense(expense_cost):
273
  raise ValueError(f"Do not have enough expense budget to create/use the agent. "
274
+ + f"Creating/Using the agent costs {expense_cost} but only {self.budget_manager.get_current_remaining_expense_budget()} is remaining")
275
+
276
+ def create_agent_class(self,
277
  agent_name: str,
278
+ base_model: str,
279
+ system_prompt: str,
280
+ description: str = "",
281
+ create_resource_cost: float = 0,
282
  invoke_resource_cost: float = 0,
283
  create_expense_cost: float = 0,
284
  invoke_expense_cost: float = 0,
285
+ **additional_params) -> Agent:
286
  agent_type = self._get_agent_type(base_model)
287
  agent_class = self._agent_types.get(agent_type)
288
+
289
  if not agent_class:
290
  raise ValueError(f"Unsupported base model {base_model}")
291
+
292
+ created_agent = agent_class(agent_name,
293
+ base_model,
294
+ system_prompt,
295
  create_resource_cost,
296
  invoke_resource_cost,
297
  create_expense_cost,
298
  invoke_expense_cost,)
299
+
300
+ self.validate_budget(create_resource_cost,
301
  create_expense_cost)
302
+
303
  self.budget_manager.add_to_resource_budget(create_resource_cost)
304
  self.budget_manager.add_to_expense_budget(create_expense_cost)
305
  # create agent
 
310
  if agent_name not in self._agents:
311
  raise ValueError(f"Agent {agent_name} does not exists")
312
  return self._agents[agent_name]
313
+
314
  def list_agents(self) -> dict:
315
  """Return agent information (name, description, costs)"""
316
  try:
317
  if os.path.exists(MODEL_FILE_PATH):
318
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
319
  full_models = json.loads(f.read())
320
+
321
  # Create a simplified version with only the description and costs
322
  simplified_agents = {}
323
  for name, data in full_models.items():
 
335
  except Exception as e:
336
  output_assistant_response(f"Error listing agents: {e}")
337
  return {}
338
+
339
  def delete_agent(self, agent_name: str) -> int:
340
+ agent: Agent = self.get_agent(agent_name)
341
+
342
+ self.budget_manager.remove_from_resource_expense(
343
+ agent.create_resource_cost)
344
  agent.delete_agent()
345
+
346
  del self._agents[agent_name]
347
  try:
348
  if os.path.exists(MODEL_FILE_PATH):
349
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
350
  models = json.loads(f.read())
351
+
352
  del models[agent_name]
353
  with open(MODEL_FILE_PATH, "w", encoding="utf8") as f:
354
  f.write(json.dumps(models, indent=4))
355
  except Exception as e:
356
  output_assistant_response(f"Error deleting agent: {e}")
357
+ return (self.budget_manager.get_current_remaining_resource_budget(),
358
  self.budget_manager.get_current_remaining_expense_budget())
359
+
360
+ def ask_agent(self, agent_name: str, prompt: str) -> Tuple[str, int]:
361
+ agent: Agent = self.get_agent(agent_name)
362
+ print(agent.get_type())
363
+ print(agent_name)
364
+ print(self.is_local_invocation_enabled, self.is_cloud_invocation_enabled)
365
+ if not self.is_local_invocation_enabled and agent.get_type() == "local":
366
+ raise ValueError("Local invocation mode is disabled.")
367
+
368
+ if not self.is_cloud_invocation_enabled and agent.get_type() == "cloud":
369
+ raise ValueError("Cloud invocation mode is disabled.")
370
+
371
+ self.validate_budget(agent.invoke_resource_cost,
372
  agent.invoke_expense_cost)
373
+
374
  self.budget_manager.add_to_expense_budget(agent.invoke_expense_cost)
375
 
376
+ response = agent.ask_agent(prompt)
377
+ return (response,
378
+ self.budget_manager.get_current_remaining_resource_budget(),
379
  self.budget_manager.get_current_remaining_expense_budget())
380
+
381
+ def _save_agent(self,
382
+ agent_name: str,
383
+ base_model: str,
384
+ system_prompt: str,
385
+ description: str = "",
386
+ create_resource_cost: float = 0,
387
+ invoke_resource_cost: float = 0,
388
  create_expense_cost: float = 0,
389
  invoke_expense_cost: float = 0,
390
  **additional_params) -> None:
 
392
  try:
393
  # Ensure the directory exists
394
  os.makedirs(MODEL_PATH, exist_ok=True)
395
+
396
  # Read existing models file or create empty dict if it doesn't exist
397
  try:
398
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
399
  models = json.loads(f.read())
400
  except (FileNotFoundError, json.JSONDecodeError):
401
  models = {}
402
+
403
  # Update the models dict with the new agent
404
  models[agent_name] = {
405
  "base_model": base_model,
 
410
  "create_expense_cost": create_expense_cost,
411
  "invoke_expense_cost": invoke_expense_cost,
412
  }
413
+
414
  # Add any additional parameters that were passed
415
  for key, value in additional_params.items():
416
  models[agent_name][key] = value
417
+
418
  # Write the updated models back to the file
419
  with open(MODEL_FILE_PATH, "w", encoding="utf8") as f:
420
  f.write(json.dumps(models, indent=4))
421
+
422
  except Exception as e:
423
  output_assistant_response(f"Error saving agent {agent_name}: {e}")
424
 
425
+ def _get_agent_type(self, base_model) -> str:
426
 
427
  if base_model == "llama3.2":
428
  return "ollama"
 
436
  return "groq"
437
  else:
438
  return "unknown"
439
+
440
  def _load_agents(self) -> None:
441
  """Load agent configurations from disk"""
442
  try:
443
  if not os.path.exists(MODEL_FILE_PATH):
444
  return
445
+
446
  with open(MODEL_FILE_PATH, "r", encoding="utf8") as f:
447
  models = json.loads(f.read())
448
+
449
  for name, data in models.items():
450
  if name in self._agents:
451
  continue
 
457
  invoke_expense_cost = data.get("invoke_expense_cost", 0)
458
  model_type = self._get_agent_type(base_model)
459
  manager_class = self._agent_types.get(model_type)
460
+
461
  if manager_class:
462
  # Create the agent with the appropriate manager class
463
  self._agents[name] = self.create_agent_class(
464
+ name,
465
+ base_model,
466
+ system_prompt,
467
  description=data.get("description", ""),
468
  create_resource_cost=create_resource_cost,
469
  invoke_resource_cost=invoke_resource_cost,
 
472
  **data.get("additional_params", {})
473
  )
474
  self._agents[name] = manager_class(
475
+ name,
476
  base_model,
477
  system_prompt,
478
  create_resource_cost,
 
481
  invoke_expense_cost,
482
  )
483
  except Exception as e:
484
+ output_assistant_response(f"Error loading agents: {e}")
src/manager/budget_manager.py CHANGED
@@ -9,11 +9,27 @@ class BudgetManager():
9
  total_expense_budget = 10
10
  current_expense = 0
11
  is_budget_initialized = False
 
 
12
 
13
  def __init__(self):
14
  if not self.is_budget_initialized:
15
  self.total_resource_budget = self.calculate_total_budget()
16
  self.is_budget_initialized = True
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
  def calculate_total_budget(self)-> int:
19
  total_mem = 0
@@ -43,14 +59,20 @@ class BudgetManager():
43
  return self.total_resource_budget - self.current_resource_usage
44
 
45
  def can_spend_resource(self, cost):
 
 
46
  return True if self.current_resource_usage + cost <= self.total_resource_budget else False
47
 
48
  def add_to_resource_budget(self, cost):
 
 
49
  if not self.can_spend_resource(cost):
50
  raise Exception("No resource budget remaining")
51
  self.current_resource_usage += cost
52
 
53
  def remove_from_resource_expense(self, cost):
 
 
54
  if self.current_resource_usage - cost < 0:
55
  raise Exception("Not enough resource budget to remove")
56
  self.current_resource_usage -= cost
@@ -65,9 +87,13 @@ class BudgetManager():
65
  return self.total_expense_budget - self.current_expense
66
 
67
  def can_spend_expense(self, cost):
 
 
68
  return True if self.current_expense + cost <= self.total_expense_budget else False
69
 
70
  def add_to_expense_budget(self, cost):
 
 
71
  if not self.can_spend_expense(cost):
72
  raise Exception("No expense budget remaining")
73
  self.current_expense += cost
 
9
  total_expense_budget = 10
10
  current_expense = 0
11
  is_budget_initialized = False
12
+ is_resource_budget_enabled = True
13
+ is_expense_budget_enabled = True
14
 
15
  def __init__(self):
16
  if not self.is_budget_initialized:
17
  self.total_resource_budget = self.calculate_total_budget()
18
  self.is_budget_initialized = True
19
+
20
+ def set_resource_budget_status(self, status: bool):
21
+ self.is_enabled = status
22
+ if status:
23
+ print("Budget manager is enabled.")
24
+ else:
25
+ print("Budget manager is disabled.")
26
+
27
+ def set_expense_budget_status(self, status: bool):
28
+ self.is_expense_budget_enabled = status
29
+ if status:
30
+ print("Expense budget manager is enabled.")
31
+ else:
32
+ print("Expense budget manager is disabled.")
33
 
34
  def calculate_total_budget(self)-> int:
35
  total_mem = 0
 
59
  return self.total_resource_budget - self.current_resource_usage
60
 
61
  def can_spend_resource(self, cost):
62
+ if not self.is_resource_budget_enabled:
63
+ return True
64
  return True if self.current_resource_usage + cost <= self.total_resource_budget else False
65
 
66
  def add_to_resource_budget(self, cost):
67
+ if not self.is_resource_budget_enabled:
68
+ return
69
  if not self.can_spend_resource(cost):
70
  raise Exception("No resource budget remaining")
71
  self.current_resource_usage += cost
72
 
73
  def remove_from_resource_expense(self, cost):
74
+ if not self.is_resource_budget_enabled:
75
+ return
76
  if self.current_resource_usage - cost < 0:
77
  raise Exception("Not enough resource budget to remove")
78
  self.current_resource_usage -= cost
 
87
  return self.total_expense_budget - self.current_expense
88
 
89
  def can_spend_expense(self, cost):
90
+ if not self.is_expense_budget_enabled:
91
+ return True
92
  return True if self.current_expense + cost <= self.total_expense_budget else False
93
 
94
  def add_to_expense_budget(self, cost):
95
+ if not self.is_expense_budget_enabled:
96
+ return
97
  if not self.can_spend_expense(cost):
98
  raise Exception("No expense budget remaining")
99
  self.current_expense += cost
src/manager/manager.py CHANGED
@@ -1,9 +1,13 @@
 
 
1
  from google import genai
2
  from google.genai import types
3
  from google.genai.types import *
4
  import os
5
  from dotenv import load_dotenv
6
  import sys
 
 
7
  from src.manager.tool_manager import ToolManager
8
  from src.manager.utils.suppress_outputs import suppress_output
9
  import logging
@@ -19,32 +23,59 @@ handler = logging.StreamHandler(sys.stdout)
19
  logger.addHandler(handler)
20
 
21
 
 
 
 
 
 
 
 
 
 
 
 
22
  class GeminiManager:
23
- def __init__(self, toolsLoader: ToolManager = None,
24
- system_prompt_file="./src/models/system4.prompt",
25
  gemini_model="gemini-2.5-pro-exp-03-25",
26
- local_only=False, allow_tool_creation=True,
27
- cloud_only=False, use_economy=True,
28
- use_memory=True):
29
  load_dotenv()
30
- self.toolsLoader: ToolManager = toolsLoader
31
- if not toolsLoader:
32
- self.toolsLoader: ToolManager = ToolManager()
33
 
34
- self.local_only = local_only
35
- self.allow_tool_creation = allow_tool_creation
36
- self.cloud_only = cloud_only
37
- self.use_economy = use_economy
38
- self.use_memory = use_memory
39
 
40
  self.API_KEY = os.getenv("GEMINI_KEY")
41
  self.client = genai.Client(api_key=self.API_KEY)
42
- self.toolsLoader.load_tools()
43
  self.model_name = gemini_model
44
- self.memory_manager = MemoryManager() if use_memory else None
45
  with open(system_prompt_file, 'r', encoding="utf8") as f:
46
  self.system_prompt = f.read()
47
  self.messages = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  def generate_response(self, messages):
50
  tools = self.toolsLoader.getTools()
@@ -149,10 +180,12 @@ class GeminiManager:
149
  types.Part.from_text(text="Error uploading file: "+str(e)))
150
  continue
151
  else:
152
- parts = [types.Part.from_text(text=message.get("content", ""))]
 
153
  case "memories":
154
  role = "user"
155
- parts = [types.Part.from_text(text="Relevant memories: "+message.get("content", ""))]
 
156
  case "tool":
157
  role = "tool"
158
  formatted_history.append(
@@ -160,12 +193,14 @@ class GeminiManager:
160
  continue
161
  case "function_call":
162
  role = "model"
 
163
  formatted_history.append(
164
  eval(message.get("content", "")))
165
  continue
166
  case _:
167
  role = "model"
168
- parts = [types.Part.from_text(text=message.get("content", ""))]
 
169
  formatted_history.append(types.Content(
170
  role=role,
171
  parts=parts
@@ -173,8 +208,6 @@ class GeminiManager:
173
  return formatted_history
174
 
175
  def get_k_memories(self, query, k=5, threshold=0.0):
176
- if not self.use_memory:
177
- return []
178
  memories = MemoryManager().get_memories()
179
  for i in range(len(memories)):
180
  memories[i] = memories[i]['memory']
@@ -189,20 +222,24 @@ class GeminiManager:
189
  else:
190
  device = 'cpu'
191
  model = SentenceTransformer('all-MiniLM-L6-v2', device=device)
192
- doc_embeddings = model.encode(memories, convert_to_tensor=True, device=device)
193
- query_embedding = model.encode(query, convert_to_tensor=True, device=device)
194
- similarity_scores = model.similarity(query_embedding, doc_embeddings)[0]
 
 
 
195
  scores, indices = torch.topk(similarity_scores, k=top_k)
196
  results = []
197
  for score, idx in zip(scores, indices):
198
  if score >= threshold:
199
  results.append(memories[idx])
200
  return results
201
-
202
  def run(self, messages):
203
  try:
204
- if self.use_memory:
205
- memories = self.get_k_memories(messages[-1]['content'], k=5, threshold=0.1)
 
206
  if len(memories) > 0:
207
  messages.append({
208
  "role": "memories",
@@ -217,7 +254,7 @@ class GeminiManager:
217
  except Exception as e:
218
  pass
219
  yield from self.invoke_manager(messages)
220
-
221
  def invoke_manager(self, messages):
222
  chat_history = self.format_chat_history(messages)
223
  logger.debug(f"Chat history: {chat_history}")
@@ -261,8 +298,8 @@ class GeminiManager:
261
  if response.function_calls:
262
  for call in self.handle_tool_calls(response):
263
  yield messages + [call]
264
- if (call.get("role") == "tool"
265
- or (call.get("role") == "assistant" and call.get("metadata", {}).get("status") == "done")):
266
  messages.append(call)
267
  yield from self.invoke_manager(messages)
268
  yield messages
 
1
+ from enum import Enum, auto
2
+ from typing import List
3
  from google import genai
4
  from google.genai import types
5
  from google.genai.types import *
6
  import os
7
  from dotenv import load_dotenv
8
  import sys
9
+ from src.manager.agent_manager import AgentManager
10
+ from src.manager.budget_manager import BudgetManager
11
  from src.manager.tool_manager import ToolManager
12
  from src.manager.utils.suppress_outputs import suppress_output
13
  import logging
 
23
  logger.addHandler(handler)
24
 
25
 
26
+ class Mode(Enum):
27
+ ENABLE_AGENT_CREATION = auto()
28
+ ENABLE_LOCAL_AGENTS = auto()
29
+ ENABLE_CLOUD_AGENTS = auto()
30
+ ENABLE_TOOL_CREATION = auto()
31
+ ENABLE_TOOL_INVOCATION = auto()
32
+ ENABLE_RESOURCE_BUDGET = auto()
33
+ ENABLE_ECONOMY_BUDGET = auto()
34
+ ENABLE_MEMORY = auto()
35
+
36
+
37
  class GeminiManager:
38
+ def __init__(self, system_prompt_file="./src/models/system4.prompt",
 
39
  gemini_model="gemini-2.5-pro-exp-03-25",
40
+ modes: List[Mode] = []):
 
 
41
  load_dotenv()
42
+ self.budget_manager = BudgetManager()
43
+
44
+ self.toolsLoader: ToolManager = ToolManager()
45
 
46
+ self.agentManager: AgentManager = AgentManager()
 
 
 
 
47
 
48
  self.API_KEY = os.getenv("GEMINI_KEY")
49
  self.client = genai.Client(api_key=self.API_KEY)
 
50
  self.model_name = gemini_model
51
+ self.memory_manager = MemoryManager()
52
  with open(system_prompt_file, 'r', encoding="utf8") as f:
53
  self.system_prompt = f.read()
54
  self.messages = []
55
+ self.set_modes(modes)
56
+
57
+ def get_current_modes(self):
58
+ return [mode.name for mode in self.modes]
59
+
60
+ def set_modes(self, modes: List[Mode]):
61
+ self.modes = modes
62
+ self.budget_manager.set_resource_budget_status(
63
+ self.check_mode(Mode.ENABLE_RESOURCE_BUDGET))
64
+ self.budget_manager.set_expense_budget_status(
65
+ self.check_mode(Mode.ENABLE_ECONOMY_BUDGET))
66
+ self.toolsLoader.set_creation_mode(
67
+ self.check_mode(Mode.ENABLE_TOOL_CREATION))
68
+ self.toolsLoader.set_invocation_mode(
69
+ self.check_mode(Mode.ENABLE_TOOL_INVOCATION))
70
+ self.agentManager.set_creation_mode(
71
+ self.check_mode(Mode.ENABLE_AGENT_CREATION))
72
+ self.agentManager.set_local_invocation_mode(
73
+ self.check_mode(Mode.ENABLE_LOCAL_AGENTS))
74
+ self.agentManager.set_cloud_invocation_mode(
75
+ self.check_mode(Mode.ENABLE_CLOUD_AGENTS))
76
+
77
+ def check_mode(self, mode: Mode):
78
+ return mode in self.modes
79
 
80
  def generate_response(self, messages):
81
  tools = self.toolsLoader.getTools()
 
180
  types.Part.from_text(text="Error uploading file: "+str(e)))
181
  continue
182
  else:
183
+ parts = [types.Part.from_text(
184
+ text=message.get("content", ""))]
185
  case "memories":
186
  role = "user"
187
+ parts = [types.Part.from_text(
188
+ text="Relevant memories: "+message.get("content", ""))]
189
  case "tool":
190
  role = "tool"
191
  formatted_history.append(
 
193
  continue
194
  case "function_call":
195
  role = "model"
196
+ print(message)
197
  formatted_history.append(
198
  eval(message.get("content", "")))
199
  continue
200
  case _:
201
  role = "model"
202
+ parts = [types.Part.from_text(
203
+ text=message.get("content", ""))]
204
  formatted_history.append(types.Content(
205
  role=role,
206
  parts=parts
 
208
  return formatted_history
209
 
210
  def get_k_memories(self, query, k=5, threshold=0.0):
 
 
211
  memories = MemoryManager().get_memories()
212
  for i in range(len(memories)):
213
  memories[i] = memories[i]['memory']
 
222
  else:
223
  device = 'cpu'
224
  model = SentenceTransformer('all-MiniLM-L6-v2', device=device)
225
+ doc_embeddings = model.encode(
226
+ memories, convert_to_tensor=True, device=device)
227
+ query_embedding = model.encode(
228
+ query, convert_to_tensor=True, device=device)
229
+ similarity_scores = model.similarity(
230
+ query_embedding, doc_embeddings)[0]
231
  scores, indices = torch.topk(similarity_scores, k=top_k)
232
  results = []
233
  for score, idx in zip(scores, indices):
234
  if score >= threshold:
235
  results.append(memories[idx])
236
  return results
237
+
238
  def run(self, messages):
239
  try:
240
+ if self.check_mode(Mode.ENABLE_MEMORY) and len(messages) > 0:
241
+ memories = self.get_k_memories(
242
+ messages[-1]['content'], k=5, threshold=0.1)
243
  if len(memories) > 0:
244
  messages.append({
245
  "role": "memories",
 
254
  except Exception as e:
255
  pass
256
  yield from self.invoke_manager(messages)
257
+
258
  def invoke_manager(self, messages):
259
  chat_history = self.format_chat_history(messages)
260
  logger.debug(f"Chat history: {chat_history}")
 
298
  if response.function_calls:
299
  for call in self.handle_tool_calls(response):
300
  yield messages + [call]
301
+ if (call.get("role") == "tool"
302
+ or (call.get("role") == "assistant" and call.get("metadata", {}).get("status") == "done")):
303
  messages.append(call)
304
  yield from self.invoke_manager(messages)
305
  yield messages
src/manager/tool_manager.py CHANGED
@@ -55,14 +55,30 @@ class Tool:
55
  class ToolManager:
56
  toolsImported: List[Tool] = []
57
  budget_manager: BudgetManager = BudgetManager()
 
 
58
 
59
  def __init__(self):
60
  self.load_tools()
61
  self._output_budgets()
62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  def _output_budgets(self):
64
  output_assistant_response(f"Resource budget Remaining: {self.budget_manager.get_current_remaining_resource_budget()}")
65
- output_assistant_response(f"Expense budget Remaining: {self.budget_manager.get_current_remaining_resource_budget()}")
66
 
67
  def load_tools(self):
68
  newToolsImported = []
@@ -84,6 +100,11 @@ class ToolManager:
84
  self.toolsImported = newToolsImported
85
 
86
  def runTool(self, toolName, query):
 
 
 
 
 
87
  self._output_budgets()
88
  for tool in self.toolsImported:
89
  if tool.name == toolName:
 
55
  class ToolManager:
56
  toolsImported: List[Tool] = []
57
  budget_manager: BudgetManager = BudgetManager()
58
+ is_creation_enabled: bool = True
59
+ is_invocation_enabled: bool = True
60
 
61
  def __init__(self):
62
  self.load_tools()
63
  self._output_budgets()
64
 
65
+ def set_creation_mode(self, status: bool):
66
+ self.is_creation_enabled = status
67
+ if status:
68
+ output_assistant_response("Tool creation mode is enabled.")
69
+ else:
70
+ output_assistant_response("Tool creation mode is disabled.")
71
+
72
+ def set_invocation_mode(self, status: bool):
73
+ self.is_invocation_enabled = status
74
+ if status:
75
+ output_assistant_response("Tool invocation mode is enabled.")
76
+ else:
77
+ output_assistant_response("Tool invocation mode is disabled.")
78
+
79
  def _output_budgets(self):
80
  output_assistant_response(f"Resource budget Remaining: {self.budget_manager.get_current_remaining_resource_budget()}")
81
+ output_assistant_response(f"Expense budget Remaining: {self.budget_manager.get_current_remaining_expense_budget()}")
82
 
83
  def load_tools(self):
84
  newToolsImported = []
 
100
  self.toolsImported = newToolsImported
101
 
102
  def runTool(self, toolName, query):
103
+ if not self.is_invocation_enabled:
104
+ raise Exception("Tool invocation mode is disabled")
105
+ if toolName == "ToolCreator":
106
+ if not self.is_creation_enabled:
107
+ raise Exception("Tool creation mode is disabled")
108
  self._output_budgets()
109
  for tool in self.toolsImported:
110
  if tool.name == toolName: