Kunal Pai commited on
Commit
618af48
·
1 Parent(s): 3618e59

refactor: remove unused AgentCreator class and related code

Browse files
src/tools/default_tools/test_cost/agent_creator_tool.py DELETED
@@ -1,168 +0,0 @@
1
- from src.manager.agent_manager import AgentManager
2
- from src.manager.config.model_selector import choose_best_model
3
- from src.manager.utils.runtime_selector import detect_runtime_environment
4
-
5
- __all__ = ['AgentCreator']
6
-
7
- class AgentCreator():
8
- dependencies = ["ollama==0.4.7",
9
- "pydantic==2.11.1",
10
- "pydantic_core==2.33.0"]
11
-
12
- inputSchema = {
13
- "name": "AgentCreator",
14
- "description": "Creates an AI agent for you. Please make sure to invoke the created agent using the AskAgent tool.",
15
- "parameters": {
16
- "type": "object",
17
- "properties":{
18
- "agent_name": {
19
- "type": "string",
20
- "description": "Name of the AI agent that is to be created. This name cannot have spaces or special characters. It should be a single word.",
21
- },
22
- "base_model": {
23
- "type": "string",
24
- "description": "A base model from which the new agent mode is to be created. Available models are: llama3.2, mistral, gemini-2.5-flash-preview-04-17, gemini-2.5-pro-preview-03-25, gemini-2.0-flash, gemini-2.0-flash-lite, gemini-1.5-flash, gemini-1.5-flash-8b, gemini-1.5-pro, and gemini-2.0-flash-live-001"
25
- },
26
- "system_prompt": {
27
- "type": "string",
28
- "description": "This is the system prompt that will be used to create the agent. It should be a string that describes the role of the agent and its capabilities."
29
- },
30
- "description": {
31
- "type": "string",
32
- "description": "Description of the agent. This is a string that describes the agent and its capabilities. It should be a single line description.",
33
- },
34
- },
35
- "required": ["agent_name", "system_prompt", "description"],
36
- #"required": ["agent_name", "base_model", "system_prompt", "description"],
37
- },
38
- "creates": {
39
- "selector": "base_model",
40
- "types": {
41
- "llama3.2":{
42
- "description": "3 Billion parameter model",
43
- "create_cost": 10,
44
- "invoke_cost": 20,
45
- },
46
- "mistral":{
47
- "description": "7 Billion parameter model",
48
- "create_cost": 20,
49
- "invoke_cost": 50,
50
- },
51
- "gemini-2.5-flash-preview-04-17": {
52
- "description": "Adaptive thinking, cost efficiency",
53
- "create_cost": 20,
54
- "invoke_cost": 50
55
- },
56
- "gemini-2.5-pro-preview-03-25": {
57
- "description": "Enhanced thinking and reasoning, multimodal understanding, advanced coding, and more",
58
- "create_cost": 20,
59
- "invoke_cost": 50
60
- },
61
- "gemini-2.0-flash": {
62
- "description": "Next generation features, speed, thinking, realtime streaming, and multimodal generation",
63
- "create_cost": 20,
64
- "invoke_cost": 50
65
- },
66
- "gemini-2.0-flash-lite": {
67
- "description": "Cost efficiency and low latency",
68
- "create_cost": 20,
69
- "invoke_cost": 50
70
- },
71
- "gemini-1.5-flash": {
72
- "description": "Fast and versatile performance across a diverse variety of tasks",
73
- "create_cost": 20,
74
- "invoke_cost": 50
75
- },
76
- "gemini-1.5-flash-8b": {
77
- "description": "High volume and lower intelligence tasks",
78
- "create_cost": 20,
79
- "invoke_cost": 50
80
- },
81
- "gemini-1.5-pro": {
82
- "description": "Complex reasoning tasks requiring more intelligence",
83
- "create_cost": 20,
84
- "invoke_cost": 50
85
- },
86
- # "gemini-embedding-exp": {
87
- # "description": "Measuring the relatedness of text strings",
88
- # "create_cost": 20,
89
- # "invoke_cost": 50
90
- # },
91
- # "imagen-3.0-generate-002": {
92
- # "description": "Our most advanced image generation model",
93
- # "create_cost": 20,
94
- # "invoke_cost": 50
95
- # },
96
- # "veo-2.0-generate-001": {
97
- # "description": "High quality video generation",
98
- # "create_cost": 20,
99
- # "invoke_cost": 50
100
- # },
101
- "gemini-2.0-flash-live-001": {
102
- "description": "Low-latency bidirectional voice and video interactions",
103
- "create_cost": 20,
104
- "invoke_cost": 50
105
- }
106
- }
107
- }
108
- }
109
-
110
- def run(self, **kwargs):
111
- print("Running Agent Creator")
112
-
113
- agent_name = kwargs.get("agent_name")
114
- base_model = kwargs.get("base_model")
115
-
116
- # NEW: read flags from kwargs
117
- use_local_only = kwargs.get("use_local_only", False)
118
- use_api_only = kwargs.get("use_api_only", False)
119
-
120
- if not base_model:
121
- env = detect_runtime_environment()
122
- print(f"\n[DEBUG] Detected Runtime Environment: {env}")
123
-
124
- from src.cost_benefit import get_best_model
125
- model_meta = get_best_model(
126
- runtime_env=env,
127
- use_local_only=use_local_only,
128
- use_api_only=use_api_only
129
- )
130
- base_model = model_meta["model"]
131
- else:
132
- model_meta = {"model": base_model}
133
-
134
- print(f"[DEBUG] Selected Model: {base_model}")
135
-
136
- if base_model not in self.inputSchema["creates"]["types"]:
137
- print(f"[WARN] Auto-selected model '{base_model}' not in schema. Falling back to gemini-2.0-flash")
138
- base_model = "gemini-2.0-flash"
139
-
140
- system_prompt = kwargs.get("system_prompt")
141
- description = kwargs.get("description")
142
-
143
- create_cost = self.inputSchema["creates"]["types"][base_model]["create_cost"]
144
- invoke_cost = self.inputSchema["creates"]["types"][base_model]["invoke_cost"]
145
-
146
- agent_manager = AgentManager()
147
- try:
148
- _, remaining_budget = agent_manager.create_agent(
149
- agent_name=agent_name,
150
- base_model=base_model,
151
- system_prompt=system_prompt,
152
- description=description,
153
- create_resource_cost=create_cost,
154
- invoke_resource_cost=invoke_cost
155
- )
156
- except ValueError as e:
157
- return {
158
- "status": "error",
159
- "message": f"Error occurred: {str(e)}",
160
- "output": None
161
- }
162
-
163
- return {
164
- "status": "success",
165
- "message": f"Agent '{agent_name}' created using model '{base_model}'",
166
- "model_info": model_meta,
167
- "remaining_budget": remaining_budget,
168
- }