Harshil Patel
commited on
Commit
·
64a2e26
1
Parent(s):
40afc88
Add tool to create AI agent
Browse files- For now the tool can only create an ollama agent.
- .gitignore +1 -1
- CEO/toolLoader.py +3 -3
- tools/agent_creater_tool.py +74 -0
.gitignore
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
venv/
|
2 |
__pycache__/
|
3 |
*.pyc
|
4 |
.env
|
|
|
1 |
+
*venv/
|
2 |
__pycache__/
|
3 |
*.pyc
|
4 |
.env
|
CEO/toolLoader.py
CHANGED
@@ -5,7 +5,7 @@ import pip
|
|
5 |
|
6 |
toolsImported = []
|
7 |
|
8 |
-
TOOLS_DIRECTORY =
|
9 |
|
10 |
class Tool:
|
11 |
def __init__(self, toolClass):
|
@@ -62,5 +62,5 @@ class ToolLoader:
|
|
62 |
toolLoader = ToolLoader()
|
63 |
|
64 |
# Example usage
|
65 |
-
|
66 |
-
|
|
|
5 |
|
6 |
toolsImported = []
|
7 |
|
8 |
+
TOOLS_DIRECTORY = os.path.abspath("../tools")
|
9 |
|
10 |
class Tool:
|
11 |
def __init__(self, toolClass):
|
|
|
62 |
toolLoader = ToolLoader()
|
63 |
|
64 |
# Example usage
|
65 |
+
print(toolLoader.getTools())
|
66 |
+
print(toolLoader.runTool("AgentCreator", {"agent_name": "Kunla","base_model":"llama3.2","system_prompt": "You love making the indian dish called Kulcha. You declare that in every conversation you have in a witty way." }))
|
tools/agent_creater_tool.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import importlib
|
2 |
+
|
3 |
+
__all__ = ['AgentCreator']
|
4 |
+
|
5 |
+
class AgentCreator():
|
6 |
+
dependencies = ["ollama==0.4.7",
|
7 |
+
"pydantic==2.11.1",
|
8 |
+
"pydantic_core==2.33.0"]
|
9 |
+
|
10 |
+
inputSchema = {
|
11 |
+
"name": "AgentCreator",
|
12 |
+
"description": "Creates an AI agent for an given AI model with a given system prompt",
|
13 |
+
"parameters": {
|
14 |
+
"type": "object",
|
15 |
+
"properties":{
|
16 |
+
"agent_name": {
|
17 |
+
"type": "string",
|
18 |
+
"description": "Name of the AI agent that is to be created"
|
19 |
+
},
|
20 |
+
"base_model": {
|
21 |
+
"type": "string",
|
22 |
+
"description": "A base model from which the new agent mode is to be created"
|
23 |
+
},
|
24 |
+
"system_prompt": {
|
25 |
+
"type": "string",
|
26 |
+
"description": "A string containing the system prompt for the AI agent"
|
27 |
+
}
|
28 |
+
}
|
29 |
+
}
|
30 |
+
}
|
31 |
+
|
32 |
+
def __init__(self):
|
33 |
+
pass
|
34 |
+
|
35 |
+
def does_agent_exist(self, agent_name):
|
36 |
+
ollama = importlib.import_module("ollama")
|
37 |
+
all_agents = [a.model for a in ollama.list().models]
|
38 |
+
if agent_name in all_agents or f'{agent_name}:latest' in all_agents:
|
39 |
+
return True
|
40 |
+
|
41 |
+
return False
|
42 |
+
|
43 |
+
def run(self, **kwargs):
|
44 |
+
print("Running Agent Creator")
|
45 |
+
agent_name= kwargs.get("agent_name")
|
46 |
+
base_model = kwargs.get("base_model")
|
47 |
+
system_prompt = kwargs.get("system_prompt")
|
48 |
+
ollama = importlib.import_module("ollama")
|
49 |
+
|
50 |
+
if self.does_agent_exist(agent_name):
|
51 |
+
return {
|
52 |
+
"status": "error",
|
53 |
+
"message": "Agent already exists",
|
54 |
+
"output": None
|
55 |
+
}
|
56 |
+
ollama_response = ollama.create(
|
57 |
+
model = agent_name,
|
58 |
+
from_ = base_model,
|
59 |
+
system = system_prompt,
|
60 |
+
stream = False
|
61 |
+
)
|
62 |
+
|
63 |
+
if "success" in ollama_response["status"]:
|
64 |
+
return {
|
65 |
+
"status": "success",
|
66 |
+
"message": "Agent successfully created",
|
67 |
+
"output": None
|
68 |
+
}
|
69 |
+
else:
|
70 |
+
return {
|
71 |
+
"status": "error",
|
72 |
+
"message": "Agent creation failed",
|
73 |
+
"output": None
|
74 |
+
}
|