File size: 3,434 Bytes
fcb1a95
64a2e26
 
 
 
 
 
 
 
 
21611df
64a2e26
 
 
 
 
8157183
64a2e26
 
 
e5d4b0f
64a2e26
 
 
8157183
 
 
 
 
 
2ea8556
 
967c695
 
 
 
 
 
 
 
 
 
 
 
 
e5d4b0f
 
 
 
 
967c695
 
64a2e26
 
fcb1a95
64a2e26
 
 
8cf77a3
64a2e26
 
fcb1a95
 
 
64a2e26
fcb1a95
 
 
 
 
 
 
 
 
 
 
64a2e26
 
fcb1a95
64a2e26
 
8cf77a3
fcb1a95
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from src.agent_manager import AgentManager
__all__ = ['AgentCreator']

class AgentCreator():
    dependencies = ["ollama==0.4.7",
                    "pydantic==2.11.1",
                    "pydantic_core==2.33.0"]

    inputSchema = {
        "name": "AgentCreator",
        "description": "Creates an AI agent for you. Please make sure to invoke the created agent using the AskAgent tool.",
        "parameters": {
            "type": "object",
            "properties":{
                "agent_name": {
                    "type": "string",
                    "description": "Name of the AI agent that is to be created. This name cannot have spaces or special characters. It should be a single word.",
                },
                "base_model": {
                    "type": "string",
                    "description": "A base model from which the new agent mode is to be created. Available models are: llama3.2, mistral, gemini-2.0-flash"
                },
                "system_prompt": {
                    "type": "string",
                    "description": "This is the system prompt that will be used to create the agent. It should be a string that describes the role of the agent and its capabilities."
                },
                "description": {
                    "type": "string",
                    "description": "Description of the agent. This is a string that describes the agent and its capabilities. It should be a single line description.",
                },
            },
            "required": ["agent_name", "base_model", "system_prompt", "description"],
        },
        "creates": {
            "selector": "base_model",
            "types": {
                "llama3.2":{
                    "description": "3 Billion parameter model",
                    "create_cost": 10,
                    "invoke_cost": 20,
                },
                "mistral":{
                    "description": "7 Billion parameter model",
                    "create_cost": 20,
                    "invoke_cost": 50,
                },
                "gemini-2.0-flash": {
                    "description": "40 Billion parameter model",
                    "create_cost": 30,
                    "invoke_cost": 60,
                }
            }
        }
    }


    def run(self, **kwargs):
        print("Running Agent Creator")
        agent_name = kwargs.get("agent_name")
        base_model = kwargs.get("base_model")
        system_prompt = kwargs.get("system_prompt")
        description = kwargs.get("description")
        create_cost = self.inputSchema["creates"]["types"][base_model]["create_cost"]
        invoke_cost = self.inputSchema["creates"]["types"][base_model]["invoke_cost"]

        agent_manager = AgentManager()
        try:
            agent_manager.create_agent(
                agent_name=agent_name,
                base_model=base_model,
                system_prompt=system_prompt,
                description=description,
                create_cost=create_cost,
                invoke_cost=invoke_cost
            )
        except ValueError as e:
            return {
                "status": "error",
                "message": f"Error occurred: {str(e)}",
                "output": None
            }
        
        return {
            "status": "success",
            "message": "Agent successfully created",
            "cost": create_cost,
        }