File size: 2,917 Bytes
98b8ff7
 
 
 
 
 
 
 
ff0624e
98b8ff7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff0624e
91afd29
 
98b8ff7
 
 
 
 
 
 
 
 
 
91afd29
98b8ff7
 
91afd29
98b8ff7
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
from langchain_core.prompts import ChatPromptTemplate
from langgraph.graph import StateGraph, END, MessagesState
import datetime
from src.tools.langgraphtool import book_appointment, get_next_available_appointment, cancel_appointment
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import ToolNode
from langchain_core.messages import HumanMessage
from src.LLMS.groqllm import GroqLLM
from src.tools.langgraphtool import APPOINTMENTS

CONVERSATION = []

class Caller_Agent:
    def __init__(self,model):
        self.llm = model
        
        
    
    # Nodes
    def call_caller_model(self,state: MessagesState):
        state["current_time"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
        response = self.caller_model.invoke(state)
        return {"messages": [response]}
    
     # Edges
    def should_continue_caller(self,state: MessagesState):
        messages = state["messages"]
        last_message = messages[-1]
        if not last_message.tool_calls:
            return "end"
        else:
            return "continue"


    def call_tool(self):
        caller_tools = [book_appointment, get_next_available_appointment, cancel_appointment]
        tool_node = ToolNode(caller_tools)


        caller_pa_prompt = """You are a personal assistant, and need to help the user to book or cancel appointments, you should check the available appointments before booking anything. Be extremely polite, so much so that it is almost rude.
        Current time: {current_time}
        """

        caller_chat_template = ChatPromptTemplate.from_messages([
            ("system", caller_pa_prompt),
            ("placeholder", "{messages}"),
        ])

        self.caller_model = caller_chat_template | self.llm.bind_tools(caller_tools)



        # Graph 
        caller_workflow = StateGraph(MessagesState)

        # Add Nodes
        caller_workflow.add_node("agent", self.call_caller_model)
        caller_workflow.add_node("action", tool_node)

        # Add Edges
        caller_workflow.add_conditional_edges(
            "agent",
            self.should_continue_caller,
            {
                "continue": "action",
                "end": END,
            },
        )
        caller_workflow.add_edge("action", "agent")

        # Set Entry Point and build the graph
        caller_workflow.set_entry_point("agent")

        self.caller_app = caller_workflow.compile()
        
        
        
        
   
    

    # Invoke model
    def receive_message_from_caller(self,message):
        CONVERSATION.append(HumanMessage(content=message, type="human"))
        state = {
            "messages": CONVERSATION,
        }
        print(state)
        graph = self.call_tool()
        new_state = self.caller_app.invoke(state)
        CONVERSATION.extend(new_state["messages"][len(CONVERSATION):])
        return CONVERSATION, APPOINTMENTS,self.caller_app