langgraph / src /langgraphagent /caller_agent.py
genaitiwari's picture
state graph display
91afd29
from langchain_core.prompts import ChatPromptTemplate
from langgraph.graph import StateGraph, END, MessagesState
import datetime
from src.tools.langgraphtool import book_appointment, get_next_available_appointment, cancel_appointment
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import ToolNode
from langchain_core.messages import HumanMessage
from src.LLMS.groqllm import GroqLLM
from src.tools.langgraphtool import APPOINTMENTS
CONVERSATION = []
class Caller_Agent:
def __init__(self,model):
self.llm = model
# Nodes
def call_caller_model(self,state: MessagesState):
state["current_time"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
response = self.caller_model.invoke(state)
return {"messages": [response]}
# Edges
def should_continue_caller(self,state: MessagesState):
messages = state["messages"]
last_message = messages[-1]
if not last_message.tool_calls:
return "end"
else:
return "continue"
def call_tool(self):
caller_tools = [book_appointment, get_next_available_appointment, cancel_appointment]
tool_node = ToolNode(caller_tools)
caller_pa_prompt = """You are a personal assistant, and need to help the user to book or cancel appointments, you should check the available appointments before booking anything. Be extremely polite, so much so that it is almost rude.
Current time: {current_time}
"""
caller_chat_template = ChatPromptTemplate.from_messages([
("system", caller_pa_prompt),
("placeholder", "{messages}"),
])
self.caller_model = caller_chat_template | self.llm.bind_tools(caller_tools)
# Graph
caller_workflow = StateGraph(MessagesState)
# Add Nodes
caller_workflow.add_node("agent", self.call_caller_model)
caller_workflow.add_node("action", tool_node)
# Add Edges
caller_workflow.add_conditional_edges(
"agent",
self.should_continue_caller,
{
"continue": "action",
"end": END,
},
)
caller_workflow.add_edge("action", "agent")
# Set Entry Point and build the graph
caller_workflow.set_entry_point("agent")
self.caller_app = caller_workflow.compile()
# Invoke model
def receive_message_from_caller(self,message):
CONVERSATION.append(HumanMessage(content=message, type="human"))
state = {
"messages": CONVERSATION,
}
print(state)
graph = self.call_tool()
new_state = self.caller_app.invoke(state)
CONVERSATION.extend(new_state["messages"][len(CONVERSATION):])
return CONVERSATION, APPOINTMENTS,self.caller_app