File size: 1,530 Bytes
a92d3ed
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
"""LangGraph Agent"""
import os

from langchain_openai import ChatOpenAI

from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition, ToolNode
from langgraph.graph import START, StateGraph, MessagesState
from langchain_core.messages import SystemMessage, HumanMessage

from tools import level1_tools


# Build graph function
def build_agent_graph():
    """Build the graph"""
    # Load environment variables from .env file
    llm = ChatOpenAI(model="gpt-4o-mini")
    
    # Bind tools to LLM
    llm_with_tools = llm.bind_tools(level1_tools)

    # Node
    def assistant(state: MessagesState):
        """Assistant node"""
        return {"messages": [llm_with_tools.invoke(state["messages"])]}
    
    
    builder = StateGraph(MessagesState)
    builder.add_node("assistant", assistant)
    builder.add_node("tools", ToolNode(level1_tools))
    builder.add_edge(START, "assistant")
    builder.add_conditional_edges(
        "assistant",
        tools_condition,
    )
    builder.add_edge("tools", "assistant")

    # Compile graph
    return builder.compile()


# test
if __name__ == "__main__":
    question1 = "How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)?"
    question2 = "Convert 10 miles to kilometers."
    # Build the graph
    graph = build_agent_graph()
    # Run the graph
    messages = [HumanMessage(content=question1)]
    messages = graph.invoke({"messages": messages})
    for m in messages["messages"]:
        m.pretty_print()