Update app.py
Browse files
app.py
CHANGED
@@ -5,12 +5,12 @@ import inspect
|
|
5 |
import pandas as pd
|
6 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
7 |
from langgraph.graph import StateGraph, MessagesState, START
|
|
|
|
|
|
|
8 |
from langchain_core.messages import SystemMessage, HumanMessage
|
9 |
from langchain_community.document_loaders import WikipediaLoader
|
10 |
from langchain_community.tools import TavilySearchResults
|
11 |
-
import operator
|
12 |
-
from typing import Annotated
|
13 |
-
from typing_extensions import TypedDict
|
14 |
|
15 |
# (Keep Constants as is)
|
16 |
# --- Constants ---
|
@@ -33,20 +33,13 @@ class BasicAgent:
|
|
33 |
print("BasicAgent initialized.")
|
34 |
def __call__(self, question: str) -> str:
|
35 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
36 |
-
|
37 |
-
"
|
38 |
-
|
39 |
-
}
|
40 |
-
final_state = self.graph.invoke(initial_state)
|
41 |
-
answer = final_state["answer"]
|
42 |
print(f"Agent returning fixed answer: {answer}")
|
43 |
return answer
|
44 |
|
45 |
-
|
46 |
-
question: str
|
47 |
-
answer: str
|
48 |
-
context: Annotated[list, operator.add]
|
49 |
-
|
50 |
def search_tavily(state):
|
51 |
|
52 |
""" Retrieve docs from web search """
|
@@ -66,6 +59,7 @@ def search_tavily(state):
|
|
66 |
|
67 |
return {"context": [formatted_search_docs]}
|
68 |
|
|
|
69 |
def search_wikipedia(state):
|
70 |
|
71 |
""" Retrieve docs from wikipedia """
|
@@ -84,34 +78,19 @@ def search_wikipedia(state):
|
|
84 |
|
85 |
return {"context": [formatted_search_docs]}
|
86 |
|
87 |
-
|
88 |
-
|
89 |
-
"""Node to give answer to the question"""
|
90 |
-
|
91 |
-
context = state["context"]
|
92 |
-
question = state["question"]
|
93 |
-
|
94 |
-
additional_context_template = """Here are some contexts about the question you can use if you find it helpful: {context}"""
|
95 |
-
additional_context = additional_context_template.format(context=context)
|
96 |
-
final_instruction = SYSTEM_MESSAGE + additional_context
|
97 |
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
# Append it to state
|
102 |
-
return {"answer": answer}
|
103 |
|
104 |
|
105 |
builder = StateGraph(State)
|
106 |
-
|
107 |
-
builder.add_node("
|
108 |
-
builder.
|
109 |
-
builder.
|
110 |
-
|
111 |
-
builder.add_edge(START, "search_wikipedia")
|
112 |
-
builder.add_edge(START, "search_tavily")
|
113 |
-
builder.add_edge("search_wikipedia", "generate_answer")
|
114 |
-
builder.add_edge("search_tavily", "generate_answer")
|
115 |
graph = builder.compile()
|
116 |
|
117 |
|
|
|
5 |
import pandas as pd
|
6 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
7 |
from langgraph.graph import StateGraph, MessagesState, START
|
8 |
+
from langgraph.prebuilt import ToolNode
|
9 |
+
from langgraph.prebuilt import tools_condition
|
10 |
+
from langchain_core.tools import tool
|
11 |
from langchain_core.messages import SystemMessage, HumanMessage
|
12 |
from langchain_community.document_loaders import WikipediaLoader
|
13 |
from langchain_community.tools import TavilySearchResults
|
|
|
|
|
|
|
14 |
|
15 |
# (Keep Constants as is)
|
16 |
# --- Constants ---
|
|
|
33 |
print("BasicAgent initialized.")
|
34 |
def __call__(self, question: str) -> str:
|
35 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
36 |
+
messages = [SystemMessage(content=SYSTEM_MESSAGE)] + [HumanMessage(content=f"Answer the question: {question}")]
|
37 |
+
messages = self.graph.invoke({"messages": messages})
|
38 |
+
answer = messages['messages'][-1].content
|
|
|
|
|
|
|
39 |
print(f"Agent returning fixed answer: {answer}")
|
40 |
return answer
|
41 |
|
42 |
+
@tool
|
|
|
|
|
|
|
|
|
43 |
def search_tavily(state):
|
44 |
|
45 |
""" Retrieve docs from web search """
|
|
|
59 |
|
60 |
return {"context": [formatted_search_docs]}
|
61 |
|
62 |
+
@tool
|
63 |
def search_wikipedia(state):
|
64 |
|
65 |
""" Retrieve docs from wikipedia """
|
|
|
78 |
|
79 |
return {"context": [formatted_search_docs]}
|
80 |
|
81 |
+
llm_with_tools = llm.bind_tools([search_tavily, search_wikipedia])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
|
83 |
+
def router(state: MessagesState):
|
84 |
+
"""Router of the graph"""
|
85 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
|
|
|
|
86 |
|
87 |
|
88 |
builder = StateGraph(State)
|
89 |
+
builder.add_node("router", router)
|
90 |
+
builder.add_node("tools", ToolNode([search_tavily, search_wikipedia]))
|
91 |
+
builder.add_edge(START, "router")
|
92 |
+
builder.add_conditional_edges("router", tools_condition)
|
93 |
+
builder.add_edge("tools", "router")
|
|
|
|
|
|
|
|
|
94 |
graph = builder.compile()
|
95 |
|
96 |
|