wishwakankanamg commited on
Commit
ad73b35
·
1 Parent(s): 8b7605f
Files changed (4) hide show
  1. __pycache__/agent.cpython-310.pyc +0 -0
  2. agent.py +14 -3
  3. app.py +2 -2
  4. requirements.txt +1 -0
__pycache__/agent.cpython-310.pyc CHANGED
Binary files a/__pycache__/agent.cpython-310.pyc and b/__pycache__/agent.cpython-310.pyc differ
 
agent.py CHANGED
@@ -147,10 +147,21 @@ def build_graph(provider: str = "huggingface"):
147
  print("\n--- Assistant Node ---")
148
  print("Incoming messages to assistant:")
149
  for msg in state["messages"]:
150
- msg.pretty_print() #
 
 
 
 
 
 
 
 
 
 
 
151
 
152
- """Assistant node"""
153
- return {"messages": [llm_with_tools.invoke(state["messages"])]}
154
 
155
  # def retriever(state: MessagesState):
156
  # """Retriever node"""
 
147
  print("\n--- Assistant Node ---")
148
  print("Incoming messages to assistant:")
149
  for msg in state["messages"]:
150
+ msg.pretty_print()
151
+
152
+ # Defensive check
153
+ if not state["messages"]:
154
+ raise ValueError("No messages provided to the assistant.")
155
+
156
+ # Must contain at least one HumanMessage (Claude requirement)
157
+ if not any(isinstance(msg, HumanMessage) for msg in state["messages"]):
158
+ raise ValueError("Claude requires at least one HumanMessage in the input.")
159
+
160
+ response = llm_with_tools.invoke(state["messages"])
161
+ return {"messages": state["messages"] + [response]}
162
 
163
+ # """Assistant node"""
164
+ # return {"messages": [llm_with_tools.invoke(state["messages"])]}
165
 
166
  # def retriever(state: MessagesState):
167
  # """Retriever node"""
app.py CHANGED
@@ -37,8 +37,8 @@ except Exception as e:
37
 
38
  def run_langgraph_agent(user_input: str):
39
  graph = build_graph()
40
- result = graph.invoke({"input": user_input})
41
- return result["output"] if "output" in result else result
42
 
43
 
44
  demo = gr.Interface(
 
37
 
38
  def run_langgraph_agent(user_input: str):
39
  graph = build_graph()
40
+ result = graph.invoke({"messages": [HumanMessage(content=user_input)]})
41
+ return result["messages"][-1].content if "messages" in result else result
42
 
43
 
44
  demo = gr.Interface(
requirements.txt CHANGED
@@ -3,6 +3,7 @@ gradio
3
  langchain
4
  langchain-community
5
  langchain-core
 
6
  langchain-google-genai
7
  langchain-huggingface
8
  langchain-groq
 
3
  langchain
4
  langchain-community
5
  langchain-core
6
+ langchain-anthropic
7
  langchain-google-genai
8
  langchain-huggingface
9
  langchain-groq