wishwakankanamg commited on
Commit
0e69073
·
1 Parent(s): 45e9de4
Files changed (3) hide show
  1. __pycache__/graph.cpython-310.pyc +0 -0
  2. app.log +55 -0
  3. graph.py +21 -2
__pycache__/graph.cpython-310.pyc CHANGED
Binary files a/__pycache__/graph.cpython-310.pyc and b/__pycache__/graph.cpython-310.pyc differ
 
app.log CHANGED
@@ -49744,3 +49744,58 @@ Traceback (most recent call last):
49744
  raise GraphRecursionError(msg)
49745
  langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49746
  For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49744
  raise GraphRecursionError(msg)
49745
  langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49746
  For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49747
+ 2025-06-06 09:16:45:__main__:INFO: Starting the interface
49748
+ 2025-06-06 09:16:56:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49749
+ 2025-06-06 09:17:02:__main__:INFO: Prompt: You are a helpful assistant.
49750
+ 2025-06-06 09:17:30:__main__:ERROR: Exception occurred
49751
+ Traceback (most recent call last):
49752
+ File "/home/user/app/app.py", line 85, in chat_fn
49753
+ async for stream_mode, chunk in graph.astream(
49754
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49755
+ raise GraphRecursionError(msg)
49756
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49757
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49758
+ 2025-06-06 09:23:02:__main__:INFO: Starting the interface
49759
+ 2025-06-06 09:23:14:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49760
+ 2025-06-06 09:23:17:__main__:INFO: Prompt: You are a helpful assistant.
49761
+ 2025-06-06 09:23:20:__main__:ERROR: Exception occurred
49762
+ Traceback (most recent call last):
49763
+ File "/home/user/app/app.py", line 85, in chat_fn
49764
+ async for stream_mode, chunk in graph.astream(
49765
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
49766
+ async for _ in runner.atick(
49767
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
49768
+ _panic_or_proceed(
49769
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
49770
+ raise exc
49771
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
49772
+ return await task.proc.ainvoke(task.input, config)
49773
+ File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
49774
+ input = await step.ainvoke(input, config, **kwargs)
49775
+ File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
49776
+ ret = await self.afunc(*args, **kwargs)
49777
+ File "/home/user/app/graph.py", line 194, in assistant_node
49778
+ if isinstance(msg, HumanMessage):
49779
+ NameError: name 'HumanMessage' is not defined
49780
+ 2025-06-06 09:24:10:__main__:INFO: Starting the interface
49781
+ 2025-06-06 09:24:17:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49782
+ 2025-06-06 09:24:26:__main__:INFO: Prompt: You are a helpful assistant.
49783
+ 2025-06-06 09:24:59:__main__:ERROR: Exception occurred
49784
+ Traceback (most recent call last):
49785
+ File "/home/user/app/app.py", line 85, in chat_fn
49786
+ async for stream_mode, chunk in graph.astream(
49787
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49788
+ raise GraphRecursionError(msg)
49789
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49790
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49791
+ 2025-06-06 09:26:06:__main__:INFO: Starting the interface
49792
+ 2025-06-06 09:26:11:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49793
+ 2025-06-06 09:26:25:__main__:INFO: Prompt: You are a helpful assistant.
49794
+ 2025-06-06 09:26:59:__main__:ERROR: Exception occurred
49795
+ Traceback (most recent call last):
49796
+ File "/home/user/app/app.py", line 85, in chat_fn
49797
+ async for stream_mode, chunk in graph.astream(
49798
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49799
+ raise GraphRecursionError(msg)
49800
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49801
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
graph.py CHANGED
@@ -6,7 +6,8 @@ from typing import Annotated
6
  from typing_extensions import TypedDict
7
 
8
  import aiohttp
9
- from langchain_core.messages import AnyMessage
 
10
  from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
11
  from langchain_core.tools import tool
12
  from langgraph.graph.state import CompiledStateGraph
@@ -190,7 +191,15 @@ async def assistant_node(state: GraphProcessingState, config=None):
190
  chain = prompt | assistant_model
191
  response = await chain.ainvoke({"messages": state.messages}, config=config)
192
 
193
- print(response.content)
 
 
 
 
 
 
 
 
194
 
195
  idea_complete = evaluate_idea_completion(response)
196
 
@@ -279,6 +288,16 @@ async def guidance_node(state: GraphProcessingState, config=None):
279
  # Get response from assistant
280
  response = await chain.ainvoke({"messages": state.messages}, config=config)
281
 
 
 
 
 
 
 
 
 
 
 
282
  return {
283
  "messages": response,
284
  "next_stage": incomplete[0] if incomplete else None
 
6
  from typing_extensions import TypedDict
7
 
8
  import aiohttp
9
+ from langchain_core.messages import AIMessage, HumanMessage, AnyMessage
10
+
11
  from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
12
  from langchain_core.tools import tool
13
  from langgraph.graph.state import CompiledStateGraph
 
191
  chain = prompt | assistant_model
192
  response = await chain.ainvoke({"messages": state.messages}, config=config)
193
 
194
+ for msg in response:
195
+ if isinstance(msg, HumanMessage):
196
+ print("Human:", msg.content)
197
+ elif isinstance(msg, AIMessage):
198
+ if isinstance(msg.content, list):
199
+ ai_texts = [part.get("text", "") for part in msg.content if isinstance(part, dict)]
200
+ print("AI:", " ".join(ai_texts))
201
+ else:
202
+ print("AI:", msg.content)
203
 
204
  idea_complete = evaluate_idea_completion(response)
205
 
 
288
  # Get response from assistant
289
  response = await chain.ainvoke({"messages": state.messages}, config=config)
290
 
291
+ for msg in response:
292
+ if isinstance(msg, HumanMessage):
293
+ print("Human:", msg.content)
294
+ elif isinstance(msg, AIMessage):
295
+ if isinstance(msg.content, list):
296
+ ai_texts = [part.get("text", "") for part in msg.content if isinstance(part, dict)]
297
+ print("AI:", " ".join(ai_texts))
298
+ else:
299
+ print("AI:", msg.content)
300
+
301
  return {
302
  "messages": response,
303
  "next_stage": incomplete[0] if incomplete else None