wishwakankanamg commited on
Commit
f21c09d
·
1 Parent(s): 203dbd3

logging is good now

Browse files
Files changed (4) hide show
  1. __pycache__/graph.cpython-310.pyc +0 -0
  2. app.log +61 -0
  3. graph.png +0 -0
  4. graph.py +24 -5
__pycache__/graph.cpython-310.pyc CHANGED
Binary files a/__pycache__/graph.cpython-310.pyc and b/__pycache__/graph.cpython-310.pyc differ
 
app.log CHANGED
@@ -49821,3 +49821,64 @@ Traceback (most recent call last):
49821
  raise GraphRecursionError(msg)
49822
  langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49823
  For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49821
  raise GraphRecursionError(msg)
49822
  langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49823
  For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49824
+ 2025-06-06 13:57:07:__main__:INFO: Starting the interface
49825
+ 2025-06-06 13:57:13:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49826
+ 2025-06-06 13:59:23:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49827
+ 2025-06-06 13:59:43:__main__:INFO: Prompt: You are a helpful assistant.
49828
+ 2025-06-06 14:00:23:__main__:ERROR: Exception occurred
49829
+ Traceback (most recent call last):
49830
+ File "/home/user/app/app.py", line 85, in chat_fn
49831
+ async for stream_mode, chunk in graph.astream(
49832
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49833
+ raise GraphRecursionError(msg)
49834
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49835
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49836
+ 2025-06-06 14:17:27:__main__:INFO: Prompt: You are a helpful assistant.
49837
+ 2025-06-06 14:18:29:__main__:ERROR: Exception occurred
49838
+ Traceback (most recent call last):
49839
+ File "/home/user/app/app.py", line 85, in chat_fn
49840
+ async for stream_mode, chunk in graph.astream(
49841
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49842
+ raise GraphRecursionError(msg)
49843
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49844
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49845
+ 2025-06-06 14:22:46:__main__:INFO: Starting the interface
49846
+ 2025-06-06 14:23:56:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49847
+ 2025-06-06 14:25:57:__main__:INFO: Starting the interface
49848
+ 2025-06-06 14:26:35:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49849
+ 2025-06-06 14:26:44:__main__:INFO: Prompt: You are a helpful assistant.
49850
+ 2025-06-06 14:26:44:__main__:ERROR: Exception occurred
49851
+ Traceback (most recent call last):
49852
+ File "/home/user/app/app.py", line 85, in chat_fn
49853
+ async for stream_mode, chunk in graph.astream(
49854
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
49855
+ async for _ in runner.atick(
49856
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
49857
+ _panic_or_proceed(
49858
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
49859
+ raise exc
49860
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
49861
+ return await task.proc.ainvoke(task.input, config)
49862
+ File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
49863
+ input = await step.ainvoke(input, config, **kwargs)
49864
+ File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
49865
+ ret = await self.afunc(*args, **kwargs)
49866
+ File "/home/user/app/graph.py", line 245, in guidance_node
49867
+ n = n+1
49868
+ UnboundLocalError: local variable 'n' referenced before assignment
49869
+ 2025-06-06 14:30:04:__main__:INFO: Starting the interface
49870
+ 2025-06-06 14:30:12:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49871
+ 2025-06-06 14:30:21:__main__:INFO: Prompt: You are a helpful assistant.
49872
+ 2025-06-06 14:31:00:__main__:ERROR: Exception occurred
49873
+ Traceback (most recent call last):
49874
+ File "/home/user/app/app.py", line 85, in chat_fn
49875
+ async for stream_mode, chunk in graph.astream(
49876
+ File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2677, in astream
49877
+ raise GraphRecursionError(msg)
49878
+ langgraph.errors.GraphRecursionError: Recursion limit of 20 reached without hitting a stop condition. You can increase the limit by setting the `recursion_limit` config key.
49879
+ For troubleshooting, visit: https://python.langchain.com/docs/troubleshooting/errors/GRAPH_RECURSION_LIMIT
49880
+ 2025-06-06 14:31:42:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49881
+ 2025-06-06 14:34:39:__main__:INFO: Starting the interface
49882
+ 2025-06-06 14:35:30:__main__:INFO: Prompt: You are a helpful assistant.
49883
+ 2025-06-06 14:35:39:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
49884
+ 2025-06-06 14:36:20:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
graph.png CHANGED
graph.py CHANGED
@@ -223,6 +223,23 @@ async def guidance_node(state: GraphProcessingState, config=None):
223
 
224
 
225
  print(f"Prompt: {state.prompt}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
226
 
227
  print(f"Tools Enabled: {state.tools_enabled}")
228
  print(f"Search Enabled: {state.search_enabled}")
@@ -236,9 +253,11 @@ async def guidance_node(state: GraphProcessingState, config=None):
236
  print(f"Drawing Complete: {state.drawing_complete}")
237
  print(f"Product Searching Complete: {state.product_searching_complete}")
238
  print(f"Purchasing Complete: {state.purchasing_complete}")
239
- print("--- End Guidance Node Debug ---") # Added for clarity
240
- print(f"\nMessage: {state.messages}")
241
-
 
 
242
 
243
  # Prepare context: stage completion statuses
244
  stage_order = ["brainstorming", "planning", "drawing", "product_searching", "purchasing"]
@@ -315,7 +334,7 @@ def assistant_routing(state: GraphProcessingState) -> str:
315
  def guidance_routing(state: GraphProcessingState) -> str:
316
  # Use logger.debug() for verbose debugging information
317
  # This allows you to control its visibility via logging configuration
318
- print("\n--- Guidance Node (Debug via print) ---") # Added a newline for clarity
319
 
320
  # Log the entire state as a dictionary (Pydantic's .model_dump())
321
  # This is very comprehensive. For large states, consider logging parts.
@@ -389,7 +408,7 @@ def define_workflow() -> CompiledStateGraph:
389
  workflow.add_conditional_edges("guidance_node", guidance_routing)
390
 
391
  # # Set end nodes
392
- workflow.set_entry_point("assistant_node")
393
  # workflow.set_finish_point("assistant_node")
394
  compiled_graph = workflow.compile(checkpointer=memory)
395
  try:
 
223
 
224
 
225
  print(f"Prompt: {state.prompt}")
226
+ for message in state.messages:
227
+ if isinstance(message, HumanMessage):
228
+ print(f"Human: {message.content}")
229
+ elif isinstance(message, AIMessage):
230
+ # Check if content is non-empty
231
+ if message.content:
232
+ # If content is a list (e.g., list of dicts), extract text
233
+ if isinstance(message.content, list):
234
+ texts = [item.get('text', '') for item in message.content if isinstance(item, dict) and 'text' in item]
235
+ if texts:
236
+ print(f"AI: {' '.join(texts)}")
237
+ elif isinstance(message.content, str):
238
+ print(f"AI: {message.content}")
239
+ elif isinstance(message, SystemMessage):
240
+ print(f"System: {message.content}")
241
+ elif isinstance(message, ToolMessage):
242
+ print(f"Tool: {message.content}")
243
 
244
  print(f"Tools Enabled: {state.tools_enabled}")
245
  print(f"Search Enabled: {state.search_enabled}")
 
253
  print(f"Drawing Complete: {state.drawing_complete}")
254
  print(f"Product Searching Complete: {state.product_searching_complete}")
255
  print(f"Purchasing Complete: {state.purchasing_complete}")
256
+
257
+
258
+ guidance_node.count = getattr(guidance_node, 'count', 0) + 1
259
+ print('\nGuidance Node called count', guidance_node.count)
260
+ print("\n--- End Guidance Node Debug ---") # Added for clarity
261
 
262
  # Prepare context: stage completion statuses
263
  stage_order = ["brainstorming", "planning", "drawing", "product_searching", "purchasing"]
 
334
  def guidance_routing(state: GraphProcessingState) -> str:
335
  # Use logger.debug() for verbose debugging information
336
  # This allows you to control its visibility via logging configuration
337
+ print("\n--- Guidance Routing Edge (Debug via print) ---") # Added a newline for clarity
338
 
339
  # Log the entire state as a dictionary (Pydantic's .model_dump())
340
  # This is very comprehensive. For large states, consider logging parts.
 
408
  workflow.add_conditional_edges("guidance_node", guidance_routing)
409
 
410
  # # Set end nodes
411
+ workflow.set_entry_point("guidance_node")
412
  # workflow.set_finish_point("assistant_node")
413
  compiled_graph = workflow.compile(checkpointer=memory)
414
  try: