Spaces:
Runtime error
Runtime error
Commit
·
b8b2aff
1
Parent(s):
261ae2d
fiX
Browse files- __pycache__/graph.cpython-310.pyc +0 -0
- app.log +149 -0
- graph.py +71 -14
__pycache__/graph.cpython-310.pyc
CHANGED
Binary files a/__pycache__/graph.cpython-310.pyc and b/__pycache__/graph.cpython-310.pyc differ
|
|
app.log
CHANGED
@@ -50483,3 +50483,152 @@ Traceback (most recent call last):
|
|
50483 |
elif isinstance(message, SystemMessage):
|
50484 |
NameError: name 'SystemMessage' is not defined
|
50485 |
2025-06-06 19:34:07:__main__:INFO: Prompt: You are a helpful assistant.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50483 |
elif isinstance(message, SystemMessage):
|
50484 |
NameError: name 'SystemMessage' is not defined
|
50485 |
2025-06-06 19:34:07:__main__:INFO: Prompt: You are a helpful assistant.
|
50486 |
+
2025-06-06 19:35:43:__main__:INFO: Starting the interface
|
50487 |
+
2025-06-06 19:35:47:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50488 |
+
2025-06-06 19:36:17:__main__:INFO: Prompt: You are a helpful assistant.
|
50489 |
+
2025-06-06 19:38:19:__main__:INFO: Prompt: You are a helpful assistant.
|
50490 |
+
2025-06-06 19:38:57:__main__:INFO: Prompt: You are a helpful assistant.
|
50491 |
+
2025-06-06 19:38:57:__main__:ERROR: Exception occurred
|
50492 |
+
Traceback (most recent call last):
|
50493 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50494 |
+
async for stream_mode, chunk in graph.astream(
|
50495 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50496 |
+
async for _ in runner.atick(
|
50497 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50498 |
+
_panic_or_proceed(
|
50499 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50500 |
+
raise exc
|
50501 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50502 |
+
return await task.proc.ainvoke(task.input, config)
|
50503 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50504 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50505 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50506 |
+
ret = await self.afunc(*args, **kwargs)
|
50507 |
+
File "/home/user/app/graph.py", line 145, in guidance_node
|
50508 |
+
elif isinstance(message, SystemMessage):
|
50509 |
+
NameError: name 'SystemMessage' is not defined
|
50510 |
+
2025-06-06 19:42:46:__main__:INFO: Starting the interface
|
50511 |
+
2025-06-06 19:42:50:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50512 |
+
2025-06-06 19:42:59:__main__:INFO: Prompt: You are a helpful assistant.
|
50513 |
+
2025-06-06 19:42:59:__main__:ERROR: Exception occurred
|
50514 |
+
Traceback (most recent call last):
|
50515 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50516 |
+
async for stream_mode, chunk in graph.astream(
|
50517 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50518 |
+
async for _ in runner.atick(
|
50519 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50520 |
+
_panic_or_proceed(
|
50521 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50522 |
+
raise exc
|
50523 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50524 |
+
return await task.proc.ainvoke(task.input, config)
|
50525 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50526 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50527 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50528 |
+
ret = await self.afunc(*args, **kwargs)
|
50529 |
+
File "/home/user/app/graph.py", line 147, in guidance_node
|
50530 |
+
elif isinstance(message, ToolMessage):
|
50531 |
+
NameError: name 'ToolMessage' is not defined. Did you mean: 'AnyMessage'?
|
50532 |
+
2025-06-06 19:43:50:__main__:INFO: Starting the interface
|
50533 |
+
2025-06-06 19:43:55:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50534 |
+
2025-06-06 19:43:59:__main__:INFO: Prompt: You are a helpful assistant.
|
50535 |
+
2025-06-06 19:44:44:__main__:INFO: Prompt: You are a helpful assistant.
|
50536 |
+
2025-06-06 19:45:00:__main__:INFO: Prompt: You are a helpful assistant.
|
50537 |
+
2025-06-06 19:45:19:__main__:INFO: Prompt: You are a helpful assistant.
|
50538 |
+
2025-06-06 19:48:01:__main__:INFO: Starting the interface
|
50539 |
+
2025-06-06 19:48:04:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50540 |
+
2025-06-06 19:49:31:__main__:INFO: Prompt: You are a helpful assistant.
|
50541 |
+
2025-06-06 19:49:31:__main__:ERROR: Exception occurred
|
50542 |
+
Traceback (most recent call last):
|
50543 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50544 |
+
async for stream_mode, chunk in graph.astream(
|
50545 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50546 |
+
async for _ in runner.atick(
|
50547 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50548 |
+
_panic_or_proceed(
|
50549 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50550 |
+
raise exc
|
50551 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50552 |
+
return await task.proc.ainvoke(task.input, config)
|
50553 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50554 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50555 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50556 |
+
ret = await self.afunc(*args, **kwargs)
|
50557 |
+
File "/home/user/app/graph.py", line 137, in guidance_node
|
50558 |
+
filtered_messages.append(message)
|
50559 |
+
NameError: name 'filtered_messages' is not defined
|
50560 |
+
2025-06-06 19:50:04:__main__:INFO: Starting the interface
|
50561 |
+
2025-06-06 19:50:08:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50562 |
+
2025-06-06 19:52:01:__main__:INFO: Prompt: You are a helpful assistant.
|
50563 |
+
2025-06-06 19:52:22:__main__:INFO: Prompt: You are a helpful assistant.
|
50564 |
+
2025-06-06 19:54:21:__main__:INFO: Prompt: You are a helpful assistant.
|
50565 |
+
2025-06-06 19:55:08:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50566 |
+
2025-06-06 19:55:15:__main__:INFO: Prompt: You are a helpful assistant.
|
50567 |
+
2025-06-06 19:56:47:__main__:INFO: Starting the interface
|
50568 |
+
2025-06-06 19:57:07:__main__:INFO: Prompt: You are a helpful assistant.
|
50569 |
+
2025-06-06 19:58:40:__main__:INFO: Prompt: You are a helpful assistant.
|
50570 |
+
2025-06-06 20:25:30:__main__:INFO: Starting the interface
|
50571 |
+
2025-06-06 20:25:47:__main__:INFO: Prompt: You are a helpful assistant.
|
50572 |
+
2025-06-06 20:25:47:__main__:ERROR: Exception occurred
|
50573 |
+
Traceback (most recent call last):
|
50574 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50575 |
+
async for stream_mode, chunk in graph.astream(
|
50576 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50577 |
+
async for _ in runner.atick(
|
50578 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50579 |
+
_panic_or_proceed(
|
50580 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50581 |
+
raise exc
|
50582 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50583 |
+
return await task.proc.ainvoke(task.input, config)
|
50584 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50585 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50586 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50587 |
+
ret = await self.afunc(*args, **kwargs)
|
50588 |
+
File "/home/user/app/graph.py", line 172, in guidance_node
|
50589 |
+
f"Based on the status, the most logical next stage appears to be: **'{proposed_next_stage}'**.\n\n"
|
50590 |
+
NameError: name 'proposed_next_stage' is not defined
|
50591 |
+
2025-06-06 20:26:00:__main__:INFO: Greeting added for new user via handle_initial_greeting_load.
|
50592 |
+
2025-06-06 20:29:14:__main__:INFO: Starting the interface
|
50593 |
+
2025-06-06 20:29:45:__main__:INFO: Prompt: You are a helpful assistant.
|
50594 |
+
2025-06-06 20:29:45:__main__:ERROR: Exception occurred
|
50595 |
+
Traceback (most recent call last):
|
50596 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50597 |
+
async for stream_mode, chunk in graph.astream(
|
50598 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50599 |
+
async for _ in runner.atick(
|
50600 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50601 |
+
_panic_or_proceed(
|
50602 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50603 |
+
raise exc
|
50604 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50605 |
+
return await task.proc.ainvoke(task.input, config)
|
50606 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50607 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50608 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50609 |
+
ret = await self.afunc(*args, **kwargs)
|
50610 |
+
File "/home/user/app/graph.py", line 198, in guidance_node
|
50611 |
+
final_prompt = "\n".join([guidance_prompt, state.prompt, ASSISTANT_SYSTEM_PROMPT_BASE])
|
50612 |
+
NameError: name 'guidance_prompt' is not defined. Did you mean: 'guidance_prompt_text'?
|
50613 |
+
2025-06-06 20:30:18:__main__:INFO: Starting the interface
|
50614 |
+
2025-06-06 20:30:39:__main__:INFO: Prompt: You are a helpful assistant.
|
50615 |
+
2025-06-06 20:30:52:__main__:INFO: Prompt: You are a helpful assistant.
|
50616 |
+
2025-06-06 20:30:52:__main__:ERROR: Exception occurred
|
50617 |
+
Traceback (most recent call last):
|
50618 |
+
File "/home/user/app/app.py", line 97, in chat_fn
|
50619 |
+
async for stream_mode, chunk in graph.astream(
|
50620 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/__init__.py", line 2655, in astream
|
50621 |
+
async for _ in runner.atick(
|
50622 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 400, in atick
|
50623 |
+
_panic_or_proceed(
|
50624 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/runner.py", line 509, in _panic_or_proceed
|
50625 |
+
raise exc
|
50626 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/pregel/retry.py", line 136, in arun_with_retry
|
50627 |
+
return await task.proc.ainvoke(task.input, config)
|
50628 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 676, in ainvoke
|
50629 |
+
input = await step.ainvoke(input, config, **kwargs)
|
50630 |
+
File "/usr/local/lib/python3.10/site-packages/langgraph/utils/runnable.py", line 440, in ainvoke
|
50631 |
+
ret = await self.afunc(*args, **kwargs)
|
50632 |
+
File "/home/user/app/graph.py", line 145, in guidance_node
|
50633 |
+
elif isinstance(message, SystemMessage):
|
50634 |
+
NameError: name 'SystemMessage' is not defined
|
graph.py
CHANGED
@@ -23,6 +23,8 @@ from langgraph.prebuilt import ToolNode
|
|
23 |
from langgraph.checkpoint.memory import MemorySaver
|
24 |
from langgraph.types import Command, interrupt
|
25 |
|
|
|
|
|
26 |
class State(TypedDict):
|
27 |
messages: Annotated[list, add_messages]
|
28 |
|
@@ -151,12 +153,9 @@ async def guidance_node(state: GraphProcessingState, config=None):
|
|
151 |
print(f"Search Enabled: {state.search_enabled}")
|
152 |
print(f"Next Stage: {state.next_stage}")
|
153 |
|
154 |
-
|
155 |
print(f"Brainstorming Complete: {state.brainstorming_complete}")
|
156 |
-
|
157 |
-
print(f"Drawing Complete: {state.drawing_complete}")
|
158 |
-
print(f"Product Searching Complete: {state.product_searching_complete}")
|
159 |
-
print(f"Purchasing Complete: {state.purchasing_complete}")
|
160 |
|
161 |
guidance_node.count = getattr(guidance_node, 'count', 0) + 1
|
162 |
print('\nGuidance Node called count', guidance_node.count)
|
@@ -166,19 +165,41 @@ async def guidance_node(state: GraphProcessingState, config=None):
|
|
166 |
completed = [stage for stage in stage_order if getattr(state, f"{stage}_complete", False)]
|
167 |
incomplete = [stage for stage in stage_order if not getattr(state, f"{stage}_complete", False)]
|
168 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
169 |
status_summary = f"Completed stages: {completed}\nIncomplete stages: {incomplete}"
|
170 |
|
171 |
-
|
172 |
-
"You are the
|
173 |
-
"
|
174 |
-
f"CURRENT STATUS:\n{status_summary}\n\n"
|
175 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
176 |
)
|
177 |
|
178 |
if state.prompt:
|
179 |
-
final_prompt = "\n".join([
|
180 |
else:
|
181 |
-
final_prompt = "\n".join([
|
182 |
|
183 |
prompt = ChatPromptTemplate.from_messages(
|
184 |
[
|
@@ -410,7 +431,37 @@ async def planning_node(state: GraphProcessingState, config=None):
|
|
410 |
"messages": response
|
411 |
}
|
412 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
413 |
|
|
|
414 |
|
415 |
def guidance_routing(state: GraphProcessingState) -> str:
|
416 |
|
@@ -467,8 +518,14 @@ def define_workflow() -> CompiledStateGraph:
|
|
467 |
# workflow.add_edge("tools", "brainstorming_node")
|
468 |
|
469 |
workflow.add_conditional_edges(
|
470 |
-
|
471 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
472 |
)
|
473 |
# workflow.add_conditional_edges("guidance_node", guidance_routing)
|
474 |
# workflow.add_conditional_edges("brainstorming_node", brainstorming_routing)
|
|
|
23 |
from langgraph.checkpoint.memory import MemorySaver
|
24 |
from langgraph.types import Command, interrupt
|
25 |
|
26 |
+
from typing import TypedDict, List, Optional
|
27 |
+
|
28 |
class State(TypedDict):
|
29 |
messages: Annotated[list, add_messages]
|
30 |
|
|
|
153 |
print(f"Search Enabled: {state.search_enabled}")
|
154 |
print(f"Next Stage: {state.next_stage}")
|
155 |
|
156 |
+
|
157 |
print(f"Brainstorming Complete: {state.brainstorming_complete}")
|
158 |
+
|
|
|
|
|
|
|
159 |
|
160 |
guidance_node.count = getattr(guidance_node, 'count', 0) + 1
|
161 |
print('\nGuidance Node called count', guidance_node.count)
|
|
|
165 |
completed = [stage for stage in stage_order if getattr(state, f"{stage}_complete", False)]
|
166 |
incomplete = [stage for stage in stage_order if not getattr(state, f"{stage}_complete", False)]
|
167 |
|
168 |
+
if not incomplete:
|
169 |
+
print("All stages complete!")
|
170 |
+
# Handle case where all stages are complete
|
171 |
+
# You might want to return a message and end, or set proposed_next_stage to a special value
|
172 |
+
ai_all_complete_msg = AIMessage(content="All DIY project stages are complete!")
|
173 |
+
return {
|
174 |
+
"messages": current_messages + [ai_all_complete_msg],
|
175 |
+
"next_stage": "end_project", # Or None, or a final summary node
|
176 |
+
"pending_approval_stage": None,
|
177 |
+
}
|
178 |
+
else:
|
179 |
+
# THIS LINE DEFINES THE VARIABLE
|
180 |
+
proposed_next_stage = incomplete[0]
|
181 |
+
|
182 |
+
print(f"Proposed next stage: {proposed_next_stage}")
|
183 |
+
|
184 |
status_summary = f"Completed stages: {completed}\nIncomplete stages: {incomplete}"
|
185 |
|
186 |
+
guidance_prompt_text = (
|
187 |
+
"You are the Guiding Assistant for a DIY project. Your primary responsibility is to determine the next logical step "
|
188 |
+
"and then **obtain the user's explicit approval** before proceeding.\n\n"
|
189 |
+
f"CURRENT PROJECT STATUS:\n{status_summary}\n\n"
|
190 |
+
f"Based on the status, the most logical next stage appears to be: **'{proposed_next_stage}'**.\n\n"
|
191 |
+
"YOUR TASK:\n"
|
192 |
+
f"1. Formulate a clear and concise question for the user, asking if they agree to proceed to the **'{proposed_next_stage}'** stage. For example: 'It looks like '{proposed_next_stage}' is next. Shall we proceed with that?' or 'Are you ready to move on to {proposed_next_stage}?'\n"
|
193 |
+
"2. **You MUST use the 'human_assistance' tool to ask this question.** Do not answer directly. Invoke the tool with your question.\n"
|
194 |
+
"Example of tool usage (though you don't write this, you *call* the tool):\n"
|
195 |
+
"Tool Call: human_assistance(query='The next stage is planning. Do you want to proceed with planning?')\n\n"
|
196 |
+
"Consider the user's most recent message if it provides any preference."
|
197 |
)
|
198 |
|
199 |
if state.prompt:
|
200 |
+
final_prompt = "\n".join([guidance_prompt_text, state.prompt, ASSISTANT_SYSTEM_PROMPT_BASE])
|
201 |
else:
|
202 |
+
final_prompt = "\n".join([guidance_prompt_text, ASSISTANT_SYSTEM_PROMPT_BASE])
|
203 |
|
204 |
prompt = ChatPromptTemplate.from_messages(
|
205 |
[
|
|
|
431 |
"messages": response
|
432 |
}
|
433 |
|
434 |
+
def custom_route_after_guidance(state: GraphProcessingState) -> Literal["execute_tools", "proceed_to_next_stage"]:
|
435 |
+
"""
|
436 |
+
Checks the last message from the 'guidance_node'.
|
437 |
+
If it's an AIMessage with tool_calls, routes to 'execute_tools'.
|
438 |
+
Otherwise, routes to 'proceed_to_next_stage' (which could be a router itself
|
439 |
+
or directly to the state.next_stage node if it's set).
|
440 |
+
"""
|
441 |
+
print("\n--- Custom Route After Guidance Condition ---")
|
442 |
+
messages = state.get("messages", [])
|
443 |
+
if not messages:
|
444 |
+
print("No messages found in state. Defaulting to proceed_to_next_stage.")
|
445 |
+
return "proceed_to_next_stage" # Or handle as an error/specific state
|
446 |
+
|
447 |
+
last_message = messages[-1]
|
448 |
+
print(f"Last message type: {type(last_message)}")
|
449 |
+
|
450 |
+
if isinstance(last_message, AIMessage):
|
451 |
+
if hasattr(last_message, "tool_calls") and last_message.tool_calls:
|
452 |
+
# Ensure tool_calls is not None and not an empty list
|
453 |
+
print(f"AIMessage has tool_calls: {last_message.tool_calls}")
|
454 |
+
return "execute_tools"
|
455 |
+
else:
|
456 |
+
print("AIMessage, but no tool_calls or tool_calls is empty.")
|
457 |
+
# If next_stage was set by guidance_node (e.g. after approval), we'd use that.
|
458 |
+
# For simplicity here, we just assume a generic "proceed"
|
459 |
+
return "proceed_to_next_stage"
|
460 |
+
else:
|
461 |
+
print(f"Last message is not an AIMessage (type: {type(last_message)}). Proceeding to next stage.")
|
462 |
+
return "proceed_to_next_stage"
|
463 |
|
464 |
+
print("--- End Custom Route After Guidance Condition ---")
|
465 |
|
466 |
def guidance_routing(state: GraphProcessingState) -> str:
|
467 |
|
|
|
518 |
# workflow.add_edge("tools", "brainstorming_node")
|
519 |
|
520 |
workflow.add_conditional_edges(
|
521 |
+
"guidance_node", # The source node
|
522 |
+
custom_route_after_guidance, # Your custom condition function
|
523 |
+
{
|
524 |
+
# "Path name": "Destination node name"
|
525 |
+
"execute_tools": "tools", # If function returns "execute_tools"
|
526 |
+
"proceed_to_next_stage": "planning_node" # If function returns "proceed_to_next_stage"
|
527 |
+
# Or this could be another router, or END
|
528 |
+
}
|
529 |
)
|
530 |
# workflow.add_conditional_edges("guidance_node", guidance_routing)
|
531 |
# workflow.add_conditional_edges("brainstorming_node", brainstorming_routing)
|