Harshil Patel commited on
Commit
83e804d
·
1 Parent(s): 1f35fad

Add a format string function to pretty print json output

Browse files
Files changed (1) hide show
  1. src/manager/manager.py +52 -6
src/manager/manager.py CHANGED
@@ -37,7 +37,54 @@ class Mode(Enum):
37
  ENABLE_ECONOMY_BUDGET = auto()
38
  ENABLE_MEMORY = auto()
39
 
40
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  class GeminiManager:
42
  def __init__(self, system_prompt_file="./src/models/system4.prompt",
43
  gemini_model="gemini-2.5-pro-exp-03-25",
@@ -106,7 +153,7 @@ class GeminiManager:
106
  toolResponse = None
107
  logger.info(
108
  f"Function Name: {function_call.name}, Arguments: {function_call.args}")
109
- title = f"Invoking `{function_call.name}` with `{function_call.args}`\n"
110
  yield {
111
  "role": "assistant",
112
  "content": thinking,
@@ -126,9 +173,8 @@ class GeminiManager:
126
  "message": f"Tool `{function_call.name}` failed to run.",
127
  "output": str(e),
128
  }
129
- pretty_json = json.dumps(toolResponse, indent=4)
130
- logger.debug(f"Tool Response: {pretty_json}")
131
- thinking += f"Tool responded with \n```\n{pretty_json}\n```\n"
132
  yield {
133
  "role": "assistant",
134
  "content": thinking,
@@ -267,7 +313,7 @@ class GeminiManager:
267
  })
268
  messages.append({
269
  "role": "assistant",
270
- "content": f"Memories: \n```\n{json.dumps(memories, indent=4)}\n```\n",
271
  "metadata": {"title": "Memories"}
272
  })
273
  yield messages
 
37
  ENABLE_ECONOMY_BUDGET = auto()
38
  ENABLE_MEMORY = auto()
39
 
40
+ def format_tool_response(response, indent=0):
41
+ """
42
+ Format a tool response for display with proper handling of newlines in string values.
43
+ This preserves the dictionary structure while making string values with newlines display properly.
44
+ """
45
+ indent_str = " " * indent
46
+ result = []
47
+
48
+ if isinstance(response, dict):
49
+ result.append("{")
50
+ items = list(response.items())
51
+ for i, (key, value) in enumerate(items):
52
+ end_comma = "" if i == len(items) - 1 else ","
53
+ formatted_value = format_tool_response(value, indent + 1)
54
+ result.append(f"{indent_str} \"{key}\": {formatted_value}{end_comma}")
55
+ result.append(f"{indent_str}}}")
56
+ return "\n".join(result)
57
+
58
+ elif isinstance(response, list):
59
+ result.append("[")
60
+ for i, item in enumerate(response):
61
+ end_comma = "" if i == len(response) - 1 else ","
62
+ formatted_item = format_tool_response(item, indent + 1)
63
+ result.append(f"{indent_str} {formatted_item}{end_comma}")
64
+ result.append(f"{indent_str}]")
65
+ return "\n".join(result)
66
+
67
+ elif isinstance(response, str):
68
+ # Handle multiline strings by using triple quotes and preserving newlines
69
+ if "\n" in response:
70
+ # Replace newlines with actual newlines and proper indentation
71
+ lines = response.split("\n")
72
+ indented_lines = [f"{indent_str} {line}" for line in lines]
73
+ joined_lines = "\n".join(indented_lines)
74
+ return f"'''\n{joined_lines}\n{indent_str} '''"
75
+ else:
76
+ # Regular string
77
+ return f"\"{response}\""
78
+
79
+ elif response is None:
80
+ return "null"
81
+
82
+ elif isinstance(response, (int, float, bool)):
83
+ return str(response).lower() if isinstance(response, bool) else str(response)
84
+
85
+ else:
86
+ # Fallback for other types
87
+ return f"\"{str(response)}\""
88
  class GeminiManager:
89
  def __init__(self, system_prompt_file="./src/models/system4.prompt",
90
  gemini_model="gemini-2.5-pro-exp-03-25",
 
153
  toolResponse = None
154
  logger.info(
155
  f"Function Name: {function_call.name}, Arguments: {function_call.args}")
156
+ title = f"Invoking `{function_call.name}` with \n```json\n{format_tool_response(function_call.args)}\n```\n"
157
  yield {
158
  "role": "assistant",
159
  "content": thinking,
 
173
  "message": f"Tool `{function_call.name}` failed to run.",
174
  "output": str(e),
175
  }
176
+ logger.debug(f"Tool Response: {toolResponse}")
177
+ thinking += f"Tool responded with \n```json\n{format_tool_response(toolResponse)}\n```\n"
 
178
  yield {
179
  "role": "assistant",
180
  "content": thinking,
 
313
  })
314
  messages.append({
315
  "role": "assistant",
316
+ "content": f"Memories: \n```json\n{format_tool_response(memories)}\n```\n",
317
  "metadata": {"title": "Memories"}
318
  })
319
  yield messages