Commit
·
dc5dca4
1
Parent(s):
33d0a71
Clean up code
Browse filesSigned-off-by: Aivin V. Solatorio <avsolatorio@gmail.com>
- mcp_client.py +0 -92
mcp_client.py
CHANGED
@@ -85,49 +85,6 @@ class MCPClientWrapper:
|
|
85 |
self.anthropic = Anthropic()
|
86 |
self.tools = []
|
87 |
|
88 |
-
# def connect(self, server_path: str) -> str:
|
89 |
-
# return loop.run_until_complete(self._connect(server_path))
|
90 |
-
|
91 |
-
# async def _connect(self, server_path: str) -> str:
|
92 |
-
# if self.exit_stack:
|
93 |
-
# await self.exit_stack.aclose()
|
94 |
-
|
95 |
-
# self.exit_stack = AsyncExitStack()
|
96 |
-
|
97 |
-
# is_python = server_path.endswith(".py")
|
98 |
-
# command = "python" if is_python else "node"
|
99 |
-
|
100 |
-
# server_params = StdioServerParameters(
|
101 |
-
# command=command,
|
102 |
-
# args=[server_path],
|
103 |
-
# env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"},
|
104 |
-
# )
|
105 |
-
|
106 |
-
# stdio_transport = await self.exit_stack.enter_async_context(
|
107 |
-
# stdio_client(server_params)
|
108 |
-
# )
|
109 |
-
# self.stdio, self.write = stdio_transport
|
110 |
-
|
111 |
-
# self.session = await self.exit_stack.enter_async_context(
|
112 |
-
# ClientSession(self.stdio, self.write)
|
113 |
-
# )
|
114 |
-
# await self.session.initialize()
|
115 |
-
|
116 |
-
# response = await self.session.list_tools()
|
117 |
-
# self.tools = [
|
118 |
-
# {
|
119 |
-
# "name": tool.name,
|
120 |
-
# "description": tool.description,
|
121 |
-
# "input_schema": tool.inputSchema,
|
122 |
-
# }
|
123 |
-
# for tool in response.tools
|
124 |
-
# ]
|
125 |
-
|
126 |
-
# print(self.tools)
|
127 |
-
|
128 |
-
# tool_names = [tool["name"] for tool in self.tools]
|
129 |
-
# return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
|
130 |
-
|
131 |
async def connect(self, server_path: str) -> str:
|
132 |
# If there's an existing session, close it
|
133 |
if self.exit_stack:
|
@@ -182,11 +139,6 @@ class MCPClientWrapper:
|
|
182 |
]
|
183 |
yield messages, gr.Textbox(value="")
|
184 |
else:
|
185 |
-
# new_messages = loop.run_until_complete(
|
186 |
-
# self._process_query(message, history)
|
187 |
-
# )
|
188 |
-
# messages = history + [{"role": "user", "content": message}] + new_messages
|
189 |
-
|
190 |
messages = history + [{"role": "user", "content": message}]
|
191 |
|
192 |
yield messages, gr.Textbox(value="")
|
@@ -250,36 +202,6 @@ class MCPClientWrapper:
|
|
250 |
yield [result_messages[-1]]
|
251 |
partial_messages = []
|
252 |
|
253 |
-
# if (
|
254 |
-
# auto_calls < MAX_CALLS
|
255 |
-
# and "TOOL_USE" in content.text
|
256 |
-
# and len(contents) == 0
|
257 |
-
# ):
|
258 |
-
# # Call the LLM automatically.
|
259 |
-
# claude_messages.append({"role": "user", "content": "ok"})
|
260 |
-
# try:
|
261 |
-
# next_response = self.anthropic.messages.create(
|
262 |
-
# model=LLM_MODEL,
|
263 |
-
# system=SYSTEM_PROMPT,
|
264 |
-
# max_tokens=1000,
|
265 |
-
# messages=claude_messages,
|
266 |
-
# tools=self.tools,
|
267 |
-
# )
|
268 |
-
# except OverloadedError:
|
269 |
-
# return [
|
270 |
-
# {
|
271 |
-
# "role": "assistant",
|
272 |
-
# "content": "The LLM API is overloaded now, try again later...",
|
273 |
-
# }
|
274 |
-
# ]
|
275 |
-
|
276 |
-
# print("next_response", next_response.content)
|
277 |
-
|
278 |
-
# contents.extend(next_response.content)
|
279 |
-
# auto_calls += 1
|
280 |
-
|
281 |
-
# # continue
|
282 |
-
|
283 |
elif content.type == "tool_use":
|
284 |
tool_id = content.id
|
285 |
tool_name = content.name
|
@@ -350,7 +272,6 @@ class MCPClientWrapper:
|
|
350 |
except:
|
351 |
pass
|
352 |
|
353 |
-
# result_content = "\n".join(str(item) for item in result_content)
|
354 |
print("result_content", result_content)
|
355 |
|
356 |
result_messages.append(
|
@@ -370,12 +291,6 @@ class MCPClientWrapper:
|
|
370 |
yield [result_messages[-1]]
|
371 |
partial_messages = []
|
372 |
|
373 |
-
# claude_messages.append(
|
374 |
-
# {
|
375 |
-
# "role": "user",
|
376 |
-
# "content": f"Tool result for {tool_name}: {result_content}",
|
377 |
-
# }
|
378 |
-
# )
|
379 |
claude_messages.append(
|
380 |
{"role": "assistant", "content": [content.model_dump()]}
|
381 |
)
|
@@ -413,13 +328,6 @@ class MCPClientWrapper:
|
|
413 |
|
414 |
contents.extend(next_response.content)
|
415 |
|
416 |
-
# if next_response.content and next_response.content[0].type == "text":
|
417 |
-
# result_messages.append(
|
418 |
-
# {"role": "assistant", "content": next_response.content[0].text}
|
419 |
-
# )
|
420 |
-
|
421 |
-
# yield result_messages
|
422 |
-
|
423 |
|
424 |
client = MCPClientWrapper()
|
425 |
|
|
|
85 |
self.anthropic = Anthropic()
|
86 |
self.tools = []
|
87 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
async def connect(self, server_path: str) -> str:
|
89 |
# If there's an existing session, close it
|
90 |
if self.exit_stack:
|
|
|
139 |
]
|
140 |
yield messages, gr.Textbox(value="")
|
141 |
else:
|
|
|
|
|
|
|
|
|
|
|
142 |
messages = history + [{"role": "user", "content": message}]
|
143 |
|
144 |
yield messages, gr.Textbox(value="")
|
|
|
202 |
yield [result_messages[-1]]
|
203 |
partial_messages = []
|
204 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
205 |
elif content.type == "tool_use":
|
206 |
tool_id = content.id
|
207 |
tool_name = content.name
|
|
|
272 |
except:
|
273 |
pass
|
274 |
|
|
|
275 |
print("result_content", result_content)
|
276 |
|
277 |
result_messages.append(
|
|
|
291 |
yield [result_messages[-1]]
|
292 |
partial_messages = []
|
293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
294 |
claude_messages.append(
|
295 |
{"role": "assistant", "content": [content.model_dump()]}
|
296 |
)
|
|
|
328 |
|
329 |
contents.extend(next_response.content)
|
330 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
331 |
|
332 |
client = MCPClientWrapper()
|
333 |
|