Commit
·
4804219
1
Parent(s):
c812099
Use OpenAI API and improve interactivity
Browse filesSigned-off-by: Aivin V. Solatorio <avsolatorio@gmail.com>
- mcp_openai_client.py +100 -14
mcp_openai_client.py
CHANGED
@@ -22,7 +22,9 @@ from openai.types.responses import (
|
|
22 |
ResponseAudioDeltaEvent,
|
23 |
ResponseMcpCallCompletedEvent,
|
24 |
ResponseOutputItemDoneEvent,
|
|
|
25 |
)
|
|
|
26 |
|
27 |
load_dotenv()
|
28 |
|
@@ -67,10 +69,17 @@ When responding you must always plan the steps and enumerate all the tools that
|
|
67 |
- Summarize the data in a table format with clear column names and values.
|
68 |
- If the data is not available, respond by clearly stating that you do not have access to that information.
|
69 |
|
|
|
|
|
|
|
|
|
70 |
Stay strictly within these boundaries while maintaining a helpful and respectful tone."""
|
71 |
|
72 |
|
73 |
LLM_MODEL = "claude-3-5-haiku-20241022"
|
|
|
|
|
|
|
74 |
# What is the military spending of bangladesh in 2014?
|
75 |
# When a tool is needed for any step, ensure to add the token `TOOL_USE`.
|
76 |
|
@@ -202,9 +211,10 @@ class MCPClientWrapper:
|
|
202 |
else:
|
203 |
is_delta = False
|
204 |
messages.extend(partial)
|
|
|
205 |
|
206 |
yield messages, gr.Textbox(value="")
|
207 |
-
await asyncio.sleep(0.
|
208 |
|
209 |
if (
|
210 |
messages[-1]["role"] == "assistant"
|
@@ -220,7 +230,7 @@ class MCPClientWrapper:
|
|
220 |
self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]
|
221 |
):
|
222 |
response = self.openai.responses.create(
|
223 |
-
model=
|
224 |
tools=[
|
225 |
{
|
226 |
"type": "mcp",
|
@@ -241,24 +251,100 @@ class MCPClientWrapper:
|
|
241 |
)
|
242 |
|
243 |
is_tool_call = False
|
|
|
|
|
244 |
for event in response:
|
245 |
-
if
|
|
|
|
|
|
|
246 |
is_tool_call = True
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
255 |
yield [
|
256 |
{
|
257 |
"role": "assistant",
|
258 |
-
"content": "
|
|
|
|
|
|
|
|
|
|
|
259 |
}
|
260 |
]
|
261 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
262 |
elif isinstance(event, ResponseTextDeltaEvent):
|
263 |
yield [{"role": "assistant", "content": None, "delta": event.delta}]
|
264 |
|
@@ -509,7 +595,7 @@ def gradio_interface(
|
|
509 |
# Disable auto-dark mode by setting theme to None
|
510 |
with gr.Blocks(title="WDI MCP Client", css=custom_css, theme=None) as demo:
|
511 |
try:
|
512 |
-
gr.Markdown("#
|
513 |
# gr.Markdown("Connect to the WDI MCP server and chat with the assistant")
|
514 |
|
515 |
with gr.Accordion(
|
|
|
22 |
ResponseAudioDeltaEvent,
|
23 |
ResponseMcpCallCompletedEvent,
|
24 |
ResponseOutputItemDoneEvent,
|
25 |
+
ResponseOutputItemAddedEvent,
|
26 |
)
|
27 |
+
import ast
|
28 |
|
29 |
load_dotenv()
|
30 |
|
|
|
69 |
- Summarize the data in a table format with clear column names and values.
|
70 |
- If the data is not available, respond by clearly stating that you do not have access to that information.
|
71 |
|
72 |
+
7. **Tool Use**:
|
73 |
+
- Fetch each indicator data using independent tool calls.
|
74 |
+
- Provide some brief explanation between tool calls.
|
75 |
+
|
76 |
Stay strictly within these boundaries while maintaining a helpful and respectful tone."""
|
77 |
|
78 |
|
79 |
LLM_MODEL = "claude-3-5-haiku-20241022"
|
80 |
+
OPENAI_MODEL = "gpt-4.1"
|
81 |
+
# OPENAI_MODEL = "gpt-4.1-mini"
|
82 |
+
# OPENAI_MODEL = "gpt-4.1-nano"
|
83 |
# What is the military spending of bangladesh in 2014?
|
84 |
# When a tool is needed for any step, ensure to add the token `TOOL_USE`.
|
85 |
|
|
|
211 |
else:
|
212 |
is_delta = False
|
213 |
messages.extend(partial)
|
214 |
+
print(partial)
|
215 |
|
216 |
yield messages, gr.Textbox(value="")
|
217 |
+
await asyncio.sleep(0.01)
|
218 |
|
219 |
if (
|
220 |
messages[-1]["role"] == "assistant"
|
|
|
230 |
self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]
|
231 |
):
|
232 |
response = self.openai.responses.create(
|
233 |
+
model=OPENAI_MODEL,
|
234 |
tools=[
|
235 |
{
|
236 |
"type": "mcp",
|
|
|
251 |
)
|
252 |
|
253 |
is_tool_call = False
|
254 |
+
tool_name = None
|
255 |
+
tool_args = None
|
256 |
for event in response:
|
257 |
+
if (
|
258 |
+
isinstance(event, ResponseOutputItemAddedEvent)
|
259 |
+
and event.item.type == "mcp_call"
|
260 |
+
):
|
261 |
is_tool_call = True
|
262 |
+
tool_name = event.item.name
|
263 |
+
# if isinstance(event, ResponseMcpCallInProgressEvent):
|
264 |
+
# is_tool_call = True
|
265 |
+
# yield [
|
266 |
+
# {
|
267 |
+
# "role": "assistant",
|
268 |
+
# "content": "I'll use the tool to help answer your question.",
|
269 |
+
# }
|
270 |
+
# ]
|
271 |
+
if is_tool_call:
|
272 |
+
if (
|
273 |
+
isinstance(event, ResponseAudioDeltaEvent)
|
274 |
+
and event.type == "response.mcp_call_arguments.done"
|
275 |
+
):
|
276 |
+
tool_args = event.arguments
|
277 |
+
|
278 |
+
try:
|
279 |
+
tool_args = json.dumps(
|
280 |
+
json.loads(tool_args), ensure_ascii=True, indent=2
|
281 |
+
)
|
282 |
+
except:
|
283 |
+
pass
|
284 |
+
|
285 |
+
yield [
|
286 |
+
{
|
287 |
+
"role": "assistant",
|
288 |
+
"content": f"I'll use the {tool_name} tool to help answer your question.",
|
289 |
+
"metadata": {
|
290 |
+
"title": f"Using tool: {tool_name.replace('avsolatorio_test_data_mcp_server', '')}",
|
291 |
+
"log": f"Parameters: {tool_args}",
|
292 |
+
# "status": "pending",
|
293 |
+
"status": "done",
|
294 |
+
"id": f"tool_call_{tool_name}",
|
295 |
+
},
|
296 |
+
}
|
297 |
+
]
|
298 |
+
|
299 |
yield [
|
300 |
{
|
301 |
"role": "assistant",
|
302 |
+
"content": "```json\n" + tool_args + "\n```",
|
303 |
+
"metadata": {
|
304 |
+
"parent_id": f"tool_call_{tool_name}",
|
305 |
+
"id": f"params_{tool_name}",
|
306 |
+
"title": "Tool Parameters",
|
307 |
+
},
|
308 |
}
|
309 |
]
|
310 |
+
|
311 |
+
elif isinstance(event, ResponseOutputItemDoneEvent):
|
312 |
+
if event.item.type == "mcp_call":
|
313 |
+
yield [
|
314 |
+
{
|
315 |
+
"role": "assistant",
|
316 |
+
"content": "Here are the results from the tool:",
|
317 |
+
"metadata": {
|
318 |
+
"title": f"Tool Result for {tool_name.replace('avsolatorio_test_data_mcp_server', '')}",
|
319 |
+
"status": "done",
|
320 |
+
"id": f"result_{tool_name}",
|
321 |
+
},
|
322 |
+
}
|
323 |
+
]
|
324 |
+
|
325 |
+
result_content = event.item.output
|
326 |
+
if result_content.startswith("root="):
|
327 |
+
result_content = result_content[5:]
|
328 |
+
try:
|
329 |
+
result_content = ast.literal_eval(result_content)
|
330 |
+
result_content = json.dumps(result_content, indent=2)
|
331 |
+
except:
|
332 |
+
pass
|
333 |
+
|
334 |
+
yield [
|
335 |
+
{
|
336 |
+
"role": "assistant",
|
337 |
+
"content": "```\n" + result_content + "\n```",
|
338 |
+
"metadata": {
|
339 |
+
"parent_id": f"result_{tool_name}",
|
340 |
+
"id": f"raw_result_{tool_name}",
|
341 |
+
"title": "Raw Output",
|
342 |
+
},
|
343 |
+
}
|
344 |
+
]
|
345 |
+
is_tool_call = False
|
346 |
+
tool_name = None
|
347 |
+
tool_args = None
|
348 |
elif isinstance(event, ResponseTextDeltaEvent):
|
349 |
yield [{"role": "assistant", "content": None, "delta": event.delta}]
|
350 |
|
|
|
595 |
# Disable auto-dark mode by setting theme to None
|
596 |
with gr.Blocks(title="WDI MCP Client", css=custom_css, theme=None) as demo:
|
597 |
try:
|
598 |
+
gr.Markdown("# Data360 Chat [Prototype]")
|
599 |
# gr.Markdown("Connect to the WDI MCP server and chat with the assistant")
|
600 |
|
601 |
with gr.Accordion(
|