Commit
·
8362096
1
Parent(s):
33f4b8c
Add history
Browse filesSigned-off-by: Aivin V. Solatorio <avsolatorio@gmail.com>
- mcp_openai_client.py +70 -12
mcp_openai_client.py
CHANGED
@@ -13,6 +13,7 @@ from anthropic import Anthropic
|
|
13 |
from anthropic._exceptions import OverloadedError
|
14 |
from dotenv import load_dotenv
|
15 |
from openai import OpenAI
|
|
|
16 |
from openai.types.responses import (
|
17 |
ResponseTextDeltaEvent,
|
18 |
ResponseContentPartAddedEvent,
|
@@ -23,6 +24,7 @@ from openai.types.responses import (
|
|
23 |
ResponseMcpCallCompletedEvent,
|
24 |
ResponseOutputItemDoneEvent,
|
25 |
ResponseOutputItemAddedEvent,
|
|
|
26 |
)
|
27 |
import ast
|
28 |
|
@@ -171,7 +173,10 @@ class MCPClientWrapper:
|
|
171 |
self.session = None
|
172 |
|
173 |
async def process_message(
|
174 |
-
self,
|
|
|
|
|
|
|
175 |
):
|
176 |
if not self.session and LLM_PROVIDER == "anthropic":
|
177 |
messages = history + [
|
@@ -181,7 +186,7 @@ class MCPClientWrapper:
|
|
181 |
"content": "Please connect to an MCP server first by reloading the page.",
|
182 |
},
|
183 |
]
|
184 |
-
yield messages, gr.Textbox(value="")
|
185 |
else:
|
186 |
messages = history + [
|
187 |
{"role": "user", "content": message},
|
@@ -191,13 +196,15 @@ class MCPClientWrapper:
|
|
191 |
},
|
192 |
]
|
193 |
|
194 |
-
yield messages, gr.Textbox(value="")
|
195 |
# simulate thinking with asyncio.sleep
|
196 |
await asyncio.sleep(0.1)
|
197 |
messages.pop(-1)
|
198 |
|
199 |
is_delta = False
|
200 |
-
async for partial in self._process_query(
|
|
|
|
|
201 |
if partial[-1].get("delta"):
|
202 |
if not is_delta:
|
203 |
is_delta = True
|
@@ -208,12 +215,25 @@ class MCPClientWrapper:
|
|
208 |
}
|
209 |
)
|
210 |
messages[-1]["content"] += partial[-1]["delta"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
211 |
else:
|
212 |
is_delta = False
|
213 |
messages.extend(partial)
|
214 |
print(partial)
|
215 |
|
216 |
-
yield
|
|
|
|
|
|
|
|
|
217 |
await asyncio.sleep(0.01)
|
218 |
|
219 |
if (
|
@@ -227,7 +247,10 @@ class MCPClientWrapper:
|
|
227 |
fl.write(json.dumps(dict(time=f"{datetime.now()}", messages=messages)))
|
228 |
|
229 |
async def _process_query_openai(
|
230 |
-
self,
|
|
|
|
|
|
|
231 |
):
|
232 |
response = self.openai.responses.create(
|
233 |
model=OPENAI_MODEL,
|
@@ -247,14 +270,24 @@ class MCPClientWrapper:
|
|
247 |
input=message,
|
248 |
parallel_tool_calls=False,
|
249 |
stream=True,
|
|
|
250 |
temperature=0,
|
|
|
|
|
|
|
251 |
)
|
252 |
|
253 |
is_tool_call = False
|
254 |
tool_name = None
|
255 |
tool_args = None
|
256 |
for event in response:
|
257 |
-
if (
|
|
|
|
|
|
|
|
|
|
|
|
|
258 |
isinstance(event, ResponseOutputItemAddedEvent)
|
259 |
and event.item.type == "mcp_call"
|
260 |
):
|
@@ -553,14 +586,36 @@ class MCPClientWrapper:
|
|
553 |
contents.extend(next_response.content)
|
554 |
|
555 |
async def _process_query(
|
556 |
-
self,
|
|
|
|
|
|
|
557 |
):
|
558 |
if LLM_PROVIDER == "anthropic":
|
559 |
async for partial in self._process_query_anthropic(message, history):
|
560 |
yield partial
|
561 |
elif LLM_PROVIDER == "openai":
|
562 |
-
|
563 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
564 |
|
565 |
|
566 |
def gradio_interface(
|
@@ -625,6 +680,9 @@ def gradio_interface(
|
|
625 |
layout="panel",
|
626 |
placeholder="Ask development data questions!",
|
627 |
)
|
|
|
|
|
|
|
628 |
|
629 |
with gr.Row(equal_height=True):
|
630 |
msg = gr.Textbox(
|
@@ -647,8 +705,8 @@ def gradio_interface(
|
|
647 |
|
648 |
msg.submit(
|
649 |
client.process_message,
|
650 |
-
[msg, chatbot],
|
651 |
-
[chatbot, msg],
|
652 |
concurrency_limit=10,
|
653 |
)
|
654 |
# clear_btn.click(lambda: [], None, chatbot)
|
|
|
13 |
from anthropic._exceptions import OverloadedError
|
14 |
from dotenv import load_dotenv
|
15 |
from openai import OpenAI
|
16 |
+
import openai
|
17 |
from openai.types.responses import (
|
18 |
ResponseTextDeltaEvent,
|
19 |
ResponseContentPartAddedEvent,
|
|
|
24 |
ResponseMcpCallCompletedEvent,
|
25 |
ResponseOutputItemDoneEvent,
|
26 |
ResponseOutputItemAddedEvent,
|
27 |
+
ResponseCompletedEvent,
|
28 |
)
|
29 |
import ast
|
30 |
|
|
|
173 |
self.session = None
|
174 |
|
175 |
async def process_message(
|
176 |
+
self,
|
177 |
+
message: str,
|
178 |
+
history: List[Union[Dict[str, Any], ChatMessage]],
|
179 |
+
previous_response_id: str = None,
|
180 |
):
|
181 |
if not self.session and LLM_PROVIDER == "anthropic":
|
182 |
messages = history + [
|
|
|
186 |
"content": "Please connect to an MCP server first by reloading the page.",
|
187 |
},
|
188 |
]
|
189 |
+
yield messages, gr.Textbox(value=""), gr.Textbox(value=previous_response_id)
|
190 |
else:
|
191 |
messages = history + [
|
192 |
{"role": "user", "content": message},
|
|
|
196 |
},
|
197 |
]
|
198 |
|
199 |
+
yield messages, gr.Textbox(value=""), gr.Textbox(value=previous_response_id)
|
200 |
# simulate thinking with asyncio.sleep
|
201 |
await asyncio.sleep(0.1)
|
202 |
messages.pop(-1)
|
203 |
|
204 |
is_delta = False
|
205 |
+
async for partial in self._process_query(
|
206 |
+
message, history, previous_response_id
|
207 |
+
):
|
208 |
if partial[-1].get("delta"):
|
209 |
if not is_delta:
|
210 |
is_delta = True
|
|
|
215 |
}
|
216 |
)
|
217 |
messages[-1]["content"] += partial[-1]["delta"]
|
218 |
+
elif partial[-1].get("response_id"):
|
219 |
+
previous_response_id = partial[-1]["response_id"]
|
220 |
+
yield (
|
221 |
+
messages,
|
222 |
+
gr.Textbox(value=""),
|
223 |
+
gr.Textbox(value=previous_response_id),
|
224 |
+
)
|
225 |
+
await asyncio.sleep(0.01)
|
226 |
+
continue
|
227 |
else:
|
228 |
is_delta = False
|
229 |
messages.extend(partial)
|
230 |
print(partial)
|
231 |
|
232 |
+
yield (
|
233 |
+
messages,
|
234 |
+
gr.Textbox(value=""),
|
235 |
+
gr.Textbox(value=previous_response_id),
|
236 |
+
)
|
237 |
await asyncio.sleep(0.01)
|
238 |
|
239 |
if (
|
|
|
247 |
fl.write(json.dumps(dict(time=f"{datetime.now()}", messages=messages)))
|
248 |
|
249 |
async def _process_query_openai(
|
250 |
+
self,
|
251 |
+
message: str,
|
252 |
+
history: List[Union[Dict[str, Any], ChatMessage]],
|
253 |
+
previous_response_id: str = None,
|
254 |
):
|
255 |
response = self.openai.responses.create(
|
256 |
model=OPENAI_MODEL,
|
|
|
270 |
input=message,
|
271 |
parallel_tool_calls=False,
|
272 |
stream=True,
|
273 |
+
max_output_tokens=32768,
|
274 |
temperature=0,
|
275 |
+
previous_response_id=previous_response_id
|
276 |
+
if previous_response_id.strip()
|
277 |
+
else None,
|
278 |
)
|
279 |
|
280 |
is_tool_call = False
|
281 |
tool_name = None
|
282 |
tool_args = None
|
283 |
for event in response:
|
284 |
+
if isinstance(event, ResponseCompletedEvent):
|
285 |
+
yield [
|
286 |
+
{
|
287 |
+
"response_id": event.response.id,
|
288 |
+
}
|
289 |
+
]
|
290 |
+
elif (
|
291 |
isinstance(event, ResponseOutputItemAddedEvent)
|
292 |
and event.item.type == "mcp_call"
|
293 |
):
|
|
|
586 |
contents.extend(next_response.content)
|
587 |
|
588 |
async def _process_query(
|
589 |
+
self,
|
590 |
+
message: str,
|
591 |
+
history: List[Union[Dict[Any, Any], ChatMessage]],
|
592 |
+
previous_response_id: str = None,
|
593 |
):
|
594 |
if LLM_PROVIDER == "anthropic":
|
595 |
async for partial in self._process_query_anthropic(message, history):
|
596 |
yield partial
|
597 |
elif LLM_PROVIDER == "openai":
|
598 |
+
try:
|
599 |
+
async for partial in self._process_query_openai(
|
600 |
+
message, history, previous_response_id
|
601 |
+
):
|
602 |
+
yield partial
|
603 |
+
except openai.APIError as e:
|
604 |
+
print(e)
|
605 |
+
yield [
|
606 |
+
{
|
607 |
+
"role": "assistant",
|
608 |
+
"content": "The LLM encountered an error. Please try again or reload the page.",
|
609 |
+
}
|
610 |
+
]
|
611 |
+
except Exception as e:
|
612 |
+
print(e)
|
613 |
+
yield [
|
614 |
+
{
|
615 |
+
"role": "assistant",
|
616 |
+
"content": f"Sorry, I encountered an unexpected error: `{e}`. Please try again or reload the page.",
|
617 |
+
}
|
618 |
+
]
|
619 |
|
620 |
|
621 |
def gradio_interface(
|
|
|
680 |
layout="panel",
|
681 |
placeholder="Ask development data questions!",
|
682 |
)
|
683 |
+
previous_response_id = gr.Textbox(
|
684 |
+
label="Previous Response ID", interactive=False, visible=False
|
685 |
+
)
|
686 |
|
687 |
with gr.Row(equal_height=True):
|
688 |
msg = gr.Textbox(
|
|
|
705 |
|
706 |
msg.submit(
|
707 |
client.process_message,
|
708 |
+
[msg, chatbot, previous_response_id],
|
709 |
+
[chatbot, msg, previous_response_id],
|
710 |
concurrency_limit=10,
|
711 |
)
|
712 |
# clear_btn.click(lambda: [], None, chatbot)
|