Commit
·
0c13494
1
Parent(s):
feb936d
Use state for previous_response_id
Browse filesSigned-off-by: Aivin V. Solatorio <avsolatorio@gmail.com>
- mcp_openai_client.py +18 -11
mcp_openai_client.py
CHANGED
@@ -181,6 +181,7 @@ class MCPClientWrapper:
|
|
181 |
history: List[Union[Dict[str, Any], ChatMessage]],
|
182 |
previous_response_id: str = None,
|
183 |
):
|
|
|
184 |
if not self.session and LLM_PROVIDER == "anthropic":
|
185 |
messages = history + [
|
186 |
{"role": "user", "content": message},
|
@@ -189,7 +190,7 @@ class MCPClientWrapper:
|
|
189 |
"content": "Please connect to an MCP server first by reloading the page.",
|
190 |
},
|
191 |
]
|
192 |
-
yield messages, gr.Textbox(value=""),
|
193 |
else:
|
194 |
messages = history + [
|
195 |
{"role": "user", "content": message},
|
@@ -199,9 +200,9 @@ class MCPClientWrapper:
|
|
199 |
},
|
200 |
]
|
201 |
|
202 |
-
yield messages, gr.Textbox(value=""),
|
203 |
# simulate thinking with asyncio.sleep
|
204 |
-
await asyncio.sleep(0.
|
205 |
messages.pop(-1)
|
206 |
|
207 |
is_delta = False
|
@@ -227,7 +228,7 @@ class MCPClientWrapper:
|
|
227 |
yield (
|
228 |
messages,
|
229 |
gr.Textbox(value=""),
|
230 |
-
|
231 |
)
|
232 |
await asyncio.sleep(0.01)
|
233 |
continue
|
@@ -238,7 +239,7 @@ class MCPClientWrapper:
|
|
238 |
yield (
|
239 |
messages,
|
240 |
gr.Textbox(value=""),
|
241 |
-
|
242 |
)
|
243 |
await asyncio.sleep(0.01)
|
244 |
|
@@ -250,7 +251,15 @@ class MCPClientWrapper:
|
|
250 |
break
|
251 |
|
252 |
with open("messages.log.jsonl", "a+") as fl:
|
253 |
-
fl.write(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
|
255 |
async def _process_query_openai(
|
256 |
self,
|
@@ -278,8 +287,8 @@ class MCPClientWrapper:
|
|
278 |
stream=True,
|
279 |
max_output_tokens=32768,
|
280 |
temperature=0,
|
281 |
-
previous_response_id=previous_response_id
|
282 |
-
if previous_response_id
|
283 |
else None,
|
284 |
store=True, # Store the response in the OpenAIlogs
|
285 |
)
|
@@ -700,9 +709,7 @@ def gradio_interface(
|
|
700 |
layout="panel",
|
701 |
placeholder="Ask development data questions!",
|
702 |
)
|
703 |
-
previous_response_id = gr.
|
704 |
-
label="Previous Response ID", interactive=False, visible=False
|
705 |
-
)
|
706 |
|
707 |
with gr.Row(equal_height=True):
|
708 |
msg = gr.Textbox(
|
|
|
181 |
history: List[Union[Dict[str, Any], ChatMessage]],
|
182 |
previous_response_id: str = None,
|
183 |
):
|
184 |
+
print("previous_response_id", previous_response_id)
|
185 |
if not self.session and LLM_PROVIDER == "anthropic":
|
186 |
messages = history + [
|
187 |
{"role": "user", "content": message},
|
|
|
190 |
"content": "Please connect to an MCP server first by reloading the page.",
|
191 |
},
|
192 |
]
|
193 |
+
yield messages, gr.Textbox(value=""), previous_response_id
|
194 |
else:
|
195 |
messages = history + [
|
196 |
{"role": "user", "content": message},
|
|
|
200 |
},
|
201 |
]
|
202 |
|
203 |
+
yield messages, gr.Textbox(value=""), previous_response_id
|
204 |
# simulate thinking with asyncio.sleep
|
205 |
+
await asyncio.sleep(0.2)
|
206 |
messages.pop(-1)
|
207 |
|
208 |
is_delta = False
|
|
|
228 |
yield (
|
229 |
messages,
|
230 |
gr.Textbox(value=""),
|
231 |
+
previous_response_id,
|
232 |
)
|
233 |
await asyncio.sleep(0.01)
|
234 |
continue
|
|
|
239 |
yield (
|
240 |
messages,
|
241 |
gr.Textbox(value=""),
|
242 |
+
previous_response_id,
|
243 |
)
|
244 |
await asyncio.sleep(0.01)
|
245 |
|
|
|
251 |
break
|
252 |
|
253 |
with open("messages.log.jsonl", "a+") as fl:
|
254 |
+
fl.write(
|
255 |
+
json.dumps(
|
256 |
+
dict(
|
257 |
+
time=f"{datetime.now()}",
|
258 |
+
messages=messages,
|
259 |
+
previous_response_id=previous_response_id,
|
260 |
+
)
|
261 |
+
)
|
262 |
+
)
|
263 |
|
264 |
async def _process_query_openai(
|
265 |
self,
|
|
|
287 |
stream=True,
|
288 |
max_output_tokens=32768,
|
289 |
temperature=0,
|
290 |
+
previous_response_id=previous_response_id.strip()
|
291 |
+
if previous_response_id
|
292 |
else None,
|
293 |
store=True, # Store the response in the OpenAIlogs
|
294 |
)
|
|
|
709 |
layout="panel",
|
710 |
placeholder="Ask development data questions!",
|
711 |
)
|
712 |
+
previous_response_id = gr.State(None)
|
|
|
|
|
713 |
|
714 |
with gr.Row(equal_height=True):
|
715 |
msg = gr.Textbox(
|