CultriX commited on
Commit
bbe6fe0
·
verified ·
1 Parent(s): 874f037

Update run.py

Browse files
Files changed (1) hide show
  1. run.py +162 -18
run.py CHANGED
@@ -1,6 +1,9 @@
1
  import argparse
2
  import os
3
  import threading
 
 
 
4
 
5
  from dotenv import load_dotenv
6
  from huggingface_hub import login
@@ -33,6 +36,26 @@ AUTHORIZED_IMPORTS = [
33
  append_answer_lock = threading.Lock()
34
 
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  def create_agent(
37
  model_id="gpt-4o-mini",
38
  hf_token=None,
@@ -135,12 +158,120 @@ Additionally, if after some searching you find out that you need more informatio
135
  return manager_agent
136
 
137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  def main():
139
  print("[DEBUG] Loading environment variables")
140
  load_dotenv(override=True)
141
 
142
  parser = argparse.ArgumentParser()
143
- parser.add_argument("question", type=str)
 
144
  parser.add_argument("--model-id", type=str, default="gpt-4o-mini")
145
  parser.add_argument("--hf-token", type=str, default=os.getenv("HF_TOKEN"))
146
  parser.add_argument("--serpapi-key", type=str, default=os.getenv("SERPAPI_API_KEY"))
@@ -153,24 +284,37 @@ def main():
153
 
154
  print("[DEBUG] CLI arguments parsed:", args)
155
 
156
- agent = create_agent(
157
- model_id=args.model_id,
158
- hf_token=args.hf_token,
159
- openai_api_token=openai_api_token,
160
- serpapi_key=args.serpapi_key,
161
- api_endpoint=api_endpoint,
162
- custom_api_endpoint=args.custom_api_endpoint,
163
- custom_api_key=args.custom_api_key,
164
- search_provider=args.search_provider,
165
- search_api_key=args.search_api_key,
166
- custom_search_url=args.custom_search_url,
167
- )
 
 
 
 
 
 
 
 
 
 
168
 
169
- print("[DEBUG] Running agent...")
170
- answer = agent.run(args.question)
171
- print(f"Got this answer: {answer}")
 
 
 
 
172
 
173
 
174
  if __name__ == "__main__":
175
- main()
176
-
 
1
  import argparse
2
  import os
3
  import threading
4
+ import sys
5
+ from io import StringIO
6
+ from contextlib import redirect_stdout, redirect_stderr
7
 
8
  from dotenv import load_dotenv
9
  from huggingface_hub import login
 
36
  append_answer_lock = threading.Lock()
37
 
38
 
39
+ class StreamingCapture:
40
+ """Captures stdout/stderr and yields content in real-time"""
41
+ def __init__(self):
42
+ self.content = []
43
+ self.callbacks = []
44
+
45
+ def add_callback(self, callback):
46
+ self.callbacks.append(callback)
47
+
48
+ def write(self, text):
49
+ if text.strip():
50
+ self.content.append(text)
51
+ for callback in self.callbacks:
52
+ callback(text)
53
+ return len(text)
54
+
55
+ def flush(self):
56
+ pass
57
+
58
+
59
  def create_agent(
60
  model_id="gpt-4o-mini",
61
  hf_token=None,
 
158
  return manager_agent
159
 
160
 
161
+ def run_agent_with_streaming(agent, question, stream_callback=None):
162
+ """Run agent and stream output in real-time"""
163
+
164
+ # Capture stdout and stderr
165
+ stdout_capture = StreamingCapture()
166
+ stderr_capture = StreamingCapture()
167
+
168
+ if stream_callback:
169
+ stdout_capture.add_callback(stream_callback)
170
+ stderr_capture.add_callback(stream_callback)
171
+
172
+ try:
173
+ with redirect_stdout(stdout_capture), redirect_stderr(stderr_capture):
174
+ print(f"[STARTING] Running agent with question: {question}")
175
+ answer = agent.run(question)
176
+ print(f"[COMPLETED] Final answer: {answer}")
177
+ return answer
178
+ except Exception as e:
179
+ error_msg = f"[ERROR] Exception occurred: {str(e)}"
180
+ print(error_msg)
181
+ if stream_callback:
182
+ stream_callback(error_msg)
183
+ raise
184
+
185
+
186
+ def create_gradio_interface():
187
+ """Create Gradio interface with streaming support"""
188
+ import gradio as gr
189
+
190
+ def process_question(question, model_id, hf_token, serpapi_key, custom_api_endpoint,
191
+ custom_api_key, search_provider, search_api_key, custom_search_url):
192
+
193
+ # Create agent
194
+ agent = create_agent(
195
+ model_id=model_id,
196
+ hf_token=hf_token,
197
+ openai_api_key=None, # Add if needed
198
+ serpapi_key=serpapi_key,
199
+ api_endpoint=None, # Add if needed
200
+ custom_api_endpoint=custom_api_endpoint,
201
+ custom_api_key=custom_api_key,
202
+ search_provider=search_provider,
203
+ search_api_key=search_api_key,
204
+ custom_search_url=custom_search_url,
205
+ )
206
+
207
+ # Stream output
208
+ full_output = []
209
+
210
+ def stream_callback(text):
211
+ full_output.append(text)
212
+ return "".join(full_output)
213
+
214
+ # Generator function for streaming
215
+ def generate():
216
+ try:
217
+ answer = run_agent_with_streaming(agent, question, stream_callback)
218
+ yield "".join(full_output) + f"\n\n**FINAL ANSWER:** {answer}"
219
+ except Exception as e:
220
+ yield "".join(full_output) + f"\n\n**ERROR:** {str(e)}"
221
+
222
+ return generate()
223
+
224
+ # Create Gradio interface
225
+ with gr.Blocks(title="Streaming Agent Chat") as demo:
226
+ gr.Markdown("# Streaming Agent Chat Interface")
227
+
228
+ with gr.Row():
229
+ with gr.Column():
230
+ question_input = gr.Textbox(label="Question", placeholder="Enter your question here...")
231
+ model_id_input = gr.Textbox(label="Model ID", value="gpt-4o-mini")
232
+ hf_token_input = gr.Textbox(label="HuggingFace Token", type="password")
233
+ serpapi_key_input = gr.Textbox(label="SerpAPI Key", type="password")
234
+ custom_api_endpoint_input = gr.Textbox(label="Custom API Endpoint")
235
+ custom_api_key_input = gr.Textbox(label="Custom API Key", type="password")
236
+ search_provider_input = gr.Dropdown(
237
+ choices=["serper", "searxng"],
238
+ value="serper",
239
+ label="Search Provider"
240
+ )
241
+ search_api_key_input = gr.Textbox(label="Search API Key", type="password")
242
+ custom_search_url_input = gr.Textbox(label="Custom Search URL")
243
+
244
+ submit_btn = gr.Button("Submit", variant="primary")
245
+
246
+ with gr.Column():
247
+ output = gr.Textbox(
248
+ label="Agent Output (Streaming)",
249
+ lines=30,
250
+ max_lines=50,
251
+ interactive=False
252
+ )
253
+
254
+ submit_btn.click(
255
+ fn=process_question,
256
+ inputs=[
257
+ question_input, model_id_input, hf_token_input, serpapi_key_input,
258
+ custom_api_endpoint_input, custom_api_key_input, search_provider_input,
259
+ search_api_key_input, custom_search_url_input
260
+ ],
261
+ outputs=output,
262
+ show_progress=True
263
+ )
264
+
265
+ return demo
266
+
267
+
268
  def main():
269
  print("[DEBUG] Loading environment variables")
270
  load_dotenv(override=True)
271
 
272
  parser = argparse.ArgumentParser()
273
+ parser.add_argument("--gradio", action="store_true", help="Launch Gradio interface")
274
+ parser.add_argument("question", type=str, nargs='?', help="Question to ask (CLI mode)")
275
  parser.add_argument("--model-id", type=str, default="gpt-4o-mini")
276
  parser.add_argument("--hf-token", type=str, default=os.getenv("HF_TOKEN"))
277
  parser.add_argument("--serpapi-key", type=str, default=os.getenv("SERPAPI_API_KEY"))
 
284
 
285
  print("[DEBUG] CLI arguments parsed:", args)
286
 
287
+ if args.gradio:
288
+ # Launch Gradio interface
289
+ demo = create_gradio_interface()
290
+ demo.launch(share=True)
291
+ else:
292
+ # CLI mode
293
+ if not args.question:
294
+ print("Error: Question required for CLI mode")
295
+ return
296
+
297
+ agent = create_agent(
298
+ model_id=args.model_id,
299
+ hf_token=args.hf_token,
300
+ openai_api_key=None, # Fix: was openai_api_token
301
+ serpapi_key=args.serpapi_key,
302
+ api_endpoint=None, # Fix: was api_endpoint
303
+ custom_api_endpoint=args.custom_api_endpoint,
304
+ custom_api_key=args.custom_api_key,
305
+ search_provider=args.search_provider,
306
+ search_api_key=args.search_api_key,
307
+ custom_search_url=args.custom_search_url,
308
+ )
309
 
310
+ print("[DEBUG] Running agent...")
311
+
312
+ def print_stream(text):
313
+ print(text, end='', flush=True)
314
+
315
+ answer = run_agent_with_streaming(agent, args.question, print_stream)
316
+ print(f"\n\nGot this answer: {answer}")
317
 
318
 
319
  if __name__ == "__main__":
320
+ main()