mobinln commited on
Commit
1729f2d
·
verified ·
1 Parent(s): cb863c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -81
app.py CHANGED
@@ -1,72 +1,12 @@
1
- import socket
2
  import subprocess
3
  import gradio as gr
4
  from openai import OpenAI
5
  import json
6
- import sys
7
- from io import StringIO
8
- import traceback
9
- import matplotlib
10
-
11
- matplotlib.use("Agg") # Use non-interactive backend
12
- import matplotlib.pyplot as plt
13
- import base64
14
- from io import BytesIO
15
-
16
 
17
  subprocess.Popen("bash /home/user/app/start.sh", shell=True)
18
 
19
  client = OpenAI(base_url="http://0.0.0.0:8000/v1", api_key="sk-local", timeout=600)
20
 
21
-
22
- def execute_python_code(code):
23
- """Execute Python code safely and return results"""
24
- # Capture stdout
25
- old_stdout = sys.stdout
26
- sys.stdout = StringIO()
27
-
28
- # Store any plots
29
- plt.clf() # Clear any existing plots
30
-
31
- try:
32
- # Execute the code
33
- exec_globals = {
34
- "plt": plt,
35
- "matplotlib": matplotlib,
36
- "__builtins__": __builtins__,
37
- # Add other safe modules as needed
38
- "json": json,
39
- "math": __import__("math"),
40
- "numpy": __import__("numpy"), # if available
41
- "pandas": __import__("pandas"), # if available
42
- }
43
-
44
- exec(code, exec_globals)
45
-
46
- # Get printed output
47
- output = sys.stdout.getvalue()
48
-
49
- # Check if there are any plots
50
- plot_data = None
51
- if plt.get_fignums(): # If there are active figures
52
- buf = BytesIO()
53
- plt.savefig(buf, format="png", bbox_inches="tight", dpi=150)
54
- buf.seek(0)
55
- plot_data = base64.b64encode(buf.read()).decode()
56
- plt.close("all") # Close all figures
57
-
58
- sys.stdout = old_stdout
59
-
60
- result = {"success": True, "output": output, "plot": plot_data}
61
-
62
- return result
63
-
64
- except Exception as e:
65
- sys.stdout = old_stdout
66
- error_msg = f"Error: {str(e)}\n{traceback.format_exc()}"
67
- return {"success": False, "output": error_msg, "plot": None}
68
-
69
-
70
  def handle_function_call(function_name, arguments):
71
  """Handle function calls from the model"""
72
  if function_name == "browser_search":
@@ -76,21 +16,12 @@ def handle_function_call(function_name, arguments):
76
  return f"Search results for '{query}' (max {max_results} results): [Implementation needed]"
77
 
78
  elif function_name == "code_interpreter":
 
79
  code = arguments.get("code", "")
80
  if not code:
81
  return "No code provided to execute."
82
 
83
- result = execute_python_code(code)
84
-
85
- if result["success"]:
86
- response = f"Code executed successfully:\n\n```\n{result['output']}\n```"
87
- if result["plot"]:
88
- response += (
89
- f"\n\n[Plot generated - base64 data: {result['plot'][:50]}...]"
90
- )
91
- return response
92
- else:
93
- return f"Code execution failed:\n\n```\n{result['output']}\n```"
94
 
95
  return f"Unknown function: {function_name}"
96
 
@@ -178,7 +109,6 @@ def respond(
178
 
179
  for chunk in stream:
180
  delta = chunk.choices[0].delta
181
- print('delta', delta)
182
 
183
  # Handle function calls
184
  if hasattr(delta, "tool_calls") and delta.tool_calls:
@@ -191,15 +121,11 @@ def respond(
191
  }
192
  )
193
 
194
- # Handle regular content
195
- try:
196
- if hasattr(delta, "reasoning_content") and delta.reasoning_content:
197
- output += delta.reasoning_content
198
- elif delta.content:
199
- output += delta.content
200
- except:
201
- if delta.content:
202
- output += delta.content
203
 
204
  yield output
205
 
 
 
1
  import subprocess
2
  import gradio as gr
3
  from openai import OpenAI
4
  import json
 
 
 
 
 
 
 
 
 
 
5
 
6
  subprocess.Popen("bash /home/user/app/start.sh", shell=True)
7
 
8
  client = OpenAI(base_url="http://0.0.0.0:8000/v1", api_key="sk-local", timeout=600)
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  def handle_function_call(function_name, arguments):
11
  """Handle function calls from the model"""
12
  if function_name == "browser_search":
 
16
  return f"Search results for '{query}' (max {max_results} results): [Implementation needed]"
17
 
18
  elif function_name == "code_interpreter":
19
+ # Implement your code interpreter logic here
20
  code = arguments.get("code", "")
21
  if not code:
22
  return "No code provided to execute."
23
 
24
+ return f"Code interpreter results for '{code}': [Implementation needed]"
 
 
 
 
 
 
 
 
 
 
25
 
26
  return f"Unknown function: {function_name}"
27
 
 
109
 
110
  for chunk in stream:
111
  delta = chunk.choices[0].delta
 
112
 
113
  # Handle function calls
114
  if hasattr(delta, "tool_calls") and delta.tool_calls:
 
121
  }
122
  )
123
 
124
+ if hasattr(delta, "reasoning_content") and delta.reasoning_content:
125
+ # output += delta.reasoning_content
126
+ output = f"""*{output}{delta.reasoning_content}*\n"""
127
+ elif delta.content:
128
+ output += delta.content
 
 
 
 
129
 
130
  yield output
131