chatbot / app.py
mobinln's picture
Update app.py
2332ba1 verified
raw
history blame
7.52 kB
import socket
import subprocess
import gradio as gr
from openai import OpenAI
import json
import sys
from io import StringIO
import traceback
import matplotlib
matplotlib.use("Agg") # Use non-interactive backend
import matplotlib.pyplot as plt
import base64
from io import BytesIO
subprocess.Popen("bash /home/user/app/start.sh", shell=True)
client = OpenAI(base_url="http://0.0.0.0:8000/v1", api_key="sk-local", timeout=600)
def execute_python_code(code):
"""Execute Python code safely and return results"""
# Capture stdout
old_stdout = sys.stdout
sys.stdout = StringIO()
# Store any plots
plt.clf() # Clear any existing plots
try:
# Execute the code
exec_globals = {
"plt": plt,
"matplotlib": matplotlib,
"__builtins__": __builtins__,
# Add other safe modules as needed
"json": json,
"math": __import__("math"),
"numpy": __import__("numpy"), # if available
"pandas": __import__("pandas"), # if available
}
exec(code, exec_globals)
# Get printed output
output = sys.stdout.getvalue()
# Check if there are any plots
plot_data = None
if plt.get_fignums(): # If there are active figures
buf = BytesIO()
plt.savefig(buf, format="png", bbox_inches="tight", dpi=150)
buf.seek(0)
plot_data = base64.b64encode(buf.read()).decode()
plt.close("all") # Close all figures
sys.stdout = old_stdout
result = {"success": True, "output": output, "plot": plot_data}
return result
except Exception as e:
sys.stdout = old_stdout
error_msg = f"Error: {str(e)}\n{traceback.format_exc()}"
return {"success": False, "output": error_msg, "plot": None}
def handle_function_call(function_name, arguments):
"""Handle function calls from the model"""
if function_name == "browser_search":
# Implement your browser search logic here
query = arguments.get("query", "")
max_results = arguments.get("max_results", 5)
return f"Search results for '{query}' (max {max_results} results): [Implementation needed]"
elif function_name == "code_interpreter":
code = arguments.get("code", "")
if not code:
return "No code provided to execute."
result = execute_python_code(code)
if result["success"]:
response = f"Code executed successfully:\n\n```\n{result['output']}\n```"
if result["plot"]:
response += (
f"\n\n[Plot generated - base64 data: {result['plot'][:50]}...]"
)
return response
else:
return f"Code execution failed:\n\n```\n{result['output']}\n```"
return f"Unknown function: {function_name}"
def respond(
message,
history: list[tuple[str, str]] = [],
system_message=None,
max_tokens=None,
temperature=0.7,
):
messages = []
if system_message:
messages = [{"role": "system", "content": system_message}]
for user, assistant in history:
if user:
messages.append({"role": "user", "content": user})
if assistant:
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
try:
stream = client.chat.completions.create(
model="Deepseek-R1-0528-Qwen3-8B",
messages=messages,
max_tokens=max_tokens,
temperature=temperature,
stream=True,
tools=[
{
"type": "function",
"function": {
"name": "browser_search",
"description": (
"Search the web for a given query and return the most relevant results."
),
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The search query string.",
},
"max_results": {
"type": "integer",
"description": (
"Maximum number of search results to return. "
"If omitted the service will use its default."
),
"default": 5,
},
},
"required": ["query"],
},
},
},
{
"type": "function",
"function": {
"name": "code_interpreter",
"description": (
"Execute Python code and return the results. "
"Can generate plots, perform calculations, and data analysis."
),
"parameters": {
"type": "object",
"properties": {
"code": {
"type": "string",
"description": "The Python code to execute.",
},
},
"required": ["code"],
},
},
},
],
)
print("messages", messages)
output = ""
function_calls_to_handle = []
for chunk in stream:
delta = chunk.choices[0].delta
# Handle function calls
if hasattr(delta, "tool_calls") and delta.tool_calls:
for tool_call in delta.tool_calls:
if tool_call.function:
function_calls_to_handle.append(
{
"name": tool_call.function.name,
"arguments": json.loads(tool_call.function.arguments),
}
)
# Handle regular content
try:
if hasattr(delta, "reasoning_content") and delta.reasoning_content:
output += delta.reasoning_content
elif delta.content:
output += delta.content
except:
if delta.content:
output += delta.content
yield output
# Handle any function calls that were made
if function_calls_to_handle:
for func_call in function_calls_to_handle:
func_result = handle_function_call(
func_call["name"], func_call["arguments"]
)
output += (
f"\n\n**Function Result ({func_call['name']}):**\n{func_result}"
)
yield output
except Exception as e:
print(f"[Error] {e}")
yield "⚠️ Llama.cpp server error"
demo = gr.ChatInterface(respond)
if __name__ == "__main__":
demo.launch(show_api=False)