File size: 6,682 Bytes
3b8d6af
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
from run import create_agent, run_agent_with_streaming
import gradio as gr
import os
import threading
import time
from dotenv import load_dotenv

load_dotenv()
CONFIG_FILE = ".user_config.env"

def save_env_vars_to_file(env_vars):
    print("[DEBUG] Saving user config to file")
    with open(CONFIG_FILE, "w") as f:
        for key, value in env_vars.items():
            f.write(f"{key}={value}\n")

def launch_interface():
    def setup_agent_streaming(question, model_id, hf_token, openai_api_key, serpapi_key, api_endpoint, use_custom_endpoint,
                    custom_api_endpoint, custom_api_key, search_provider, search_api_key, custom_search_url):
        print("[DEBUG] Setting up agent with input question:", question)

        if question.strip() == "":
            yield "Please enter a question.", ""
            return

        endpoint = custom_api_endpoint if use_custom_endpoint else api_endpoint
        api_key = custom_api_key if use_custom_endpoint else openai_api_key

        save_env_vars_to_file({
            "HF_TOKEN": hf_token,
            "SERPAPI_API_KEY": serpapi_key,
            "API_ENDPOINT": api_endpoint,
            "OPENAI_API_KEY": openai_api_key
        })

        print("[DEBUG] Instantiating agent with UI configuration")
        agent = create_agent(
            model_id=model_id,
            hf_token=hf_token,
            serpapi_key=serpapi_key,
            openai_api_key=openai_api_key,
            api_endpoint=api_endpoint,
            custom_api_endpoint=endpoint,
            custom_api_key=api_key,
            search_provider=search_provider,
            search_api_key=search_api_key,
            custom_search_url=custom_search_url
        )

        output_buffer = []
        final_answer = ""
        is_complete = False

        def highlight_text(text):
            if "[COMPLETED] Final answer:" in text:
                return f"<span style='color:#10b981;font-weight:bold;'>[FINAL]</span> <mark>{text.split(':', 1)[1].strip()}</mark>"
            elif "[ERROR]" in text:
                return f"<span style='color:#ef4444;font-weight:bold;'>[ERROR]</span> <pre>{text.strip()}</pre>"
            elif "[STARTING]" in text:
                return f"<span style='color:#f59e0b;font-weight:bold;'>[STEP]</span> {text.strip()}"
            elif text.strip():
                return f"<details><summary><span style='color:#f59e0b;'>Step</span></summary>\n<pre>{text.strip()}</pre>\n</details>"
            return ""

        def stream_callback(text):
            nonlocal final_answer
            if "[COMPLETED] Final answer:" in text:
                final_answer = text.split("[COMPLETED] Final answer:", 1)[1].strip()
            formatted = highlight_text(text)
            if formatted:
                output_buffer.append(formatted)

        def run_agent_async():
            nonlocal is_complete
            try:
                _ = run_agent_with_streaming(agent, question, stream_callback)
            except Exception as e:
                output_buffer.append(highlight_text(f"[ERROR] {str(e)}"))
            finally:
                is_complete = True

        agent_thread = threading.Thread(target=run_agent_async)
        agent_thread.start()

        last_length = 0
        while not is_complete or agent_thread.is_alive():
            current_output = "\n".join(output_buffer)
            if len(current_output) > last_length:
                yield current_output, ""
                last_length = len(current_output)
            time.sleep(0.1)

        final_output = "\n".join(output_buffer)
        yield final_output, final_answer

    with gr.Blocks(title="SmolAgent - Streaming AI") as demo:
        gr.Markdown("# SmolAgent - Intelligent AI with Web Tools")

        with gr.Row():
            with gr.Column():
                question = gr.Textbox(label="Your Question", lines=3)
                model_id = gr.Textbox(label="Model ID", value="gpt-4.1-nano")
                hf_token = gr.Textbox(label="HF Token", type="password", value=os.getenv("HF_TOKEN", ""))
                openai_api_key = gr.Textbox(label="OpenAI API Key", type="password", value=os.getenv("OPENAI_API_KEY", ""))
                api_endpoint = gr.Textbox(label="API Endpoint", value=os.getenv("API_ENDPOINT", "https://api.openai.com/v1"))
                use_custom_endpoint = gr.Checkbox(label="Use Custom API Endpoint")
                custom_api_endpoint = gr.Textbox(label="Custom API URL", visible=False)
                custom_api_key = gr.Textbox(label="Custom API Key", type="password", visible=False)
                serpapi_key = gr.Textbox(label="SerpAPI Key", type="password", value=os.getenv("SERPAPI_API_KEY", ""))
                search_provider = gr.Dropdown(choices=["serper", "searxng"], value="searxng", label="Search Provider")
                search_api_key = gr.Textbox(label="Search Provider API Key", type="password", visible=True)
                custom_search_url = gr.Textbox(label="Custom SearxNG URL", value="https://search.endorisk.nl/search", visible=True)
                submit_btn = gr.Button("Submit")

            with gr.Column():
                output = gr.Markdown(label="Live Agent Output")
                final = gr.Textbox(label="Final Answer", interactive=False)
                copy_btn = gr.Button("Copy Final Answer")

        def update_visibility(provider):
            return {
                custom_search_url: gr.update(visible=(provider == "searxng")),
                search_api_key: gr.update(visible=(provider == "searxng"))
            }

        def update_custom_fields(checked):
            return {
                custom_api_endpoint: gr.update(visible=checked),
                custom_api_key: gr.update(visible=checked)
            }

        search_provider.change(fn=update_visibility, inputs=search_provider, outputs=[custom_search_url, search_api_key])
        use_custom_endpoint.change(fn=update_custom_fields, inputs=use_custom_endpoint, outputs=[custom_api_endpoint, custom_api_key])

        submit_btn.click(
            fn=setup_agent_streaming,
            inputs=[question, model_id, hf_token, openai_api_key, serpapi_key, api_endpoint, use_custom_endpoint, custom_api_endpoint, custom_api_key, search_provider, search_api_key, custom_search_url],
            outputs=[output, final],
            show_progress=True
        )

        copy_btn.click(
            fn=lambda txt: gr.Textbox.update(value=txt),
            inputs=final,
            outputs=final,
            show_progress=False
        )

    print("[DEBUG] Launching updated Gradio interface")
    demo.launch()

if __name__ == "__main__":
    launch_interface()