|
""" |
|
app.py – Gradio front‑end for “AnyCoder AI” (a.k.a. Shasha AI) |
|
|
|
UI : single‑page, 3‑column layout |
|
Logo : assets/logo.png (120 px wide, centred) |
|
SDK : Gradio 5.38.2 (no `height=` arg on gr.Code) |
|
""" |
|
|
|
from __future__ import annotations |
|
|
|
import gradio as gr |
|
from typing import List, Tuple, Dict, Optional, Any |
|
|
|
|
|
from constants import ( |
|
SEARCH_START, DIVIDER, REPLACE_END, |
|
HTML_SYSTEM_PROMPT, HTML_SYSTEM_PROMPT_WITH_SEARCH, |
|
TRANSFORMERS_JS_SYSTEM_PROMPT, TRANSFORMERS_JS_SYSTEM_PROMPT_WITH_SEARCH, |
|
GENERIC_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT_WITH_SEARCH, |
|
SYSTEM_PROMPTS, FollowUpSystemPrompt, |
|
TransformersJSFollowUpSystemPrompt, |
|
AVAILABLE_MODELS, DEMO_LIST, |
|
get_gradio_language, |
|
) |
|
|
|
from hf_client import get_inference_client |
|
from tavily_search import enhance_query_with_search |
|
from utils import ( |
|
extract_text_from_file, extract_website_content, |
|
history_to_messages, history_to_chatbot_messages, |
|
remove_code_block, parse_transformers_js_output, format_transformers_js_output, |
|
apply_search_replace_changes, apply_transformers_js_search_replace_changes, |
|
) |
|
from deploy import send_to_sandbox |
|
from search_replace import SEARCH_START as SR_START |
|
|
|
|
|
|
|
History = List[Tuple[str, str]] |
|
ModelInfo = Dict[str, str] |
|
|
|
|
|
def generate_code( |
|
prompt: str, |
|
file_path: Optional[str], |
|
website_url: Optional[str], |
|
model: ModelInfo, |
|
language: str, |
|
enable_search: bool, |
|
history: Optional[History], |
|
) -> Tuple[str, History, str, List[Dict[str, str]]]: |
|
|
|
history = history or [] |
|
prompt = prompt or "" |
|
|
|
|
|
if history: |
|
|
|
if language == "transformers.js": |
|
system_prompt = TransformersJSFollowUpSystemPrompt |
|
else: |
|
system_prompt = FollowUpSystemPrompt |
|
else: |
|
|
|
if language == "html": |
|
system_prompt = HTML_SYSTEM_PROMPT_WITH_SEARCH if enable_search else HTML_SYSTEM_PROMPT |
|
elif language == "transformers.js": |
|
system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT_WITH_SEARCH if enable_search else TRANSFORMERS_JS_SYSTEM_PROMPT |
|
else: |
|
system_prompt = ( |
|
GENERIC_SYSTEM_PROMPT_WITH_SEARCH.format(language=language) |
|
if enable_search else GENERIC_SYSTEM_PROMPT.format(language=language) |
|
) |
|
|
|
messages = history_to_messages(history, system_prompt) |
|
|
|
|
|
if file_path: |
|
file_txt = extract_text_from_file(file_path)[:5000] |
|
prompt += f"\n\n[Reference file]\n{file_txt}" |
|
|
|
if website_url: |
|
site_ctx = extract_website_content(website_url.strip()) |
|
prompt += f"\n\n[Website]\n{site_ctx[:8000]}" |
|
|
|
|
|
user_query = enhance_query_with_search(prompt, enable_search) |
|
messages.append({"role": "user", "content": user_query}) |
|
|
|
|
|
client = get_inference_client(model["id"]) |
|
try: |
|
resp = client.chat.completions.create( |
|
model=model["id"], |
|
messages=messages, |
|
max_tokens=16_000, |
|
temperature=0.1, |
|
) |
|
answer = resp.choices[0].message.content |
|
except Exception as e: |
|
err = f"❌ **Error:**\n```\n{e}\n```" |
|
history.append((prompt, err)) |
|
return "", history, "", history_to_chatbot_messages(history) |
|
|
|
|
|
if language == "transformers.js": |
|
files = parse_transformers_js_output(answer) |
|
code = format_transformers_js_output(files) |
|
preview = send_to_sandbox(files["index.html"]) if files["index.html"] else "" |
|
else: |
|
clean = remove_code_block(answer) |
|
if history and not history[-1][1].startswith("❌"): |
|
clean = apply_search_replace_changes(history[-1][1], clean) |
|
code = clean |
|
preview = send_to_sandbox(code) if language == "html" else "" |
|
|
|
history.append((prompt, code)) |
|
chat_msgs = history_to_chatbot_messages(history) |
|
|
|
return code, history, preview, chat_msgs |
|
|
|
|
|
|
|
THEME = gr.themes.Base(primary_hue="indigo", font="Inter") |
|
|
|
with gr.Blocks(theme=THEME, title="AnyCoder AI") as demo: |
|
state_hist = gr.State([]) |
|
state_model = gr.State(AVAILABLE_MODELS[0]) |
|
|
|
|
|
with gr.Row(): |
|
gr.HTML( |
|
'<div style="text-align:center; margin:1.2rem 0;">' |
|
'<img src="assets/logo.png" alt="AnyCoder logo" style="width:120px;"><br>' |
|
'<h1 style="margin:0.4rem 0 0; font-size:1.9rem;">AnyCoder AI</h1>' |
|
'<p style="color:#555;">Your AI partner for generating, modifying & understanding code.</p>' |
|
'</div>' |
|
) |
|
|
|
with gr.Row(): |
|
|
|
with gr.Column(scale=1): |
|
gr.Markdown("### 1 · Select Model") |
|
dd_model = gr.Dropdown( |
|
[m["name"] for m in AVAILABLE_MODELS], |
|
value=AVAILABLE_MODELS[0]["name"], |
|
label="AI Model", |
|
) |
|
|
|
gr.Markdown("### 2 · Provide Context") |
|
with gr.Tabs(): |
|
with gr.Tab("Prompt"): |
|
tb_prompt = gr.Textbox(lines=6, placeholder="Describe what you want to build…") |
|
with gr.Tab("File"): |
|
fi_file = gr.File() |
|
with gr.Tab("Website"): |
|
tb_url = gr.Textbox(placeholder="https://example.com") |
|
|
|
gr.Markdown("### 3 · Configure Output") |
|
dd_lang = gr.Dropdown( |
|
GRADIO_SUPPORTED_LANGUAGES[:-1], |
|
value="html", |
|
label="Target Language", |
|
) |
|
cb_search = gr.Checkbox(label="Enable Tavily Web Search") |
|
|
|
with gr.Row(): |
|
btn_clear = gr.Button("Clear Session", variant="secondary") |
|
btn_gen = gr.Button("Generate Code", variant="primary") |
|
|
|
|
|
with gr.Column(scale=2): |
|
with gr.Tabs(): |
|
with gr.Tab("Code"): |
|
code_out = gr.Code(language="html", lines=25, label="Generated code") |
|
with gr.Tab("Preview"): |
|
html_prev = gr.HTML() |
|
with gr.Tab("History"): |
|
chat_out = gr.Chatbot(type="messages", height=400) |
|
|
|
|
|
gr.Markdown("#### Quick Start Examples") |
|
with gr.Row(): |
|
for demo in DEMO_LIST[:6]: |
|
gr.Button(demo["title"], size="sm").click( |
|
lambda d=demo: d["description"], outputs=tb_prompt |
|
) |
|
|
|
|
|
def _select_model(name: str) -> ModelInfo: |
|
return next((m for m in AVAILABLE_MODELS if m["name"] == name), AVAILABLE_MODELS[0]) |
|
|
|
dd_model.change(_select_model, dd_model, state_model) |
|
btn_gen.click( |
|
generate_code, |
|
inputs=[tb_prompt, fi_file, tb_url, |
|
state_model, dd_lang, cb_search, state_hist], |
|
outputs=[code_out, state_hist, html_prev, chat_out], |
|
) |
|
btn_clear.click( |
|
lambda: ("", None, "", [], [], "", ""), |
|
outputs=[tb_prompt, fi_file, tb_url, state_hist, chat_out, code_out, html_prev], |
|
queue=False, |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|