Spaces:
Running
Running
update openrouter error
Browse files
app.py
CHANGED
@@ -1151,6 +1151,73 @@ This will help me create a better design for you."""
|
|
1151 |
# Use dynamic client based on selected model
|
1152 |
client = get_inference_client(_current_model["id"], provider)
|
1153 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1154 |
if image is not None:
|
1155 |
messages.append(create_multimodal_message(enhanced_query, image))
|
1156 |
else:
|
@@ -1273,72 +1340,6 @@ This will help me create a better design for you."""
|
|
1273 |
history_output: history_to_chatbot_messages(_history),
|
1274 |
}
|
1275 |
|
1276 |
-
# OpenRouter (OpenAI) logic
|
1277 |
-
if client == "openrouter":
|
1278 |
-
import os
|
1279 |
-
from openai import OpenAI
|
1280 |
-
openrouter_api_key = os.getenv("OPENROUTER_API_KEY")
|
1281 |
-
openrouter_site_url = os.getenv("OPENROUTER_SITE_URL", "https://huggingface.co/spaces/akhaliq/anycoder")
|
1282 |
-
openrouter_site_title = os.getenv("OPENROUTER_SITE_TITLE", "AnyCoder")
|
1283 |
-
if not openrouter_api_key:
|
1284 |
-
error_message = "Error: OPENROUTER_API_KEY environment variable is not set."
|
1285 |
-
yield {
|
1286 |
-
code_output: error_message,
|
1287 |
-
history_output: history_to_chatbot_messages(_history),
|
1288 |
-
}
|
1289 |
-
return
|
1290 |
-
openai_client = OpenAI(
|
1291 |
-
base_url="https://openrouter.ai/api/v1",
|
1292 |
-
api_key=openrouter_api_key,
|
1293 |
-
)
|
1294 |
-
# Prepare OpenAI message format
|
1295 |
-
openai_messages = []
|
1296 |
-
for m in messages:
|
1297 |
-
if m["role"] == "system":
|
1298 |
-
openai_messages.append({"role": "system", "content": m["content"]})
|
1299 |
-
elif m["role"] == "user":
|
1300 |
-
openai_messages.append({"role": "user", "content": m["content"]})
|
1301 |
-
elif m["role"] == "assistant":
|
1302 |
-
openai_messages.append({"role": "assistant", "content": m["content"]})
|
1303 |
-
openai_messages.append({"role": "user", "content": enhanced_query})
|
1304 |
-
try:
|
1305 |
-
completion = openai_client.chat.completions.create(
|
1306 |
-
model="qwen/qwen3-235b-a22b-07-25:free",
|
1307 |
-
messages=openai_messages,
|
1308 |
-
extra_headers={
|
1309 |
-
"HTTP-Referer": openrouter_site_url,
|
1310 |
-
"X-Title": openrouter_site_title,
|
1311 |
-
},
|
1312 |
-
extra_body={},
|
1313 |
-
stream=True,
|
1314 |
-
max_tokens=10000
|
1315 |
-
)
|
1316 |
-
content = ""
|
1317 |
-
for chunk in completion:
|
1318 |
-
if hasattr(chunk, "choices") and chunk.choices and hasattr(chunk.choices[0], "delta") and hasattr(chunk.choices[0].delta, "content") and chunk.choices[0].delta.content is not None:
|
1319 |
-
content += chunk.choices[0].delta.content
|
1320 |
-
clean_code = remove_code_block(content)
|
1321 |
-
yield {
|
1322 |
-
code_output: gr.update(value=clean_code, language=get_gradio_language(language)),
|
1323 |
-
history_output: history_to_chatbot_messages(_history),
|
1324 |
-
sandbox: send_to_sandbox(clean_code) if language == "html" else "<div style='padding:1em;color:#888;text-align:center;'>Preview is only available for HTML. Please download your code using the download button above.</div>",
|
1325 |
-
}
|
1326 |
-
# After streaming, update history
|
1327 |
-
_history.append([query, content])
|
1328 |
-
yield {
|
1329 |
-
code_output: remove_code_block(content),
|
1330 |
-
history: _history,
|
1331 |
-
sandbox: send_to_sandbox(remove_code_block(content)),
|
1332 |
-
history_output: history_to_chatbot_messages(_history),
|
1333 |
-
}
|
1334 |
-
except Exception as e:
|
1335 |
-
error_message = f"Error (OpenRouter): {str(e)}"
|
1336 |
-
yield {
|
1337 |
-
code_output: error_message,
|
1338 |
-
history_output: history_to_chatbot_messages(_history),
|
1339 |
-
}
|
1340 |
-
return
|
1341 |
-
|
1342 |
# Deploy to Spaces logic
|
1343 |
|
1344 |
def wrap_html_in_gradio_app(html_code):
|
|
|
1151 |
# Use dynamic client based on selected model
|
1152 |
client = get_inference_client(_current_model["id"], provider)
|
1153 |
|
1154 |
+
# --- FIX: Handle OpenRouter client before HuggingFace logic ---
|
1155 |
+
if client == "openrouter":
|
1156 |
+
import os
|
1157 |
+
from openai import OpenAI
|
1158 |
+
openrouter_api_key = os.getenv("OPENROUTER_API_KEY")
|
1159 |
+
openrouter_site_url = os.getenv("OPENROUTER_SITE_URL", "https://huggingface.co/spaces/akhaliq/anycoder")
|
1160 |
+
openrouter_site_title = os.getenv("OPENROUTER_SITE_TITLE", "AnyCoder")
|
1161 |
+
if not openrouter_api_key:
|
1162 |
+
error_message = "Error: OPENROUTER_API_KEY environment variable is not set."
|
1163 |
+
yield {
|
1164 |
+
code_output: error_message,
|
1165 |
+
history_output: history_to_chatbot_messages(_history),
|
1166 |
+
}
|
1167 |
+
return
|
1168 |
+
openai_client = OpenAI(
|
1169 |
+
base_url="https://openrouter.ai/api/v1",
|
1170 |
+
api_key=openrouter_api_key,
|
1171 |
+
)
|
1172 |
+
# Prepare OpenAI message format
|
1173 |
+
openai_messages = []
|
1174 |
+
for m in messages:
|
1175 |
+
if m["role"] == "system":
|
1176 |
+
openai_messages.append({"role": "system", "content": m["content"]})
|
1177 |
+
elif m["role"] == "user":
|
1178 |
+
openai_messages.append({"role": "user", "content": m["content"]})
|
1179 |
+
elif m["role"] == "assistant":
|
1180 |
+
openai_messages.append({"role": "assistant", "content": m["content"]})
|
1181 |
+
openai_messages.append({"role": "user", "content": enhanced_query})
|
1182 |
+
try:
|
1183 |
+
completion = openai_client.chat.completions.create(
|
1184 |
+
model="qwen/qwen3-235b-a22b-07-25:free",
|
1185 |
+
messages=openai_messages,
|
1186 |
+
extra_headers={
|
1187 |
+
"HTTP-Referer": openrouter_site_url,
|
1188 |
+
"X-Title": openrouter_site_title,
|
1189 |
+
},
|
1190 |
+
extra_body={},
|
1191 |
+
stream=True,
|
1192 |
+
max_tokens=10000
|
1193 |
+
)
|
1194 |
+
content = ""
|
1195 |
+
for chunk in completion:
|
1196 |
+
if hasattr(chunk, "choices") and chunk.choices and hasattr(chunk.choices[0], "delta") and hasattr(chunk.choices[0].delta, "content") and chunk.choices[0].delta.content is not None:
|
1197 |
+
content += chunk.choices[0].delta.content
|
1198 |
+
clean_code = remove_code_block(content)
|
1199 |
+
yield {
|
1200 |
+
code_output: gr.update(value=clean_code, language=get_gradio_language(language)),
|
1201 |
+
history_output: history_to_chatbot_messages(_history),
|
1202 |
+
sandbox: send_to_sandbox(clean_code) if language == "html" else "<div style='padding:1em;color:#888;text-align:center;'>Preview is only available for HTML. Please download your code using the download button above.</div>",
|
1203 |
+
}
|
1204 |
+
# After streaming, update history
|
1205 |
+
_history.append([query, content])
|
1206 |
+
yield {
|
1207 |
+
code_output: remove_code_block(content),
|
1208 |
+
history: _history,
|
1209 |
+
sandbox: send_to_sandbox(remove_code_block(content)),
|
1210 |
+
history_output: history_to_chatbot_messages(_history),
|
1211 |
+
}
|
1212 |
+
except Exception as e:
|
1213 |
+
error_message = f"Error (OpenRouter): {str(e)}"
|
1214 |
+
yield {
|
1215 |
+
code_output: error_message,
|
1216 |
+
history_output: history_to_chatbot_messages(_history),
|
1217 |
+
}
|
1218 |
+
return
|
1219 |
+
# --- END FIX ---
|
1220 |
+
|
1221 |
if image is not None:
|
1222 |
messages.append(create_multimodal_message(enhanced_query, image))
|
1223 |
else:
|
|
|
1340 |
history_output: history_to_chatbot_messages(_history),
|
1341 |
}
|
1342 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1343 |
# Deploy to Spaces logic
|
1344 |
|
1345 |
def wrap_html_in_gradio_app(html_code):
|