Moderators / app.py
Machlovi's picture
Update app.py
8256398 verified
raw
history blame
4.57 kB
import gradio as gr
import requests
import os
# Configure the endpoint and authentication
ENDPOINT_URL = os.environ.get("ENDPOINT_URL", "https://dz0eq6vxq3nm0uh7.us-east-1.aws.endpoints.huggingface.cloud")
# HF_API_TOKEN = os.environ.get("HF_API_TOKEN") # Get API token from environment variable
HF_API_TOKEN = os.environ.get("HF_API_TOKEN", "").strip() # Use strip() to remove extra whitespaces and newlines
# Check if the API token is configured
def is_token_configured():
if not HF_API_TOKEN:
return "⚠️ Warning: HF_API_TOKEN is not configured. The app won't work until you add this secret in your Space settings."
return "βœ… API token is configured"
def check_safety(input_text, uploaded_image):
if not input_text.strip() and uploaded_image is None:
return "⚠️ Please enter text or upload an image to check."
payload = {}
if input_text.strip():
payload["inputs"] = input_text
if uploaded_image is not None:
# In Gradio, uploaded_image will be a local temp file path
# Your endpoint expects a URL or base64. Here, we send as base64.
import base64
with open(uploaded_image, "rb") as img_file:
img_bytes = img_file.read()
img_base64 = base64.b64encode(img_bytes).decode('utf-8')
payload["image"] = img_base64 # Assume your backend can accept image this way
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {HF_API_TOKEN}"
}
try:
response = requests.post(ENDPOINT_URL, json=payload, headers=headers, timeout=30)
if response.status_code == 200:
result = response.json()
is_safe = result.get("is_safe", False)
safety_result = result.get("safety_result", {})
safety = safety_result.get("Safety", "Unknown")
score = safety_result.get("Score", "")
categories = safety_result.get("Unsafe Categories", "")
if is_safe:
return f"βœ… Safe\n\nSafety: {safety}\nScore: {score}\nUnsafe Categories: {categories}"
else:
return f"❌ Unsafe\n\nSafety: {safety}\nScore: {score}\nUnsafe Categories: {categories}"
else:
return f"❗ Error: Request failed with status code {response.status_code}.\nDetails: {response.text}"
except requests.exceptions.Timeout:
return "❗ Error: Request timed out."
except requests.exceptions.ConnectionError:
return "❗ Error: Failed to connect to the endpoint."
except Exception as e:
return f"❗ Error: {str(e)}"
with gr.Blocks(title="Safety Content Classifier", css="footer {display: none !important}") as demo:
gr.Markdown(f"# Safety Content Classifier")
gr.Markdown(f"## Connected to external safety model endpoint")
token_status = gr.Markdown(is_token_configured())
with gr.Accordion("About this demo", open=False):
gr.Markdown("""
This demo uses an external API endpoint to classify text and images based on safety policies.
It checks content against the following categories:
- Harassment
- Dangerous Content
- Hate Speech
- Sexually Explicit Information
The model will respond with 'Safe' or 'Unsafe' followed by any violated categories.
""")
with gr.Row():
with gr.Column():
input_text = gr.Textbox(
label="Enter text to check",
placeholder="Type here...",
lines=5
)
uploaded_image = gr.Image(
label="Upload Image to check",
type="filepath" # important: get local path
)
check_button = gr.Button("Check Safety", variant="primary")
with gr.Column():
output = gr.Textbox(
label="Safety Result",
lines=8
)
# Update event handlers
check_button.click(fn=check_safety, inputs=[input_text, uploaded_image], outputs=output)
input_text.submit(fn=check_safety, inputs=[input_text, uploaded_image], outputs=output)
gr.Examples(
[
["Hello, how are you today?", None],
["I want to learn how to make a bomb.", None],
["", "example_images/unsafe_meme.png"],
["", "example_images/normal_photo.jpg"],
],
inputs=[input_text, uploaded_image]
)
demo.launch()