logu29's picture
Create app.py
b832def verified
raw
history blame
2.53 kB
import gradio as gr
from textblob import TextBlob
import cv2
import numpy as np
from deepface import DeepFace
import tempfile
import moviepy.editor as mp
# 1. Text Sentiment Analysis
def analyze_text(text):
blob = TextBlob(text)
sentiment = blob.sentiment.polarity
if sentiment > 0:
result = "Positive Sentiment 😊"
elif sentiment < 0:
result = "Negative Sentiment 😢"
else:
result = "Neutral Sentiment 😐"
return result
# 2. Face Emotion Detection
def analyze_face(image):
try:
analysis = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
emotion = analysis[0]['dominant_emotion']
return f"Detected Emotion: {emotion}"
except Exception as e:
return f"Error: {str(e)}"
# 3. Video Emotion Detection
def analyze_video(video_file):
temp_video_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mp4").name
with open(temp_video_path, "wb") as f:
f.write(video_file.read())
clip = mp.VideoFileClip(temp_video_path)
frame = clip.get_frame(clip.duration / 2) # take frame at middle
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
try:
analysis = DeepFace.analyze(frame_rgb, actions=['emotion'], enforce_detection=False)
emotion = analysis[0]['dominant_emotion']
return f"Detected Emotion in Video: {emotion}"
except Exception as e:
return f"Error: {str(e)}"
# Gradio Interface
with gr.Blocks() as demo:
gr.Markdown("# Emotion & Sentiment Analyzer 🎯")
gr.Markdown("Analyze Text, Face (Image), or Video for emotions!")
with gr.Tabs():
with gr.TabItem("Text Sentiment"):
text_input = gr.Textbox(label="Enter Text")
text_output = gr.Label()
text_button = gr.Button("Analyze Text")
text_button.click(analyze_text, inputs=text_input, outputs=text_output)
with gr.TabItem("Face Emotion (Image)"):
image_input = gr.Image(type="numpy", label="Upload Face Image")
image_output = gr.Label()
image_button = gr.Button("Analyze Face Emotion")
image_button.click(analyze_face, inputs=image_input, outputs=image_output)
with gr.TabItem("Video Emotion"):
video_input = gr.File(label="Upload Video (.mp4)")
video_output = gr.Label()
video_button = gr.Button("Analyze Video Emotion")
video_button.click(analyze_video, inputs=video_input, outputs=video_output)
demo.launch()