logu29 commited on
Commit
90ada29
·
verified ·
1 Parent(s): bacf85d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -67
app.py CHANGED
@@ -1,71 +1,14 @@
 
 
1
  import gradio as gr
2
- from textblob import TextBlob
3
- import cv2
4
- import numpy as np
5
- from deepface import DeepFace
6
- import tempfile
7
- import moviepy.editor as mp
8
 
9
- # 1. Text Sentiment Analysis
10
- def analyze_text(text):
11
- blob = TextBlob(text)
12
- sentiment = blob.sentiment.polarity
13
- if sentiment > 0:
14
- result = "Positive Sentiment 😊"
15
- elif sentiment < 0:
16
- result = "Negative Sentiment 😢"
17
- else:
18
- result = "Neutral Sentiment 😐"
19
- return result
20
 
21
- # 2. Face Emotion Detection
22
- def analyze_face(image):
23
- try:
24
- analysis = DeepFace.analyze(image, actions=['emotion'], enforce_detection=False)
25
- emotion = analysis[0]['dominant_emotion']
26
- return f"Detected Emotion: {emotion}"
27
- except Exception as e:
28
- return f"Error: {str(e)}"
29
 
30
- # 3. Video Emotion Detection
31
- def analyze_video(video_file):
32
- temp_video_path = tempfile.NamedTemporaryFile(delete=False, suffix=".mp4").name
33
- with open(temp_video_path, "wb") as f:
34
- f.write(video_file.read())
35
-
36
- clip = mp.VideoFileClip(temp_video_path)
37
- frame = clip.get_frame(clip.duration / 2) # take frame at middle
38
- frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
39
-
40
- try:
41
- analysis = DeepFace.analyze(frame_rgb, actions=['emotion'], enforce_detection=False)
42
- emotion = analysis[0]['dominant_emotion']
43
- return f"Detected Emotion in Video: {emotion}"
44
- except Exception as e:
45
- return f"Error: {str(e)}"
46
-
47
- # Gradio Interface
48
- with gr.Blocks() as demo:
49
- gr.Markdown("# Emotion & Sentiment Analyzer 🎯")
50
- gr.Markdown("Analyze Text, Face (Image), or Video for emotions!")
51
-
52
- with gr.Tabs():
53
- with gr.TabItem("Text Sentiment"):
54
- text_input = gr.Textbox(label="Enter Text")
55
- text_output = gr.Label()
56
- text_button = gr.Button("Analyze Text")
57
- text_button.click(analyze_text, inputs=text_input, outputs=text_output)
58
-
59
- with gr.TabItem("Face Emotion (Image)"):
60
- image_input = gr.Image(type="numpy", label="Upload Face Image")
61
- image_output = gr.Label()
62
- image_button = gr.Button("Analyze Face Emotion")
63
- image_button.click(analyze_face, inputs=image_input, outputs=image_output)
64
-
65
- with gr.TabItem("Video Emotion"):
66
- video_input = gr.File(label="Upload Video (.mp4)")
67
- video_output = gr.Label()
68
- video_button = gr.Button("Analyze Video Emotion")
69
- video_button.click(analyze_video, inputs=video_input, outputs=video_output)
70
-
71
- demo.launch()
 
1
+ # In Colab
2
+ code = '''
3
  import gradio as gr
 
 
 
 
 
 
4
 
5
+ def greet(name):
6
+ return "Hello " + name + "!"
 
 
 
 
 
 
 
 
 
7
 
8
+ iface = gr.Interface(fn=greet, inputs="text", outputs="text")
9
+ iface.launch()
10
+ '''
 
 
 
 
 
11
 
12
+ # Save it as app.py
13
+ with open("app.py", "w") as f:
14
+ f.write(code)