Update t1.py
Browse files
t1.py
CHANGED
@@ -271,13 +271,16 @@ def classify_overall_sentiment(video, hashtag, mode, num_of_tweets, since_date,
|
|
271 |
|
272 |
|
273 |
# Create Gradio Interfaces
|
|
|
|
|
274 |
video_interface = gr.Interface(
|
275 |
fn=analyze_video_sentiment,
|
276 |
-
inputs=[gr.Video(), gr.Slider(minimum=1, maximum=20, step=1),
|
277 |
gr.Radio(["retinaface", "mtcnn", "opencv", "ssd", "dlib", "mediapipe"], label="Detector Backend",
|
278 |
value="retinaface")],
|
279 |
outputs=["text", gr.Gallery(label="Analyzed Frames")],
|
280 |
-
title="Video Sentiment Analysis"
|
|
|
281 |
)
|
282 |
|
283 |
text_interface = gr.Interface(
|
|
|
271 |
|
272 |
|
273 |
# Create Gradio Interfaces
|
274 |
+
example_video = "https://www.youtube.com/shorts/fFoN-hyZLOs?feature=share" # Provide the path to your example video
|
275 |
+
|
276 |
video_interface = gr.Interface(
|
277 |
fn=analyze_video_sentiment,
|
278 |
+
inputs=[gr.Video(value=example_video), gr.Slider(minimum=1, maximum=20, step=1),
|
279 |
gr.Radio(["retinaface", "mtcnn", "opencv", "ssd", "dlib", "mediapipe"], label="Detector Backend",
|
280 |
value="retinaface")],
|
281 |
outputs=["text", gr.Gallery(label="Analyzed Frames")],
|
282 |
+
title="Video Sentiment Analysis",
|
283 |
+
examples=[example_video] # Set the example video
|
284 |
)
|
285 |
|
286 |
text_interface = gr.Interface(
|