mrfakename
commited on
Commit
•
4b3ad7b
1
Parent(s):
a73b7f1
Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,7 @@ from moviepy.video.tools.subtitles import SubtitlesClip
|
|
8 |
import torch
|
9 |
import tempfile
|
10 |
import gradio as gr
|
|
|
11 |
|
12 |
mdl = whisper.load_model("base" if torch.cuda.is_available() else 'tiny')
|
13 |
if torch.cuda.is_available(): mdl.to('cuda')
|
@@ -33,9 +34,8 @@ def subtitle(input):
|
|
33 |
yield status, gr.update()
|
34 |
gr.Info("Loading video...")
|
35 |
print(input)
|
36 |
-
video = VideoFileClip(input)
|
37 |
width, height = video.size
|
38 |
-
gr.Info(width)
|
39 |
generator = lambda txt: TextClip(txt, size=(width * (3 / 4) + 8, None), color='white', stroke_color='black', stroke_width=8, method='caption', fontsize=min(width / 7, height / 7), font=str(cached_path(FONT_URL)))
|
40 |
generator1 = lambda txt: TextClip(txt, size=(width * (3 / 4), None), color='white', method='caption', fontsize=min(width / 7, height / 7), font=str(cached_path(FONT_URL)))
|
41 |
status += "\n\n[4/5] Loading video clip..."
|
|
|
8 |
import torch
|
9 |
import tempfile
|
10 |
import gradio as gr
|
11 |
+
from pathlib import Path
|
12 |
|
13 |
mdl = whisper.load_model("base" if torch.cuda.is_available() else 'tiny')
|
14 |
if torch.cuda.is_available(): mdl.to('cuda')
|
|
|
34 |
yield status, gr.update()
|
35 |
gr.Info("Loading video...")
|
36 |
print(input)
|
37 |
+
video = VideoFileClip(Path(input))
|
38 |
width, height = video.size
|
|
|
39 |
generator = lambda txt: TextClip(txt, size=(width * (3 / 4) + 8, None), color='white', stroke_color='black', stroke_width=8, method='caption', fontsize=min(width / 7, height / 7), font=str(cached_path(FONT_URL)))
|
40 |
generator1 = lambda txt: TextClip(txt, size=(width * (3 / 4), None), color='white', method='caption', fontsize=min(width / 7, height / 7), font=str(cached_path(FONT_URL)))
|
41 |
status += "\n\n[4/5] Loading video clip..."
|