yerang commited on
Commit
c09f54d
·
verified ·
1 Parent(s): 74f4d22

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -10
app.py CHANGED
@@ -66,7 +66,7 @@ print("PATH:", os.environ['PATH'])
66
  print("LD_LIBRARY_PATH:", os.environ['LD_LIBRARY_PATH'])
67
 
68
  from stf_utils import STFPipeline
69
- from flux_schnell import create_flux_tab
70
 
71
 
72
  # audio_path="assets/examples/driving/test_aud.mp3"
@@ -186,7 +186,7 @@ output_image_paste_back = gr.Image(type="numpy")
186
  output_video = gr.Video()
187
  output_video_concat = gr.Video()
188
 
189
- # video_input = gr.Video()
190
  driving_video_path=gr.Video()
191
 
192
 
@@ -213,7 +213,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
213
 
214
  gr.Markdown("### 2. Audio to Driving-Video")
215
  with gr.Row():
216
- audio_path_component = gr.Textbox(label="Input", value="assets/examples/driving/test_aud.mp3")
217
  video_gen_button = gr.Button("Audio to Video generation", variant="primary")
218
  with gr.Row():
219
  #a2v_output = gr.Video()
@@ -240,7 +240,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
240
  cache_examples=False,
241
  )
242
  with gr.Accordion(open=True, label="Driving Video"):
243
- video_input = gr.Video()
244
  gr.Examples(
245
  examples=[
246
  [osp.join(example_video_dir, "d0.mp4")],
@@ -323,10 +323,13 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
323
  video_gen_button.click(
324
  fn=gpu_wrapped_stf_pipeline_execute,
325
  inputs=[
326
- #output_audio
327
- audio_path_component
 
 
 
 
328
  ],
329
- outputs=[driving_video_path],
330
  show_progress=True
331
  )
332
 
@@ -343,9 +346,9 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
343
  outputs=video_input
344
  )
345
 
346
- # 세 번째 탭: Flux 개발용 탭
347
- with gr.Tab("FLUX Image"):
348
- flux_demo = create_flux_tab(image_input) # Flux 개발용 탭 생성
349
 
350
  demo.launch(
351
  server_port=args.server_port,
 
66
  print("LD_LIBRARY_PATH:", os.environ['LD_LIBRARY_PATH'])
67
 
68
  from stf_utils import STFPipeline
69
+
70
 
71
 
72
  # audio_path="assets/examples/driving/test_aud.mp3"
 
186
  output_video = gr.Video()
187
  output_video_concat = gr.Video()
188
 
189
+ video_input = gr.Video()
190
  driving_video_path=gr.Video()
191
 
192
 
 
213
 
214
  gr.Markdown("### 2. Audio to Driving-Video")
215
  with gr.Row():
216
+ #audio_path_component = gr.Textbox(label="Input", value="assets/examples/driving/test_aud.mp3")
217
  video_gen_button = gr.Button("Audio to Video generation", variant="primary")
218
  with gr.Row():
219
  #a2v_output = gr.Video()
 
240
  cache_examples=False,
241
  )
242
  with gr.Accordion(open=True, label="Driving Video"):
243
+ #video_input = gr.Video()
244
  gr.Examples(
245
  examples=[
246
  [osp.join(example_video_dir, "d0.mp4")],
 
323
  video_gen_button.click(
324
  fn=gpu_wrapped_stf_pipeline_execute,
325
  inputs=[
326
+ output_audio
327
+ #audio_path_component
328
+ ],
329
+ outputs=[
330
+ video_input
331
+ #driving_video_path
332
  ],
 
333
  show_progress=True
334
  )
335
 
 
346
  outputs=video_input
347
  )
348
 
349
+ # # 세 번째 탭: Flux 개발용 탭
350
+ # with gr.Tab("FLUX Image"):
351
+ # flux_demo = create_flux_tab(image_input) # Flux 개발용 탭 생성
352
 
353
  demo.launch(
354
  server_port=args.server_port,