fffiloni commited on
Commit
d6761cb
1 Parent(s): fe70a88

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -17
app.py CHANGED
@@ -110,7 +110,7 @@ def get_video_fps(video_path):
110
  def load_examples(ref_image_in, ref_video_in):
111
  return "./examples/mimicmotion_result1_example.mp4"
112
 
113
- def infer(ref_image_in, ref_video_in):
114
  # check if 'outputs' dir exists and empty it if necessary
115
  check_outputs_folder('./outputs')
116
 
@@ -124,17 +124,17 @@ def infer(ref_image_in, ref_video_in):
124
  num_frames = 16
125
  resolution = 576
126
  frames_overlap = 6
127
- num_inference_steps = 25
128
  noise_aug_strength = 0
129
- guidance_scale = 2.0
130
  sample_stride = 2
131
- fps = 16
132
- seed = 42
133
 
134
  # Create the data structure
135
  data = {
136
  'base_model_path': 'stabilityai/stable-video-diffusion-img2vid-xt-1-1',
137
- 'ckpt_path': 'models/MimicMotion_1.pth',
138
  'test_case': [
139
  {
140
  'ref_video_path': ref_video_path,
@@ -230,6 +230,7 @@ div#warning-ready > .gr-prose > h2, div#warning-ready > .gr-prose > p {
230
  with gr.Blocks(css=css) as demo:
231
  with gr.Column():
232
  gr.Markdown("# MimicMotion")
 
233
  with gr.Row():
234
  with gr.Column():
235
  if is_shared_ui:
@@ -250,30 +251,45 @@ with gr.Blocks(css=css) as demo:
250
  </div>
251
  ''', elem_id="warning-duplicate")
252
  else:
253
- top_description = gr.HTML(f'''
254
- <div class="gr-prose">
 
 
 
 
 
 
 
 
 
 
 
255
  <h2 class="custom-color"><svg xmlns="http://www.w3.org/2000/svg" width="18px" height="18px" style="margin-right: 0px;display: inline-block;"fill="none"><path fill="#fff" d="M7 13.2a6.3 6.3 0 0 0 4.4-10.7A6.3 6.3 0 0 0 .6 6.9 6.3 6.3 0 0 0 7 13.2Z"/><path fill="#fff" fill-rule="evenodd" d="M7 0a6.9 6.9 0 0 1 4.8 11.8A6.9 6.9 0 0 1 0 7 6.9 6.9 0 0 1 7 0Zm0 0v.7V0ZM0 7h.6H0Zm7 6.8v-.6.6ZM13.7 7h-.6.6ZM9.1 1.7c-.7-.3-1.4-.4-2.2-.4a5.6 5.6 0 0 0-4 1.6 5.6 5.6 0 0 0-1.6 4 5.6 5.6 0 0 0 1.6 4 5.6 5.6 0 0 0 4 1.7 5.6 5.6 0 0 0 4-1.7 5.6 5.6 0 0 0 1.7-4 5.6 5.6 0 0 0-1.7-4c-.5-.5-1.1-.9-1.8-1.2Z" clip-rule="evenodd"/><path fill="#000" fill-rule="evenodd" d="M7 2.9a.8.8 0 1 1 0 1.5A.8.8 0 0 1 7 3ZM5.8 5.7c0-.4.3-.6.6-.6h.7c.3 0 .6.2.6.6v3.7h.5a.6.6 0 0 1 0 1.3H6a.6.6 0 0 1 0-1.3h.4v-3a.6.6 0 0 1-.6-.7Z" clip-rule="evenodd"/></svg>
256
- You have successfully associated a {which_gpu} GPU to this Space 🎉</h2>
257
- <p class="custom-color">
258
- You will be billed by the minute from when you activated the GPU until when it is turned off.
259
- </p>
260
- </div>
261
- ''', elem_id="warning-ready")
 
 
262
  with gr.Row():
263
  ref_image_in = gr.Image(label="Person Image Reference", type="filepath")
264
  ref_video_in = gr.Video(label="Person Video Reference")
265
  with gr.Accordion("Advanced Settings", open=False):
266
  num_inference_steps = gr.Slider(label="num inference steps", minimum=12, maximum=50, value=25, step=1, interactive=available_property)
267
  guidance_scale = gr.Slider(label="guidance scale", minimum=0.1, maximum=10, value=2, step=0.1, interactive=available_property)
268
- output_frames_per_second = gr.Slider(label="fps", minimum=1, maximum=60, value=16, step=1, interactive=available_property)
269
- seed = gr.Number(label="Seed", value=42, interactive=available_property)
 
 
270
  submit_btn = gr.Button("Submit", interactive=available_property)
271
  gr.Examples(
272
  examples = [
273
  ["./examples/demo1.jpg", "./examples/preview_1.mp4"]
274
  ],
275
  fn = load_examples,
276
- inputs = [ref_image_in, ref_video_in],
277
  outputs = [output_video],
278
  run_on_click = True,
279
  cache_examples = False
 
110
  def load_examples(ref_image_in, ref_video_in):
111
  return "./examples/mimicmotion_result1_example.mp4"
112
 
113
+ def infer(ref_image_in, ref_video_in, num_inference_steps, guidance_scale, output_frames_per_second, seed, checkpoint_version):
114
  # check if 'outputs' dir exists and empty it if necessary
115
  check_outputs_folder('./outputs')
116
 
 
124
  num_frames = 16
125
  resolution = 576
126
  frames_overlap = 6
127
+ num_inference_steps = num_inference_steps # 25
128
  noise_aug_strength = 0
129
+ guidance_scale = guidance_scale # 2.0
130
  sample_stride = 2
131
+ fps = output_frames_per_second # 16
132
+ seed = seed # 42
133
 
134
  # Create the data structure
135
  data = {
136
  'base_model_path': 'stabilityai/stable-video-diffusion-img2vid-xt-1-1',
137
+ 'ckpt_path': f'models/{checkpoint_version}',
138
  'test_case': [
139
  {
140
  'ref_video_path': ref_video_path,
 
230
  with gr.Blocks(css=css) as demo:
231
  with gr.Column():
232
  gr.Markdown("# MimicMotion")
233
+ gr.Markdown("High-quality human motion video generation with pose-guided control")
234
  with gr.Row():
235
  with gr.Column():
236
  if is_shared_ui:
 
251
  </div>
252
  ''', elem_id="warning-duplicate")
253
  else:
254
+ if(is_gpu_associated):
255
+ top_description = gr.HTML(f'''
256
+ <div class="gr-prose">
257
+ <h2 class="custom-color"><svg xmlns="http://www.w3.org/2000/svg" width="18px" height="18px" style="margin-right: 0px;display: inline-block;"fill="none"><path fill="#fff" d="M7 13.2a6.3 6.3 0 0 0 4.4-10.7A6.3 6.3 0 0 0 .6 6.9 6.3 6.3 0 0 0 7 13.2Z"/><path fill="#fff" fill-rule="evenodd" d="M7 0a6.9 6.9 0 0 1 4.8 11.8A6.9 6.9 0 0 1 0 7 6.9 6.9 0 0 1 7 0Zm0 0v.7V0ZM0 7h.6H0Zm7 6.8v-.6.6ZM13.7 7h-.6.6ZM9.1 1.7c-.7-.3-1.4-.4-2.2-.4a5.6 5.6 0 0 0-4 1.6 5.6 5.6 0 0 0-1.6 4 5.6 5.6 0 0 0 1.6 4 5.6 5.6 0 0 0 4 1.7 5.6 5.6 0 0 0 4-1.7 5.6 5.6 0 0 0 1.7-4 5.6 5.6 0 0 0-1.7-4c-.5-.5-1.1-.9-1.8-1.2Z" clip-rule="evenodd"/><path fill="#000" fill-rule="evenodd" d="M7 2.9a.8.8 0 1 1 0 1.5A.8.8 0 0 1 7 3ZM5.8 5.7c0-.4.3-.6.6-.6h.7c.3 0 .6.2.6.6v3.7h.5a.6.6 0 0 1 0 1.3H6a.6.6 0 0 1 0-1.3h.4v-3a.6.6 0 0 1-.6-.7Z" clip-rule="evenodd"/></svg>
258
+ You have successfully associated a {which_gpu} GPU to this Space 🎉</h2>
259
+ <p class="custom-color">
260
+ You will be billed by the minute from when you activated the GPU until when it is turned off.
261
+ </p>
262
+ </div>
263
+ ''', elem_id="warning-ready")
264
+ else:
265
+ top_description = gr.HTML(f'''
266
+ <div class="gr-prose">
267
  <h2 class="custom-color"><svg xmlns="http://www.w3.org/2000/svg" width="18px" height="18px" style="margin-right: 0px;display: inline-block;"fill="none"><path fill="#fff" d="M7 13.2a6.3 6.3 0 0 0 4.4-10.7A6.3 6.3 0 0 0 .6 6.9 6.3 6.3 0 0 0 7 13.2Z"/><path fill="#fff" fill-rule="evenodd" d="M7 0a6.9 6.9 0 0 1 4.8 11.8A6.9 6.9 0 0 1 0 7 6.9 6.9 0 0 1 7 0Zm0 0v.7V0ZM0 7h.6H0Zm7 6.8v-.6.6ZM13.7 7h-.6.6ZM9.1 1.7c-.7-.3-1.4-.4-2.2-.4a5.6 5.6 0 0 0-4 1.6 5.6 5.6 0 0 0-1.6 4 5.6 5.6 0 0 0 1.6 4 5.6 5.6 0 0 0 4 1.7 5.6 5.6 0 0 0 4-1.7 5.6 5.6 0 0 0 1.7-4 5.6 5.6 0 0 0-1.7-4c-.5-.5-1.1-.9-1.8-1.2Z" clip-rule="evenodd"/><path fill="#000" fill-rule="evenodd" d="M7 2.9a.8.8 0 1 1 0 1.5A.8.8 0 0 1 7 3ZM5.8 5.7c0-.4.3-.6.6-.6h.7c.3 0 .6.2.6.6v3.7h.5a.6.6 0 0 1 0 1.3H6a.6.6 0 0 1 0-1.3h.4v-3a.6.6 0 0 1-.6-.7Z" clip-rule="evenodd"/></svg>
268
+ You have successfully duplicated the MimicMotion Space 🎉</h2>
269
+ <p class="custom-color">There's only one step left before you can properly play with this demo: <a href="https://huggingface.co/spaces/{os.environ['SPACE_ID']}/settings" style="text-decoration: underline" target="_blank">attribute a GPU</b> to it (via the Settings tab)</a> and run the app below.
270
+ You will be billed by the minute from when you activate the GPU until when it is turned off.</p>
271
+ <p class="actions custom-color">
272
+ <a href="https://huggingface.co/spaces/{os.environ['SPACE_ID']}/settings">🔥 &nbsp; Set recommended GPU</a>
273
+ </p>
274
+ </div>
275
+ ''', elem_id="warning-setgpu")
276
  with gr.Row():
277
  ref_image_in = gr.Image(label="Person Image Reference", type="filepath")
278
  ref_video_in = gr.Video(label="Person Video Reference")
279
  with gr.Accordion("Advanced Settings", open=False):
280
  num_inference_steps = gr.Slider(label="num inference steps", minimum=12, maximum=50, value=25, step=1, interactive=available_property)
281
  guidance_scale = gr.Slider(label="guidance scale", minimum=0.1, maximum=10, value=2, step=0.1, interactive=available_property)
282
+ with gr.Row():
283
+ output_frames_per_second = gr.Slider(label="fps", minimum=1, maximum=60, value=16, step=1, interactive=available_property)
284
+ seed = gr.Number(label="Seed", value=42, interactive=available_property)
285
+ checkpoint_version = gr.Dropdown(label="Checkpoint Version", choices=["MimicMotion_1.pth", "MimicMotion_1-1.pth"], value="MimicMotion_1.pth", interactive=available_property)
286
  submit_btn = gr.Button("Submit", interactive=available_property)
287
  gr.Examples(
288
  examples = [
289
  ["./examples/demo1.jpg", "./examples/preview_1.mp4"]
290
  ],
291
  fn = load_examples,
292
+ inputs = [ref_image_in, ref_video_in, num_inference_steps, guidance_scale, output_frames_per_second, seed, checkpoint_version],
293
  outputs = [output_video],
294
  run_on_click = True,
295
  cache_examples = False