JiantaoLin commited on
Commit
e70b610
·
1 Parent(s): 51e4765
Files changed (2) hide show
  1. app.py +0 -7
  2. pipeline/kiss3d_wrapper.py +4 -2
app.py CHANGED
@@ -396,17 +396,10 @@ with gr.Blocks(css="""
396
  with gr.Row(elem_id="example-container"):
397
  gr.Examples(
398
  examples=[
399
- # ["A tree with red leaves"],
400
- # ["A dragon with black texture"],
401
  ["A girl with pink hair"],
402
  ["A boy playing guitar"],
403
-
404
-
405
  ["A dog wearing a hat"],
406
  ["A boy playing basketball"],
407
- # [""],
408
- # [""],
409
- # [""],
410
 
411
  ],
412
  inputs=[prompt], # 将选中的示例填入 prompt 文本框
 
396
  with gr.Row(elem_id="example-container"):
397
  gr.Examples(
398
  examples=[
 
 
399
  ["A girl with pink hair"],
400
  ["A boy playing guitar"],
 
 
401
  ["A dog wearing a hat"],
402
  ["A boy playing basketball"],
 
 
 
403
 
404
  ],
405
  inputs=[prompt], # 将选中的示例填入 prompt 文本框
pipeline/kiss3d_wrapper.py CHANGED
@@ -69,14 +69,16 @@ def init_wrapper_from_config(config_path):
69
  # flux_lora_pth = config_['flux'].get('lora', None)
70
  flux_lora_pth = hf_hub_download(repo_id="LTT/Kiss3DGen", filename="rgb_normal_large.safetensors", repo_type="model", token=access_token)
71
  flux_redux_pth = config_['flux'].get('redux', None)
72
- taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype_[flux_dtype]).to(flux_device)
73
  if flux_base_model_pth.endswith('safetensors'):
74
  flux_pipe = FluxImg2ImgPipeline.from_single_file(flux_base_model_pth, torch_dtype=dtype_[flux_dtype], token=access_token)
75
  else:
76
  flux_pipe = FluxImg2ImgPipeline.from_pretrained(flux_base_model_pth, torch_dtype=dtype_[flux_dtype], token=access_token)
77
  # flux_pipe.enable_vae_slicing()
78
  # flux_pipe.enable_vae_tiling()
79
- flux_pipe.vae = taef1
 
 
80
 
81
  # flux_pipe.enable_sequential_cpu_offload()
82
  # load flux model and controlnet
 
69
  # flux_lora_pth = config_['flux'].get('lora', None)
70
  flux_lora_pth = hf_hub_download(repo_id="LTT/Kiss3DGen", filename="rgb_normal_large.safetensors", repo_type="model", token=access_token)
71
  flux_redux_pth = config_['flux'].get('redux', None)
72
+ # taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype_[flux_dtype]).to(flux_device)
73
  if flux_base_model_pth.endswith('safetensors'):
74
  flux_pipe = FluxImg2ImgPipeline.from_single_file(flux_base_model_pth, torch_dtype=dtype_[flux_dtype], token=access_token)
75
  else:
76
  flux_pipe = FluxImg2ImgPipeline.from_pretrained(flux_base_model_pth, torch_dtype=dtype_[flux_dtype], token=access_token)
77
  # flux_pipe.enable_vae_slicing()
78
  # flux_pipe.enable_vae_tiling()
79
+ # flux_pipe.vae = taef1
80
+ flux_pipe.vae.enable_slicing() # 多批次生图优化
81
+ flux_pipe.vae.enable_tiling()
82
 
83
  # flux_pipe.enable_sequential_cpu_offload()
84
  # load flux model and controlnet