John6666 commited on
Commit
bffe899
1 Parent(s): 7a6d055

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +3 -1
  2. convert_url_to_diffusers_flux_gr.py +37 -16
app.py CHANGED
@@ -43,6 +43,8 @@ It saves you the trouble of typing them in.<br>
43
  is_dequat = gr.Checkbox(label="Dequantization", info="Deadly slow", value=False)
44
  use_original = gr.CheckboxGroup(label="Use original version", choices=["vae", "text_encoder", "text_encoder_2"], value=["vae", "text_encoder"])
45
  is_fix_only = gr.Checkbox(label="Only fixing", value=False)
 
 
46
  run_button = gr.Button(value="Submit")
47
  repo_urls = gr.CheckboxGroup(visible=False, choices=[], value=None)
48
  output_md = gr.Markdown(label="Output")
@@ -51,7 +53,7 @@ It saves you the trouble of typing them in.<br>
51
  gr.on(
52
  triggers=[run_button.click],
53
  fn=convert_url_to_diffusers_repo_flux,
54
- inputs=[dl_url, hf_user, hf_repo, hf_token, civitai_key, is_upload_sf,
55
  data_type, model_type, is_dequat, repo_urls, is_fix_only, use_original],
56
  outputs=[repo_urls, output_md],
57
  )
 
43
  is_dequat = gr.Checkbox(label="Dequantization", info="Deadly slow", value=False)
44
  use_original = gr.CheckboxGroup(label="Use original version", choices=["vae", "text_encoder", "text_encoder_2"], value=["vae", "text_encoder"])
45
  is_fix_only = gr.Checkbox(label="Only fixing", value=False)
46
+ is_private = gr.Checkbox(label="Create private repo", value=True)
47
+ is_overwrite = gr.Checkbox(label="Overweite repo", value=True)
48
  run_button = gr.Button(value="Submit")
49
  repo_urls = gr.CheckboxGroup(visible=False, choices=[], value=None)
50
  output_md = gr.Markdown(label="Output")
 
53
  gr.on(
54
  triggers=[run_button.click],
55
  fn=convert_url_to_diffusers_repo_flux,
56
+ inputs=[dl_url, hf_user, hf_repo, hf_token, civitai_key, is_private, is_overwrite, is_upload_sf,
57
  data_type, model_type, is_dequat, repo_urls, is_fix_only, use_original],
58
  outputs=[repo_urls, output_md],
59
  )
convert_url_to_diffusers_flux_gr.py CHANGED
@@ -179,14 +179,14 @@ def is_repo_exists(repo_id):
179
  print(f"Error: Failed to connect {repo_id}. ")
180
  return True # for safe
181
 
182
- def create_diffusers_repo(new_repo_id, diffusers_folder, progress=gr.Progress(track_tqdm=True)):
183
  from huggingface_hub import HfApi
184
  import os
185
  hf_token = os.environ.get("HF_TOKEN")
186
  api = HfApi()
187
  try:
188
  progress(0, desc="Start uploading...")
189
- api.create_repo(repo_id=new_repo_id, token=hf_token, private=True, exist_ok=True)
190
  for path in Path(diffusers_folder).glob("*"):
191
  if path.is_dir():
192
  api.upload_folder(repo_id=new_repo_id, folder_path=str(path), path_in_repo=path.name, token=hf_token)
@@ -420,11 +420,13 @@ with torch.no_grad():
420
  # https://github.com/huggingface/transformers/issues/13769
421
  # https://github.com/huggingface/optimum-quanto/issues/278
422
  # https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/serialization/_torch.py
 
423
  with torch.no_grad():
424
  def to_safetensors_flux_module(sd: dict, path: str, pattern: str, size: str,
425
  quantization: bool=False, name: str = "",
426
  metadata: dict | None = None, progress=gr.Progress(track_tqdm=True)):
427
- from huggingface_hub import save_torch_state_dict
 
428
  try:
429
  progress(0, desc=f"Preparing to save FLUX.1 {name} to Diffusers format.")
430
  print(f"Preparing to save FLUX.1 {name} to Diffusers format.")
@@ -438,13 +440,29 @@ with torch.no_grad():
438
  print(f"Saving quantized FLUX.1 {name} to {path}")
439
  else:
440
  progress(0.5, desc=f"Saving FLUX.1 {name} to: {path}")
441
- print(f"Saving FLUX.1 {name} to: {path}")
442
- if metadata is not None:
443
- progress(0.5, desc=f"Saving FLUX.1 {name} metadata to: {path}")
444
- save_torch_state_dict(state_dict=sd, save_directory=path,
445
- filename_pattern=pattern, max_shard_size=size, metadata=metadata)
 
 
 
 
 
 
 
 
 
 
446
  else:
447
- save_torch_state_dict(state_dict=sd, save_directory=path,
 
 
 
 
 
 
448
  filename_pattern=pattern, max_shard_size=size)
449
  progress(1, desc=f"Saved FLUX.1 {name} to: {path}")
450
  print(f"Saved FLUX.1 {name} to: {path}")
@@ -699,16 +717,16 @@ with torch.no_grad(): # Much lower memory consumption, but higher disk load
699
  new_repo_id: str = "", local: bool = False, progress=gr.Progress(track_tqdm=True)):
700
  unet_sd_path = savepath.removesuffix("/") + "/transformer"
701
  unet_sd_pattern = "diffusion_pytorch_model{suffix}.safetensors"
702
- unet_sd_size = "10GB"
703
  te_sd_path = savepath.removesuffix("/") + "/text_encoder_2"
704
  te_sd_pattern = "model{suffix}.safetensors"
705
  te_sd_size = "5GB"
706
  clip_sd_path = savepath.removesuffix("/") + "/text_encoder"
707
  clip_sd_pattern = "model{suffix}.safetensors"
708
- clip_sd_size = "10GB"
709
  vae_sd_path = savepath.removesuffix("/") + "/vae"
710
  vae_sd_pattern = "diffusion_pytorch_model{suffix}.safetensors"
711
- vae_sd_size = "10GB"
712
  print_resource_usage() #
713
  metadata = {"format": "pt", **read_safetensors_metadata(loadpath)}
714
  clear_cache()
@@ -743,6 +761,9 @@ with torch.no_grad(): # Much lower memory consumption, but higher disk load
743
  print("Deleted downloaded file.")
744
  clear_cache()
745
  print_resource_usage() #
 
 
 
746
  to_safetensors_flux_module(unet_sd, unet_sd_path, unet_sd_pattern, unet_sd_size,
747
  quantization, "Transformer", metadata)
748
  clear_sd(unet_sd)
@@ -871,7 +892,7 @@ def convert_url_to_fixed_flux_safetensors(url, civitai_key="", is_upload_sf=Fals
871
  q.put(new_repo_name)
872
  return new_repo_name
873
 
874
- def convert_url_to_diffusers_repo_flux(dl_url, hf_user, hf_repo, hf_token, civitai_key="",
875
  is_upload_sf=False, data_type="bf16", model_type="dev", dequant=False,
876
  repo_urls=[], fix_only=False, use_original=["vae", "text_encoder"],
877
  progress=gr.Progress(track_tqdm=True)):
@@ -902,12 +923,12 @@ def convert_url_to_diffusers_repo_flux(dl_url, hf_user, hf_repo, hf_token, civit
902
  print(f"Invalid repo name: {new_repo_id}")
903
  progress(1, desc=f"Invalid repo name: {new_repo_id}")
904
  return gr.update(value=repo_urls, choices=repo_urls), gr.update(value="")
905
- if is_repo_exists(new_repo_id):
906
  print(f"Repo already exists: {new_repo_id}")
907
  progress(1, desc=f"Repo already exists: {new_repo_id}")
908
  return gr.update(value=repo_urls, choices=repo_urls), gr.update(value="")
909
- save_readme_md(new_path, dl_url)
910
- repo_url = create_diffusers_repo(new_repo_id, new_path)
911
  shutil.rmtree(new_path)
912
  if not repo_urls: repo_urls = []
913
  repo_urls.append(repo_url)
 
179
  print(f"Error: Failed to connect {repo_id}. ")
180
  return True # for safe
181
 
182
+ def create_diffusers_repo(new_repo_id, diffusers_folder, is_private, is_overwrite, progress=gr.Progress(track_tqdm=True)):
183
  from huggingface_hub import HfApi
184
  import os
185
  hf_token = os.environ.get("HF_TOKEN")
186
  api = HfApi()
187
  try:
188
  progress(0, desc="Start uploading...")
189
+ api.create_repo(repo_id=new_repo_id, token=hf_token, private=is_private, exist_ok=is_overwrite)
190
  for path in Path(diffusers_folder).glob("*"):
191
  if path.is_dir():
192
  api.upload_folder(repo_id=new_repo_id, folder_path=str(path), path_in_repo=path.name, token=hf_token)
 
420
  # https://github.com/huggingface/transformers/issues/13769
421
  # https://github.com/huggingface/optimum-quanto/issues/278
422
  # https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/serialization/_torch.py
423
+ # https://huggingface.co/docs/accelerate/usage_guides/big_modeling
424
  with torch.no_grad():
425
  def to_safetensors_flux_module(sd: dict, path: str, pattern: str, size: str,
426
  quantization: bool=False, name: str = "",
427
  metadata: dict | None = None, progress=gr.Progress(track_tqdm=True)):
428
+ from huggingface_hub import save_torch_state_dict, save_torch_model
429
+ from accelerate import init_empty_weights
430
  try:
431
  progress(0, desc=f"Preparing to save FLUX.1 {name} to Diffusers format.")
432
  print(f"Preparing to save FLUX.1 {name} to Diffusers format.")
 
440
  print(f"Saving quantized FLUX.1 {name} to {path}")
441
  else:
442
  progress(0.5, desc=f"Saving FLUX.1 {name} to: {path}")
443
+ if False and path.endswith("/transformer"):
444
+ from diffusers import FluxTransformer2DModel
445
+ has_guidance = any("guidance" in k for k in sd)
446
+ with init_empty_weights():
447
+ model = FluxTransformer2DModel(guidance_embeds=has_guidance)
448
+ model.to("cpu")
449
+ model.load_state_dict(sd, strict=True)
450
+ print(f"Saving FLUX.1 {name} to: {path} (FluxTransformer2DModel)")
451
+ if metadata is not None:
452
+ progress(0.5, desc=f"Saving FLUX.1 {name} metadata to: {path}")
453
+ save_torch_model(model=model, save_directory=path,
454
+ filename_pattern=pattern, max_shard_size=size, metadata=metadata)
455
+ else:
456
+ save_torch_model(model=model, save_directory=path,
457
+ filename_pattern=pattern, max_shard_size=size)
458
  else:
459
+ print(f"Saving FLUX.1 {name} to: {path}")
460
+ if metadata is not None:
461
+ progress(0.5, desc=f"Saving FLUX.1 {name} metadata to: {path}")
462
+ save_torch_state_dict(state_dict=sd, save_directory=path,
463
+ filename_pattern=pattern, max_shard_size=size, metadata=metadata)
464
+ else:
465
+ save_torch_state_dict(state_dict=sd, save_directory=path,
466
  filename_pattern=pattern, max_shard_size=size)
467
  progress(1, desc=f"Saved FLUX.1 {name} to: {path}")
468
  print(f"Saved FLUX.1 {name} to: {path}")
 
717
  new_repo_id: str = "", local: bool = False, progress=gr.Progress(track_tqdm=True)):
718
  unet_sd_path = savepath.removesuffix("/") + "/transformer"
719
  unet_sd_pattern = "diffusion_pytorch_model{suffix}.safetensors"
720
+ unet_sd_size = "9.5GB"
721
  te_sd_path = savepath.removesuffix("/") + "/text_encoder_2"
722
  te_sd_pattern = "model{suffix}.safetensors"
723
  te_sd_size = "5GB"
724
  clip_sd_path = savepath.removesuffix("/") + "/text_encoder"
725
  clip_sd_pattern = "model{suffix}.safetensors"
726
+ clip_sd_size = "9.5GB"
727
  vae_sd_path = savepath.removesuffix("/") + "/vae"
728
  vae_sd_pattern = "diffusion_pytorch_model{suffix}.safetensors"
729
+ vae_sd_size = "9.5GB"
730
  print_resource_usage() #
731
  metadata = {"format": "pt", **read_safetensors_metadata(loadpath)}
732
  clear_cache()
 
761
  print("Deleted downloaded file.")
762
  clear_cache()
763
  print_resource_usage() #
764
+ unet_sd = convert_flux_transformer_sd_to_diffusers(unet_sd)
765
+ clear_cache()
766
+ print_resource_usage() #
767
  to_safetensors_flux_module(unet_sd, unet_sd_path, unet_sd_pattern, unet_sd_size,
768
  quantization, "Transformer", metadata)
769
  clear_sd(unet_sd)
 
892
  q.put(new_repo_name)
893
  return new_repo_name
894
 
895
+ def convert_url_to_diffusers_repo_flux(dl_url, hf_user, hf_repo, hf_token, civitai_key="", is_private=True, is_overwrite=False,
896
  is_upload_sf=False, data_type="bf16", model_type="dev", dequant=False,
897
  repo_urls=[], fix_only=False, use_original=["vae", "text_encoder"],
898
  progress=gr.Progress(track_tqdm=True)):
 
923
  print(f"Invalid repo name: {new_repo_id}")
924
  progress(1, desc=f"Invalid repo name: {new_repo_id}")
925
  return gr.update(value=repo_urls, choices=repo_urls), gr.update(value="")
926
+ if not is_overwrite and is_repo_exists(new_repo_id):
927
  print(f"Repo already exists: {new_repo_id}")
928
  progress(1, desc=f"Repo already exists: {new_repo_id}")
929
  return gr.update(value=repo_urls, choices=repo_urls), gr.update(value="")
930
+ #save_readme_md(new_path, dl_url)
931
+ repo_url = create_diffusers_repo(new_repo_id, new_path, is_private, is_overwrite)
932
  shutil.rmtree(new_path)
933
  if not repo_urls: repo_urls = []
934
  repo_urls.append(repo_url)