|
import gradio as gr
|
|
import spaces
|
|
import os
|
|
from convert_url_to_diffusers_flux_gr import convert_url_to_diffusers_repo_flux
|
|
os.environ["GRADIO_ANALYTICS_ENABLED"] = "False"
|
|
|
|
|
|
css = """"""
|
|
|
|
with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
|
|
gr.Markdown("# Download and convert FLUX.1 ComfyUI formatted safetensors to Diffusers and create your repo")
|
|
gr.Markdown(
|
|
f"""
|
|
**⚠️IMPORTANT NOTICE⚠️**<br>
|
|
# If the output setting was fp8, this space could be completed in about 10 minutes, but **the torch on HF's server apparently does not support fp8 input**, which makes no sense.
|
|
The conversion to bf16 does not seem feasible in any way at present. Even if the file is processed as shard, it still does not work due to lack of RAM. (P.S. But then the RAM was down to only 60% consumption. I don't know why anymore.)
|
|
I guess I'll have to freeze it until someone with more advanced technology realizes it, or until Diffusers, pytorch, or Quanto will be upgraded.<br><br>
|
|
From an information security standpoint, it is dangerous to expose your access token or key to others.
|
|
If you do use it, I recommend that you duplicate this space on your own account before doing so.
|
|
Keys and tokens could be set to SECRET (HF_TOKEN, CIVITAI_API_KEY) if it's placed in your own space.
|
|
It saves you the trouble of typing them in.<br>
|
|
<br>
|
|
**The steps are the following**:
|
|
- Paste a write-access token from [hf.co/settings/tokens](https://huggingface.co./settings/tokens).
|
|
- Input a model download url from the Hub or Civitai or other sites.
|
|
- If you want to download a model from Civitai, paste a Civitai API Key.
|
|
- Input your HF user ID. e.g. 'yourid'.
|
|
- Input your new repo name. If empty, auto-complete. e.g. 'newrepo'.
|
|
- Set the parameters. If not sure, just use the defaults.
|
|
- Click "Submit".
|
|
- Patiently wait until the output changes. It takes approximately ? minutes (downloading from HF).
|
|
"""
|
|
)
|
|
with gr.Column():
|
|
dl_url = gr.Textbox(label="URL to download", placeholder="https://huggingface.co./marduk191/Flux.1_collection/blob/main/flux.1_dev_fp8_fp16t5-marduk191.safetensors", value="", max_lines=1)
|
|
with gr.Row():
|
|
hf_user = gr.Textbox(label="Your HF user ID", placeholder="username", value="", max_lines=1)
|
|
hf_repo = gr.Textbox(label="New repo name", placeholder="reponame", info="If empty, auto-complete", value="", max_lines=1)
|
|
with gr.Row():
|
|
hf_token = gr.Textbox(label="Your HF write token", placeholder="hf_...", value="", max_lines=1)
|
|
civitai_key = gr.Textbox(label="Your Civitai API Key (Optional)", info="If you download model from Civitai...", placeholder="", value="", max_lines=1)
|
|
with gr.Row():
|
|
data_type = gr.Radio(label="Output data type", choices=["bf16", "fp8"], value="fp8")
|
|
model_type = gr.Radio(label="Original model repo", choices=["dev", "schnell", "dev fp8", "schnell fp8"], value="dev")
|
|
use_original = gr.CheckboxGroup(label="Use original repo version", choices=["vae", "text_encoder", "text_encoder_2"], value=["vae", "text_encoder"])
|
|
with gr.Row():
|
|
is_dequat = gr.Checkbox(label="Dequantization", info="Deadly slow", value=False)
|
|
is_upload_sf = gr.Checkbox(label="Upload single safetensors file into new repo", value=False, visible=False)
|
|
is_fix_only = gr.Checkbox(label="Only fixing", value=False)
|
|
is_private = gr.Checkbox(label="Create private repo", value=True)
|
|
is_overwrite = gr.Checkbox(label="Overweite repo", value=True)
|
|
run_button = gr.Button(value="Submit")
|
|
repo_urls = gr.CheckboxGroup(visible=False, choices=[], value=None)
|
|
output_md = gr.Markdown(label="Output")
|
|
gr.DuplicateButton(value="Duplicate Space")
|
|
|
|
gr.on(
|
|
triggers=[run_button.click],
|
|
fn=convert_url_to_diffusers_repo_flux,
|
|
inputs=[dl_url, hf_user, hf_repo, hf_token, civitai_key, is_private, is_overwrite, is_upload_sf,
|
|
data_type, model_type, is_dequat, repo_urls, is_fix_only, use_original],
|
|
outputs=[repo_urls, output_md],
|
|
)
|
|
|
|
demo.queue(default_concurrency_limit=1, max_size=5).launch(debug=True, show_api=False)
|
|
|