Spaces:
Running
on
Zero
Running
on
Zero
import os | |
from diffusers import DiffusionPipeline, AutoencoderTiny | |
import torch | |
# Define models and their configurations | |
models = { | |
"FLUX.1-dev": { | |
"pipeline_class": DiffusionPipeline, | |
"model_id": "black-forest-labs/FLUX.1-dev", | |
"config": {"torch_dtype": torch.bfloat16}, | |
"description": "**FLUX.1-dev** is a development model that focuses on delivering highly detailed and artistically rich images.", | |
}, | |
} | |
# Helper function to get the Hugging Face token securely | |
def get_hf_token(): | |
try: | |
from google.colab import userdata # Try to get token from Colab secrets | |
hf_token = userdata.get('HF_TOKEN') | |
if hf_token: | |
return hf_token | |
else: | |
raise RuntimeError("HF_TOKEN not found in Colab secrets.") | |
except ImportError: # Not running in Colab | |
return os.getenv("HF_TOKEN", None) | |
# Function to pre-download models | |
def download_all_models(): | |
print("Downloading all models...") | |
_HF_TOKEN = get_hf_token() | |
if not _HF_TOKEN: | |
raise ValueError("HF_TOKEN is not available. Please set it in Colab secrets or environment variables.") | |
for model_key, config in models.items(): | |
try: | |
pipeline_class = config["pipeline_class"] | |
model_id = config["model_id"] | |
# Download the pipeline (weights will be cached) | |
pipeline_class.from_pretrained(model_id, token=_HF_TOKEN, **config.get("config", {})) | |
print(f"Model '{model_key}' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading model '{model_key}': {e}") | |
print("Model download process complete.") | |
# Download the only VAE needed | |
print("Downloading VAE...") | |
try: | |
AutoencoderTiny.from_pretrained("madebyollin/taef1", token=_HF_TOKEN) | |
print("VAE 'taef1' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading VAE: {e}") | |
print("VAE download process complete.") | |
''' | |
import os | |
from diffusers import DiffusionPipeline, AutoencoderTiny | |
import torch | |
# Define models and their configurations | |
models = { | |
"FLUX.1-dev": { | |
"pipeline_class": DiffusionPipeline, | |
"model_id": "black-forest-labs/FLUX.1-dev", | |
"config": {"torch_dtype": torch.bfloat16}, | |
"description": "**FLUX.1-dev** is a development model that focuses on delivering highly detailed and artistically rich images.", | |
}, | |
} | |
# Helper function to get the Hugging Face token securely | |
def get_hf_token(): | |
try: | |
from google.colab import userdata # Try to get token from Colab secrets | |
hf_token = userdata.get('HF_TOKEN') | |
if hf_token: | |
return hf_token | |
else: | |
raise RuntimeError("HF_TOKEN not found in Colab secrets.") | |
except ImportError: # Not running in Colab | |
return os.getenv("HF_TOKEN", None) | |
# Function to pre-download models | |
def download_all_models(): | |
print("Downloading all models...") | |
_HF_TOKEN = get_hf_token() | |
if not _HF_TOKEN: | |
raise ValueError("HF_TOKEN is not available. Please set it in Colab secrets or environment variables.") | |
for model_key, config in models.items(): | |
try: | |
pipeline_class = config["pipeline_class"] | |
model_id = config["model_id"] | |
# Download the pipeline (weights will be cached) | |
pipeline_class.from_pretrained(model_id, token=_HF_TOKEN, **config.get("config", {})) | |
print(f"Model '{model_key}' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading model '{model_key}': {e}") | |
print("Model download process complete.") | |
# Download the only VAE needed | |
print("Downloading VAE...") | |
try: | |
AutoencoderTiny.from_pretrained("madebyollin/taef1", token=_HF_TOKEN) | |
print("VAE 'taef1' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading VAE: {e}") | |
print("VAE download process complete.") | |
import os | |
from diffusers import DiffusionPipeline, FluxPipeline, AutoencoderTiny, AutoencoderKL | |
from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast | |
import torch | |
# Define models and their configurations (same as in app.py) | |
models = { | |
"FLUX.1-schnell": { | |
"pipeline_class": FluxPipeline, | |
"model_id": "black-forest-labs/FLUX.1-schnell", | |
"config": {"torch_dtype": torch.bfloat16}, | |
"description": "**FLUX.1-schnell** is a fast and efficient model designed for quick image generation.", | |
}, | |
"FLUX.1-dev": { | |
"pipeline_class": DiffusionPipeline, | |
"model_id": "black-forest-labs/FLUX.1-dev", | |
"config": {"torch_dtype": torch.bfloat16}, | |
"description": "**FLUX.1-dev** is a development model that focuses on delivering highly detailed and artistically rich images.", | |
}, | |
} | |
# Helper function to get the Hugging Face token securely | |
def get_hf_token(): | |
try: | |
from google.colab import userdata # Try to get token from Colab secrets | |
hf_token = userdata.get('HF_TOKEN') | |
if hf_token: | |
return hf_token | |
else: | |
raise RuntimeError("HF_TOKEN not found in Colab secrets.") | |
except ImportError: # Not running in Colab | |
return os.getenv("HF_TOKEN", None) | |
# Function to pre-download models | |
def download_all_models(): | |
print("Downloading all models...") | |
_HF_TOKEN = get_hf_token() # Get the token once | |
if not _HF_TOKEN: | |
raise ValueError("HF_TOKEN is not available. Please set it in Colab secrets or environment variables.") | |
for model_key, config in models.items(): | |
try: | |
pipeline_class = config["pipeline_class"] | |
model_id = config["model_id"] | |
# Download the pipeline (weights will be cached) | |
pipeline_class.from_pretrained(model_id, token=_HF_TOKEN, **config.get("config", {})) | |
print(f"Model '{model_key}' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading model '{model_key}': {e}") | |
print("Model download process complete.") | |
def download_vaes(): | |
print("Downloading VAEs...") | |
try: | |
# Download taef1 | |
AutoencoderTiny.from_pretrained("madebyollin/taef1", use_auth_token=get_hf_token()) | |
print("VAE 'taef1' downloaded successfully.") | |
# Download good_vae (AutoencoderKL from FLUX.1-dev) | |
AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="vae", use_auth_token=get_hf_token()) | |
print("VAE 'good_vae' downloaded successfully.") | |
except Exception as e: | |
print(f"Error downloading VAEs: {e}") | |
print("VAE download process complete.") | |
''' |