Spaces:
Running
on
Zero
Running
on
Zero
# Reference: https://huggingface.co./spaces/FoundationVision/LlamaGen/blob/main/app.py | |
from PIL import Image | |
import gradio as gr | |
from imagenet_classes import imagenet_idx2classname | |
import torch | |
torch.backends.cuda.matmul.allow_tf32 = True | |
torch.backends.cudnn.allow_tf32 = True | |
import time | |
import argparse | |
import demo_util | |
import os | |
device = "cuda" if torch.cuda.is_available() else "cpu" | |
model2ckpt = { | |
"TiTok-L-32": ("tokenizer_titok_l32.bin", "generator_titok_l32.bin"), | |
} | |
if not os.path.exists("tokenizer_titok_l32.bin"): | |
os.system("gdown 1I_m2Vm4JgQsa7bZVORj-nVhP8fgQLngd") | |
if not os.path.exists("generator_titok_l32.bin"): | |
os.system("gdown 1IgqZ_vwGIj2ZWOPuCzilxeQ2UrMVY93l") | |
parser = argparse.ArgumentParser() | |
parser.add_argument("--precision", type=str, default='bf16', choices=["none", "fp16", "bf16"]) | |
parser.add_argument("--guidance_scale", type=float, default=3.5) | |
parser.add_argument("--randomize_temperature", type=float, default=1.0) | |
parser.add_argument("--num_sample_steps", type=int, default=8) | |
parser.add_argument("--seed", type=int, default=42) | |
parser.add_argument("--temperature", type=float, default=1.0, help="temperature value to sample with") | |
args = parser.parse_args() | |
config = demo_util.get_config("configs/titok_l32.yaml") | |
print(config) | |
titok_tokenizer = demo_util.get_titok_tokenizer(config) | |
print(titok_tokenizer) | |
titok_generator = demo_util.get_titok_generator(config) | |
print(titok_generator) | |
titok_tokenizer = titok_tokenizer.to(device) | |
titok_generator = titok_generator.to(device) | |
def demo_infer(guidance_scale, randomize_temperature, num_sample_steps, | |
class_label, seed): | |
n = 4 | |
class_labels = [class_label for _ in range(n)] | |
torch.manual_seed(seed) | |
torch.cuda.manual_seed(seed) | |
t1 = time.time() | |
generated_image = demo_util.sample_fn( | |
generator=titok_generator, | |
tokenizer=titok_tokenizer, | |
labels=class_labels, | |
guidance_scale=guidance_scale, | |
randomize_temperature=randomize_temperature, | |
num_sample_steps=num_sample_steps, | |
device=device | |
) | |
sampling_time = time.time() - t1 | |
print(f"generation takes about {sampling_time:.2f} seconds.") | |
samples = [Image.fromarray(sample) for sample in generated_image] | |
return samples | |
with gr.Blocks() as demo: | |
gr.Markdown("<h1 style='text-align: center'>An Image is Worth 32 Tokens for Reconstruction and Generation</h1>") | |
with gr.Tabs(): | |
with gr.TabItem('Generate'): | |
with gr.Row(): | |
with gr.Column(): | |
with gr.Row(): | |
i1k_class = gr.Dropdown( | |
list(imagenet_idx2classname.values()), | |
value='macaw', | |
type="index", label='ImageNet-1K Class' | |
) | |
guidance_scale = gr.Slider(minimum=1, maximum=25, step=0.1, value=3.5, label='Classifier-free Guidance Scale') | |
randomize_temperature = gr.Slider(minimum=0., maximum=10.0, step=0.1, value=1.0, label='randomize_temperature') | |
num_sample_steps = gr.Slider(minimum=1, maximum=32, step=1, value=8, label='num_sample_steps') | |
seed = gr.Slider(minimum=0, maximum=1000, step=1, value=42, label='Seed') | |
button = gr.Button("Generate", variant="primary") | |
with gr.Column(): | |
output = gr.Gallery(label='Generated Images', height=700) | |
button.click(demo_infer, inputs=[ | |
guidance_scale, randomize_temperature, num_sample_steps, | |
i1k_class, seed], | |
outputs=[output]) | |
demo.queue() | |
demo.launch(debug=True) |