Spaces:
Running
on
Zero
Running
on
Zero
Anonymous
commited on
Commit
·
fb2d9f3
1
Parent(s):
21f92ab
load in advance
Browse files
app.py
CHANGED
@@ -8,16 +8,18 @@ from free_lunch_utils import register_free_upblock2d, register_free_crossattn_up
|
|
8 |
|
9 |
from pipeline_freescale import StableDiffusionXLPipeline
|
10 |
from pipeline_freescale_turbo import StableDiffusionXLPipeline_Turbo
|
|
|
|
|
|
|
11 |
model_ckpt = "stabilityai/stable-diffusion-xl-base-1.0"
|
12 |
model_ckpt_turbo = "stabilityai/sdxl-turbo"
|
|
|
|
|
|
|
|
|
13 |
|
14 |
-
|
15 |
-
pipe_turbo = StableDiffusionXLPipeline_Turbo.from_pretrained(model_ckpt_turbo, torch_dtype=torch.float16)
|
16 |
-
|
17 |
-
@spaces.GPU(duration=120)
|
18 |
def infer_gpu_part(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
19 |
-
pipe = pipe.to("cuda")
|
20 |
-
generator = torch.Generator(device='cuda')
|
21 |
generator = generator.manual_seed(seed)
|
22 |
if not disable_freeu:
|
23 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
@@ -29,10 +31,8 @@ def infer_gpu_part(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_sca
|
|
29 |
).images[0]
|
30 |
return result
|
31 |
|
32 |
-
@spaces.GPU(duration=
|
33 |
def infer_gpu_part_turbo(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
34 |
-
pipe = pipe.to("cuda")
|
35 |
-
generator = torch.Generator(device='cuda')
|
36 |
generator = generator.manual_seed(seed)
|
37 |
if not disable_freeu:
|
38 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
|
|
8 |
|
9 |
from pipeline_freescale import StableDiffusionXLPipeline
|
10 |
from pipeline_freescale_turbo import StableDiffusionXLPipeline_Turbo
|
11 |
+
|
12 |
+
dtype = torch.bfloat16
|
13 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
14 |
model_ckpt = "stabilityai/stable-diffusion-xl-base-1.0"
|
15 |
model_ckpt_turbo = "stabilityai/sdxl-turbo"
|
16 |
+
pipe = StableDiffusionXLPipeline.from_pretrained(model_ckpt, torch_dtype=dtype).to(device)
|
17 |
+
pipe_turbo = StableDiffusionXLPipeline_Turbo.from_pretrained(model_ckpt_turbo, torch_dtype=dtype).to(device)
|
18 |
+
generator = torch.Generator(device='cuda')
|
19 |
+
torch.cuda.empty_cache()
|
20 |
|
21 |
+
@spaces.GPU(duration=100)
|
|
|
|
|
|
|
22 |
def infer_gpu_part(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
|
|
|
|
23 |
generator = generator.manual_seed(seed)
|
24 |
if not disable_freeu:
|
25 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
|
|
31 |
).images[0]
|
32 |
return result
|
33 |
|
34 |
+
@spaces.GPU(duration=20)
|
35 |
def infer_gpu_part_turbo(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
|
|
|
|
36 |
generator = generator.manual_seed(seed)
|
37 |
if not disable_freeu:
|
38 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|