Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -59,13 +59,13 @@ def sample_model():
|
|
59 |
@spaces.GPU
|
60 |
def inference( prompt, negative_prompt, guidance_scale, ddim_steps, seed):
|
61 |
global device
|
62 |
-
global generator
|
63 |
global unet
|
64 |
global vae
|
65 |
global text_encoder
|
66 |
global tokenizer
|
67 |
global noise_scheduler
|
68 |
-
|
69 |
latents = torch.randn(
|
70 |
(1, unet.in_channels, 512 // 8, 512 // 8),
|
71 |
generator = generator,
|
@@ -207,6 +207,7 @@ def sample_then_run():
|
|
207 |
|
208 |
@spaces.GPU
|
209 |
def start_items():
|
|
|
210 |
global young
|
211 |
global pointy
|
212 |
global wavy
|
|
|
59 |
@spaces.GPU
|
60 |
def inference( prompt, negative_prompt, guidance_scale, ddim_steps, seed):
|
61 |
global device
|
62 |
+
#global generator
|
63 |
global unet
|
64 |
global vae
|
65 |
global text_encoder
|
66 |
global tokenizer
|
67 |
global noise_scheduler
|
68 |
+
torch.Generator(device=device).manual_seed(seed)
|
69 |
latents = torch.randn(
|
70 |
(1, unet.in_channels, 512 // 8, 512 // 8),
|
71 |
generator = generator,
|
|
|
207 |
|
208 |
@spaces.GPU
|
209 |
def start_items():
|
210 |
+
print("Starting items")
|
211 |
global young
|
212 |
global pointy
|
213 |
global wavy
|