nsfwalex commited on
Commit
d37c7ab
1 Parent(s): 01406a3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -32
app.py CHANGED
@@ -213,37 +213,7 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
213
  seed = random.randint(0, MAX_SEED)
214
  return seed
215
 
216
- @spaces.GPU(duration=60)
217
- def generate(prompt, progress=gr.Progress(track_tqdm=True)):
218
- negative_prompt = cfg.get("negative_prompt", "")
219
- style_selection = ""
220
- use_negative_prompt = True
221
- seed = 0
222
- width = cfg.get("width", 1024)
223
- height = cfg.get("width", 768)
224
- inference_steps = cfg.get("inference_steps", 30)
225
- randomize_seed = True
226
- guidance_scale = cfg.get("guidance_scale", 7.5)
227
- prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", prompt)
228
-
229
- seed = int(randomize_seed_fn(seed, randomize_seed))
230
- generator = torch.Generator(pipe.device).manual_seed(seed)
231
-
232
- images = pipe(
233
- prompt=prompt_str,
234
- negative_prompt=negative_prompt,
235
- width=width,
236
- height=height,
237
- guidance_scale=guidance_scale,
238
- num_inference_steps=inference_steps,
239
- generator=generator,
240
- num_images_per_prompt=NUM_IMAGES_PER_PROMPT,
241
- output_type="pil",
242
- ).images
243
- images = [save_image(img) for img in images]
244
- image_paths = [i[1] for i in images]
245
- print(prompt_str, image_paths)
246
- return [i[0] for i in images], prompt
247
 
248
  with gr.Blocks(css=css,head=js,fill_height=True) as demo:
249
  with gr.Row(equal_height=False):
@@ -265,7 +235,40 @@ with gr.Blocks(css=css,head=js,fill_height=True) as demo:
265
  )
266
  random_button = gr.Button("Surprise Me", scale=1, min_width=10)
267
  run_button = gr.Button( "GO!", scale=1, min_width=20, variant="primary",icon="https://huggingface.co/spaces/nsfwalex/sd_card/resolve/main/hot.svg")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
269
  def on_demo_load(request: gr.Request):
270
  current_domain = request.request.headers.get("Host", "")
271
 
@@ -306,7 +309,7 @@ with gr.Blocks(css=css,head=js,fill_height=True) as demo:
306
 
307
 
308
  result.change(fn=lambda x,y:x, inputs=[prompt,result], outputs=[], js=f'''(p,img)=>window.uploadImage(p, img,"process_finished","demo_hf_{cfg.get("name")}_card", "{cfg["model_id"]}")''')
309
- run_button.click(generate, inputs=[prompt], outputs=[result, prompt], js=f'''(p)=>window.postMessageToParent(p,"process_started","demo_hf_{cfg.get("name")}_card", "click_go")''')
310
  random_button.click(fn=lambda x:x, inputs=[prompt], outputs=[prompt], js='''(p)=>window.g(p)''')
311
  demo.load(fn=on_demo_load, inputs=[], outputs=[result], js='''()=>onDemoLoad()''')
312
  if __name__ == "__main__":
 
213
  seed = random.randint(0, MAX_SEED)
214
  return seed
215
 
216
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
217
 
218
  with gr.Blocks(css=css,head=js,fill_height=True) as demo:
219
  with gr.Row(equal_height=False):
 
235
  )
236
  random_button = gr.Button("Surprise Me", scale=1, min_width=10)
237
  run_button = gr.Button( "GO!", scale=1, min_width=20, variant="primary",icon="https://huggingface.co/spaces/nsfwalex/sd_card/resolve/main/hot.svg")
238
+
239
+ @spaces.GPU(duration=60)
240
+ def generate(p, progress=gr.Progress(track_tqdm=True)):
241
+ negative_prompt = cfg.get("negative_prompt", "")
242
+ style_selection = ""
243
+ use_negative_prompt = True
244
+ seed = 0
245
+ width = cfg.get("width", 1024)
246
+ height = cfg.get("width", 768)
247
+ inference_steps = cfg.get("inference_steps", 30)
248
+ randomize_seed = True
249
+ guidance_scale = cfg.get("guidance_scale", 7.5)
250
+ prompt_str = cfg.get("prompt", "{prompt}").replace("{prompt}", p)
251
+ if not prompt.value:
252
+ prompt.value = p
253
+ seed = int(randomize_seed_fn(seed, randomize_seed))
254
+ generator = torch.Generator(pipe.device).manual_seed(seed)
255
 
256
+ images = pipe(
257
+ prompt=prompt_str,
258
+ negative_prompt=negative_prompt,
259
+ width=width,
260
+ height=height,
261
+ guidance_scale=guidance_scale,
262
+ num_inference_steps=inference_steps,
263
+ generator=generator,
264
+ num_images_per_prompt=NUM_IMAGES_PER_PROMPT,
265
+ output_type="pil",
266
+ ).images
267
+ images = [save_image(img) for img in images]
268
+ image_paths = [i[1] for i in images]
269
+ print(prompt_str, image_paths)
270
+ return [i[0] for i in images]
271
+
272
  def on_demo_load(request: gr.Request):
273
  current_domain = request.request.headers.get("Host", "")
274
 
 
309
 
310
 
311
  result.change(fn=lambda x,y:x, inputs=[prompt,result], outputs=[], js=f'''(p,img)=>window.uploadImage(p, img,"process_finished","demo_hf_{cfg.get("name")}_card", "{cfg["model_id"]}")''')
312
+ run_button.click(generate, inputs=[prompt], outputs=[result], js=f'''(p)=>window.postMessageToParent(p,"process_started","demo_hf_{cfg.get("name")}_card", "click_go")''')
313
  random_button.click(fn=lambda x:x, inputs=[prompt], outputs=[prompt], js='''(p)=>window.g(p)''')
314
  demo.load(fn=on_demo_load, inputs=[], outputs=[result], js='''()=>onDemoLoad()''')
315
  if __name__ == "__main__":