fffiloni commited on
Commit
e5b1cf7
·
verified ·
1 Parent(s): 5022594

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -2
app.py CHANGED
@@ -69,7 +69,7 @@ def custom_model_changed(custom_model, previous_model):
69
  return status_message
70
 
71
  @spaces.GPU
72
- def infer (custom_model, weight_name, prompt, inf_steps, guidance_scale, seed, lora_weight, progress=gr.Progress(track_tqdm=True)):
73
 
74
  vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
75
 
@@ -106,6 +106,8 @@ def infer (custom_model, weight_name, prompt, inf_steps, guidance_scale, seed, l
106
  image = pipe(
107
  prompt=prompt,
108
  num_inference_steps=inf_steps,
 
 
109
  guidance_scale = guidance_scale,
110
  generator=generator,
111
  cross_attention_kwargs={"scale": lora_weight}
@@ -235,6 +237,22 @@ with gr.Blocks(css=css) as demo:
235
  step=0.1,
236
  value=7.5
237
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
 
239
  with gr.Row():
240
  seed = gr.Slider(
@@ -273,7 +291,7 @@ with gr.Blocks(css=css) as demo:
273
  )
274
  submit_btn.click(
275
  fn = infer,
276
- inputs = [custom_model, weight_name, prompt_in, inf_steps, guidance_scale, seed, lora_weight],
277
  outputs = [image_out, last_used_seed]
278
  )
279
 
 
69
  return status_message
70
 
71
  @spaces.GPU
72
+ def infer (custom_model, weight_name, prompt, inf_steps, guidance_scale, width, height, seed, lora_weight, progress=gr.Progress(track_tqdm=True)):
73
 
74
  vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
75
 
 
106
  image = pipe(
107
  prompt=prompt,
108
  num_inference_steps=inf_steps,
109
+ width=width,
110
+ height=height,
111
  guidance_scale = guidance_scale,
112
  generator=generator,
113
  cross_attention_kwargs={"scale": lora_weight}
 
237
  step=0.1,
238
  value=7.5
239
  )
240
+
241
+ with gr.Row():
242
+ width = gr.Slider(
243
+ label="Width",
244
+ minimum=256,
245
+ maximum=MAX_IMAGE_SIZE,
246
+ step=32,
247
+ value=1024,
248
+ )
249
+ height = gr.Slider(
250
+ label="Height",
251
+ minimum=256,
252
+ maximum=MAX_IMAGE_SIZE,
253
+ step=32,
254
+ value=1024,
255
+ )
256
 
257
  with gr.Row():
258
  seed = gr.Slider(
 
291
  )
292
  submit_btn.click(
293
  fn = infer,
294
+ inputs = [custom_model, weight_name, prompt_in, inf_steps, guidance_scale, width, height, seed, lora_weight],
295
  outputs = [image_out, last_used_seed]
296
  )
297