multimodalart HF staff commited on
Commit
0b5db90
1 Parent(s): 582601f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -161,6 +161,7 @@ def randomize_loras(selected_indices):
161
 
162
  @spaces.GPU(duration=70)
163
  def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, progress):
 
164
  pipe.to("cuda")
165
  generator = torch.Generator(device="cuda").manual_seed(seed)
166
  with calculateDuration("Generating image"):
@@ -180,8 +181,8 @@ def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, progress)
180
 
181
  @spaces.GPU(duration=70)
182
  def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps, cfg_scale, width, height, seed):
183
- generator = torch.Generator(device="cuda").manual_seed(seed)
184
  pipe_i2i.to("cuda")
 
185
  image_input = load_image(image_input_path)
186
  final_image = pipe_i2i(
187
  prompt=prompt_mash,
@@ -238,6 +239,7 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
238
  pipe.load_lora_weights(lora_path, weight_name=lora["weights"], low_cpu_mem_usage=True, adapter_name=lora_name)
239
  else:
240
  pipe.load_lora_weights(lora_path, low_cpu_mem_usage=True, adapter_name=lora_name)
 
241
  if image_input is not None:
242
  pipe_i2i.set_adapters(lora_names, adapter_weights=[lora_scale_1, lora_scale_2])
243
  else:
 
161
 
162
  @spaces.GPU(duration=70)
163
  def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, progress):
164
+ print("Entrou aqui!")
165
  pipe.to("cuda")
166
  generator = torch.Generator(device="cuda").manual_seed(seed)
167
  with calculateDuration("Generating image"):
 
181
 
182
  @spaces.GPU(duration=70)
183
  def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps, cfg_scale, width, height, seed):
 
184
  pipe_i2i.to("cuda")
185
+ generator = torch.Generator(device="cuda").manual_seed(seed)
186
  image_input = load_image(image_input_path)
187
  final_image = pipe_i2i(
188
  prompt=prompt_mash,
 
239
  pipe.load_lora_weights(lora_path, weight_name=lora["weights"], low_cpu_mem_usage=True, adapter_name=lora_name)
240
  else:
241
  pipe.load_lora_weights(lora_path, low_cpu_mem_usage=True, adapter_name=lora_name)
242
+ print(lora_names)
243
  if image_input is not None:
244
  pipe_i2i.set_adapters(lora_names, adapter_weights=[lora_scale_1, lora_scale_2])
245
  else: