rafaaa2105 commited on
Commit
a17e285
1 Parent(s): 9ddb94a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -7,16 +7,15 @@ import spaces
7
  model_list = os.environ.get("MODELS").split(",")
8
  lora_list = os.environ.get("LORAS") # Not in use
9
 
10
- @spaces.GPU
11
- # Load the available models and their pipelines
12
- models = []
13
  for model_name in model_list:
14
  try:
15
  models[model_name] = pipeline("text-to-image", model=model_name, torch_dtype=torch.float16).to("cuda")
16
  except Exception as e:
17
  print(f"Error loading model {model_name}: {e}")
18
 
19
- # Define the function to generate the image
 
20
  def generate_image(model_name, prompt, negative_prompt, num_inference_steps, guidance_scale):
21
  pipe = models[model_name]
22
  image = pipe(prompt, negative_prompt=negative_prompt, num_inference_steps=num_inference_steps, guidance_scale=guidance_scale)["sample"][0]
 
7
  model_list = os.environ.get("MODELS").split(",")
8
  lora_list = os.environ.get("LORAS") # Not in use
9
 
10
+ models = {}
 
 
11
  for model_name in model_list:
12
  try:
13
  models[model_name] = pipeline("text-to-image", model=model_name, torch_dtype=torch.float16).to("cuda")
14
  except Exception as e:
15
  print(f"Error loading model {model_name}: {e}")
16
 
17
+
18
+ @spaces.GPU
19
  def generate_image(model_name, prompt, negative_prompt, num_inference_steps, guidance_scale):
20
  pipe = models[model_name]
21
  image = pipe(prompt, negative_prompt=negative_prompt, num_inference_steps=num_inference_steps, guidance_scale=guidance_scale)["sample"][0]