Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -11,11 +11,14 @@ from accelerate import Accelerator
|
|
11 |
accelerator = Accelerator()
|
12 |
|
13 |
def plex(prompt,neg_prompt):
|
|
|
14 |
pipe = accelerator.prepare(AutoPipelineForText2Image.from_pretrained("openskyml/overall-v1", torch_dtype=torch.float32, variant=None, use_safetensors=False, safety_checker=None))
|
15 |
pipe = accelerator.prepare(pipe.to("cpu"))
|
16 |
-
image = pipe(prompt=prompt, negative_prompt=neg_prompt,num_inference_steps=10)
|
17 |
-
|
|
|
|
|
18 |
|
19 |
-
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality")],outputs=gr.
|
20 |
iface.queue(max_size=1,api_open=False)
|
21 |
iface.launch(max_threads=1)
|
|
|
11 |
accelerator = Accelerator()
|
12 |
|
13 |
def plex(prompt,neg_prompt):
|
14 |
+
apol=[]
|
15 |
pipe = accelerator.prepare(AutoPipelineForText2Image.from_pretrained("openskyml/overall-v1", torch_dtype=torch.float32, variant=None, use_safetensors=False, safety_checker=None))
|
16 |
pipe = accelerator.prepare(pipe.to("cpu"))
|
17 |
+
image = pipe(prompt=prompt, negative_prompt=neg_prompt,num_inference_steps=10)
|
18 |
+
for a, imze in enumerate(image["images"]):
|
19 |
+
apol.append(imze)
|
20 |
+
return apol
|
21 |
|
22 |
+
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality")],outputs=gr.Gallery(label="Generated Output Image", columns=1), title="Txt2Img_Overall_v1_SD",description="Running on cpu, very slow!")
|
23 |
iface.queue(max_size=1,api_open=False)
|
24 |
iface.launch(max_threads=1)
|