Spaces:
Paused
Paused
montyanderson
commited on
Commit
•
8637ff9
1
Parent(s):
928dc00
`app.py`: img2img
Browse files
app.py
CHANGED
@@ -69,15 +69,13 @@ class Prodia:
|
|
69 |
return response
|
70 |
|
71 |
|
72 |
-
def image_to_base64(
|
73 |
-
#
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
# Encode the bytes to base64
|
80 |
-
img_str = base64.b64encode(buffered.getvalue())
|
81 |
|
82 |
return img_str.decode('utf-8') # Convert bytes to string
|
83 |
|
@@ -153,7 +151,6 @@ def send_to_txt2img(image):
|
|
153 |
return result
|
154 |
|
155 |
|
156 |
-
|
157 |
prodia_client = Prodia(api_key=os.getenv("PRODIA_API_KEY"))
|
158 |
model_list = prodia_client.list_models()
|
159 |
model_names = {}
|
@@ -162,7 +159,7 @@ for model_name in model_list:
|
|
162 |
name_without_ext = remove_id_and_ext(model_name)
|
163 |
model_names[name_without_ext] = model_name
|
164 |
|
165 |
-
def
|
166 |
result = prodia_client.generate({
|
167 |
"prompt": prompt,
|
168 |
"negative_prompt": negative_prompt,
|
@@ -179,6 +176,25 @@ def flip_text(prompt, negative_prompt, model, steps, sampler, cfg_scale, width,
|
|
179 |
|
180 |
return job["imageUrl"]
|
181 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
182 |
|
183 |
css = """
|
184 |
#generate {
|
@@ -186,9 +202,29 @@ css = """
|
|
186 |
}
|
187 |
"""
|
188 |
|
189 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
190 |
|
191 |
-
|
192 |
with gr.Row():
|
193 |
with gr.Column(scale=6):
|
194 |
model = gr.Dropdown(interactive=True,value="absolutereality_v181.safetensors [3d9d4d2b]", show_label=True, label="Stable Diffusion Checkpoint", choices=prodia_client.list_models())
|
@@ -196,6 +232,7 @@ with gr.Blocks(css=css) as demo:
|
|
196 |
with gr.Column(scale=1):
|
197 |
gr.Markdown(elem_id="powered-by-prodia", value="AUTOMATIC1111 Stable Diffusion Web UI.<br>Powered by [Prodia](https://prodia.com).<br> For more features and faster gen times check out our [API Docs](https://docs.prodia.com/reference/getting-started-guide)")
|
198 |
|
|
|
199 |
with gr.Tabs() as tabs:
|
200 |
with gr.Tab("txt2img", id='t2i'):
|
201 |
with gr.Row():
|
@@ -251,7 +288,46 @@ with gr.Blocks(css=css) as demo:
|
|
251 |
with gr.Column(scale=2):
|
252 |
image_output = gr.Image(value="https://images.prodia.xyz/8ede1a7c-c0ee-4ded-987d-6ffed35fc477.png")
|
253 |
|
254 |
-
text_button.click(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
255 |
|
256 |
with gr.Tab("PNG Info"):
|
257 |
def plaintext_to_html(text, classname=None):
|
@@ -289,6 +365,6 @@ with gr.Blocks(css=css) as demo:
|
|
289 |
image_input.upload(get_exif_data, inputs=[image_input], outputs=exif_output)
|
290 |
send_to_txt2img_btn.click(send_to_txt2img, inputs=[image_input], outputs=[tabs, prompt, negative_prompt, steps, seed,
|
291 |
model, sampler, width, height, cfg_scale])
|
292 |
-
|
293 |
demo.queue(concurrency_count=32)
|
294 |
demo.launch()
|
|
|
69 |
return response
|
70 |
|
71 |
|
72 |
+
def image_to_base64(image):
|
73 |
+
# Convert the image to bytes
|
74 |
+
buffered = BytesIO()
|
75 |
+
image.save(buffered, format="PNG") # You can change format to PNG if needed
|
76 |
+
|
77 |
+
# Encode the bytes to base64
|
78 |
+
img_str = base64.b64encode(buffered.getvalue())
|
|
|
|
|
79 |
|
80 |
return img_str.decode('utf-8') # Convert bytes to string
|
81 |
|
|
|
151 |
return result
|
152 |
|
153 |
|
|
|
154 |
prodia_client = Prodia(api_key=os.getenv("PRODIA_API_KEY"))
|
155 |
model_list = prodia_client.list_models()
|
156 |
model_names = {}
|
|
|
159 |
name_without_ext = remove_id_and_ext(model_name)
|
160 |
model_names[name_without_ext] = model_name
|
161 |
|
162 |
+
def txt2img(prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, seed):
|
163 |
result = prodia_client.generate({
|
164 |
"prompt": prompt,
|
165 |
"negative_prompt": negative_prompt,
|
|
|
176 |
|
177 |
return job["imageUrl"]
|
178 |
|
179 |
+
def img2img(input_image, denoising, prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, seed):
|
180 |
+
result = prodia_client.transform({
|
181 |
+
"imageData": image_to_base64(input_image),
|
182 |
+
"denoising_strength": denoising,
|
183 |
+
"prompt": prompt,
|
184 |
+
"negative_prompt": negative_prompt,
|
185 |
+
"model": model,
|
186 |
+
"steps": steps,
|
187 |
+
"sampler": sampler,
|
188 |
+
"cfg_scale": cfg_scale,
|
189 |
+
"width": width,
|
190 |
+
"height": height,
|
191 |
+
"seed": seed
|
192 |
+
})
|
193 |
+
|
194 |
+
job = prodia_client.wait(result)
|
195 |
+
|
196 |
+
return job["imageUrl"]
|
197 |
+
|
198 |
|
199 |
css = """
|
200 |
#generate {
|
|
|
202 |
}
|
203 |
"""
|
204 |
|
205 |
+
samplers = [
|
206 |
+
"Euler",
|
207 |
+
"Euler a",
|
208 |
+
"LMS",
|
209 |
+
"Heun",
|
210 |
+
"DPM2",
|
211 |
+
"DPM2 a",
|
212 |
+
"DPM++ 2S a",
|
213 |
+
"DPM++ 2M",
|
214 |
+
"DPM++ SDE",
|
215 |
+
"DPM fast",
|
216 |
+
"DPM adaptive",
|
217 |
+
"LMS Karras",
|
218 |
+
"DPM2 Karras",
|
219 |
+
"DPM2 a Karras",
|
220 |
+
"DPM++ 2S a Karras",
|
221 |
+
"DPM++ 2M Karras",
|
222 |
+
"DPM++ SDE Karras",
|
223 |
+
"DDIM",
|
224 |
+
"PLMS",
|
225 |
+
]
|
226 |
|
227 |
+
with gr.Blocks(css=css) as demo:
|
228 |
with gr.Row():
|
229 |
with gr.Column(scale=6):
|
230 |
model = gr.Dropdown(interactive=True,value="absolutereality_v181.safetensors [3d9d4d2b]", show_label=True, label="Stable Diffusion Checkpoint", choices=prodia_client.list_models())
|
|
|
232 |
with gr.Column(scale=1):
|
233 |
gr.Markdown(elem_id="powered-by-prodia", value="AUTOMATIC1111 Stable Diffusion Web UI.<br>Powered by [Prodia](https://prodia.com).<br> For more features and faster gen times check out our [API Docs](https://docs.prodia.com/reference/getting-started-guide)")
|
234 |
|
235 |
+
|
236 |
with gr.Tabs() as tabs:
|
237 |
with gr.Tab("txt2img", id='t2i'):
|
238 |
with gr.Row():
|
|
|
288 |
with gr.Column(scale=2):
|
289 |
image_output = gr.Image(value="https://images.prodia.xyz/8ede1a7c-c0ee-4ded-987d-6ffed35fc477.png")
|
290 |
|
291 |
+
text_button.click(txt2img, inputs=[prompt, negative_prompt, model, steps, sampler, cfg_scale, width, height, seed], outputs=image_output)
|
292 |
+
|
293 |
+
with gr.Tab("img2img", id='i2i'):
|
294 |
+
with gr.Row():
|
295 |
+
with gr.Column(scale=6, min_width=600):
|
296 |
+
i2i_prompt = gr.Textbox("space warrior, beautiful, female, ultrarealistic, soft lighting, 8k", placeholder="Prompt", show_label=False, lines=3)
|
297 |
+
i2i_negative_prompt = gr.Textbox(placeholder="Negative Prompt", show_label=False, lines=3, value="3d, cartoon, anime, (deformed eyes, nose, ears, nose), bad anatomy, ugly")
|
298 |
+
with gr.Column():
|
299 |
+
i2i_text_button = gr.Button("Generate", variant='primary', elem_id="generate")
|
300 |
+
|
301 |
+
with gr.Row():
|
302 |
+
with gr.Column(scale=3):
|
303 |
+
with gr.Tab("Generation"):
|
304 |
+
i2i_image_input = gr.Image(type="pil")
|
305 |
+
|
306 |
+
with gr.Row():
|
307 |
+
with gr.Column(scale=1):
|
308 |
+
i2i_sampler = gr.Dropdown(value="Euler a", show_label=True, label="Sampling Method", choices=samplers)
|
309 |
+
|
310 |
+
with gr.Column(scale=1):
|
311 |
+
i2i_steps = gr.Slider(label="Sampling Steps", minimum=1, maximum=30, value=25, step=1)
|
312 |
+
|
313 |
+
with gr.Row():
|
314 |
+
with gr.Column(scale=1):
|
315 |
+
i2i_width = gr.Slider(label="Width", maximum=1024, value=512, step=8)
|
316 |
+
i2i_height = gr.Slider(label="Height", maximum=1024, value=512, step=8)
|
317 |
+
|
318 |
+
with gr.Column(scale=1):
|
319 |
+
i2i_batch_size = gr.Slider(label="Batch Size", maximum=1, value=1)
|
320 |
+
i2i_batch_count = gr.Slider(label="Batch Count", maximum=1, value=1)
|
321 |
+
|
322 |
+
i2i_cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, value=7, step=1)
|
323 |
+
i2i_denoising = gr.Slider(label="Denoising Strength", minimum=0, maximum=1, value=0.7, step=0.1)
|
324 |
+
i2i_seed = gr.Number(label="Seed", value=-1)
|
325 |
+
|
326 |
+
|
327 |
+
with gr.Column(scale=2):
|
328 |
+
i2i_image_output = gr.Image(value="https://images.prodia.xyz/8ede1a7c-c0ee-4ded-987d-6ffed35fc477.png")
|
329 |
+
|
330 |
+
i2i_text_button.click(img2img, inputs=[i2i_image_input, i2i_denoising, i2i_prompt, i2i_negative_prompt, model, i2i_steps, i2i_sampler, i2i_cfg_scale, i2i_width, i2i_height, i2i_seed], outputs=i2i_image_output)
|
331 |
|
332 |
with gr.Tab("PNG Info"):
|
333 |
def plaintext_to_html(text, classname=None):
|
|
|
365 |
image_input.upload(get_exif_data, inputs=[image_input], outputs=exif_output)
|
366 |
send_to_txt2img_btn.click(send_to_txt2img, inputs=[image_input], outputs=[tabs, prompt, negative_prompt, steps, seed,
|
367 |
model, sampler, width, height, cfg_scale])
|
368 |
+
|
369 |
demo.queue(concurrency_count=32)
|
370 |
demo.launch()
|