Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,122 +1,139 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import numpy as np
|
3 |
-
import random
|
4 |
-
import spaces
|
5 |
-
import torch
|
6 |
-
from diffusers import DiffusionPipeline
|
7 |
-
|
8 |
-
dtype = torch.bfloat16
|
9 |
-
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
-
|
11 |
-
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
|
12 |
-
|
13 |
-
MAX_SEED = np.iinfo(np.int32).max
|
14 |
-
MAX_IMAGE_SIZE = 2048
|
15 |
-
|
16 |
-
@spaces.GPU()
|
17 |
-
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
18 |
-
if randomize_seed:
|
19 |
-
seed = random.randint(0, MAX_SEED)
|
20 |
-
generator = torch.Generator().manual_seed(seed)
|
21 |
-
image = pipe(
|
22 |
-
prompt = prompt,
|
23 |
-
width = width,
|
24 |
-
height = height,
|
25 |
-
num_inference_steps = num_inference_steps,
|
26 |
-
generator = generator,
|
27 |
-
guidance_scale=0.0
|
28 |
-
).images[0]
|
29 |
-
return image, seed
|
30 |
-
|
31 |
-
examples = [
|
32 |
-
"
|
33 |
-
"a
|
34 |
-
"
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
"""
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import numpy as np
|
3 |
+
import random
|
4 |
+
import spaces
|
5 |
+
import torch
|
6 |
+
from diffusers import DiffusionPipeline
|
7 |
+
|
8 |
+
dtype = torch.bfloat16
|
9 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
+
|
11 |
+
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
|
12 |
+
|
13 |
+
MAX_SEED = np.iinfo(np.int32).max
|
14 |
+
MAX_IMAGE_SIZE = 2048
|
15 |
+
|
16 |
+
@spaces.GPU()
|
17 |
+
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
|
18 |
+
if randomize_seed:
|
19 |
+
seed = random.randint(0, MAX_SEED)
|
20 |
+
generator = torch.Generator().manual_seed(seed)
|
21 |
+
image = pipe(
|
22 |
+
prompt = prompt,
|
23 |
+
width = width,
|
24 |
+
height = height,
|
25 |
+
num_inference_steps = num_inference_steps,
|
26 |
+
generator = generator,
|
27 |
+
guidance_scale=0.0
|
28 |
+
).images[0]
|
29 |
+
return image, seed
|
30 |
+
|
31 |
+
examples = [
|
32 |
+
"A captivating Instagram post for a tourist page highlighting the beauty of Budapest",
|
33 |
+
"A professional marketing advertisement featuring a stunning image of a luxury hotel in Budapest",
|
34 |
+
"A compelling social media banner promoting a travel package to Budapest",
|
35 |
+
"An engaging content piece for a travel blog showcasing the city's rich history and culture",
|
36 |
+
"A visually appealing Instagram post promoting a local event in Budapest",
|
37 |
+
"A marketing advertisement featuring a scenic image of Budapest's famous Chain Bridge",
|
38 |
+
"A social media banner promoting a food tour in Budapest",
|
39 |
+
"An Instagram post highlighting the vibrant nightlife of Budapest"
|
40 |
+
]
|
41 |
+
|
42 |
+
css="""
|
43 |
+
#col-container {
|
44 |
+
margin: 0 auto;
|
45 |
+
max-width: 520px;
|
46 |
+
}
|
47 |
+
"""
|
48 |
+
|
49 |
+
footer = """
|
50 |
+
<div style="text-align: center; margin-top: 20px;">
|
51 |
+
<a href="https://www.linkedin.com/in/pejman-ebrahimi-4a60151a7/" target="_blank">LinkedIn</a> |
|
52 |
+
<a href="https://github.com/arad1367" target="_blank">GitHub</a> |
|
53 |
+
<a href="https://arad1367.pythonanywhere.com/" target="_blank">Live demo of my PhD defense</a>
|
54 |
+
<br>
|
55 |
+
Made with 💖 by Pejman Ebrahimi
|
56 |
+
</div>
|
57 |
+
"""
|
58 |
+
|
59 |
+
with gr.Blocks(css=css, theme='gradio/soft') as demo:
|
60 |
+
|
61 |
+
with gr.Column(elem_id="col-container"):
|
62 |
+
gr.Markdown("""
|
63 |
+
# FLUX.1 Schnell Marketing Assistant
|
64 |
+
|
65 |
+
This App is based on FLUX.1 schnell and can help you to manage your advertising activities, create new logo, marketing advertisement, banner for social media advertisement, or make exciting content for social networks.
|
66 |
+
""")
|
67 |
+
|
68 |
+
with gr.Row():
|
69 |
+
|
70 |
+
prompt = gr.Text(
|
71 |
+
label="Prompt",
|
72 |
+
show_label=False,
|
73 |
+
max_lines=1,
|
74 |
+
placeholder="Enter your prompt",
|
75 |
+
container=False,
|
76 |
+
)
|
77 |
+
|
78 |
+
run_button = gr.Button("Run", scale=0)
|
79 |
+
|
80 |
+
result = gr.Image(label="Result", show_label=False)
|
81 |
+
|
82 |
+
with gr.Accordion("Advanced Settings", open=False):
|
83 |
+
|
84 |
+
seed = gr.Slider(
|
85 |
+
label="Seed",
|
86 |
+
minimum=0,
|
87 |
+
maximum=MAX_SEED,
|
88 |
+
step=1,
|
89 |
+
value=0,
|
90 |
+
)
|
91 |
+
|
92 |
+
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
93 |
+
|
94 |
+
with gr.Row():
|
95 |
+
|
96 |
+
width = gr.Slider(
|
97 |
+
label="Width",
|
98 |
+
minimum=256,
|
99 |
+
maximum=MAX_IMAGE_SIZE,
|
100 |
+
step=32,
|
101 |
+
value=1024,
|
102 |
+
)
|
103 |
+
|
104 |
+
height = gr.Slider(
|
105 |
+
label="Height",
|
106 |
+
minimum=256,
|
107 |
+
maximum=MAX_IMAGE_SIZE,
|
108 |
+
step=32,
|
109 |
+
value=1024,
|
110 |
+
)
|
111 |
+
|
112 |
+
with gr.Row():
|
113 |
+
|
114 |
+
num_inference_steps = gr.Slider(
|
115 |
+
label="Number of inference steps",
|
116 |
+
minimum=1,
|
117 |
+
maximum=50,
|
118 |
+
step=1,
|
119 |
+
value=4,
|
120 |
+
)
|
121 |
+
|
122 |
+
gr.Examples(
|
123 |
+
examples = examples,
|
124 |
+
fn = infer,
|
125 |
+
inputs = [prompt],
|
126 |
+
outputs = [result, seed],
|
127 |
+
cache_examples="lazy"
|
128 |
+
)
|
129 |
+
|
130 |
+
gr.HTML(footer)
|
131 |
+
|
132 |
+
gr.on(
|
133 |
+
triggers=[run_button.click, prompt.submit],
|
134 |
+
fn = infer,
|
135 |
+
inputs = [prompt, seed, randomize_seed, width, height, num_inference_steps],
|
136 |
+
outputs = [result, seed]
|
137 |
+
)
|
138 |
+
|
139 |
+
demo.launch()
|