Spaces:
Sleeping
Sleeping
Commit
·
89f03cc
1
Parent(s):
e330fbf
update
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import huggingface_hub
|
2 |
import gradio as gr
|
3 |
-
from stable_diffusion_reference_only.pipelines.
|
4 |
StableDiffusionReferenceOnlyPipeline,
|
5 |
)
|
6 |
from anime_segmentation import get_model as get_anime_segmentation_model
|
@@ -29,6 +29,7 @@ segment_model = get_anime_segmentation_model(
|
|
29 |
model_path=huggingface_hub.hf_hub_download("skytnt/anime-seg", "isnetis.ckpt")
|
30 |
).to(device)
|
31 |
|
|
|
32 |
def character_segment(img):
|
33 |
if img is None:
|
34 |
return None
|
@@ -36,6 +37,7 @@ def character_segment(img):
|
|
36 |
img = cv2.cvtColor(img, cv2.COLOR_RGBA2RGB)
|
37 |
return img
|
38 |
|
|
|
39 |
def color_inversion(img):
|
40 |
if img is None:
|
41 |
return None
|
@@ -85,6 +87,13 @@ def style_transfer(prompt, blueprint, num_inference_steps):
|
|
85 |
blueprint = get_line_art(blueprint)
|
86 |
blueprint = color_inversion(blueprint)
|
87 |
return inference(prompt, blueprint, num_inference_steps)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
with gr.Blocks() as demo:
|
89 |
gr.Markdown(
|
90 |
"""
|
@@ -94,7 +103,21 @@ with gr.Blocks() as demo:
|
|
94 |
)
|
95 |
with gr.Row():
|
96 |
with gr.Column():
|
97 |
-
prompt_input_compoent = gr.Image(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
prompt_character_segment_button = gr.Button(
|
99 |
"character segment",
|
100 |
)
|
@@ -104,7 +127,22 @@ with gr.Blocks() as demo:
|
|
104 |
outputs=prompt_input_compoent,
|
105 |
)
|
106 |
with gr.Column():
|
107 |
-
blueprint_input_compoent = gr.Image(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
blueprint_character_segment_button = gr.Button("character segment")
|
109 |
blueprint_character_segment_button.click(
|
110 |
character_segment,
|
@@ -128,7 +166,7 @@ with gr.Blocks() as demo:
|
|
128 |
outputs=blueprint_input_compoent,
|
129 |
)
|
130 |
with gr.Column():
|
131 |
-
result_output_component = gr.Image(
|
132 |
num_inference_steps_input_component = gr.Number(
|
133 |
20, label="num inference steps", minimum=1, maximum=1000, step=1
|
134 |
)
|
@@ -179,5 +217,6 @@ with gr.Blocks() as demo:
|
|
179 |
fn=lambda x, y: None,
|
180 |
cache_examples=True,
|
181 |
)
|
|
|
182 |
if __name__ == "__main__":
|
183 |
demo.queue(max_size=5).launch()
|
|
|
1 |
import huggingface_hub
|
2 |
import gradio as gr
|
3 |
+
from stable_diffusion_reference_only.pipelines.pipeline_stable_diffusion_reference_only import (
|
4 |
StableDiffusionReferenceOnlyPipeline,
|
5 |
)
|
6 |
from anime_segmentation import get_model as get_anime_segmentation_model
|
|
|
29 |
model_path=huggingface_hub.hf_hub_download("skytnt/anime-seg", "isnetis.ckpt")
|
30 |
).to(device)
|
31 |
|
32 |
+
|
33 |
def character_segment(img):
|
34 |
if img is None:
|
35 |
return None
|
|
|
37 |
img = cv2.cvtColor(img, cv2.COLOR_RGBA2RGB)
|
38 |
return img
|
39 |
|
40 |
+
|
41 |
def color_inversion(img):
|
42 |
if img is None:
|
43 |
return None
|
|
|
87 |
blueprint = get_line_art(blueprint)
|
88 |
blueprint = color_inversion(blueprint)
|
89 |
return inference(prompt, blueprint, num_inference_steps)
|
90 |
+
|
91 |
+
|
92 |
+
def resize(img, new_height, new_width):
|
93 |
+
img = Image.fromarray(img).resize((int(new_width), int(new_height)), Image.BILINEAR)
|
94 |
+
return np.array(img)
|
95 |
+
|
96 |
+
|
97 |
with gr.Blocks() as demo:
|
98 |
gr.Markdown(
|
99 |
"""
|
|
|
103 |
)
|
104 |
with gr.Row():
|
105 |
with gr.Column():
|
106 |
+
prompt_input_compoent = gr.Image(label="prompt")
|
107 |
+
with gr.Row():
|
108 |
+
prompt_new_height = gr.Number(512, label="height", minimum=1)
|
109 |
+
prompt_new_width = gr.Number(512, label="width", minimum=1)
|
110 |
+
prompt_resize_button = gr.Button("prompt resize")
|
111 |
+
prompt_resize_button.click(
|
112 |
+
resize,
|
113 |
+
inputs=[
|
114 |
+
prompt_input_compoent,
|
115 |
+
prompt_new_height,
|
116 |
+
prompt_new_width,
|
117 |
+
],
|
118 |
+
outputs=prompt_input_compoent,
|
119 |
+
)
|
120 |
+
|
121 |
prompt_character_segment_button = gr.Button(
|
122 |
"character segment",
|
123 |
)
|
|
|
127 |
outputs=prompt_input_compoent,
|
128 |
)
|
129 |
with gr.Column():
|
130 |
+
blueprint_input_compoent = gr.Image(label="blueprint")
|
131 |
+
|
132 |
+
with gr.Row():
|
133 |
+
blueprint_new_height = gr.Number(512, label="height", minimum=1)
|
134 |
+
blueprint_new_width = gr.Number(512, label="width", minimum=1)
|
135 |
+
blueprint_resize_button = gr.Button("blueprint resize")
|
136 |
+
blueprint_resize_button.click(
|
137 |
+
resize,
|
138 |
+
inputs=[
|
139 |
+
blueprint_input_compoent,
|
140 |
+
blueprint_new_height,
|
141 |
+
blueprint_new_width,
|
142 |
+
],
|
143 |
+
outputs=blueprint_input_compoent,
|
144 |
+
)
|
145 |
+
|
146 |
blueprint_character_segment_button = gr.Button("character segment")
|
147 |
blueprint_character_segment_button.click(
|
148 |
character_segment,
|
|
|
166 |
outputs=blueprint_input_compoent,
|
167 |
)
|
168 |
with gr.Column():
|
169 |
+
result_output_component = gr.Image(label="result")
|
170 |
num_inference_steps_input_component = gr.Number(
|
171 |
20, label="num inference steps", minimum=1, maximum=1000, step=1
|
172 |
)
|
|
|
217 |
fn=lambda x, y: None,
|
218 |
cache_examples=True,
|
219 |
)
|
220 |
+
|
221 |
if __name__ == "__main__":
|
222 |
demo.queue(max_size=5).launch()
|