Spaces:
Running
on
Zero
Running
on
Zero
Fix snitching
Browse files
app.py
CHANGED
@@ -81,17 +81,11 @@ def make_example(image_path: Path, mask_path: Path) -> EditorValue:
|
|
81 |
|
82 |
|
83 |
def remove_padding(image, original_size):
|
84 |
-
# Get current dimensions
|
85 |
-
padded_width, padded_height = image.size
|
86 |
original_width, original_height = original_size
|
87 |
-
|
88 |
-
|
89 |
-
left = (padded_width - original_width) // 2
|
90 |
-
top = (padded_height - original_height) // 2
|
91 |
right = left + original_width
|
92 |
bottom = top + original_height
|
93 |
-
|
94 |
-
# Crop to original size
|
95 |
return image.crop((left, top, right, bottom))
|
96 |
|
97 |
|
@@ -127,14 +121,10 @@ def infer(
|
|
127 |
room_mask.getbbox(alpha_only=False)
|
128 |
)
|
129 |
# Add MASK_CONTEXT_PADDING (16 pixels) for the context
|
130 |
-
mask_bbox_x_min
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
mask_bbox_x_max += MASK_CONTEXT_PADDING
|
135 |
-
mask_bbox_x_max = min(mask_bbox_x_max, room_mask.width)
|
136 |
-
mask_bbox_y_max += MASK_CONTEXT_PADDING
|
137 |
-
mask_bbox_y_max = min(mask_bbox_y_max, room_mask.height)
|
138 |
|
139 |
bbox_longest_side = max(
|
140 |
mask_bbox_x_max - mask_bbox_x_min,
|
@@ -157,7 +147,7 @@ def infer(
|
|
157 |
room_image_cropped = ImageOps.fit(
|
158 |
room_image_cropped,
|
159 |
(max_dimension, max_dimension),
|
160 |
-
method=Image.Resampling.
|
161 |
centering=(0.5, 0.5),
|
162 |
)
|
163 |
|
@@ -167,7 +157,7 @@ def infer(
|
|
167 |
mask_bbox_x_max,
|
168 |
mask_bbox_y_max,
|
169 |
))
|
170 |
-
|
171 |
room_mask_cropped = ImageOps.pad(
|
172 |
room_mask_cropped,
|
173 |
(max_dimension, max_dimension),
|
@@ -178,12 +168,12 @@ def infer(
|
|
178 |
room_mask_cropped = ImageOps.fit(
|
179 |
room_mask_cropped,
|
180 |
(max_dimension, max_dimension),
|
181 |
-
method=Image.Resampling.
|
182 |
centering=(0.5, 0.5),
|
183 |
)
|
184 |
|
185 |
-
|
186 |
-
|
187 |
|
188 |
# _room_image = ImageOps.fit(
|
189 |
# _room_image,
|
@@ -239,8 +229,8 @@ def infer(
|
|
239 |
prompt = (
|
240 |
furniture_prompt + ".\n" + SYSTEM_PROMPT if furniture_prompt else SYSTEM_PROMPT
|
241 |
)
|
242 |
-
|
243 |
-
|
244 |
results_images = pipe(
|
245 |
prompt=prompt,
|
246 |
image=image,
|
@@ -264,7 +254,9 @@ def infer(
|
|
264 |
max_dimension * 2,
|
265 |
max_dimension,
|
266 |
))
|
267 |
-
image_generated = image_generated.resize(
|
|
|
|
|
268 |
# Crop back to the bbox (remove the padding)
|
269 |
image_generated = remove_padding(
|
270 |
image_generated,
|
|
|
81 |
|
82 |
|
83 |
def remove_padding(image, original_size):
|
|
|
|
|
84 |
original_width, original_height = original_size
|
85 |
+
left = max((image.width - original_width) // 2, 0)
|
86 |
+
top = max((image.height - original_height) // 2, 0)
|
|
|
|
|
87 |
right = left + original_width
|
88 |
bottom = top + original_height
|
|
|
|
|
89 |
return image.crop((left, top, right, bottom))
|
90 |
|
91 |
|
|
|
121 |
room_mask.getbbox(alpha_only=False)
|
122 |
)
|
123 |
# Add MASK_CONTEXT_PADDING (16 pixels) for the context
|
124 |
+
mask_bbox_x_min = max(mask_bbox_x_min - MASK_CONTEXT_PADDING, 0)
|
125 |
+
mask_bbox_y_min = max(mask_bbox_y_min - MASK_CONTEXT_PADDING, 0)
|
126 |
+
mask_bbox_x_max = min(mask_bbox_x_max + MASK_CONTEXT_PADDING, room_mask.width)
|
127 |
+
mask_bbox_y_max = min(mask_bbox_y_max + MASK_CONTEXT_PADDING, room_mask.height)
|
|
|
|
|
|
|
|
|
128 |
|
129 |
bbox_longest_side = max(
|
130 |
mask_bbox_x_max - mask_bbox_x_min,
|
|
|
147 |
room_image_cropped = ImageOps.fit(
|
148 |
room_image_cropped,
|
149 |
(max_dimension, max_dimension),
|
150 |
+
method=Image.Resampling.BICUBIC,
|
151 |
centering=(0.5, 0.5),
|
152 |
)
|
153 |
|
|
|
157 |
mask_bbox_x_max,
|
158 |
mask_bbox_y_max,
|
159 |
))
|
160 |
+
room_mask_cropped.save("room_mask_croppedv1.png")
|
161 |
room_mask_cropped = ImageOps.pad(
|
162 |
room_mask_cropped,
|
163 |
(max_dimension, max_dimension),
|
|
|
168 |
room_mask_cropped = ImageOps.fit(
|
169 |
room_mask_cropped,
|
170 |
(max_dimension, max_dimension),
|
171 |
+
method=Image.Resampling.BICUBIC,
|
172 |
centering=(0.5, 0.5),
|
173 |
)
|
174 |
|
175 |
+
room_image_cropped.save("room_image_cropped.png")
|
176 |
+
room_mask_cropped.save("room_mask_cropped.png")
|
177 |
|
178 |
# _room_image = ImageOps.fit(
|
179 |
# _room_image,
|
|
|
229 |
prompt = (
|
230 |
furniture_prompt + ".\n" + SYSTEM_PROMPT if furniture_prompt else SYSTEM_PROMPT
|
231 |
)
|
232 |
+
image.save("image.png")
|
233 |
+
mask.save("mask.png")
|
234 |
results_images = pipe(
|
235 |
prompt=prompt,
|
236 |
image=image,
|
|
|
254 |
max_dimension * 2,
|
255 |
max_dimension,
|
256 |
))
|
257 |
+
image_generated = image_generated.resize(
|
258 |
+
(bbox_longest_side, bbox_longest_side), Image.Resampling.BICUBIC
|
259 |
+
)
|
260 |
# Crop back to the bbox (remove the padding)
|
261 |
image_generated = remove_padding(
|
262 |
image_generated,
|