Spaces:
openfree
/
Running on Zero

openfree commited on
Commit
9baf2c7
โ€ข
1 Parent(s): 72c66da

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -24
app.py CHANGED
@@ -157,31 +157,33 @@ def create_demo(args, model_name: str, device: str = "cuda" if torch.cuda.is_ava
157
  offload: bool = False):
158
 
159
  with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
 
 
160
 
161
  with gr.Row():
162
  with gr.Column():
163
- prompt = gr.Textbox(label="Prompt", value="portrait, color, cinematic")
164
- id_image = gr.Image(label="ID Image")
165
- generate_btn = gr.Button("Generate")
166
 
167
  with gr.Column():
168
- output_image = gr.Image(label="Generated Image")
169
 
170
  with gr.Row():
171
  with gr.Column():
172
- gr.Markdown("## Examples")
173
 
174
  all_examples = [
175
- ['a woman holding sign with glowing green text \"PuLID for FLUX\"', 'example_inputs/liuyifei.png'],
176
- ['portrait, side view', 'example_inputs/liuyifei.png'],
177
- ['white-haired woman with vr technology atmosphere', 'example_inputs/liuyifei.png'],
178
- ['a young child is eating Icecream', 'example_inputs/liuyifei.png'],
179
- ['a man is holding a sign with text \"PuLID for FLUX\", winter, snowing', 'example_inputs/pengwei.jpg'],
180
- ['portrait, candle light', 'example_inputs/pengwei.jpg'],
181
- ['profile shot dark photo of a 25-year-old male with smoke', 'example_inputs/pengwei.jpg'],
182
- ['American Comics, 1boy', 'example_inputs/pengwei.jpg'],
183
- ['portrait, pixar', 'example_inputs/pengwei.jpg'],
184
- ['portrait, made of ice sculpture', 'example_inputs/lecun.jpg'],
185
  ]
186
 
187
  example_images = [example[1] for example in all_examples]
@@ -189,7 +191,7 @@ def create_demo(args, model_name: str, device: str = "cuda" if torch.cuda.is_ava
189
 
190
  gallery = gr.Gallery(
191
  value=list(zip(example_images, example_captions)),
192
- label="Example Gallery",
193
  show_label=False,
194
  elem_id="gallery",
195
  columns=5,
@@ -219,7 +221,7 @@ def create_demo(args, model_name: str, device: str = "cuda" if torch.cuda.is_ava
219
  prompt,
220
  id_image,
221
  gr.Slider(0.0, 3.0, 1, step=0.05, visible=False), # id_weight
222
- gr.Textbox("bad quality, worst quality, text, signature, watermark, extra limbs", visible=False), # neg_prompt
223
  gr.Slider(1.0, 10.0, 1, step=0.1, visible=False), # true_cfg
224
  gr.Slider(0, 20, 1, step=1, visible=False), # timestep_to_start_cfg
225
  gr.Slider(128, 512, 128, step=128, visible=False), # max_sequence_length
@@ -234,17 +236,20 @@ if __name__ == "__main__":
234
 
235
  parser = argparse.ArgumentParser(description="PuLID for FLUX.1-dev")
236
  parser.add_argument("--name", type=str, default="flux-dev", choices=list('flux-dev'),
237
- help="currently only support flux-dev")
238
  parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu",
239
- help="Device to use")
240
- parser.add_argument("--offload", action="store_true", help="Offload model to CPU when not in use")
241
- parser.add_argument("--port", type=int, default=8080, help="Port to use")
242
- parser.add_argument("--dev", action='store_true', help="Development mode")
243
- parser.add_argument("--pretrained_model", type=str, help='for development')
244
  args = parser.parse_args()
245
 
246
  import huggingface_hub
247
  huggingface_hub.login(os.getenv('HF_TOKEN'))
248
 
249
  demo = create_demo(args, args.name, args.device, args.offload)
250
- demo.launch()
 
 
 
 
157
  offload: bool = False):
158
 
159
  with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
160
+ gr.Markdown("# AI ํฌํ†  ์ง€๋‹ˆ: ์šฐ์ฃผ")
161
+ gr.Markdown("## ์ด์šฉ ์•ˆ๋‚ด: 1) ์˜ˆ์‹œ์ค‘ ํ•˜๋‚˜๋ฅผ ์„ ํƒ. 2) ์นด๋ฉ”๋ผ ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๊ณ  ์–ผ๊ตด์ด ๋ณด์ด๋ฉด ์นด๋ฉ”๋ผ ๋ฒ„ํŠผ ํด๋ฆญ. 3) '์ƒ์„ฑ' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜๊ณ  ๊ธฐ๋‹ค๋ฆฌ๋ฉด ๋ฉ๋‹ˆ๋‹ค.")
162
 
163
  with gr.Row():
164
  with gr.Column():
165
+ prompt = gr.Textbox(label="ํ”„๋กฌํ”„ํŠธ", value="์ดˆ์ƒํ™”, ์ƒ‰๊ฐ, ์˜ํ™”์ ")
166
+ id_image = gr.Image(label="ID ์ด๋ฏธ์ง€", sources=["webcam", "upload"], type="numpy")
167
+ generate_btn = gr.Button("์ƒ์„ฑ")
168
 
169
  with gr.Column():
170
+ output_image = gr.Image(label="์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€")
171
 
172
  with gr.Row():
173
  with gr.Column():
174
+ gr.Markdown("## ์˜ˆ์‹œ")
175
 
176
  all_examples = [
177
+ ['์—ฌ์ž๊ฐ€ "PuLID for FLUX"๋ผ๊ณ  ์“ฐ์ธ ๋น›๋‚˜๋Š” ๋…น์ƒ‰ ํ‘œ์ง€ํŒ์„ ๋“ค๊ณ  ์žˆ๋‹ค', 'example_inputs/liuyifei.png'],
178
+ ['์˜†๋ชจ์Šต ์ดˆ์ƒํ™”', 'example_inputs/liuyifei.png'],
179
+ ['VR ๊ธฐ์ˆ  ๋ถ„์œ„๊ธฐ์˜ ํฐ ๋จธ๋ฆฌ ์—ฌ์„ฑ', 'example_inputs/liuyifei.png'],
180
+ ['์–ด๋ฆฐ ์•„์ด๊ฐ€ ์•„์ด์Šคํฌ๋ฆผ์„ ๋จน๊ณ  ์žˆ๋‹ค', 'example_inputs/liuyifei.png'],
181
+ ['๋‚จ์ž๊ฐ€ "PuLID for FLUX"๋ผ๊ณ  ์“ฐ์ธ ํ‘œ์ง€ํŒ์„ ๋“ค๊ณ  ์žˆ๋‹ค, ๊ฒจ์šธ, ๋ˆˆ ๋‚ด๋ฆผ', 'example_inputs/pengwei.jpg'],
182
+ ['์ดˆ์ƒํ™”, ์ด›๋ถˆ ์กฐ๋ช…', 'example_inputs/pengwei.jpg'],
183
+ ['25์„ธ ๋‚จ์„ฑ์˜ ์–ด๋‘์šด ํ”„๋กœํ•„ ์‚ฌ์ง„, ์ž…์—์„œ ์—ฐ๊ธฐ๊ฐ€ ๋‚˜์˜ค๊ณ  ์žˆ์Œ', 'example_inputs/pengwei.jpg'],
184
+ ['๋ฏธ๊ตญ ๋งŒํ™” ์Šคํƒ€์ผ, ์†Œ๋…„ 1๋ช…', 'example_inputs/pengwei.jpg'],
185
+ ['์ดˆ์ƒํ™”, ํ”ฝ์‚ฌ ์Šคํƒ€์ผ', 'example_inputs/pengwei.jpg'],
186
+ ['์ดˆ์ƒํ™”, ์–ผ์Œ ์กฐ๊ฐ์ƒ', 'example_inputs/lecun.jpg'],
187
  ]
188
 
189
  example_images = [example[1] for example in all_examples]
 
191
 
192
  gallery = gr.Gallery(
193
  value=list(zip(example_images, example_captions)),
194
+ label="์˜ˆ์‹œ ๊ฐค๋Ÿฌ๋ฆฌ",
195
  show_label=False,
196
  elem_id="gallery",
197
  columns=5,
 
221
  prompt,
222
  id_image,
223
  gr.Slider(0.0, 3.0, 1, step=0.05, visible=False), # id_weight
224
+ gr.Textbox("์ €ํ’ˆ์งˆ, ์ตœ์•…์˜ ํ’ˆ์งˆ, ํ…์ŠคํŠธ, ์„œ๋ช…, ์›Œํ„ฐ๋งˆํฌ, ์—ฌ๋ถ„์˜ ํŒ”๋‹ค๋ฆฌ", visible=False), # neg_prompt
225
  gr.Slider(1.0, 10.0, 1, step=0.1, visible=False), # true_cfg
226
  gr.Slider(0, 20, 1, step=1, visible=False), # timestep_to_start_cfg
227
  gr.Slider(128, 512, 128, step=128, visible=False), # max_sequence_length
 
236
 
237
  parser = argparse.ArgumentParser(description="PuLID for FLUX.1-dev")
238
  parser.add_argument("--name", type=str, default="flux-dev", choices=list('flux-dev'),
239
+ help="ํ˜„์žฌ๋Š” flux-dev๋งŒ ์ง€์›ํ•ฉ๋‹ˆ๋‹ค")
240
  parser.add_argument("--device", type=str, default="cuda" if torch.cuda.is_available() else "cpu",
241
+ help="์‚ฌ์šฉํ•  ๋””๋ฐ”์ด์Šค")
242
+ parser.add_argument("--offload", action="store_true", help="์‚ฌ์šฉํ•˜์ง€ ์•Š์„ ๋•Œ ๋ชจ๋ธ์„ CPU๋กœ ์˜ฎ๊น๋‹ˆ๋‹ค")
243
+ parser.add_argument("--port", type=int, default=8080, help="์‚ฌ์šฉํ•  ํฌํŠธ")
244
+ parser.add_argument("--dev", action='store_true', help="๊ฐœ๋ฐœ ๋ชจ๋“œ")
245
+ parser.add_argument("--pretrained_model", type=str, help='๊ฐœ๋ฐœ์šฉ')
246
  args = parser.parse_args()
247
 
248
  import huggingface_hub
249
  huggingface_hub.login(os.getenv('HF_TOKEN'))
250
 
251
  demo = create_demo(args, args.name, args.device, args.offload)
252
+ demo.launch()
253
+
254
+
255
+