DemiPoto commited on
Commit
c84e807
·
verified ·
1 Parent(s): 4689c95

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +271 -12
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  from random import randint
4
  from operator import itemgetter
5
  import bisect
6
- from all_models import tags_plus_models,models,models_plus_tags
7
  from datetime import datetime
8
  from externalmod import gr_Interface_load
9
  import asyncio
@@ -13,7 +13,7 @@ lock = RLock()
13
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
14
 
15
 
16
- nb_req_simult=75 ########
17
  nb_gallery_model=5
18
 
19
 
@@ -21,7 +21,8 @@ tempo_update_actu=3.0
21
  #incr_update_actu={}
22
 
23
  now2 = 0
24
- inference_timeout = 200
 
25
  MAX_SEED = 2**32-1
26
 
27
 
@@ -34,6 +35,7 @@ cache_id_image={}
34
  cache_list_task={}
35
  cache_text_actu={}
36
  from_reload={}
 
37
 
38
 
39
  def load_fn(models):
@@ -117,13 +119,13 @@ async def infer(model_str, prompt, nprompt="", height=None, width=None, steps=No
117
  image = str(Path(png_path).resolve())
118
  return image
119
  return None
120
- def gen_fn(model_str, prompt, nprompt="", height=None, width=None, steps=None, cfg=None, seed=-1):
121
  if model_str == 'NA':
122
  return None
123
  try:
124
  loop = asyncio.new_event_loop()
125
  result = loop.run_until_complete(infer(model_str, prompt, nprompt,
126
- height, width, steps, cfg, seed, inference_timeout))
127
  except (Exception, asyncio.CancelledError) as e:
128
  print(e)
129
  print(f"Task aborted: {model_str}")
@@ -166,7 +168,7 @@ def reset_cache_image_all_sessions(cache_image=cache_image):
166
  listT.clear()
167
  return
168
 
169
- def set_session(id):
170
  if id==0:
171
  randTemp=randint(1,MAX_SEED)
172
  cache_image[f"{randTemp}"]=[]
@@ -174,7 +176,12 @@ def set_session(id):
174
 
175
  cache_list_task[f"{randTemp}"]=[]
176
  cache_text_actu[f"{randTemp}"]={}
 
 
 
 
177
  from_reload[f"{randTemp}"]=False
 
178
  #incr_update_actu[f"{randTemp}"]=0
179
  return gr.Number(visible=False,value=randTemp)
180
  else :
@@ -307,6 +314,28 @@ def fonc_load_info(nb_of_models_to_gen,index_tag,index_first_model):
307
  list_models_temp.append(tags_plus_models[index_tag][2][i+index_first_model])
308
  str_temp+=f"\"{tags_plus_models[index_tag][2][i+index_first_model]}\",\n"
309
  return nb_of_models_to_gen,gr.Textbox(str_temp),gr.Dropdown(choices=[["",list_models_temp]], value=list_models_temp )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
310
  def fonc_load_info_custom(nb_of_models_to_gen,list_model_custom,index_first_model):
311
  str_temp=""
312
  list_models_temp=[]
@@ -342,7 +371,7 @@ def crea_list_task(id_session,list_param,list_models_to_gen,nb_images_by_prompt)
342
  dict_temp["task"].append({"prompt":p[0],"nprompt":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
343
  cache_list_task[f"{id_session}"].append(dict_temp)
344
 
345
- cache_text_actu[f"{id_session}"]={"nb_modules_use":nb_req_simult,"stop":False,"nb_fail":0,
346
  "nb_models_to_do":len(list_models_to_gen) ,"nb_models_tot":len(list_models_to_gen) ,
347
  "nb_tasks_to_do":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt ,
348
  "nb_tasks_tot":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt,
@@ -366,21 +395,28 @@ def fonc_update_actu(text_actu,id):
366
  return gr.Textbox(s)
367
 
368
  def fonc_update_actu_2(id):
369
- if id == 0:
 
 
370
  return gr.Textbox("waiting...")
371
  s=""
372
- s+=f"modules: {cache_text_actu[str(id)]['nb_modules_use']}/{nb_req_simult}\n"
373
- s+=f"models remaining: {cache_text_actu[str(id)]['nb_models_to_do']}/{cache_text_actu[str(id)]['nb_models_tot']}\n"
374
  i=0
 
375
  for d in cache_text_actu[str(id)]['progress']:
376
  i+=1
 
 
377
  s+=str(d)
378
  if i%10==0:
379
  s+=" "
380
  if i%50==0:
381
  s+="\n"
 
382
  s+="\n"
383
- s+=f"images remaining: {cache_text_actu[str(id)]['nb_tasks_to_do']}/{cache_text_actu[str(id)]['nb_tasks_tot']}\n"
 
 
 
384
  s+=f"fail attempt: {cache_text_actu[str(id)]['nb_fail']}\n"
385
  #s+=f"{tempo_update_actu*incr_update_actu[str(id)]} s"
386
  #incr_update_actu[str(id)]+=1
@@ -551,7 +587,103 @@ def fonc_add_to_text(text,list_models,index):
551
 
552
  def load_model_publ(choice_model_publ):
553
  return gr.Image(None,label=choice_model_publ,interactive=False),gr.Textbox(choice_model_publ,visible=False,show_label=False)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
554
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
555
  def make_me():
556
  with gr.Tab(" "):
557
  with gr.Column():
@@ -638,6 +770,8 @@ def make_me():
638
  index_tag,index_first_model)
639
  load_info=gr.Button("Load Models")
640
  load_info.click(fonc_load_info,[nb_of_models_to_gen,index_tag,index_first_model],[nb_of_models_to_gen,disp_info,list_models_to_gen])
 
 
641
 
642
  with gr.Accordion("Models Custom",open=False) :
643
  with gr.Row():
@@ -788,9 +922,134 @@ def make_me():
788
  row_gallery.change(lambda g,h: gr.Gallery(g,rows=h),[gallery_by_prompt,row_gallery],[gallery_by_prompt])
789
 
790
 
791
-
792
  button_restore_session.click(fonc_restore_session,[id_session],[list_param,disp_param.dataset,list_models_to_gen,nb_of_models_to_gen])
793
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
794
 
795
 
796
 
 
3
  from random import randint
4
  from operator import itemgetter
5
  import bisect
6
+ from all_models import tags_plus_models,models,models_plus_tags,find_warm_model_list
7
  from datetime import datetime
8
  from externalmod import gr_Interface_load
9
  import asyncio
 
13
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
14
 
15
 
16
+ nb_req_simult=10 ########
17
  nb_gallery_model=5
18
 
19
 
 
21
  #incr_update_actu={}
22
 
23
  now2 = 0
24
+ inference_timeout = 300
25
+ inference_timeout_w = 70
26
  MAX_SEED = 2**32-1
27
 
28
 
 
35
  cache_list_task={}
36
  cache_text_actu={}
37
  from_reload={}
38
+ cache_list_task_w={}
39
 
40
 
41
  def load_fn(models):
 
119
  image = str(Path(png_path).resolve())
120
  return image
121
  return None
122
+ def gen_fn(model_str, prompt, nprompt="", height=None, width=None, steps=None, cfg=None, seed=-1,timeout=inference_timeout):
123
  if model_str == 'NA':
124
  return None
125
  try:
126
  loop = asyncio.new_event_loop()
127
  result = loop.run_until_complete(infer(model_str, prompt, nprompt,
128
+ height, width, steps, cfg, seed, timeout))
129
  except (Exception, asyncio.CancelledError) as e:
130
  print(e)
131
  print(f"Task aborted: {model_str}")
 
168
  listT.clear()
169
  return
170
 
171
+ def set_session(id,warm=False):
172
  if id==0:
173
  randTemp=randint(1,MAX_SEED)
174
  cache_image[f"{randTemp}"]=[]
 
176
 
177
  cache_list_task[f"{randTemp}"]=[]
178
  cache_text_actu[f"{randTemp}"]={}
179
+ if warm:
180
+ cache_text_actu[f"{randTemp}"]["warm"]=True
181
+ else:
182
+ cache_text_actu[f"{randTemp}"]["warm"]=False
183
  from_reload[f"{randTemp}"]=False
184
+ cache_list_task_w[f"{randTemp}"]=[]
185
  #incr_update_actu[f"{randTemp}"]=0
186
  return gr.Number(visible=False,value=randTemp)
187
  else :
 
314
  list_models_temp.append(tags_plus_models[index_tag][2][i+index_first_model])
315
  str_temp+=f"\"{tags_plus_models[index_tag][2][i+index_first_model]}\",\n"
316
  return nb_of_models_to_gen,gr.Textbox(str_temp),gr.Dropdown(choices=[["",list_models_temp]], value=list_models_temp )
317
+
318
+ def load_random_models(nb_of_models_to_gen,index_tag):
319
+ str_temp=""
320
+ list_models_temp=[]
321
+ list_random=[]
322
+ if nb_of_models_to_gen>=len(tags_plus_models[index_tag][2]):
323
+ str_temp+="warning : to many model chosen"
324
+ nb_of_models_to_gen= len(tags_plus_models[index_tag][2])
325
+ str_temp+=f" - only {nb_of_models_to_gen} will be use\n\n"
326
+ list_models_temp=tags_plus_models[index_tag][2]
327
+ for m in list_models_temp:
328
+ str_temp+=f"\"{m}\",\n"
329
+ else :
330
+ list_random=tags_plus_models[index_tag][2].copy()
331
+ for i in range(nb_of_models_to_gen):
332
+ i_rand=randint(0,len(list_random)-1)
333
+ m=list_random.pop(i_rand)
334
+ list_models_temp.append(m)
335
+ str_temp+=f"\"{m}\",\n"
336
+ return nb_of_models_to_gen,gr.Textbox(str_temp),gr.Dropdown(choices=[["",list_models_temp]], value=list_models_temp )
337
+
338
+
339
  def fonc_load_info_custom(nb_of_models_to_gen,list_model_custom,index_first_model):
340
  str_temp=""
341
  list_models_temp=[]
 
371
  dict_temp["task"].append({"prompt":p[0],"nprompt":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
372
  cache_list_task[f"{id_session}"].append(dict_temp)
373
 
374
+ cache_text_actu[f"{id_session}"]={"nb_modules_use":nb_req_simult,"stop":False,"nb_fail":0,"warm":False,
375
  "nb_models_to_do":len(list_models_to_gen) ,"nb_models_tot":len(list_models_to_gen) ,
376
  "nb_tasks_to_do":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt ,
377
  "nb_tasks_tot":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt,
 
395
  return gr.Textbox(s)
396
 
397
  def fonc_update_actu_2(id):
398
+ if id == 0 :
399
+ return gr.Textbox("waiting...")
400
+ if cache_text_actu[str(id)]['warm']==True:
401
  return gr.Textbox("waiting...")
402
  s=""
 
 
403
  i=0
404
+ nb_ones=0
405
  for d in cache_text_actu[str(id)]['progress']:
406
  i+=1
407
+ if d==1:
408
+ nb_ones+=1
409
  s+=str(d)
410
  if i%10==0:
411
  s+=" "
412
  if i%50==0:
413
  s+="\n"
414
+
415
  s+="\n"
416
+ s+=f"modules: {cache_text_actu[str(id)]['nb_modules_use']}/{nb_req_simult} ({cache_text_actu[str(id)]['nb_modules_use']-nb_ones}/{cache_text_actu[str(id)]['nb_modules_use']})\n"
417
+ s+=f"models remaining: {cache_text_actu[str(id)]['nb_models_to_do']}/{cache_text_actu[str(id)]['nb_models_tot']}\n"
418
+
419
+ s+=f"images remaining(done): {cache_text_actu[str(id)]['nb_tasks_to_do']}({cache_text_actu[str(id)]['nb_tasks_tot']-cache_text_actu[str(id)]['nb_tasks_to_do']})/{cache_text_actu[str(id)]['nb_tasks_tot']}\n"
420
  s+=f"fail attempt: {cache_text_actu[str(id)]['nb_fail']}\n"
421
  #s+=f"{tempo_update_actu*incr_update_actu[str(id)]} s"
422
  #incr_update_actu[str(id)]+=1
 
587
 
588
  def load_model_publ(choice_model_publ):
589
  return gr.Image(None,label=choice_model_publ,interactive=False),gr.Textbox(choice_model_publ,visible=False,show_label=False)
590
+
591
+
592
+ def set_tasks_w(id_session,list_p,nb_i):
593
+ list_t=[]
594
+ for p in list_p:
595
+ cache_list_task_w[f"{id_session}"].append([nb_i,{"prompt":p[0],"nprompt":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]},0])
596
+
597
+ cache_text_actu[f"{id_session}"]={"nb_modules_use":nb_req_simult,"stop":False,"nb_fail":0,"warm":True,"nb_prompt_to_do":len(list_p),"nb_prompt_tot":len(list_p),
598
+ "nb_tasks_to_do":len(list_p)*nb_i ,"nb_tasks_tot":len(list_p)*nb_i}
599
+ return
600
+
601
+ def fonc_start_w(id_session,id_module,s,cont,nb_modules,tag):
602
+ if cont==False or id_module>=nb_modules:
603
+ cache_text_actu[f"{id_session}"]["nb_modules_use"]-=1
604
+ print("manual stop")
605
+ return None,gr.Textbox(s),gr.Number(randint(1,MAX_SEED))
606
+
607
+ find_task=False
608
+ task={}
609
+ for t in cache_list_task_w[f"{id_session}"]:
610
+ if not find_task:
611
+ if t[0]>0:
612
+ t[0]-=1
613
+ t[2]+=1
614
+ task=t[1].copy()
615
+ find_task=True
616
+ if not find_task:
617
+ cache_text_actu[f"{id_session}"]["nb_modules_use"]-=1
618
+ return None,gr.Textbox(s),gr.Number(randint(1,MAX_SEED))
619
 
620
+ if tag == "":
621
+ tagT=["stable-diffusion-xl"]
622
+ else:
623
+ tagT=["stable-diffusion-xl",tag]
624
+ models_temp , models_plus_tags_temp = find_warm_model_list("John6666", tagT, "", "last_modified", 10000)
625
+ models_rand=[]
626
+ for m in models_temp:
627
+ if m in models:
628
+ models_rand.append(m)
629
+ if len(models_rand)!=0:
630
+ model_actu=models_temp[randint(0,len(models_rand)-1)]
631
+ print(f"find model : {model_actu}")
632
+ else:
633
+ print("no warm model")
634
+ cache_text_actu[f"{id_session}"]["nb_modules_use"]-=1
635
+ return None,gr.Textbox(s),gr.Number(randint(1,MAX_SEED))
636
+
637
+
638
+ print("begin gen image:")
639
+ print(model_actu)
640
+ print(task)
641
+ result=gen_fn(model_actu, task["prompt"], task["nprompt"], task["height"], task["width"], task["steps"], task["cfg"], task["seed"],inference_timeout_w)
642
+ print("reception")
643
+ if result!=None:
644
+ id_image=len(cache_image[f"{id_session}"])
645
+ for t in cache_list_task_w[f"{id_session}"]:
646
+ if t[1]==task:
647
+ t[2]-=1
648
+ if t[0]+t[2]<=0:
649
+ cache_list_task_w[f"{id_session}"].remove(t)
650
+ cache_text_actu[f"{id_session}"]["nb_prompt_to_do"]-=1 #################################
651
+ cache_text_actu[f"{id_session}"]["nb_tasks_to_do"]-=1
652
+
653
+ task["id_image"]=id_image
654
+ task["model"]=model_actu
655
+ task["image"]=result
656
+ cache_image[f"{id_session}"].append(task)
657
+ print("image saved\n")
658
+ print(task)
659
+ else:
660
+ for t in cache_list_task_w[f"{id_session}"]:
661
+ if t[1]==task:
662
+ t[0]+=1
663
+ t[2]-=1
664
+ cache_text_actu[f"{id_session}"]["nb_fail"]+=1
665
+ print("fail to generate\n")
666
+ nb_task_to_do=cache_text_actu[str(id_session)]["nb_tasks_to_do"]
667
+ print(f"\n {nb_task_to_do} tasks to do\n")
668
+ return result , gr.Textbox(s+"1"),gr.Number(randint(1,MAX_SEED))
669
+
670
+
671
+ def fonc_update_actu_w(id):
672
+ if id == 0 :
673
+ return gr.Textbox("waiting...")
674
+ if cache_text_actu[str(id)]['warm']==False:
675
+ return gr.Textbox("waiting...")
676
+ s=""
677
+ s+=f"modules: {cache_text_actu[str(id)]['nb_modules_use']}/{nb_req_simult} \n"
678
+ s+=f"prompts remaining: {cache_text_actu[str(id)]['nb_prompt_to_do']}/{cache_text_actu[str(id)]['nb_prompt_tot']}\n"
679
+
680
+ s+=f"images remaining(done): {cache_text_actu[str(id)]['nb_tasks_to_do']}({cache_text_actu[str(id)]['nb_tasks_tot']-cache_text_actu[str(id)]['nb_tasks_to_do']})/{cache_text_actu[str(id)]['nb_tasks_tot']}\n"
681
+ s+=f"fail attempt: {cache_text_actu[str(id)]['nb_fail']}\n"
682
+ #s+=f"{tempo_update_actu*incr_update_actu[str(id)]} s"
683
+ #incr_update_actu[str(id)]+=1
684
+ s+=f"{randint(1,MAX_SEED)}"
685
+ return gr.Textbox(s)
686
+
687
  def make_me():
688
  with gr.Tab(" "):
689
  with gr.Column():
 
770
  index_tag,index_first_model)
771
  load_info=gr.Button("Load Models")
772
  load_info.click(fonc_load_info,[nb_of_models_to_gen,index_tag,index_first_model],[nb_of_models_to_gen,disp_info,list_models_to_gen])
773
+ button_load_random_models=gr.Button("Load Random Models")
774
+ button_load_random_models.click(load_random_models,[nb_of_models_to_gen,index_tag],[nb_of_models_to_gen,disp_info,list_models_to_gen])
775
 
776
  with gr.Accordion("Models Custom",open=False) :
777
  with gr.Row():
 
922
  row_gallery.change(lambda g,h: gr.Gallery(g,rows=h),[gallery_by_prompt,row_gallery],[gallery_by_prompt])
923
 
924
 
 
925
  button_restore_session.click(fonc_restore_session,[id_session],[list_param,disp_param.dataset,list_models_to_gen,nb_of_models_to_gen])
926
 
927
+ with gr.Tab(" Warm ",visible=False) as tab_warm:
928
+ button_test_pass.click(test_pass_aff,[test_pass],[tab_warm])
929
+ with gr.Column():
930
+ with gr.Group():
931
+ with gr.Row():
932
+ with gr.Column(scale=4):
933
+ txt_input_w = gr.Textbox(label='Your prompt:', lines=4, interactive = True)
934
+ neg_input_w = gr.Textbox(label='Negative prompt:', lines=4, interactive = True)
935
+ with gr.Column(scale=4):
936
+ with gr.Row():
937
+ width_w = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=2024, step=32, value=0, interactive = True)
938
+ height_w = gr.Slider(label="Height", info="If 0, the default value is used.", maximum=2024, step=32, value=0, interactive = True)
939
+ with gr.Row():
940
+ choice_ratio_w = gr.Dropdown(label="Ratio Width/Height",
941
+ info="OverWrite Width and Height (W*H<1024*1024)",
942
+ show_label=True, choices=list(list_ratios) , interactive = True, value=list_ratios[0][1])
943
+ choice_ratio_w.change(ratio_chosen,[choice_ratio_w,width,height_w],[width_w,height_w])
944
+ with gr.Row():
945
+ steps_w = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0, interactive = True)
946
+ cfg_w = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=0, interactive = True)
947
+ seed_w = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1, interactive = True)
948
+
949
+ add_param_w=gr.Button("Add to the list")
950
+ del_param_w=gr.Button("Delete to the list")
951
+
952
+ list_param_w=gr.Dropdown(choices=[["a",[["","",0,0,0,0,-1]]]], value=[["","",0,0,0,0,-1]], visible=False)
953
+ disp_param_w = gr.Examples(
954
+ label="list of prompt",
955
+ examples=list_param_w.value,
956
+ inputs=[txt_input_w,neg_input_w,width_w,height_w,steps_w,cfg_w,seed_w],
957
+ outputs=[txt_input_w,neg_input_w,width_w,height_w,steps_w,cfg_w,seed_w],
958
+ )
959
+ bool_warm=gr.Checkbox(visible=False,value=True)
960
+ add_param_w.click(fonc_add_param,[list_param_w,txt_input_w,neg_input_w,width_w,height_w,steps_w,cfg_w,seed_w],[disp_param_w.dataset,list_param_w])
961
+ add_param_w.click(set_session,[id_session,bool_warm],[id_session])
962
+ del_param_w.click(fonc_del_param,[list_param_w,txt_input_w,neg_input_w,width_w,height_w,steps_w,cfg_w,seed_w],[disp_param_w.dataset,list_param_w])
963
+
964
+ with gr.Row():
965
+ nb_images_by_prompt_w=gr.Number(2,label="Images by prompt",interactive = True)
966
+ nb_modules_w=gr.Slider(label="Module use", minimum=1, maximum=nb_req_simult, step=1, value=40, interactive = True)
967
+ text_tag_w=gr.Textbox("",label="Tag",interactive = True,lines=1)
968
+
969
+ with gr.Column():
970
+
971
+ button_load_task_w=gr.Button("LOAD")
972
+ button_load_task_w.click(set_tasks_w,[id_session,list_param_w,nb_images_by_prompt_w],[])
973
+ button_start_w=gr.Button("START")
974
+ button_stop_w=gr.Button("STOP")
975
+ cont_w=gr.Checkbox(True,visible=False)
976
+ button_start_w.click(lambda:True,[],[cont_w])
977
+ button_stop_w.click(lambda:False,[],[cont_w])
978
+ text_actu_w=gr.Textbox(fonc_update_actu_w,inputs=id_session,every=tempo_update_actu,label="in progress",interactive=False,lines=6)############
979
+ update_actu_w=gr.Number(0,visible=False)
980
+
981
+ with gr.Accordion("Gallery Parameters",open=False) :
982
+ with gr.Row():
983
+ with gr.Column():
984
+ set_height_gallery_w=gr.Checkbox(True,label="set height",show_label=True)
985
+ height_gallery_w=gr.Number(650,label="height",show_label=True)
986
+ col_gallery_w=gr.Number(5,label="nb columns",show_label=True)
987
+ row_gallery_w=gr.Number(4,label="nb row",show_label=True)
988
+ with gr.Column():
989
+ button_reset_cache_image_w=gr.Button("Reset Images")
990
+ button_reset_cache_image_w.click(reset_cache_image,[id_session],[])
991
+ button_reset_cache_image_all_session_w=gr.Button("Reset Images ALL SESSION")
992
+ button_reset_cache_image_all_session_w.click(reset_cache_image_all_sessions,[],[])
993
+
994
+ with gr.Row():
995
+ outputs_w=[]
996
+ id_modules_w=[]
997
+ states_w=[]
998
+ for i in range(nb_req_simult):
999
+ outputs_w.append(gr.Image(None,interactive=False,visible=False))
1000
+ id_modules_w.append(gr.Number(i,interactive=False,visible=False))
1001
+ states_w.append(gr.Textbox("1",interactive=False,visible=False))
1002
+ for o,i,s in zip(outputs_w,id_modules_w,states_w):
1003
+ s.change(fonc_start_w,[id_session,i,s,cont_w,nb_modules_w,text_tag_w],[o,s,update_actu_w])#################
1004
+ gen_event = gr.on(triggers=[button_start_w.click], fn=fonc_init,inputs=[s], outputs=[s])###################
1005
+
1006
+
1007
+ with gr.Column(scale=2):
1008
+ gallery_w = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
1009
+ interactive=False, show_share_button=False, container=True, format="png",
1010
+ preview=True, object_fit="contain",columns=5,rows=4,height=650)
1011
+ gallery_by_prompt_w = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
1012
+ interactive=False, show_share_button=False, container=True, format="png",
1013
+ preview=True, object_fit="contain",columns=5,rows=4,visible=False,height=650)
1014
+
1015
+ set_height_gallery_w.change(lambda g,h,s: gr.Gallery(g,height=h) if s else gr.Gallery(g,height=None),
1016
+ [gallery_w,height_gallery_w,set_height_gallery_w],[gallery_w])
1017
+ height_gallery_w.change(lambda g,h: gr.Gallery(g,height=h),[gallery_w,height_gallery_w],[gallery_w])
1018
+ col_gallery_w.change(lambda g,h: gr.Gallery(g,columns=h),[gallery_w,col_gallery_w],[gallery_w])
1019
+ row_gallery_w.change(lambda g,h: gr.Gallery(g,rows=h),[gallery_w,row_gallery_w],[gallery_w])
1020
+
1021
+ set_height_gallery_w.change(lambda g,h,s: gr.Gallery(g,height=h) if s else gr.Gallery(g,height=None),
1022
+ [gallery_by_prompt_w,height_gallery_w,set_height_gallery_w],[gallery_by_prompt_w])
1023
+ height_gallery_w.change(lambda g,h: gr.Gallery(g,height=h),[gallery_by_prompt_w,height_gallery_w],[gallery_by_prompt_w])
1024
+ col_gallery_w.change(lambda g,h: gr.Gallery(g,columns=h),[gallery_by_prompt_w,col_gallery_w],[gallery_by_prompt_w])
1025
+ row_gallery_w.change(lambda g,h: gr.Gallery(g,rows=h),[gallery_by_prompt_w,row_gallery_w],[gallery_by_prompt_w])
1026
+
1027
+ with gr.Column(scale=3):
1028
+ button_load_gallery_w=gr.Button("Load Gallery All")
1029
+ button_load_gallery_w.click(fonc_load_gallery,[id_session,gallery_w],[gallery_w])
1030
+
1031
+ with gr.Accordion("Gallery by Prompt",open=False) :
1032
+ index_gallery_by_prompt_w=gr.Number(0,visible=False)
1033
+ button_load_gallery_by_prompt_w=gr.Button("Load Gallery by prompt")
1034
+ text_gallery_by_prompt_w=gr.Textbox(f"{index_gallery_by_prompt_w.value+1}/{len(list_param_w.value)}",show_label=False)
1035
+ index_gallery_by_prompt_w.change(lambda i,p:gr.Textbox(f"{i+1}/{len(p)}"),[index_gallery_by_prompt_w,list_param_w],[text_gallery_by_prompt_w])
1036
+ button_load_gallery_by_prompt_w.click(load_gallery_by_prompt,
1037
+ [id_session,gallery_by_prompt_w,index_gallery_by_prompt_w,list_param_w],[gallery_by_prompt_w])
1038
+ gen_event_gallery_by_prompt_w = gr.on(triggers=[button_load_gallery_by_prompt_w.click], fn=lambda g:gr.Gallery(g,visible=False),
1039
+ inputs=[gallery_w], outputs=[gallery_w])
1040
+ gen_event_gallery_w = gr.on(triggers=[button_load_gallery_w.click], fn=lambda g:gr.Gallery(g,visible=False),
1041
+ inputs=[gallery_by_prompt_w], outputs=[gallery_by_prompt_w])
1042
+ with gr.Row():
1043
+ button_gallery_prev_prompt_w=gr.Button("Prev prompt")
1044
+ button_gallery_next_prompt_w=gr.Button("Next prompt")
1045
+ button_gallery_next_prompt_w.click(lambda i,p: (i+1)%len(p),[index_gallery_by_prompt_w,list_param_w],[index_gallery_by_prompt_w])
1046
+ button_gallery_prev_prompt_w.click(lambda i,p: (i-1)%len(p),[index_gallery_by_prompt_w,list_param_w],[index_gallery_by_prompt_w])
1047
+ index_gallery_by_prompt_w.change(load_gallery_by_prompt,
1048
+ [id_session,gallery_by_prompt_w,index_gallery_by_prompt_w,list_param_w],[gallery_by_prompt_w])
1049
+
1050
+
1051
+
1052
+
1053
 
1054
 
1055