Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -23,6 +23,8 @@ nb_mod_dif=20
|
|
23 |
nb_models=nb_mod_dif*nb_rep
|
24 |
|
25 |
cache_image={}
|
|
|
|
|
26 |
|
27 |
|
28 |
def load_fn(models):
|
@@ -156,6 +158,9 @@ def set_session(id):
|
|
156 |
if id==0:
|
157 |
randTemp=randint(1,MAX_SEED)
|
158 |
cache_image[f"{randTemp}"]=[]
|
|
|
|
|
|
|
159 |
return gr.Number(visible=False,value=randTemp)
|
160 |
else :
|
161 |
return id
|
@@ -281,7 +286,18 @@ def fonc_load_info_custom(nb_of_models_to_gen,list_model_custom,index_first_mode
|
|
281 |
str_temp+=f"\"{list_model_custom[i+index_first_model]}\",\n"
|
282 |
return nb_of_models_to_gen,gr.Textbox(str_temp),gr.Dropdown(choices=[["",list_models_temp]], value=list_models_temp )
|
283 |
|
284 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
285 |
|
286 |
|
287 |
def cutStrg(longStrg,start,end):
|
@@ -310,6 +326,7 @@ def aff_models_perso(txt_list_perso,models=models):
|
|
310 |
|
311 |
def make_me():
|
312 |
with gr.Column():
|
|
|
313 |
with gr.Group():
|
314 |
with gr.Row():
|
315 |
with gr.Column(scale=4):
|
@@ -344,6 +361,7 @@ def make_me():
|
|
344 |
outputs=[txt_input,neg_input,width,height,steps,cfg,seed],
|
345 |
)
|
346 |
add_param.click(fonc_add_param,[list_param,txt_input,neg_input,width,height,steps,cfg,seed],[disp_param.dataset,list_param])
|
|
|
347 |
del_param.click(fonc_del_param,[list_param,txt_input,neg_input,width,height,steps,cfg,seed],[disp_param.dataset,list_param])
|
348 |
|
349 |
with gr.Row():
|
@@ -375,17 +393,22 @@ def make_me():
|
|
375 |
|
376 |
load_model_custom=gr.Button("Load Models Custom")
|
377 |
load_model_custom.click(fonc_load_info_custom,[nb_of_models_to_gen,list_model_custom,index_first_model_custom],[nb_of_models_to_gen,disp_info,list_models_to_gen])
|
|
|
|
|
|
|
378 |
|
|
|
379 |
with gr.Column():
|
380 |
with gr.Row():
|
381 |
-
id_session=gr.Number(visible=False,value=0)
|
382 |
nb_req_simult=gr.Number(50,"Number simultaneous request")
|
383 |
nb_req_simult_max_by_mod=gr.Number(2,"Number max simultaneous request by model")
|
384 |
|
385 |
with gr.Row():
|
386 |
outputs=[]
|
|
|
387 |
for i in range(nb_req_simult.value):
|
388 |
outputs.append(gr.Image(None,interactive=False,render=False))
|
|
|
389 |
|
390 |
|
391 |
|
|
|
23 |
nb_models=nb_mod_dif*nb_rep
|
24 |
|
25 |
cache_image={}
|
26 |
+
cache_id_image={}
|
27 |
+
cache_list_task={}
|
28 |
|
29 |
|
30 |
def load_fn(models):
|
|
|
158 |
if id==0:
|
159 |
randTemp=randint(1,MAX_SEED)
|
160 |
cache_image[f"{randTemp}"]=[]
|
161 |
+
cache_id_image[f"{randTemp}"]=[]
|
162 |
+
|
163 |
+
cache_list_task[f"{randTemp}"]=[]
|
164 |
return gr.Number(visible=False,value=randTemp)
|
165 |
else :
|
166 |
return id
|
|
|
286 |
str_temp+=f"\"{list_model_custom[i+index_first_model]}\",\n"
|
287 |
return nb_of_models_to_gen,gr.Textbox(str_temp),gr.Dropdown(choices=[["",list_models_temp]], value=list_models_temp )
|
288 |
|
289 |
+
def crea_list_task(id_session,list_param,list_models_to_gen,nb_images_by_prompt):
|
290 |
+
cache_list_task[f"{id_session}"]=[]
|
291 |
+
dict_temp={}
|
292 |
+
for m in list_models_to_gen:
|
293 |
+
dict_temp["model"]=m
|
294 |
+
dict_temp["id_module"]=-1
|
295 |
+
dict_temp["task"]=[]
|
296 |
+
for p in list_param:
|
297 |
+
for i in range(nb_images_by_prompt):
|
298 |
+
dict_temp["task"].append({"txt_input":p[0],"neg_input":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
|
299 |
+
cache_list_task[f"{id_session}"].append(dict_temp)
|
300 |
+
print(cache_list_task[f"{id_session}"])###########################################
|
301 |
|
302 |
|
303 |
def cutStrg(longStrg,start,end):
|
|
|
326 |
|
327 |
def make_me():
|
328 |
with gr.Column():
|
329 |
+
id_session=gr.Number(visible=False,value=0)
|
330 |
with gr.Group():
|
331 |
with gr.Row():
|
332 |
with gr.Column(scale=4):
|
|
|
361 |
outputs=[txt_input,neg_input,width,height,steps,cfg,seed],
|
362 |
)
|
363 |
add_param.click(fonc_add_param,[list_param,txt_input,neg_input,width,height,steps,cfg,seed],[disp_param.dataset,list_param])
|
364 |
+
add_param.click(set_session,[id_session],[id_session])
|
365 |
del_param.click(fonc_del_param,[list_param,txt_input,neg_input,width,height,steps,cfg,seed],[disp_param.dataset,list_param])
|
366 |
|
367 |
with gr.Row():
|
|
|
393 |
|
394 |
load_model_custom=gr.Button("Load Models Custom")
|
395 |
load_model_custom.click(fonc_load_info_custom,[nb_of_models_to_gen,list_model_custom,index_first_model_custom],[nb_of_models_to_gen,disp_info,list_models_to_gen])
|
396 |
+
|
397 |
+
list_models_to_gen.change(crea_list_task,[id_session,list_param,list_models_to_gen,nb_images_by_prompt],[])#################
|
398 |
+
|
399 |
|
400 |
+
|
401 |
with gr.Column():
|
402 |
with gr.Row():
|
|
|
403 |
nb_req_simult=gr.Number(50,"Number simultaneous request")
|
404 |
nb_req_simult_max_by_mod=gr.Number(2,"Number max simultaneous request by model")
|
405 |
|
406 |
with gr.Row():
|
407 |
outputs=[]
|
408 |
+
outputs_name=[]
|
409 |
for i in range(nb_req_simult.value):
|
410 |
outputs.append(gr.Image(None,interactive=False,render=False))
|
411 |
+
outputs_name.append(gr.Number(i,interactive=False,render=False))
|
412 |
|
413 |
|
414 |
|