Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -63,7 +63,7 @@ pipe = EllaXLPipeline(pipe,f'{pipeline_path}/pytorch_model.bin')
|
|
63 |
|
64 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
65 |
|
66 |
-
@spaces.GPU(enable_queue=True
|
67 |
def infer(prompt,negative_prompt,seed,resolution, steps):
|
68 |
|
69 |
# if 'cuda' not in pipe.pipe.device.type:
|
@@ -123,7 +123,7 @@ with gr.Blocks(css=css) as demo:
|
|
123 |
prompt_in = gr.Textbox(label="Prompt", value="A smiling man with wavy brown hair and a trimmed beard")
|
124 |
resolution = gr.Dropdown(value=resolutions[0], show_label=True, label="Resolution", choices=resolutions)
|
125 |
seed = gr.Textbox(label="Seed", value=-1)
|
126 |
-
steps = gr.Textbox(label="Steps", value=
|
127 |
negative_prompt = gr.Textbox(label="Negative Prompt", value=default_negative_prompt)
|
128 |
submit_btn = gr.Button("Generate")
|
129 |
result = gr.Image(label="BRIA-2.3-T5 Result")
|
|
|
63 |
|
64 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
65 |
|
66 |
+
@spaces.GPU(enable_queue=True)
|
67 |
def infer(prompt,negative_prompt,seed,resolution, steps):
|
68 |
|
69 |
# if 'cuda' not in pipe.pipe.device.type:
|
|
|
123 |
prompt_in = gr.Textbox(label="Prompt", value="A smiling man with wavy brown hair and a trimmed beard")
|
124 |
resolution = gr.Dropdown(value=resolutions[0], show_label=True, label="Resolution", choices=resolutions)
|
125 |
seed = gr.Textbox(label="Seed", value=-1)
|
126 |
+
steps = gr.Textbox(label="Steps", value=30)
|
127 |
negative_prompt = gr.Textbox(label="Negative Prompt", value=default_negative_prompt)
|
128 |
submit_btn = gr.Button("Generate")
|
129 |
result = gr.Image(label="BRIA-2.3-T5 Result")
|