Spaces:
Build error
Build error
Nupur Kumari
commited on
Commit
•
25dd0a9
1
Parent(s):
e09c88c
custom-diffusion-space
Browse files- README.md +0 -1
- app.py +2 -3
- inference.py +0 -1
README.md
CHANGED
@@ -7,7 +7,6 @@ sdk: gradio
|
|
7 |
sdk_version: 3.12.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
-
license: mit
|
11 |
---
|
12 |
|
13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
7 |
sdk_version: 3.12.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
|
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
CHANGED
@@ -1,9 +1,8 @@
|
|
1 |
#!/usr/bin/env python
|
2 |
-
"""
|
3 |
|
4 |
The code in this repo is partly adapted from the following repository:
|
5 |
https://huggingface.co/spaces/hysts/LoRA-SD-training
|
6 |
-
The license of the original code is MIT, which is specified in the README.md.
|
7 |
"""
|
8 |
|
9 |
from __future__ import annotations
|
@@ -176,7 +175,7 @@ def create_inference_demo(pipe: InferencePipeline) -> gr.Blocks:
|
|
176 |
minimum=0,
|
177 |
maximum=100000,
|
178 |
step=1,
|
179 |
-
value=
|
180 |
with gr.Accordion('Other Parameters', open=False):
|
181 |
num_steps = gr.Slider(label='Number of Steps',
|
182 |
minimum=0,
|
|
|
1 |
#!/usr/bin/env python
|
2 |
+
"""Demo app for https://github.com/adobe-research/custom-diffusion.
|
3 |
|
4 |
The code in this repo is partly adapted from the following repository:
|
5 |
https://huggingface.co/spaces/hysts/LoRA-SD-training
|
|
|
6 |
"""
|
7 |
|
8 |
from __future__ import annotations
|
|
|
175 |
minimum=0,
|
176 |
maximum=100000,
|
177 |
step=1,
|
178 |
+
value=42)
|
179 |
with gr.Accordion('Other Parameters', open=False):
|
180 |
num_steps = gr.Slider(label='Number of Steps',
|
181 |
minimum=0,
|
inference.py
CHANGED
@@ -14,7 +14,6 @@ sys.path.insert(0, 'custom-diffusion')
|
|
14 |
|
15 |
|
16 |
def load_model(text_encoder, tokenizer, unet, save_path, modifier_token, freeze_model='crossattn_kv'):
|
17 |
-
logger.info("loading embeddings")
|
18 |
st = torch.load(save_path)
|
19 |
if 'text_encoder' in st:
|
20 |
text_encoder.load_state_dict(st['text_encoder'])
|
|
|
14 |
|
15 |
|
16 |
def load_model(text_encoder, tokenizer, unet, save_path, modifier_token, freeze_model='crossattn_kv'):
|
|
|
17 |
st = torch.load(save_path)
|
18 |
if 'text_encoder' in st:
|
19 |
text_encoder.load_state_dict(st['text_encoder'])
|