Update handler.py
Browse files- handler.py +7 -3
handler.py
CHANGED
@@ -5,7 +5,7 @@ from diffusers import (
|
|
5 |
EulerAncestralDiscreteScheduler,
|
6 |
)
|
7 |
from typing import Dict, List, Any
|
8 |
-
|
9 |
import qrcode
|
10 |
import os
|
11 |
import base64
|
@@ -52,11 +52,13 @@ def load_models():
|
|
52 |
"lllyasviel/control_v11f1e_sd15_tile",
|
53 |
torch_dtype=torch.float16,
|
54 |
)
|
|
|
55 |
|
56 |
controlnet_brightness = ControlNetModel.from_pretrained(
|
57 |
"ioclab/control_v1p_sd15_brightness",
|
58 |
torch_dtype=torch.float16,
|
59 |
)
|
|
|
60 |
|
61 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
62 |
MODEL_ID,
|
@@ -67,10 +69,12 @@ def load_models():
|
|
67 |
torch_dtype=torch.float16,
|
68 |
cache_dir="cache",
|
69 |
# local_files_only=True,
|
70 |
-
)
|
|
|
|
|
71 |
|
72 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
73 |
-
pipe.enable_xformers_memory_efficient_attention()
|
74 |
return pipe
|
75 |
|
76 |
|
|
|
5 |
EulerAncestralDiscreteScheduler,
|
6 |
)
|
7 |
from typing import Dict, List, Any
|
8 |
+
from xformers.ops import MemoryEfficientAttentionFlashAttentionOp
|
9 |
import qrcode
|
10 |
import os
|
11 |
import base64
|
|
|
52 |
"lllyasviel/control_v11f1e_sd15_tile",
|
53 |
torch_dtype=torch.float16,
|
54 |
)
|
55 |
+
controlnet_tile.enable_xformers_memory_efficient_attention(attention_op=MemoryEfficientAttentionFlashAttentionOp)
|
56 |
|
57 |
controlnet_brightness = ControlNetModel.from_pretrained(
|
58 |
"ioclab/control_v1p_sd15_brightness",
|
59 |
torch_dtype=torch.float16,
|
60 |
)
|
61 |
+
controlnet_brightness.enable_xformers_memory_efficient_attention(attention_op=MemoryEfficientAttentionFlashAttentionOp)
|
62 |
|
63 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
64 |
MODEL_ID,
|
|
|
69 |
torch_dtype=torch.float16,
|
70 |
cache_dir="cache",
|
71 |
# local_files_only=True,
|
72 |
+
)
|
73 |
+
|
74 |
+
pipe.to("cuda")
|
75 |
|
76 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
77 |
+
pipe.enable_xformers_memory_efficient_attention(attention_op=MemoryEfficientAttentionFlashAttentionOp)
|
78 |
return pipe
|
79 |
|
80 |
|