yamildiego commited on
Commit
6d693f7
·
1 Parent(s): d026128
Files changed (1) hide show
  1. handler.py +19 -30
handler.py CHANGED
@@ -8,21 +8,17 @@ from diffusers.utils import load_image
8
 
9
  import torch
10
  import torch.nn.functional as F
11
- from torchvision.transforms import Compose
12
 
13
  import PIL
14
  from PIL import Image
15
 
16
- from depth_anything.dpt import DepthAnything
17
- from depth_anything.util.transform import Resize, NormalizeImage, PrepareForNet
18
 
19
  from insightface.app import FaceAnalysis
20
  from pipeline_stable_diffusion_xl_instantid_full import StableDiffusionXLInstantIDPipeline, draw_kps
21
  from controlnet_aux import OpenposeDetector
22
- from huggingface_hub import hf_hub_download
23
-
24
-
25
- # end_test
26
 
27
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
28
  if device.type != 'cuda':
@@ -32,15 +28,8 @@ dtype = torch.float16 if str(device).__contains__("cuda") else torch.float32
32
 
33
  class EndpointHandler():
34
  def __init__(self, model_dir):
35
- # hf_hub_download(repo_id="InstantX/InstantID", filename="ControlNetModel/config.json", local_dir="./checkpoints")
36
- # hf_hub_download(
37
- # repo_id="InstantX/InstantID",
38
- # filename="ControlNetModel/diffusion_pytorch_model.safetensors",
39
- # local_dir="./checkpoints",
40
- # )
41
- # hf_hub_download(repo_id="InstantX/InstantID", filename="ip-adapter.bin", local_dir="./checkpoints")
42
-
43
- print("Model dir: ", model_dir)
44
  face_adapter = f"./checkpoints/ip-adapter.bin"
45
  controlnet_path = f"./checkpoints/ControlNetModel"
46
 
@@ -149,23 +138,23 @@ class EndpointHandler():
149
  self.generator = torch.Generator(device=device.type).manual_seed(3)
150
 
151
 
152
- identitynet_strength_ratio = 0.8
153
- pose_strength = 0.4
154
  self.my_controlnet_selection = ["pose"]
155
 
156
 
157
- controlnet_scales = {
158
- "pose": pose_strength,
159
- # "canny": canny_strength,
160
- # "depth": depth_strength,
161
- }
162
- self.pipe.controlnet = MultiControlNetModel(
163
- [self.controlnet_identitynet]
164
- + [self.controlnet_map[s] for s in self.my_controlnet_selection]
165
- )
166
- self.control_scales = [float(identitynet_strength_ratio)] + [
167
- controlnet_scales[s] for s in self.my_controlnet_selection
168
- ]
169
 
170
  def __call__(self, data):
171
 
 
8
 
9
  import torch
10
  import torch.nn.functional as F
11
+ # from torchvision.transforms import Compose
12
 
13
  import PIL
14
  from PIL import Image
15
 
16
+ # from depth_anything.dpt import DepthAnything
17
+ # from depth_anything.util.transform import Resize, NormalizeImage, PrepareForNet
18
 
19
  from insightface.app import FaceAnalysis
20
  from pipeline_stable_diffusion_xl_instantid_full import StableDiffusionXLInstantIDPipeline, draw_kps
21
  from controlnet_aux import OpenposeDetector
 
 
 
 
22
 
23
  device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
24
  if device.type != 'cuda':
 
28
 
29
  class EndpointHandler():
30
  def __init__(self, model_dir):
31
+ print("Loading model from", model_dir)
32
+
 
 
 
 
 
 
 
33
  face_adapter = f"./checkpoints/ip-adapter.bin"
34
  controlnet_path = f"./checkpoints/ControlNetModel"
35
 
 
138
  self.generator = torch.Generator(device=device.type).manual_seed(3)
139
 
140
 
141
+ # identitynet_strength_ratio = 0.8
142
+ # pose_strength = 0.4
143
  self.my_controlnet_selection = ["pose"]
144
 
145
 
146
+ # controlnet_scales = {
147
+ # "pose": pose_strength,
148
+ # # "canny": canny_strength,
149
+ # # "depth": depth_strength,
150
+ # }
151
+ # self.pipe.controlnet = MultiControlNetModel(
152
+ # [self.controlnet_identitynet]
153
+ # + [self.controlnet_map[s] for s in self.my_controlnet_selection]
154
+ # )
155
+ # self.control_scales = [float(identitynet_strength_ratio)] + [
156
+ # controlnet_scales[s] for s in self.my_controlnet_selection
157
+ # ]
158
 
159
  def __call__(self, data):
160