Upload handler.py
Browse files- handler.py +2 -2
handler.py
CHANGED
@@ -8,7 +8,7 @@ import torch
|
|
8 |
import torch._dynamo
|
9 |
torch._dynamo.config.suppress_errors = True
|
10 |
|
11 |
-
from huggingface_inference_toolkit.logging import logger
|
12 |
|
13 |
def compile_pipeline(pipe) -> Any:
|
14 |
pipe.transformer.fuse_qkv_projections()
|
@@ -31,7 +31,7 @@ class EndpointHandler:
|
|
31 |
|
32 |
#@torch.inference_mode()
|
33 |
def __call__(self, data: Dict[str, Any]) -> Image:
|
34 |
-
logger.info(f"Received incoming request with {data=}")
|
35 |
|
36 |
if "inputs" in data and isinstance(data["inputs"], str):
|
37 |
prompt = data.pop("inputs")
|
|
|
8 |
import torch._dynamo
|
9 |
torch._dynamo.config.suppress_errors = True
|
10 |
|
11 |
+
#from huggingface_inference_toolkit.logging import logger
|
12 |
|
13 |
def compile_pipeline(pipe) -> Any:
|
14 |
pipe.transformer.fuse_qkv_projections()
|
|
|
31 |
|
32 |
#@torch.inference_mode()
|
33 |
def __call__(self, data: Dict[str, Any]) -> Image:
|
34 |
+
#logger.info(f"Received incoming request with {data=}")
|
35 |
|
36 |
if "inputs" in data and isinstance(data["inputs"], str):
|
37 |
prompt = data.pop("inputs")
|