huz-relay commited on
Commit
29bd164
·
1 Parent(s): e5307bd

Add logging

Browse files
Files changed (4) hide show
  1. Pipfile +1 -0
  2. Pipfile.lock +8 -1
  3. handler.py +22 -14
  4. requirements.txt +1 -0
Pipfile CHANGED
@@ -7,6 +7,7 @@ name = "pypi"
7
  transformers = "*"
8
  pillow = "*"
9
  torch = "*"
 
10
 
11
  [dev-packages]
12
 
 
7
  transformers = "*"
8
  pillow = "*"
9
  torch = "*"
10
+ logging = "*"
11
 
12
  [dev-packages]
13
 
Pipfile.lock CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_meta": {
3
  "hash": {
4
- "sha256": "7a2624bd76968ab76a35343f263a0650cb105c90def7d0a508d9874eaa81f979"
5
  },
6
  "pipfile-spec": 6,
7
  "requires": {
@@ -160,6 +160,13 @@
160
  "markers": "python_version >= '3.7'",
161
  "version": "==3.1.4"
162
  },
 
 
 
 
 
 
 
163
  "markupsafe": {
164
  "hashes": [
165
  "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf",
 
1
  {
2
  "_meta": {
3
  "hash": {
4
+ "sha256": "d2cc81eabeb4001a0933c7fe68bde6dae34d241899c017a7e1fada250b02d606"
5
  },
6
  "pipfile-spec": 6,
7
  "requires": {
 
160
  "markers": "python_version >= '3.7'",
161
  "version": "==3.1.4"
162
  },
163
+ "logging": {
164
+ "hashes": [
165
+ "sha256:26f6b50773f085042d301085bd1bf5d9f3735704db9f37c1ce6d8b85c38f2417"
166
+ ],
167
+ "index": "pypi",
168
+ "version": "==0.4.9.6"
169
+ },
170
  "markupsafe": {
171
  "hashes": [
172
  "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf",
handler.py CHANGED
@@ -1,16 +1,19 @@
1
  from typing import Any, Dict, List
2
  from transformers import Idefics2Processor, Idefics2ForConditionalGeneration
3
  import torch
 
4
 
5
 
6
  class EndpointHandler:
7
  def __init__(self, path=""):
8
  # Preload all the elements you are going to need at inference.
 
 
9
  self.device = "cuda" if torch.cuda.is_available() else "cpu"
10
  self.processor = Idefics2Processor.from_pretrained(path)
11
  self.model = Idefics2ForConditionalGeneration.from_pretrained(path)
12
  self.model.to(self.device)
13
- print("Initialisation finished!")
14
 
15
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
16
  """
@@ -20,20 +23,25 @@ class EndpointHandler:
20
  Return:
21
  A :obj:`list` | `dict`: will be serialized and returned
22
  """
23
- image = data.pop("inputs", data)
24
- print("image reached")
25
 
26
- # process image
27
- inputs = self.processor(images=image, return_tensors="pt").to(self.device)
28
- print("inputs reached")
29
- generated_ids = self.model.generate(**inputs)
30
- print("generated")
31
 
32
- # run prediction
33
- generated_text = self.processor.batch_decode(
34
- generated_ids, skip_special_tokens=True
35
- )
36
- print("decoded")
 
 
 
 
 
 
 
 
37
 
38
  # decode output
39
- return generated_text
 
1
  from typing import Any, Dict, List
2
  from transformers import Idefics2Processor, Idefics2ForConditionalGeneration
3
  import torch
4
+ import logging
5
 
6
 
7
  class EndpointHandler:
8
  def __init__(self, path=""):
9
  # Preload all the elements you are going to need at inference.
10
+ self.logger = logging.getLogger()
11
+ self.logger.addHandler(logging.StreamHandler())
12
  self.device = "cuda" if torch.cuda.is_available() else "cpu"
13
  self.processor = Idefics2Processor.from_pretrained(path)
14
  self.model = Idefics2ForConditionalGeneration.from_pretrained(path)
15
  self.model.to(self.device)
16
+ self.logger.info("Initialisation finished!")
17
 
18
  def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
19
  """
 
23
  Return:
24
  A :obj:`list` | `dict`: will be serialized and returned
25
  """
26
+ checkpoints = ""
 
27
 
28
+ try:
29
+ image = data.pop("inputs", data)
30
+ checkpoints += "image reached\n"
 
 
31
 
32
+ # process image
33
+ inputs = self.processor(images=image, return_tensors="pt").to(self.device)
34
+ checkpoints += "inputs reached\n"
35
+ generated_ids = self.model.generate(**inputs, max_new_tokens=20)
36
+ checkpoints += "generated\n"
37
+
38
+ # run prediction
39
+ generated_text: List[str] = self.processor.batch_decode(
40
+ generated_ids, skip_special_tokens=True
41
+ )
42
+ checkpoints += "decoded\n"
43
+ except Exception as e:
44
+ checkpoints += f"{e}\n"
45
 
46
  # decode output
47
+ return generated_text.append(checkpoints)
requirements.txt CHANGED
@@ -6,6 +6,7 @@ fsspec==2024.6.0; python_version >= '3.8'
6
  huggingface-hub==0.23.3; python_full_version >= '3.8.0'
7
  idna==3.7; python_version >= '3.5'
8
  jinja2==3.1.4; python_version >= '3.7'
 
9
  markupsafe==2.1.5; python_version >= '3.7'
10
  mpmath==1.3.0
11
  networkx==3.3; python_version >= '3.10'
 
6
  huggingface-hub==0.23.3; python_full_version >= '3.8.0'
7
  idna==3.7; python_version >= '3.5'
8
  jinja2==3.1.4; python_version >= '3.7'
9
+ logging==0.4.9.6
10
  markupsafe==2.1.5; python_version >= '3.7'
11
  mpmath==1.3.0
12
  networkx==3.3; python_version >= '3.10'