riteshm06 commited on
Commit
c4e5cea
1 Parent(s): 578fa94

Create handler.py

Browse files
Files changed (1) hide show
  1. handler.py +44 -0
handler.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import transformers
3
+ from typing import Dict, Any
4
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
5
+
6
+
7
+ dtype = torch.bfloat16 if torch.cuda.get_device_capability()[0] == 8 else torch.float16
8
+
9
+ class EndpointHandler:
10
+ def __init__(self, model_path: str = ""):
11
+
12
+ tokenizer = AutoTokenizer.from_pretrained(model_path)
13
+
14
+ model = AutoModelForCausalLM.from_pretrained(
15
+ model_path,
16
+ return_dict=True,
17
+ device_map='auto',
18
+ load_in_8bit=True,
19
+ torch_dtype=dtype,
20
+ trust_remote_code=True)
21
+
22
+
23
+ self.pipeline = transformers.pipeline(
24
+ "text-generation",
25
+ model=model,
26
+ tokenizer=tokenizer,
27
+ temperature=0.8,
28
+ repetition_penalty=1.1,
29
+ max_new_tokens=1000,
30
+ pad_token_id=tokenizer.pad_token_id,
31
+ eos_token_id=tokenizer.eos_token_id
32
+
33
+ )
34
+
35
+
36
+ def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
37
+ prompt = data.pop("inputs", data)
38
+
39
+ llm_response = self.pipeline(
40
+ prompt,
41
+ return_full_text=False
42
+ )
43
+
44
+ return llm_response[0]['generated_text'].strip()