File size: 453 Bytes
751936e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
import tiktoken
from tiktoken import Encoding
tokenizer = tiktoken.encoding_for_model('gpt-3.5-turbo')
tokenizer.vocab_size = tokenizer.n_vocab
def decode(self, tokens, errors="replace"):
# def decode(self, tokens: list[int], errors: str = "replace") -> str:
try:
decode_str = self._core_bpe.decode_bytes(tokens).decode("utf-8", errors=errors)
except:
decode_str = "null"
return decode_str
Encoding.decode = decode
|