lqf commited on
Commit
587d2c9
1 Parent(s): d2e2d91

Upload tokenization_chatglm.py

Browse files

File "../run.py", line 224, in parse_input
input_ids = tokenizer.encode(curr_text,
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 2788, in encode
encoded_inputs = self.encode_plus(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3207, in encode_plus
return self._encode_plus(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils.py", line 804, in _encode_plus
return self.prepare_for_model(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3704, in prepare_for_model
encoded_inputs = self.pad(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3506, in pad
encoded_inputs = self._pad(
TypeError: _pad() got an unexpected keyword argument 'padding_side'

compatible for transformers >=4.45.0

Files changed (1) hide show
  1. tokenization_chatglm.py +1 -0
tokenization_chatglm.py CHANGED
@@ -199,6 +199,7 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
199
  max_length: Optional[int] = None,
200
  padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
201
  pad_to_multiple_of: Optional[int] = None,
 
202
  return_attention_mask: Optional[bool] = None,
203
  ) -> dict:
204
  """
 
199
  max_length: Optional[int] = None,
200
  padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
201
  pad_to_multiple_of: Optional[int] = None,
202
+ padding_side: Optional[bool] = None,
203
  return_attention_mask: Optional[bool] = None,
204
  ) -> dict:
205
  """