Upload tokenization_chatglm.py
Browse filesFile "../run.py", line 224, in parse_input
input_ids = tokenizer.encode(curr_text,
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 2788, in encode
encoded_inputs = self.encode_plus(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3207, in encode_plus
return self._encode_plus(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils.py", line 804, in _encode_plus
return self.prepare_for_model(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3704, in prepare_for_model
encoded_inputs = self.pad(
File "/home/pt201/lib/python3.8/site-packages/transformers/tokenization_utils_base.py", line 3506, in pad
encoded_inputs = self._pad(
TypeError: _pad() got an unexpected keyword argument 'padding_side'
compatible for transformers >=4.45.0
- tokenization_chatglm.py +1 -0
@@ -199,6 +199,7 @@ class ChatGLMTokenizer(PreTrainedTokenizer):
|
|
199 |
max_length: Optional[int] = None,
|
200 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
201 |
pad_to_multiple_of: Optional[int] = None,
|
|
|
202 |
return_attention_mask: Optional[bool] = None,
|
203 |
) -> dict:
|
204 |
"""
|
|
|
199 |
max_length: Optional[int] = None,
|
200 |
padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD,
|
201 |
pad_to_multiple_of: Optional[int] = None,
|
202 |
+
padding_side: Optional[bool] = None,
|
203 |
return_attention_mask: Optional[bool] = None,
|
204 |
) -> dict:
|
205 |
"""
|