Update tokenization_ernie4_5.py
Browse files- tokenization_ernie4_5.py +0 -1
tokenization_ernie4_5.py
CHANGED
@@ -81,7 +81,6 @@ class Ernie4_5_Tokenizer(PreTrainedTokenizer):
|
|
81 |
self.vocab_file = vocab_file
|
82 |
self.sp_model = spm.SentencePieceProcessor()
|
83 |
self.sp_model.Load(vocab_file)
|
84 |
-
self.pad_id = self._convert_token_to_id(pad_token)
|
85 |
self.tokenizer_alpha = tokenizer_alpha
|
86 |
|
87 |
if additional_special_tokens is None:
|
|
|
81 |
self.vocab_file = vocab_file
|
82 |
self.sp_model = spm.SentencePieceProcessor()
|
83 |
self.sp_model.Load(vocab_file)
|
|
|
84 |
self.tokenizer_alpha = tokenizer_alpha
|
85 |
|
86 |
if additional_special_tokens is None:
|