Update tokenizers
Browse files- .gitattributes +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
.gitattributes
CHANGED
@@ -35,3 +35,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
|
37 |
onnx/model_fp16.onnx_data filter=lfs diff=lfs merge=lfs -text
|
|
|
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
onnx/model.onnx_data filter=lfs diff=lfs merge=lfs -text
|
37 |
onnx/model_fp16.onnx_data filter=lfs diff=lfs merge=lfs -text
|
38 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -233,6 +233,7 @@
|
|
233 |
"extra_special_tokens": {},
|
234 |
"model_max_length": 131072,
|
235 |
"pad_token": "<|endoftext|>",
|
|
|
236 |
"split_special_tokens": false,
|
237 |
"tokenizer_class": "Qwen2Tokenizer",
|
238 |
"unk_token": null,
|
|
|
233 |
"extra_special_tokens": {},
|
234 |
"model_max_length": 131072,
|
235 |
"pad_token": "<|endoftext|>",
|
236 |
+
"padding_side": "left",
|
237 |
"split_special_tokens": false,
|
238 |
"tokenizer_class": "Qwen2Tokenizer",
|
239 |
"unk_token": null,
|