MohamedAhmedAE commited on
Commit
34520a2
·
verified ·
1 Parent(s): c9cc8fb

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +7 -1
  2. tokenizer_config.json +5 -1
special_tokens_map.json CHANGED
@@ -13,5 +13,11 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|eot_id|>"
 
 
 
 
 
 
17
  }
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "<|eot_id|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
  }
tokenizer_config.json CHANGED
@@ -2055,6 +2055,7 @@
2055
  "clean_up_tokenization_spaces": true,
2056
  "eos_token": "<|eot_id|>",
2057
  "extra_special_tokens": {},
 
2058
  "model_input_names": [
2059
  "input_ids",
2060
  "attention_mask"
@@ -2062,5 +2063,8 @@
2062
  "model_max_length": 4096,
2063
  "pad_token": "<|eot_id|>",
2064
  "padding_side": "left",
2065
- "tokenizer_class": "PreTrainedTokenizer"
 
 
 
2066
  }
 
2055
  "clean_up_tokenization_spaces": true,
2056
  "eos_token": "<|eot_id|>",
2057
  "extra_special_tokens": {},
2058
+ "max_length": 4096,
2059
  "model_input_names": [
2060
  "input_ids",
2061
  "attention_mask"
 
2063
  "model_max_length": 4096,
2064
  "pad_token": "<|eot_id|>",
2065
  "padding_side": "left",
2066
+ "stride": 0,
2067
+ "tokenizer_class": "PreTrainedTokenizer",
2068
+ "truncation_side": "right",
2069
+ "truncation_strategy": "longest_first"
2070
  }