goabonga commited on
Commit
ec8551a
·
verified ·
1 Parent(s): 793d7e4

Upload tokenizer files (vocab, config, README)

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +1 -4
  2. tokenizer_config.json +1 -4
special_tokens_map.json CHANGED
@@ -1,6 +1,3 @@
1
  {
2
- "unk_token": "<unk>",
3
- "pad_token": "<unk>",
4
- "bos_token": "<bos>",
5
- "eos_token": "<eos>"
6
  }
 
1
  {
2
+ "unk_token": "<unk>"
 
 
 
3
  }
tokenizer_config.json CHANGED
@@ -1,8 +1,5 @@
1
  {
2
  "model_max_length": 512,
3
  "tokenizer_class": "PreTrainedTokenizerFast",
4
- "unk_token": "<unk>",
5
- "pad_token": "<unk>",
6
- "bos_token": "<bos>",
7
- "eos_token": "<eos>"
8
  }
 
1
  {
2
  "model_max_length": 512,
3
  "tokenizer_class": "PreTrainedTokenizerFast",
4
+ "unk_token": "<unk>"
 
 
 
5
  }