YvanCarre commited on
Commit
6ee1232
·
verified ·
1 Parent(s): d79eb48

Upload tokenizer

Browse files
Files changed (4) hide show
  1. merges.txt +0 -0
  2. tokenizer.json +0 -0
  3. tokenizer_config.json +2 -2
  4. vocab.json +0 -0
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "0": {
5
  "content": "<|endoftext|>",
6
  "lstrip": false,
7
- "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
@@ -13,7 +13,7 @@
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
- "model_max_length": 1000000000000000019884624838656,
17
  "tokenizer_class": "GPT2Tokenizer",
18
  "unk_token": "<|endoftext|>"
19
  }
 
4
  "0": {
5
  "content": "<|endoftext|>",
6
  "lstrip": false,
7
+ "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
 
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
+ "model_max_length": 1024,
17
  "tokenizer_class": "GPT2Tokenizer",
18
  "unk_token": "<|endoftext|>"
19
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff