super-dainiu commited on
Commit
f6f8fc2
·
verified ·
1 Parent(s): 4909d48

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +5 -42
  2. tokenizer_config.json +2 -11
  3. vocab.txt +1 -2
special_tokens_map.json CHANGED
@@ -1,44 +1,7 @@
1
  {
2
- "cls_token": {
3
- "content": "<cls>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<eos>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "mask_token": {
17
- "content": "*",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "pad_token": {
24
- "content": "<pad>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- },
30
- "sep_token": {
31
- "content": "|",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false
36
- },
37
- "unk_token": {
38
- "content": "<unk>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false
43
- }
44
  }
 
1
  {
2
+ "cls_token": "<cls>",
3
+ "eos_token": "<eos>",
4
+ "mask_token": "<mask>",
5
+ "pad_token": "<pad>",
6
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
tokenizer_config.json CHANGED
@@ -33,15 +33,7 @@
33
  "special": true
34
  },
35
  "32": {
36
- "content": "*",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "33": {
44
- "content": "|",
45
  "lstrip": false,
46
  "normalized": false,
47
  "rstrip": false,
@@ -52,10 +44,9 @@
52
  "clean_up_tokenization_spaces": false,
53
  "cls_token": "<cls>",
54
  "eos_token": "<eos>",
55
- "mask_token": "*",
56
  "model_max_length": 1000000000000000019884624838656,
57
  "pad_token": "<pad>",
58
- "sep_token": "|",
59
  "tokenizer_class": "PairedSequenceTokenizer",
60
  "unk_token": "<unk>"
61
  }
 
33
  "special": true
34
  },
35
  "32": {
36
+ "content": "<mask>",
 
 
 
 
 
 
 
 
37
  "lstrip": false,
38
  "normalized": false,
39
  "rstrip": false,
 
44
  "clean_up_tokenization_spaces": false,
45
  "cls_token": "<cls>",
46
  "eos_token": "<eos>",
47
+ "mask_token": "<mask>",
48
  "model_max_length": 1000000000000000019884624838656,
49
  "pad_token": "<pad>",
 
50
  "tokenizer_class": "PairedSequenceTokenizer",
51
  "unk_token": "<unk>"
52
  }
vocab.txt CHANGED
@@ -30,5 +30,4 @@ O
30
  .
31
  -
32
  <null_1>
33
- *
34
- |
 
30
  .
31
  -
32
  <null_1>
33
+ <mask>