Mengkedalai commited on
Commit
845eec2
·
verified ·
1 Parent(s): ceee951

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +4 -5
  3. vocab.json +36 -67
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 69,
3
- "<s>": 68
4
  }
 
1
  {
2
+ "</s>": 38,
3
+ "<s>": 37
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "66": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "67": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "68": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "69": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
@@ -39,7 +39,6 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
- "processor_class": "Wav2Vec2BertProcessor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "35": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "36": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "37": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "38": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
 
42
  "replace_word_delimiter_char": " ",
43
  "target_lang": null,
44
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
vocab.json CHANGED
@@ -1,70 +1,39 @@
1
  {
2
- "*": 1,
3
- "/": 2,
4
- "<": 3,
5
- "[PAD]": 67,
6
- "[UNK]": 66,
7
- "\\": 4,
8
  "|": 0,
9
- "­": 5,
10
- "": 6,
11
- "": 7,
12
- "": 8,
13
- "": 9,
14
- "": 10,
15
- "": 11,
16
- "": 12,
17
- "": 13,
18
- "": 14,
19
- "": 15,
20
- "": 16,
21
- "": 17,
22
- "": 18,
23
- "": 19,
24
- "": 20,
25
- "": 21,
26
- "": 22,
27
- "": 23,
28
- "": 24,
29
- "": 25,
30
- "": 26,
31
- "": 27,
32
- "": 28,
33
- "": 29,
34
- "": 30,
35
- "": 31,
36
- "": 32,
37
- "": 33,
38
- "": 34,
39
- "": 35,
40
- "": 36,
41
- "": 37,
42
- "": 38,
43
- "ᠼ": 39,
44
- "ᠽ": 40,
45
- "ᠾ": 41,
46
- "ᠿ": 42,
47
- "ᡀ": 43,
48
- "ᡁ": 44,
49
- "ᡂ": 45,
50
- "‌": 46,
51
- "‍": 47,
52
- "—": 48,
53
- " ": 49,
54
- "⁉": 50,
55
- "ⅵ": 51,
56
- "③": 52,
57
- "⑤": 53,
58
- "⑻": 54,
59
- "⒁": 55,
60
- "⒈": 56,
61
- "⒋": 57,
62
- "︔": 58,
63
- "(": 59,
64
- ")": 60,
65
- "*": 61,
66
- "-": 62,
67
- "?": 63,
68
- "|": 64,
69
- "・": 65
70
  }
 
1
  {
2
+ "[PAD]": 36,
3
+ "[UNK]": 35,
 
 
 
 
4
  "|": 0,
5
+ "а": 1,
6
+ "б": 2,
7
+ "в": 3,
8
+ "г": 4,
9
+ "д": 5,
10
+ "е": 6,
11
+ "ж": 7,
12
+ "з": 8,
13
+ "и": 9,
14
+ "й": 10,
15
+ "к": 11,
16
+ "л": 12,
17
+ "м": 13,
18
+ "н": 14,
19
+ "о": 15,
20
+ "п": 16,
21
+ "р": 17,
22
+ "с": 18,
23
+ "т": 19,
24
+ "у": 20,
25
+ "ф": 21,
26
+ "х": 22,
27
+ "ц": 23,
28
+ "ч": 24,
29
+ "ш": 25,
30
+ "ъ": 26,
31
+ "ы": 27,
32
+ "ь": 28,
33
+ "э": 29,
34
+ "ю": 30,
35
+ "я": 31,
36
+ "ё": 32,
37
+ "ү": 33,
38
+ "ө": 34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  }