Yasinjan99 commited on
Commit
31f0fd2
·
1 Parent(s): ff420c0

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 41,
3
+ "<s>": 40
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ }
17
+ ],
18
+ "bos_token": "<s>",
19
+ "eos_token": "</s>",
20
+ "pad_token": "[PAD]",
21
+ "unk_token": "[UNK]"
22
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 39,
3
+ "[UNK]": 38,
4
+ "|": 0,
5
+ "а": 1,
6
+ "б": 2,
7
+ "в": 3,
8
+ "г": 4,
9
+ "д": 5,
10
+ "е": 6,
11
+ "ж": 7,
12
+ "з": 8,
13
+ "и": 9,
14
+ "к": 10,
15
+ "л": 11,
16
+ "м": 12,
17
+ "н": 13,
18
+ "о": 14,
19
+ "п": 15,
20
+ "р": 16,
21
+ "с": 17,
22
+ "т": 18,
23
+ "у": 19,
24
+ "ф": 20,
25
+ "х": 21,
26
+ "ц": 22,
27
+ "ш": 23,
28
+ "ы": 24,
29
+ "ь": 25,
30
+ "қ": 26,
31
+ "ҟ": 27,
32
+ "ҩ": 28,
33
+ "ҭ": 29,
34
+ "ҳ": 30,
35
+ "ҵ": 31,
36
+ "ҽ": 32,
37
+ "ҿ": 33,
38
+ "ә": 34,
39
+ "ӡ": 35,
40
+ "ӷ": 36,
41
+ "ԥ": 37
42
+ }