kingabzpro commited on
Commit
3ff1de8
·
verified ·
1 Parent(s): d198d47

Upload processor

Browse files
Files changed (3) hide show
  1. README.md +3 -3
  2. special_tokens_map.json +7 -1
  3. tokenizer_config.json +0 -1
README.md CHANGED
@@ -12,8 +12,8 @@ model-index:
12
  - name: whisper-tiny-urdu
13
  results:
14
  - task:
15
- name: Automatic Speech Recognition
16
  type: automatic-speech-recognition
 
17
  dataset:
18
  name: common_voice_17_0
19
  type: common_voice_17_0
@@ -21,9 +21,9 @@ model-index:
21
  split: test[:600]
22
  args: ur
23
  metrics:
24
- - name: Wer
25
- type: wer
26
  value: 47.85287528005975
 
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
12
  - name: whisper-tiny-urdu
13
  results:
14
  - task:
 
15
  type: automatic-speech-recognition
16
+ name: Automatic Speech Recognition
17
  dataset:
18
  name: common_voice_17_0
19
  type: common_voice_17_0
 
21
  split: test[:600]
22
  args: ur
23
  metrics:
24
+ - type: wer
 
25
  value: 47.85287528005975
26
+ name: Wer
27
  ---
28
 
29
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
special_tokens_map.json CHANGED
@@ -122,7 +122,13 @@
122
  "rstrip": false,
123
  "single_word": false
124
  },
125
- "pad_token": "<|endoftext|>",
 
 
 
 
 
 
126
  "unk_token": {
127
  "content": "<|endoftext|>",
128
  "lstrip": false,
 
122
  "rstrip": false,
123
  "single_word": false
124
  },
125
+ "pad_token": {
126
+ "content": "<|endoftext|>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false
131
+ },
132
  "unk_token": {
133
  "content": "<|endoftext|>",
134
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -12983,7 +12983,6 @@
12983
  "extra_special_tokens": {},
12984
  "model_max_length": 1024,
12985
  "pad_token": "<|endoftext|>",
12986
- "padding_side": "right",
12987
  "processor_class": "WhisperProcessor",
12988
  "return_attention_mask": false,
12989
  "tokenizer_class": "WhisperTokenizer",
 
12983
  "extra_special_tokens": {},
12984
  "model_max_length": 1024,
12985
  "pad_token": "<|endoftext|>",
 
12986
  "processor_class": "WhisperProcessor",
12987
  "return_attention_mask": false,
12988
  "tokenizer_class": "WhisperTokenizer",