Upload tokenizer
f713287
verified
|
{ |
|
"version": "1.0", |
|
"truncation": null, |
|
"padding": null, |
|
"added_tokens": [ |
|
{ |
|
"id": 2, |
|
"content": "<pad>", |
|
"single_word": false, |
|
"lstrip": false, |
|
"rstrip": false, |
|
"normalized": false, |
|
"special": true |
|
} |
|
], |
|
"normalizer": null, |
|
"pre_tokenizer": { |
|
"type": "Whitespace" |
|
}, |
|
"post_processor": null, |
|
"decoder": null, |
|
"model": { |
|
"type": "WordLevel", |
|
"vocab": { |
|
"<unk>": 0, |
|
"<bos>": 1, |
|
"<pad>": 2, |
|
":": 3, |
|
"S0": 4, |
|
"S1": 5, |
|
"S2": 6, |
|
"S3": 7, |
|
"S4": 8, |
|
"a0": 9, |
|
"a1": 10, |
|
"a2": 11, |
|
"a3": 12, |
|
"a4": 13, |
|
"a5": 14, |
|
"a6": 15, |
|
"a7": 16, |
|
"a8": 17, |
|
"a9": 18 |
|
}, |
|
"unk_token": "<unk>" |
|
} |
|
} |