Qwentify3-4B-adibun / tokenizer_config.json
aloobun's picture
Upload tokenizer
5843fea verified
{
"added_tokens_decoder": {
"0": {
"content": "<|endoftext|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"3802": {
"content": "user",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"4535": {
"content": "system",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"116061": {
"content": "assistant",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127900": {
"content": "<think>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127901": {
"content": "</think>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127902": {
"content": "<|im_end|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127903": {
"content": "<|im_start|>",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127904": {
"content": "BiBo",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127905": {
"content": "aloobun",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127906": {
"content": "LowIQGenAI",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127907": {
"content": "IndieTechie",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127908": {
"content": "Swamy",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
},
"127909": {
"content": "AdarshXs",
"lstrip": false,
"normalized": true,
"rstrip": false,
"single_word": false,
"special": false
}
},
"clean_up_tokenization_spaces": false,
"eos_token": "<|endoftext|>",
"extra_special_tokens": {},
"model_max_length": 8192,
"pad_token": "<|endoftext|>",
"padding_side": "right",
"tokenizer_class": "PreTrainedTokenizer"
}