gabobe5576's picture
Create tokenizer.json
9febfad verified
raw
history blame
309 Bytes
{
"version": "1.0",
"model_type": "GPT-2",
"tokenizer_class": "GPT2Tokenizer",
"vocab_file": "vocab.json",
"merges_file": "merges.txt",
"special_tokens_map": {
"pad_token": "<PAD>",
"unk_token": "<UNK>",
"cls_token": "<CLS>",
"sep_token": "<SEP>",
"mask_token": "<MASK>"
}
}