roberta-parser / config.json
kunalr63's picture
Upload 11 files
8171bff
{
"_name_or_path": "roberta-base",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "U-PER",
"1": "O",
"2": "B-T_EXP",
"3": "I-T_EXP",
"4": "L-T_EXP",
"5": "B-PER",
"6": "L-PER",
"7": "U-PHONE",
"8": "U-EMAIL",
"9": "B-CERTIFICATE",
"10": "I-CERTIFICATE",
"11": "L-CERTIFICATE",
"12": "B-G_YEAR",
"13": "I-G_YEAR",
"14": "L-G_YEAR",
"15": "B-G_IN",
"16": "I-G_IN",
"17": "L-G_IN",
"18": "B-G_CLG",
"19": "I-G_CLG",
"20": "L-G_CLG",
"21": "B-INTER_YEAR",
"22": "I-INTER_YEAR",
"23": "L-INTER_YEAR",
"24": "B-12_PER",
"25": "L-12_PER",
"26": "B-HIGH_YEAR",
"27": "I-HIGH_YEAR",
"28": "L-HIGH_YEAR",
"29": "B-HIGH_FROM",
"30": "I-HIGH_FROM",
"31": "L-HIGH_FROM",
"32": "B-10_PER",
"33": "L-10_PER",
"34": "U-DOB",
"35": "I-PER",
"36": "B-PHONE",
"37": "I-PHONE",
"38": "L-PHONE",
"39": "B-C_DESIG",
"40": "L-C_DESIG",
"41": "B-O_COMPANY",
"42": "I-O_COMPANY",
"43": "L-O_COMPANY",
"44": "B-GRAD_PER",
"45": "L-GRAD_PER",
"46": "B-INTER_FROM",
"47": "I-INTER_FROM",
"48": "L-INTER_FROM",
"49": "B-SKILLS",
"50": "I-SKILLS",
"51": "L-SKILLS",
"52": "U-PG_IN",
"53": "B-PG_FROM",
"54": "I-PG_FROM",
"55": "L-PG_FROM",
"56": "B-PG_YEAR",
"57": "L-PG_YEAR",
"58": "B-C_COMPANY",
"59": "I-C_COMPANY",
"60": "L-C_COMPANY",
"61": "B-PG_IN",
"62": "I-PG_IN",
"63": "L-PG_IN",
"64": "U-PG_YEAR",
"65": "U-G_IN",
"66": "U-G_YEAR",
"67": "I-C_DESIG",
"68": "U-SKILLS",
"69": "U-GRAD_PER",
"70": "B-PROJECTS",
"71": "I-PROJECTS",
"72": "L-PROJECTS",
"73": "U-C_COMPANY",
"74": "U-INTER_YEAR",
"75": "U-HIGH_YEAR",
"76": "B-EMAIL",
"77": "I-EMAIL",
"78": "L-EMAIL",
"79": "B-TOOLS_USED",
"80": "I-TOOLS_USED",
"81": "L-TOOLS_USED",
"82": "I-PG_YEAR",
"83": "I-GRAD_PER",
"84": "U-TOOLS_USED",
"85": "I-12_PER",
"86": "I-10_PER",
"87": "U-T_EXP",
"88": "U-PROJECTS",
"89": "B-DOB",
"90": "L-DOB",
"91": "U-HIGH_FROM",
"92": "I-DOB",
"93": "U-12_PER",
"94": "U-10_PER",
"95": "U-G_CLG",
"96": "U-O_COMPANY",
"97": "U-INTER_FROM",
"98": "U-C_DESIG",
"99": "U-PG_FROM"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-10_PER": 32,
"B-12_PER": 24,
"B-CERTIFICATE": 9,
"B-C_COMPANY": 58,
"B-C_DESIG": 39,
"B-DOB": 89,
"B-EMAIL": 76,
"B-GRAD_PER": 44,
"B-G_CLG": 18,
"B-G_IN": 15,
"B-G_YEAR": 12,
"B-HIGH_FROM": 29,
"B-HIGH_YEAR": 26,
"B-INTER_FROM": 46,
"B-INTER_YEAR": 21,
"B-O_COMPANY": 41,
"B-PER": 5,
"B-PG_FROM": 53,
"B-PG_IN": 61,
"B-PG_YEAR": 56,
"B-PHONE": 36,
"B-PROJECTS": 70,
"B-SKILLS": 49,
"B-TOOLS_USED": 79,
"B-T_EXP": 2,
"I-10_PER": 86,
"I-12_PER": 85,
"I-CERTIFICATE": 10,
"I-C_COMPANY": 59,
"I-C_DESIG": 67,
"I-DOB": 92,
"I-EMAIL": 77,
"I-GRAD_PER": 83,
"I-G_CLG": 19,
"I-G_IN": 16,
"I-G_YEAR": 13,
"I-HIGH_FROM": 30,
"I-HIGH_YEAR": 27,
"I-INTER_FROM": 47,
"I-INTER_YEAR": 22,
"I-O_COMPANY": 42,
"I-PER": 35,
"I-PG_FROM": 54,
"I-PG_IN": 62,
"I-PG_YEAR": 82,
"I-PHONE": 37,
"I-PROJECTS": 71,
"I-SKILLS": 50,
"I-TOOLS_USED": 80,
"I-T_EXP": 3,
"L-10_PER": 33,
"L-12_PER": 25,
"L-CERTIFICATE": 11,
"L-C_COMPANY": 60,
"L-C_DESIG": 40,
"L-DOB": 90,
"L-EMAIL": 78,
"L-GRAD_PER": 45,
"L-G_CLG": 20,
"L-G_IN": 17,
"L-G_YEAR": 14,
"L-HIGH_FROM": 31,
"L-HIGH_YEAR": 28,
"L-INTER_FROM": 48,
"L-INTER_YEAR": 23,
"L-O_COMPANY": 43,
"L-PER": 6,
"L-PG_FROM": 55,
"L-PG_IN": 63,
"L-PG_YEAR": 57,
"L-PHONE": 38,
"L-PROJECTS": 72,
"L-SKILLS": 51,
"L-TOOLS_USED": 81,
"L-T_EXP": 4,
"O": 1,
"U-10_PER": 94,
"U-12_PER": 93,
"U-C_COMPANY": 73,
"U-C_DESIG": 98,
"U-DOB": 34,
"U-EMAIL": 8,
"U-GRAD_PER": 69,
"U-G_CLG": 95,
"U-G_IN": 65,
"U-G_YEAR": 66,
"U-HIGH_FROM": 91,
"U-HIGH_YEAR": 75,
"U-INTER_FROM": 97,
"U-INTER_YEAR": 74,
"U-O_COMPANY": 96,
"U-PER": 0,
"U-PG_FROM": 99,
"U-PG_IN": 52,
"U-PG_YEAR": 64,
"U-PHONE": 7,
"U-PROJECTS": 88,
"U-SKILLS": 68,
"U-TOOLS_USED": 84,
"U-T_EXP": 87
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.22.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}