lettuce_pos_en_mono / config.json
pranaydeeps's picture
Upload folder using huggingface_hub
6e6d07a verified
{
"_name_or_path": "FacebookAI/roberta-base",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"finetuning_task": "pos",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "WP",
"1": "VB",
"2": "RB",
"3": "terrorist\t#",
"4": "''",
"5": "`",
"6": "VBD",
"7": "RBS",
"8": "PRP$",
"9": "NN",
"10": "PRP",
"11": "NNP",
"12": "RP",
"13": "\tSYM",
"14": "WDT",
"15": "U",
"16": "JJ",
"17": "JJR",
"18": "FW",
"19": "POS",
"20": "CD",
"21": "VBN",
"22": "RBR",
"23": "hero\t#",
"24": ",",
"25": "it",
"26": ":",
"27": "Ready\t#",
"28": "WRB",
"29": "VBP",
"30": "NNPS",
"31": "$",
"32": "TO",
"33": "VBG",
"34": ")",
"35": "JJS",
"36": "#",
"37": "sleepy\t#",
"38": "IN",
"39": "\tPRP",
"40": "``",
"41": "PDT",
"42": "@",
"43": "DT",
"44": "VBZ",
"45": "NNS",
"46": "LS",
"47": ".",
"48": "\tDT",
"49": "EX",
"50": "SYM",
"51": "CC",
"52": "UH",
"53": "MD",
"54": "(",
"55": "WP$"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"\tDT": 48,
"\tPRP": 39,
"\tSYM": 13,
"#": 36,
"$": 31,
"''": 4,
"(": 54,
")": 34,
",": 24,
".": 47,
":": 26,
"@": 42,
"CC": 51,
"CD": 20,
"DT": 43,
"EX": 49,
"FW": 18,
"IN": 38,
"JJ": 16,
"JJR": 17,
"JJS": 35,
"LS": 46,
"MD": 53,
"NN": 9,
"NNP": 11,
"NNPS": 30,
"NNS": 45,
"PDT": 41,
"POS": 19,
"PRP": 10,
"PRP$": 8,
"RB": 2,
"RBR": 22,
"RBS": 7,
"RP": 12,
"Ready\t#": 27,
"SYM": 50,
"TO": 32,
"U": 15,
"UH": 52,
"VB": 1,
"VBD": 6,
"VBG": 33,
"VBN": 21,
"VBP": 29,
"VBZ": 44,
"WDT": 14,
"WP": 0,
"WP$": 55,
"WRB": 28,
"`": 5,
"``": 40,
"hero\t#": 23,
"it": 25,
"sleepy\t#": 37,
"terrorist\t#": 3
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.25.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}