iflytech_heqc_cls / config.json
BrightXiaoHan's picture
ernie -> xlm-robeta
a581e4d verified
{
"_name_or_path": "FacebookAI/xlm-roberta-base",
"architectures": [
"XLMRobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "\u4e0d\u597d\u5f00\u53e3",
"1": "\u5b89\u6170\u9f13\u52b1",
"2": "\u5ba2\u5957\u56de\u5e94",
"3": "\u611f\u8c22",
"4": "\u62d2\u7edd",
"5": "\u65e5\u5e38\u751f\u6d3b",
"6": "\u7231\u610f\u3001\u60f3\u5ff5\u4e0e\u4eb2\u5bc6\u5173\u7cfb",
"7": "\u7591\u95ee",
"8": "\u795d\u798f",
"9": "\u79f0\u8d5e",
"10": "\u7b26\u53f7\u4e0e\u8bed\u6c14\u8bcd",
"11": "\u8868\u8fbe\u60c5\u7eea",
"12": "\u8bed\u4e49\u4e0d\u5b8c\u6574",
"13": "\u8d1f\u9762\u653b\u51fb",
"14": "\u9053\u6b49",
"15": "\u95ee\u5019\u5bd2\u6684"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"\u4e0d\u597d\u5f00\u53e3": 0,
"\u5b89\u6170\u9f13\u52b1": 1,
"\u5ba2\u5957\u56de\u5e94": 2,
"\u611f\u8c22": 3,
"\u62d2\u7edd": 4,
"\u65e5\u5e38\u751f\u6d3b": 5,
"\u7231\u610f\u3001\u60f3\u5ff5\u4e0e\u4eb2\u5bc6\u5173\u7cfb": 6,
"\u7591\u95ee": 7,
"\u795d\u798f": 8,
"\u79f0\u8d5e": 9,
"\u7b26\u53f7\u4e0e\u8bed\u6c14\u8bcd": 10,
"\u8868\u8fbe\u60c5\u7eea": 11,
"\u8bed\u4e49\u4e0d\u5b8c\u6574": 12,
"\u8d1f\u9762\u653b\u51fb": 13,
"\u9053\u6b49": 14,
"\u95ee\u5019\u5bd2\u6684": 15
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "xlm-roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_past": true,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.47.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 250002
}