biogpt_icd10_enhanced / config.json
Medhatvv's picture
Upload BioGPTForICD10Classification
cdf6227 verified
{
"architectures": [
"BioGPTForICD10Classification"
],
"attention_probs_dropout_prob": 0.1,
"code_to_idx": {
"D62": 0,
"D649": 1,
"D696": 2,
"E039": 3,
"E1122": 4,
"E119": 5,
"E669": 6,
"E785": 7,
"E871": 8,
"E872": 9,
"F17210": 10,
"F329": 11,
"F419": 12,
"G4700": 13,
"G4733": 14,
"G8929": 15,
"I10": 16,
"I110": 17,
"I129": 18,
"I130": 19,
"I2510": 20,
"I252": 21,
"I480": 22,
"I4891": 23,
"I5032": 24,
"J189": 25,
"J449": 26,
"J45909": 27,
"J9601": 28,
"K219": 29,
"K5900": 30,
"M109": 31,
"N179": 32,
"N183": 33,
"N189": 34,
"N390": 35,
"N400": 36,
"Y92230": 37,
"Y92239": 38,
"Y929": 39,
"Z23": 40,
"Z66": 41,
"Z7901": 42,
"Z7902": 43,
"Z794": 44,
"Z86718": 45,
"Z8673": 46,
"Z87891": 47,
"Z951": 48,
"Z955": 49
},
"dropout_rate": 0.2,
"enable_enhanced_attention": true,
"enable_enhanced_classifier": true,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"icd_codes": [
"D62",
"D649",
"D696",
"E039",
"E1122",
"E119",
"E669",
"E785",
"E871",
"E872",
"F17210",
"F329",
"F419",
"G4700",
"G4733",
"G8929",
"I10",
"I110",
"I129",
"I130",
"I2510",
"I252",
"I480",
"I4891",
"I5032",
"J189",
"J449",
"J45909",
"J9601",
"K219",
"K5900",
"M109",
"N179",
"N183",
"N189",
"N390",
"N400",
"Y92230",
"Y92239",
"Y929",
"Z23",
"Z66",
"Z7901",
"Z7902",
"Z794",
"Z86718",
"Z8673",
"Z87891",
"Z951",
"Z955"
],
"initializer_range": 0.02,
"intermediate_size": 4096,
"layer_norm_eps": 1e-12,
"learnable_icd_embeddings": true,
"max_position_embeddings": 1024,
"model_type": "biogpt-icd10",
"num_attention_heads": 16,
"num_attention_layers": 16,
"num_hidden_layers": 24,
"num_icd_codes": 50,
"performance_metrics": {
"aggregation_method": "mean",
"f1_macro": 0.6921683889110797,
"f1_micro": 0.7425923267783542,
"hamming_loss": 0.06360780339498354,
"precision_macro": 0.6175372765898886,
"precision_micro": 0.6570744275501688,
"recall_macro": 0.7921473231592516,
"recall_micro": 0.8537010843941537,
"subset_accuracy": 0.09880922219407144
},
"prediction_threshold": 0.25,
"scale_embedding": true,
"torch_dtype": "float32",
"training_details": {
"epoch": 31,
"timestamp": "2025-06-09T10:51:55.593755"
},
"transformers_version": "4.52.4",
"use_attention_pooling": true,
"use_cache": true,
"use_hierarchical_attention": true,
"vocab_size": 42384
}