File size: 2,767 Bytes
cdf6227
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
{
  "architectures": [
    "BioGPTForICD10Classification"
  ],
  "attention_probs_dropout_prob": 0.1,
  "code_to_idx": {
    "D62": 0,
    "D649": 1,
    "D696": 2,
    "E039": 3,
    "E1122": 4,
    "E119": 5,
    "E669": 6,
    "E785": 7,
    "E871": 8,
    "E872": 9,
    "F17210": 10,
    "F329": 11,
    "F419": 12,
    "G4700": 13,
    "G4733": 14,
    "G8929": 15,
    "I10": 16,
    "I110": 17,
    "I129": 18,
    "I130": 19,
    "I2510": 20,
    "I252": 21,
    "I480": 22,
    "I4891": 23,
    "I5032": 24,
    "J189": 25,
    "J449": 26,
    "J45909": 27,
    "J9601": 28,
    "K219": 29,
    "K5900": 30,
    "M109": 31,
    "N179": 32,
    "N183": 33,
    "N189": 34,
    "N390": 35,
    "N400": 36,
    "Y92230": 37,
    "Y92239": 38,
    "Y929": 39,
    "Z23": 40,
    "Z66": 41,
    "Z7901": 42,
    "Z7902": 43,
    "Z794": 44,
    "Z86718": 45,
    "Z8673": 46,
    "Z87891": 47,
    "Z951": 48,
    "Z955": 49
  },
  "dropout_rate": 0.2,
  "enable_enhanced_attention": true,
  "enable_enhanced_classifier": true,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0.1,
  "hidden_size": 1024,
  "icd_codes": [
    "D62",
    "D649",
    "D696",
    "E039",
    "E1122",
    "E119",
    "E669",
    "E785",
    "E871",
    "E872",
    "F17210",
    "F329",
    "F419",
    "G4700",
    "G4733",
    "G8929",
    "I10",
    "I110",
    "I129",
    "I130",
    "I2510",
    "I252",
    "I480",
    "I4891",
    "I5032",
    "J189",
    "J449",
    "J45909",
    "J9601",
    "K219",
    "K5900",
    "M109",
    "N179",
    "N183",
    "N189",
    "N390",
    "N400",
    "Y92230",
    "Y92239",
    "Y929",
    "Z23",
    "Z66",
    "Z7901",
    "Z7902",
    "Z794",
    "Z86718",
    "Z8673",
    "Z87891",
    "Z951",
    "Z955"
  ],
  "initializer_range": 0.02,
  "intermediate_size": 4096,
  "layer_norm_eps": 1e-12,
  "learnable_icd_embeddings": true,
  "max_position_embeddings": 1024,
  "model_type": "biogpt-icd10",
  "num_attention_heads": 16,
  "num_attention_layers": 16,
  "num_hidden_layers": 24,
  "num_icd_codes": 50,
  "performance_metrics": {
    "aggregation_method": "mean",
    "f1_macro": 0.6921683889110797,
    "f1_micro": 0.7425923267783542,
    "hamming_loss": 0.06360780339498354,
    "precision_macro": 0.6175372765898886,
    "precision_micro": 0.6570744275501688,
    "recall_macro": 0.7921473231592516,
    "recall_micro": 0.8537010843941537,
    "subset_accuracy": 0.09880922219407144
  },
  "prediction_threshold": 0.25,
  "scale_embedding": true,
  "torch_dtype": "float32",
  "training_details": {
    "epoch": 31,
    "timestamp": "2025-06-09T10:51:55.593755"
  },
  "transformers_version": "4.52.4",
  "use_attention_pooling": true,
  "use_cache": true,
  "use_hierarchical_attention": true,
  "vocab_size": 42384
}