prithivMLmods commited on
Commit
c9a3d12
·
verified ·
1 Parent(s): 1842005

Upload folder using huggingface_hub

Browse files
checkpoint-1250/config.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "adapter_attn_dim": null,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForSequenceClassification"
10
+ ],
11
+ "attention_dropout": 0.1,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 256,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": false,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "sum",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": false,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_norm": "group",
51
+ "feat_proj_dropout": 0.1,
52
+ "feat_quantizer_dropout": 0.0,
53
+ "final_dropout": 0.0,
54
+ "freeze_feat_extract_train": true,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.1,
57
+ "hidden_size": 768,
58
+ "id2label": {
59
+ "0": "ANG",
60
+ "1": "CAL",
61
+ "2": "DIS",
62
+ "3": "FEA",
63
+ "4": "HAP",
64
+ "5": "NEU",
65
+ "6": "SAD",
66
+ "7": "SUR"
67
+ },
68
+ "initializer_range": 0.02,
69
+ "intermediate_size": 3072,
70
+ "label2id": {
71
+ "ANG": 0,
72
+ "CAL": 1,
73
+ "DIS": 2,
74
+ "FEA": 3,
75
+ "HAP": 4,
76
+ "NEU": 5,
77
+ "SAD": 6,
78
+ "SUR": 7
79
+ },
80
+ "layer_norm_eps": 1e-05,
81
+ "layerdrop": 0.0,
82
+ "mask_channel_length": 10,
83
+ "mask_channel_min_space": 1,
84
+ "mask_channel_other": 0.0,
85
+ "mask_channel_prob": 0.0,
86
+ "mask_channel_selection": "static",
87
+ "mask_feature_length": 10,
88
+ "mask_feature_min_masks": 0,
89
+ "mask_feature_prob": 0.0,
90
+ "mask_time_length": 10,
91
+ "mask_time_min_masks": 2,
92
+ "mask_time_min_space": 1,
93
+ "mask_time_other": 0.0,
94
+ "mask_time_prob": 0.05,
95
+ "mask_time_selection": "static",
96
+ "model_type": "wav2vec2",
97
+ "no_mask_channel_overlap": false,
98
+ "no_mask_time_overlap": false,
99
+ "num_adapter_layers": 3,
100
+ "num_attention_heads": 12,
101
+ "num_codevector_groups": 2,
102
+ "num_codevectors_per_group": 320,
103
+ "num_conv_pos_embedding_groups": 16,
104
+ "num_conv_pos_embeddings": 128,
105
+ "num_feat_extract_layers": 7,
106
+ "num_hidden_layers": 12,
107
+ "num_negatives": 100,
108
+ "output_hidden_size": 768,
109
+ "pad_token_id": 0,
110
+ "proj_codevector_dim": 256,
111
+ "tdnn_dilation": [
112
+ 1,
113
+ 2,
114
+ 3,
115
+ 1,
116
+ 1
117
+ ],
118
+ "tdnn_dim": [
119
+ 512,
120
+ 512,
121
+ 512,
122
+ 512,
123
+ 1500
124
+ ],
125
+ "tdnn_kernel": [
126
+ 5,
127
+ 3,
128
+ 3,
129
+ 1,
130
+ 1
131
+ ],
132
+ "torch_dtype": "float32",
133
+ "transformers_version": "4.50.0",
134
+ "use_weighted_layer_sum": false,
135
+ "vocab_size": 32,
136
+ "xvector_output_dim": 512
137
+ }
checkpoint-1250/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4290ed2fe2b2aadf66d311c05986234ccfc4e7a7e352ed0379e9f44888f12915
3
+ size 378308536
checkpoint-1250/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ad4d084b1de650226a3ae204901aa83416cec45b5866341393a3c0c781f97af
3
+ size 723133690
checkpoint-1250/preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "return_attention_mask": false,
8
+ "sampling_rate": 16000
9
+ }
checkpoint-1250/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbf9ae460161f6fa2f89862b5eb4fc6e64667a9348871fadd366e09363a90829
3
+ size 14372
checkpoint-1250/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0605c8e8178370db2d62980d895c879215a2449873b33f35503646ee93bdb029
3
+ size 988
checkpoint-1250/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47b14229da988503083d2571709a86a8077268c1850af987a7c46f851943718c
3
+ size 1064
checkpoint-1250/trainer_state.json ADDED
@@ -0,0 +1,927 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 1250,
3
+ "best_metric": 0.7589222033223469,
4
+ "best_model_checkpoint": "voice_emotion_classification/checkpoint-1250",
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1250,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0008,
14
+ "grad_norm": 1.4005043506622314,
15
+ "learning_rate": 6.000000000000001e-07,
16
+ "loss": 2.0903,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.008,
21
+ "grad_norm": 1.3180536031723022,
22
+ "learning_rate": 6e-06,
23
+ "loss": 2.0826,
24
+ "step": 10
25
+ },
26
+ {
27
+ "epoch": 0.016,
28
+ "grad_norm": 1.0437175035476685,
29
+ "learning_rate": 1.2e-05,
30
+ "loss": 2.0693,
31
+ "step": 20
32
+ },
33
+ {
34
+ "epoch": 0.024,
35
+ "grad_norm": 1.371071457862854,
36
+ "learning_rate": 1.8e-05,
37
+ "loss": 2.0463,
38
+ "step": 30
39
+ },
40
+ {
41
+ "epoch": 0.032,
42
+ "grad_norm": 1.51685631275177,
43
+ "learning_rate": 2.4e-05,
44
+ "loss": 2.0423,
45
+ "step": 40
46
+ },
47
+ {
48
+ "epoch": 0.04,
49
+ "grad_norm": 1.4282890558242798,
50
+ "learning_rate": 3e-05,
51
+ "loss": 1.996,
52
+ "step": 50
53
+ },
54
+ {
55
+ "epoch": 0.048,
56
+ "grad_norm": 1.860026478767395,
57
+ "learning_rate": 2.991891891891892e-05,
58
+ "loss": 1.9639,
59
+ "step": 60
60
+ },
61
+ {
62
+ "epoch": 0.056,
63
+ "grad_norm": 2.121481418609619,
64
+ "learning_rate": 2.983783783783784e-05,
65
+ "loss": 1.9517,
66
+ "step": 70
67
+ },
68
+ {
69
+ "epoch": 0.064,
70
+ "grad_norm": 2.056445837020874,
71
+ "learning_rate": 2.9756756756756758e-05,
72
+ "loss": 1.8765,
73
+ "step": 80
74
+ },
75
+ {
76
+ "epoch": 0.072,
77
+ "grad_norm": 2.3928184509277344,
78
+ "learning_rate": 2.9675675675675678e-05,
79
+ "loss": 1.8703,
80
+ "step": 90
81
+ },
82
+ {
83
+ "epoch": 0.08,
84
+ "grad_norm": 3.0733420848846436,
85
+ "learning_rate": 2.9594594594594598e-05,
86
+ "loss": 1.853,
87
+ "step": 100
88
+ },
89
+ {
90
+ "epoch": 0.088,
91
+ "grad_norm": 2.88864803314209,
92
+ "learning_rate": 2.9513513513513514e-05,
93
+ "loss": 1.7652,
94
+ "step": 110
95
+ },
96
+ {
97
+ "epoch": 0.096,
98
+ "grad_norm": 2.9048268795013428,
99
+ "learning_rate": 2.943243243243243e-05,
100
+ "loss": 1.7984,
101
+ "step": 120
102
+ },
103
+ {
104
+ "epoch": 0.104,
105
+ "grad_norm": 2.6991426944732666,
106
+ "learning_rate": 2.935135135135135e-05,
107
+ "loss": 1.6869,
108
+ "step": 130
109
+ },
110
+ {
111
+ "epoch": 0.112,
112
+ "grad_norm": 2.4570231437683105,
113
+ "learning_rate": 2.927027027027027e-05,
114
+ "loss": 1.7812,
115
+ "step": 140
116
+ },
117
+ {
118
+ "epoch": 0.12,
119
+ "grad_norm": 4.502678871154785,
120
+ "learning_rate": 2.918918918918919e-05,
121
+ "loss": 1.8444,
122
+ "step": 150
123
+ },
124
+ {
125
+ "epoch": 0.128,
126
+ "grad_norm": 3.117838144302368,
127
+ "learning_rate": 2.9108108108108108e-05,
128
+ "loss": 1.7544,
129
+ "step": 160
130
+ },
131
+ {
132
+ "epoch": 0.136,
133
+ "grad_norm": 2.812086343765259,
134
+ "learning_rate": 2.9027027027027028e-05,
135
+ "loss": 1.638,
136
+ "step": 170
137
+ },
138
+ {
139
+ "epoch": 0.144,
140
+ "grad_norm": 3.23271107673645,
141
+ "learning_rate": 2.8954054054054057e-05,
142
+ "loss": 1.7026,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.152,
147
+ "grad_norm": 3.975172996520996,
148
+ "learning_rate": 2.8872972972972977e-05,
149
+ "loss": 1.6715,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.16,
154
+ "grad_norm": 3.8965938091278076,
155
+ "learning_rate": 2.879189189189189e-05,
156
+ "loss": 1.6804,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.168,
161
+ "grad_norm": 4.267274856567383,
162
+ "learning_rate": 2.871081081081081e-05,
163
+ "loss": 1.6587,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.176,
168
+ "grad_norm": 3.524360179901123,
169
+ "learning_rate": 2.862972972972973e-05,
170
+ "loss": 1.4811,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.184,
175
+ "grad_norm": 3.266697883605957,
176
+ "learning_rate": 2.854864864864865e-05,
177
+ "loss": 1.672,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.192,
182
+ "grad_norm": 5.3684186935424805,
183
+ "learning_rate": 2.8467567567567567e-05,
184
+ "loss": 1.5284,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.2,
189
+ "grad_norm": 3.898176431655884,
190
+ "learning_rate": 2.8386486486486487e-05,
191
+ "loss": 1.5774,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.208,
196
+ "grad_norm": 3.189732074737549,
197
+ "learning_rate": 2.8305405405405407e-05,
198
+ "loss": 1.4874,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.216,
203
+ "grad_norm": 3.274244785308838,
204
+ "learning_rate": 2.8224324324324327e-05,
205
+ "loss": 1.5098,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.224,
210
+ "grad_norm": 5.691224098205566,
211
+ "learning_rate": 2.8143243243243244e-05,
212
+ "loss": 1.509,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.232,
217
+ "grad_norm": 6.856773376464844,
218
+ "learning_rate": 2.8062162162162164e-05,
219
+ "loss": 1.4558,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.24,
224
+ "grad_norm": 7.078716278076172,
225
+ "learning_rate": 2.7981081081081084e-05,
226
+ "loss": 1.5298,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.248,
231
+ "grad_norm": 4.4305100440979,
232
+ "learning_rate": 2.79e-05,
233
+ "loss": 1.3387,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.256,
238
+ "grad_norm": 10.400449752807617,
239
+ "learning_rate": 2.7818918918918917e-05,
240
+ "loss": 1.4501,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.264,
245
+ "grad_norm": 5.316948890686035,
246
+ "learning_rate": 2.7737837837837837e-05,
247
+ "loss": 1.367,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.272,
252
+ "grad_norm": 9.753177642822266,
253
+ "learning_rate": 2.7656756756756757e-05,
254
+ "loss": 1.4684,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.28,
259
+ "grad_norm": 8.100529670715332,
260
+ "learning_rate": 2.7575675675675677e-05,
261
+ "loss": 1.4175,
262
+ "step": 350
263
+ },
264
+ {
265
+ "epoch": 0.288,
266
+ "grad_norm": 9.878854751586914,
267
+ "learning_rate": 2.7494594594594594e-05,
268
+ "loss": 1.308,
269
+ "step": 360
270
+ },
271
+ {
272
+ "epoch": 0.296,
273
+ "grad_norm": 5.865877151489258,
274
+ "learning_rate": 2.7413513513513514e-05,
275
+ "loss": 1.3035,
276
+ "step": 370
277
+ },
278
+ {
279
+ "epoch": 0.304,
280
+ "grad_norm": 7.870754241943359,
281
+ "learning_rate": 2.7332432432432434e-05,
282
+ "loss": 1.2915,
283
+ "step": 380
284
+ },
285
+ {
286
+ "epoch": 0.312,
287
+ "grad_norm": 8.517908096313477,
288
+ "learning_rate": 2.7251351351351354e-05,
289
+ "loss": 1.4318,
290
+ "step": 390
291
+ },
292
+ {
293
+ "epoch": 0.32,
294
+ "grad_norm": 4.7960309982299805,
295
+ "learning_rate": 2.717027027027027e-05,
296
+ "loss": 1.3154,
297
+ "step": 400
298
+ },
299
+ {
300
+ "epoch": 0.328,
301
+ "grad_norm": 5.629390716552734,
302
+ "learning_rate": 2.708918918918919e-05,
303
+ "loss": 1.3433,
304
+ "step": 410
305
+ },
306
+ {
307
+ "epoch": 0.336,
308
+ "grad_norm": 8.473249435424805,
309
+ "learning_rate": 2.700810810810811e-05,
310
+ "loss": 1.1474,
311
+ "step": 420
312
+ },
313
+ {
314
+ "epoch": 0.344,
315
+ "grad_norm": 3.652617931365967,
316
+ "learning_rate": 2.6927027027027028e-05,
317
+ "loss": 1.3247,
318
+ "step": 430
319
+ },
320
+ {
321
+ "epoch": 0.352,
322
+ "grad_norm": 4.9890055656433105,
323
+ "learning_rate": 2.6845945945945944e-05,
324
+ "loss": 1.3347,
325
+ "step": 440
326
+ },
327
+ {
328
+ "epoch": 0.36,
329
+ "grad_norm": 5.2355055809021,
330
+ "learning_rate": 2.6764864864864864e-05,
331
+ "loss": 1.0932,
332
+ "step": 450
333
+ },
334
+ {
335
+ "epoch": 0.368,
336
+ "grad_norm": 6.325026512145996,
337
+ "learning_rate": 2.6683783783783785e-05,
338
+ "loss": 1.4873,
339
+ "step": 460
340
+ },
341
+ {
342
+ "epoch": 0.376,
343
+ "grad_norm": 6.78115701675415,
344
+ "learning_rate": 2.6602702702702705e-05,
345
+ "loss": 1.2311,
346
+ "step": 470
347
+ },
348
+ {
349
+ "epoch": 0.384,
350
+ "grad_norm": 4.194353103637695,
351
+ "learning_rate": 2.652162162162162e-05,
352
+ "loss": 1.2493,
353
+ "step": 480
354
+ },
355
+ {
356
+ "epoch": 0.392,
357
+ "grad_norm": 3.8817057609558105,
358
+ "learning_rate": 2.644054054054054e-05,
359
+ "loss": 1.1237,
360
+ "step": 490
361
+ },
362
+ {
363
+ "epoch": 0.4,
364
+ "grad_norm": 6.7539520263671875,
365
+ "learning_rate": 2.635945945945946e-05,
366
+ "loss": 1.1135,
367
+ "step": 500
368
+ },
369
+ {
370
+ "epoch": 0.408,
371
+ "grad_norm": 9.044737815856934,
372
+ "learning_rate": 2.627837837837838e-05,
373
+ "loss": 1.2459,
374
+ "step": 510
375
+ },
376
+ {
377
+ "epoch": 0.416,
378
+ "grad_norm": 15.829017639160156,
379
+ "learning_rate": 2.6197297297297298e-05,
380
+ "loss": 1.2803,
381
+ "step": 520
382
+ },
383
+ {
384
+ "epoch": 0.424,
385
+ "grad_norm": 10.789520263671875,
386
+ "learning_rate": 2.6116216216216218e-05,
387
+ "loss": 1.1912,
388
+ "step": 530
389
+ },
390
+ {
391
+ "epoch": 0.432,
392
+ "grad_norm": 5.011368274688721,
393
+ "learning_rate": 2.6035135135135135e-05,
394
+ "loss": 1.0143,
395
+ "step": 540
396
+ },
397
+ {
398
+ "epoch": 0.44,
399
+ "grad_norm": 8.985868453979492,
400
+ "learning_rate": 2.5954054054054055e-05,
401
+ "loss": 1.118,
402
+ "step": 550
403
+ },
404
+ {
405
+ "epoch": 0.448,
406
+ "grad_norm": 6.862995147705078,
407
+ "learning_rate": 2.587297297297297e-05,
408
+ "loss": 1.1269,
409
+ "step": 560
410
+ },
411
+ {
412
+ "epoch": 0.456,
413
+ "grad_norm": 10.972336769104004,
414
+ "learning_rate": 2.579189189189189e-05,
415
+ "loss": 1.1591,
416
+ "step": 570
417
+ },
418
+ {
419
+ "epoch": 0.464,
420
+ "grad_norm": 8.179327011108398,
421
+ "learning_rate": 2.5710810810810812e-05,
422
+ "loss": 1.154,
423
+ "step": 580
424
+ },
425
+ {
426
+ "epoch": 0.472,
427
+ "grad_norm": 11.713990211486816,
428
+ "learning_rate": 2.5629729729729732e-05,
429
+ "loss": 1.0995,
430
+ "step": 590
431
+ },
432
+ {
433
+ "epoch": 0.48,
434
+ "grad_norm": 10.86710262298584,
435
+ "learning_rate": 2.554864864864865e-05,
436
+ "loss": 1.1544,
437
+ "step": 600
438
+ },
439
+ {
440
+ "epoch": 0.488,
441
+ "grad_norm": 6.228063106536865,
442
+ "learning_rate": 2.546756756756757e-05,
443
+ "loss": 1.2395,
444
+ "step": 610
445
+ },
446
+ {
447
+ "epoch": 0.496,
448
+ "grad_norm": 12.631518363952637,
449
+ "learning_rate": 2.538648648648649e-05,
450
+ "loss": 1.0992,
451
+ "step": 620
452
+ },
453
+ {
454
+ "epoch": 0.504,
455
+ "grad_norm": 7.058006763458252,
456
+ "learning_rate": 2.530540540540541e-05,
457
+ "loss": 1.194,
458
+ "step": 630
459
+ },
460
+ {
461
+ "epoch": 0.512,
462
+ "grad_norm": 5.026750087738037,
463
+ "learning_rate": 2.5224324324324325e-05,
464
+ "loss": 1.103,
465
+ "step": 640
466
+ },
467
+ {
468
+ "epoch": 0.52,
469
+ "grad_norm": 7.1134843826293945,
470
+ "learning_rate": 2.5143243243243242e-05,
471
+ "loss": 0.9427,
472
+ "step": 650
473
+ },
474
+ {
475
+ "epoch": 0.528,
476
+ "grad_norm": 7.147433280944824,
477
+ "learning_rate": 2.5062162162162162e-05,
478
+ "loss": 0.9881,
479
+ "step": 660
480
+ },
481
+ {
482
+ "epoch": 0.536,
483
+ "grad_norm": 6.535639762878418,
484
+ "learning_rate": 2.4981081081081082e-05,
485
+ "loss": 1.1143,
486
+ "step": 670
487
+ },
488
+ {
489
+ "epoch": 0.544,
490
+ "grad_norm": 10.878937721252441,
491
+ "learning_rate": 2.49e-05,
492
+ "loss": 0.8909,
493
+ "step": 680
494
+ },
495
+ {
496
+ "epoch": 0.552,
497
+ "grad_norm": 5.79094934463501,
498
+ "learning_rate": 2.481891891891892e-05,
499
+ "loss": 0.9728,
500
+ "step": 690
501
+ },
502
+ {
503
+ "epoch": 0.56,
504
+ "grad_norm": 6.935592174530029,
505
+ "learning_rate": 2.473783783783784e-05,
506
+ "loss": 1.0735,
507
+ "step": 700
508
+ },
509
+ {
510
+ "epoch": 0.568,
511
+ "grad_norm": 5.661824703216553,
512
+ "learning_rate": 2.465675675675676e-05,
513
+ "loss": 1.0012,
514
+ "step": 710
515
+ },
516
+ {
517
+ "epoch": 0.576,
518
+ "grad_norm": 13.233421325683594,
519
+ "learning_rate": 2.4575675675675676e-05,
520
+ "loss": 1.0315,
521
+ "step": 720
522
+ },
523
+ {
524
+ "epoch": 0.584,
525
+ "grad_norm": 9.292459487915039,
526
+ "learning_rate": 2.4494594594594596e-05,
527
+ "loss": 0.9547,
528
+ "step": 730
529
+ },
530
+ {
531
+ "epoch": 0.592,
532
+ "grad_norm": 13.138367652893066,
533
+ "learning_rate": 2.442162162162162e-05,
534
+ "loss": 0.9379,
535
+ "step": 740
536
+ },
537
+ {
538
+ "epoch": 0.6,
539
+ "grad_norm": 13.352531433105469,
540
+ "learning_rate": 2.434054054054054e-05,
541
+ "loss": 0.9484,
542
+ "step": 750
543
+ },
544
+ {
545
+ "epoch": 0.608,
546
+ "grad_norm": 11.993139266967773,
547
+ "learning_rate": 2.4259459459459458e-05,
548
+ "loss": 1.1064,
549
+ "step": 760
550
+ },
551
+ {
552
+ "epoch": 0.616,
553
+ "grad_norm": 12.132452011108398,
554
+ "learning_rate": 2.4178378378378378e-05,
555
+ "loss": 1.1363,
556
+ "step": 770
557
+ },
558
+ {
559
+ "epoch": 0.624,
560
+ "grad_norm": 13.944737434387207,
561
+ "learning_rate": 2.4097297297297298e-05,
562
+ "loss": 0.9835,
563
+ "step": 780
564
+ },
565
+ {
566
+ "epoch": 0.632,
567
+ "grad_norm": 6.077609062194824,
568
+ "learning_rate": 2.4016216216216218e-05,
569
+ "loss": 0.8391,
570
+ "step": 790
571
+ },
572
+ {
573
+ "epoch": 0.64,
574
+ "grad_norm": 7.873855113983154,
575
+ "learning_rate": 2.3935135135135135e-05,
576
+ "loss": 0.7772,
577
+ "step": 800
578
+ },
579
+ {
580
+ "epoch": 0.648,
581
+ "grad_norm": 13.312115669250488,
582
+ "learning_rate": 2.3854054054054055e-05,
583
+ "loss": 1.0117,
584
+ "step": 810
585
+ },
586
+ {
587
+ "epoch": 0.656,
588
+ "grad_norm": 9.016510963439941,
589
+ "learning_rate": 2.3772972972972975e-05,
590
+ "loss": 0.9353,
591
+ "step": 820
592
+ },
593
+ {
594
+ "epoch": 0.664,
595
+ "grad_norm": 8.618375778198242,
596
+ "learning_rate": 2.3691891891891895e-05,
597
+ "loss": 0.9598,
598
+ "step": 830
599
+ },
600
+ {
601
+ "epoch": 0.672,
602
+ "grad_norm": 10.867205619812012,
603
+ "learning_rate": 2.361081081081081e-05,
604
+ "loss": 0.8726,
605
+ "step": 840
606
+ },
607
+ {
608
+ "epoch": 0.68,
609
+ "grad_norm": 13.182415962219238,
610
+ "learning_rate": 2.3529729729729728e-05,
611
+ "loss": 0.9202,
612
+ "step": 850
613
+ },
614
+ {
615
+ "epoch": 0.688,
616
+ "grad_norm": 12.405129432678223,
617
+ "learning_rate": 2.3448648648648648e-05,
618
+ "loss": 0.8795,
619
+ "step": 860
620
+ },
621
+ {
622
+ "epoch": 0.696,
623
+ "grad_norm": 8.207524299621582,
624
+ "learning_rate": 2.3367567567567568e-05,
625
+ "loss": 0.8015,
626
+ "step": 870
627
+ },
628
+ {
629
+ "epoch": 0.704,
630
+ "grad_norm": 15.442817687988281,
631
+ "learning_rate": 2.3286486486486485e-05,
632
+ "loss": 0.9932,
633
+ "step": 880
634
+ },
635
+ {
636
+ "epoch": 0.712,
637
+ "grad_norm": 13.388226509094238,
638
+ "learning_rate": 2.3205405405405405e-05,
639
+ "loss": 0.87,
640
+ "step": 890
641
+ },
642
+ {
643
+ "epoch": 0.72,
644
+ "grad_norm": 8.635920524597168,
645
+ "learning_rate": 2.3124324324324325e-05,
646
+ "loss": 0.7842,
647
+ "step": 900
648
+ },
649
+ {
650
+ "epoch": 0.728,
651
+ "grad_norm": 11.66073989868164,
652
+ "learning_rate": 2.3043243243243245e-05,
653
+ "loss": 0.9023,
654
+ "step": 910
655
+ },
656
+ {
657
+ "epoch": 0.736,
658
+ "grad_norm": 12.954612731933594,
659
+ "learning_rate": 2.2962162162162162e-05,
660
+ "loss": 1.0076,
661
+ "step": 920
662
+ },
663
+ {
664
+ "epoch": 0.744,
665
+ "grad_norm": 11.18680191040039,
666
+ "learning_rate": 2.2881081081081082e-05,
667
+ "loss": 1.1349,
668
+ "step": 930
669
+ },
670
+ {
671
+ "epoch": 0.752,
672
+ "grad_norm": 8.514711380004883,
673
+ "learning_rate": 2.2800000000000002e-05,
674
+ "loss": 0.9604,
675
+ "step": 940
676
+ },
677
+ {
678
+ "epoch": 0.76,
679
+ "grad_norm": 4.436418056488037,
680
+ "learning_rate": 2.2718918918918922e-05,
681
+ "loss": 0.9632,
682
+ "step": 950
683
+ },
684
+ {
685
+ "epoch": 0.768,
686
+ "grad_norm": 10.213781356811523,
687
+ "learning_rate": 2.263783783783784e-05,
688
+ "loss": 0.8139,
689
+ "step": 960
690
+ },
691
+ {
692
+ "epoch": 0.776,
693
+ "grad_norm": 9.987252235412598,
694
+ "learning_rate": 2.2556756756756755e-05,
695
+ "loss": 0.8276,
696
+ "step": 970
697
+ },
698
+ {
699
+ "epoch": 0.784,
700
+ "grad_norm": 12.511467933654785,
701
+ "learning_rate": 2.2475675675675675e-05,
702
+ "loss": 0.8709,
703
+ "step": 980
704
+ },
705
+ {
706
+ "epoch": 0.792,
707
+ "grad_norm": 8.908098220825195,
708
+ "learning_rate": 2.2394594594594595e-05,
709
+ "loss": 0.8812,
710
+ "step": 990
711
+ },
712
+ {
713
+ "epoch": 0.8,
714
+ "grad_norm": 10.62246322631836,
715
+ "learning_rate": 2.2313513513513512e-05,
716
+ "loss": 0.9733,
717
+ "step": 1000
718
+ },
719
+ {
720
+ "epoch": 0.808,
721
+ "grad_norm": 14.651544570922852,
722
+ "learning_rate": 2.2232432432432432e-05,
723
+ "loss": 1.0309,
724
+ "step": 1010
725
+ },
726
+ {
727
+ "epoch": 0.816,
728
+ "grad_norm": 19.1525936126709,
729
+ "learning_rate": 2.2151351351351352e-05,
730
+ "loss": 0.8808,
731
+ "step": 1020
732
+ },
733
+ {
734
+ "epoch": 0.824,
735
+ "grad_norm": 7.289106369018555,
736
+ "learning_rate": 2.2070270270270272e-05,
737
+ "loss": 0.9126,
738
+ "step": 1030
739
+ },
740
+ {
741
+ "epoch": 0.832,
742
+ "grad_norm": 5.375001907348633,
743
+ "learning_rate": 2.198918918918919e-05,
744
+ "loss": 0.847,
745
+ "step": 1040
746
+ },
747
+ {
748
+ "epoch": 0.84,
749
+ "grad_norm": 8.623431205749512,
750
+ "learning_rate": 2.190810810810811e-05,
751
+ "loss": 0.9139,
752
+ "step": 1050
753
+ },
754
+ {
755
+ "epoch": 0.848,
756
+ "grad_norm": 6.639071941375732,
757
+ "learning_rate": 2.182702702702703e-05,
758
+ "loss": 0.9345,
759
+ "step": 1060
760
+ },
761
+ {
762
+ "epoch": 0.856,
763
+ "grad_norm": 7.635943412780762,
764
+ "learning_rate": 2.174594594594595e-05,
765
+ "loss": 0.8134,
766
+ "step": 1070
767
+ },
768
+ {
769
+ "epoch": 0.864,
770
+ "grad_norm": 12.048315048217773,
771
+ "learning_rate": 2.1664864864864862e-05,
772
+ "loss": 0.6728,
773
+ "step": 1080
774
+ },
775
+ {
776
+ "epoch": 0.872,
777
+ "grad_norm": 13.869949340820312,
778
+ "learning_rate": 2.1583783783783783e-05,
779
+ "loss": 0.8256,
780
+ "step": 1090
781
+ },
782
+ {
783
+ "epoch": 0.88,
784
+ "grad_norm": 26.233325958251953,
785
+ "learning_rate": 2.1502702702702703e-05,
786
+ "loss": 0.7044,
787
+ "step": 1100
788
+ },
789
+ {
790
+ "epoch": 0.888,
791
+ "grad_norm": 7.98716926574707,
792
+ "learning_rate": 2.1421621621621623e-05,
793
+ "loss": 0.7398,
794
+ "step": 1110
795
+ },
796
+ {
797
+ "epoch": 0.896,
798
+ "grad_norm": 13.682205200195312,
799
+ "learning_rate": 2.134054054054054e-05,
800
+ "loss": 0.7522,
801
+ "step": 1120
802
+ },
803
+ {
804
+ "epoch": 0.904,
805
+ "grad_norm": 9.086796760559082,
806
+ "learning_rate": 2.125945945945946e-05,
807
+ "loss": 0.8574,
808
+ "step": 1130
809
+ },
810
+ {
811
+ "epoch": 0.912,
812
+ "grad_norm": 10.3043851852417,
813
+ "learning_rate": 2.117837837837838e-05,
814
+ "loss": 0.8005,
815
+ "step": 1140
816
+ },
817
+ {
818
+ "epoch": 0.92,
819
+ "grad_norm": 12.477950096130371,
820
+ "learning_rate": 2.10972972972973e-05,
821
+ "loss": 0.8436,
822
+ "step": 1150
823
+ },
824
+ {
825
+ "epoch": 0.928,
826
+ "grad_norm": 16.634178161621094,
827
+ "learning_rate": 2.1016216216216216e-05,
828
+ "loss": 0.6515,
829
+ "step": 1160
830
+ },
831
+ {
832
+ "epoch": 0.936,
833
+ "grad_norm": 11.066425323486328,
834
+ "learning_rate": 2.0935135135135136e-05,
835
+ "loss": 0.8689,
836
+ "step": 1170
837
+ },
838
+ {
839
+ "epoch": 0.944,
840
+ "grad_norm": 19.47179412841797,
841
+ "learning_rate": 2.0854054054054056e-05,
842
+ "loss": 0.6605,
843
+ "step": 1180
844
+ },
845
+ {
846
+ "epoch": 0.952,
847
+ "grad_norm": 3.917236804962158,
848
+ "learning_rate": 2.0772972972972973e-05,
849
+ "loss": 0.6826,
850
+ "step": 1190
851
+ },
852
+ {
853
+ "epoch": 0.96,
854
+ "grad_norm": 16.43979263305664,
855
+ "learning_rate": 2.069189189189189e-05,
856
+ "loss": 0.7731,
857
+ "step": 1200
858
+ },
859
+ {
860
+ "epoch": 0.968,
861
+ "grad_norm": 6.7848711013793945,
862
+ "learning_rate": 2.061081081081081e-05,
863
+ "loss": 0.705,
864
+ "step": 1210
865
+ },
866
+ {
867
+ "epoch": 0.976,
868
+ "grad_norm": 7.472936153411865,
869
+ "learning_rate": 2.052972972972973e-05,
870
+ "loss": 0.7663,
871
+ "step": 1220
872
+ },
873
+ {
874
+ "epoch": 0.984,
875
+ "grad_norm": 5.729743957519531,
876
+ "learning_rate": 2.044864864864865e-05,
877
+ "loss": 0.9337,
878
+ "step": 1230
879
+ },
880
+ {
881
+ "epoch": 0.992,
882
+ "grad_norm": 6.306894302368164,
883
+ "learning_rate": 2.0367567567567567e-05,
884
+ "loss": 0.7655,
885
+ "step": 1240
886
+ },
887
+ {
888
+ "epoch": 1.0,
889
+ "grad_norm": 17.98261260986328,
890
+ "learning_rate": 2.0286486486486487e-05,
891
+ "loss": 0.8202,
892
+ "step": 1250
893
+ },
894
+ {
895
+ "epoch": 1.0,
896
+ "eval_accuracy": 0.7668834417208604,
897
+ "eval_f1": 0.7589222033223469,
898
+ "eval_loss": 0.7338727712631226,
899
+ "eval_model_preparation_time": 0.0029,
900
+ "eval_runtime": 59.9403,
901
+ "eval_samples_per_second": 33.35,
902
+ "eval_steps_per_second": 4.171,
903
+ "step": 1250
904
+ }
905
+ ],
906
+ "logging_steps": 10,
907
+ "max_steps": 3750,
908
+ "num_input_tokens_seen": 0,
909
+ "num_train_epochs": 3,
910
+ "save_steps": 500,
911
+ "stateful_callbacks": {
912
+ "TrainerControl": {
913
+ "args": {
914
+ "should_epoch_stop": false,
915
+ "should_evaluate": false,
916
+ "should_log": false,
917
+ "should_save": true,
918
+ "should_training_stop": false
919
+ },
920
+ "attributes": {}
921
+ }
922
+ },
923
+ "total_flos": 4.4103920295511066e+17,
924
+ "train_batch_size": 8,
925
+ "trial_name": null,
926
+ "trial_params": null
927
+ }
checkpoint-1250/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a868b38cbd73376b16ac8e3d8306db2a42a29f39b8f6d41ef6bd11bcdc6b19c
3
+ size 5304
checkpoint-2500/config.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "adapter_attn_dim": null,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForSequenceClassification"
10
+ ],
11
+ "attention_dropout": 0.1,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 256,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": false,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "sum",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": false,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_norm": "group",
51
+ "feat_proj_dropout": 0.1,
52
+ "feat_quantizer_dropout": 0.0,
53
+ "final_dropout": 0.0,
54
+ "freeze_feat_extract_train": true,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.1,
57
+ "hidden_size": 768,
58
+ "id2label": {
59
+ "0": "ANG",
60
+ "1": "CAL",
61
+ "2": "DIS",
62
+ "3": "FEA",
63
+ "4": "HAP",
64
+ "5": "NEU",
65
+ "6": "SAD",
66
+ "7": "SUR"
67
+ },
68
+ "initializer_range": 0.02,
69
+ "intermediate_size": 3072,
70
+ "label2id": {
71
+ "ANG": 0,
72
+ "CAL": 1,
73
+ "DIS": 2,
74
+ "FEA": 3,
75
+ "HAP": 4,
76
+ "NEU": 5,
77
+ "SAD": 6,
78
+ "SUR": 7
79
+ },
80
+ "layer_norm_eps": 1e-05,
81
+ "layerdrop": 0.0,
82
+ "mask_channel_length": 10,
83
+ "mask_channel_min_space": 1,
84
+ "mask_channel_other": 0.0,
85
+ "mask_channel_prob": 0.0,
86
+ "mask_channel_selection": "static",
87
+ "mask_feature_length": 10,
88
+ "mask_feature_min_masks": 0,
89
+ "mask_feature_prob": 0.0,
90
+ "mask_time_length": 10,
91
+ "mask_time_min_masks": 2,
92
+ "mask_time_min_space": 1,
93
+ "mask_time_other": 0.0,
94
+ "mask_time_prob": 0.05,
95
+ "mask_time_selection": "static",
96
+ "model_type": "wav2vec2",
97
+ "no_mask_channel_overlap": false,
98
+ "no_mask_time_overlap": false,
99
+ "num_adapter_layers": 3,
100
+ "num_attention_heads": 12,
101
+ "num_codevector_groups": 2,
102
+ "num_codevectors_per_group": 320,
103
+ "num_conv_pos_embedding_groups": 16,
104
+ "num_conv_pos_embeddings": 128,
105
+ "num_feat_extract_layers": 7,
106
+ "num_hidden_layers": 12,
107
+ "num_negatives": 100,
108
+ "output_hidden_size": 768,
109
+ "pad_token_id": 0,
110
+ "proj_codevector_dim": 256,
111
+ "tdnn_dilation": [
112
+ 1,
113
+ 2,
114
+ 3,
115
+ 1,
116
+ 1
117
+ ],
118
+ "tdnn_dim": [
119
+ 512,
120
+ 512,
121
+ 512,
122
+ 512,
123
+ 1500
124
+ ],
125
+ "tdnn_kernel": [
126
+ 5,
127
+ 3,
128
+ 3,
129
+ 1,
130
+ 1
131
+ ],
132
+ "torch_dtype": "float32",
133
+ "transformers_version": "4.50.0",
134
+ "use_weighted_layer_sum": false,
135
+ "vocab_size": 32,
136
+ "xvector_output_dim": 512
137
+ }
checkpoint-2500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dda356effdf4b1cc55bb3c4add4b0548d8df82484fb03dd86e7e03aed1545c17
3
+ size 378308536
checkpoint-2500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f629a9e5fd9b8ee1d164de9a984d0ece89f2ce5151dc5ccbed956fa19979a27
3
+ size 723133690
checkpoint-2500/preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "return_attention_mask": false,
8
+ "sampling_rate": 16000
9
+ }
checkpoint-2500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b0b3ae2ac7afa18565121cbea0e7a45d9ee0f5eb80f3117308459dbde626a9c
3
+ size 14308
checkpoint-2500/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4748155d1a1f22f9ad53e543326ed7f7c8027cde82d15dd582ea1128707c46b
3
+ size 988
checkpoint-2500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a7ccd7e8353c142ba4e4869bd47c80ced957c3e6701a27be0f6c02c3e320763
3
+ size 1064
checkpoint-2500/trainer_state.json ADDED
@@ -0,0 +1,1813 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 2500,
3
+ "best_metric": 0.8020345252909776,
4
+ "best_model_checkpoint": "voice_emotion_classification/checkpoint-2500",
5
+ "epoch": 2.0,
6
+ "eval_steps": 500,
7
+ "global_step": 2500,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0008,
14
+ "grad_norm": 1.4005043506622314,
15
+ "learning_rate": 6.000000000000001e-07,
16
+ "loss": 2.0903,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.008,
21
+ "grad_norm": 1.3180536031723022,
22
+ "learning_rate": 6e-06,
23
+ "loss": 2.0826,
24
+ "step": 10
25
+ },
26
+ {
27
+ "epoch": 0.016,
28
+ "grad_norm": 1.0437175035476685,
29
+ "learning_rate": 1.2e-05,
30
+ "loss": 2.0693,
31
+ "step": 20
32
+ },
33
+ {
34
+ "epoch": 0.024,
35
+ "grad_norm": 1.371071457862854,
36
+ "learning_rate": 1.8e-05,
37
+ "loss": 2.0463,
38
+ "step": 30
39
+ },
40
+ {
41
+ "epoch": 0.032,
42
+ "grad_norm": 1.51685631275177,
43
+ "learning_rate": 2.4e-05,
44
+ "loss": 2.0423,
45
+ "step": 40
46
+ },
47
+ {
48
+ "epoch": 0.04,
49
+ "grad_norm": 1.4282890558242798,
50
+ "learning_rate": 3e-05,
51
+ "loss": 1.996,
52
+ "step": 50
53
+ },
54
+ {
55
+ "epoch": 0.048,
56
+ "grad_norm": 1.860026478767395,
57
+ "learning_rate": 2.991891891891892e-05,
58
+ "loss": 1.9639,
59
+ "step": 60
60
+ },
61
+ {
62
+ "epoch": 0.056,
63
+ "grad_norm": 2.121481418609619,
64
+ "learning_rate": 2.983783783783784e-05,
65
+ "loss": 1.9517,
66
+ "step": 70
67
+ },
68
+ {
69
+ "epoch": 0.064,
70
+ "grad_norm": 2.056445837020874,
71
+ "learning_rate": 2.9756756756756758e-05,
72
+ "loss": 1.8765,
73
+ "step": 80
74
+ },
75
+ {
76
+ "epoch": 0.072,
77
+ "grad_norm": 2.3928184509277344,
78
+ "learning_rate": 2.9675675675675678e-05,
79
+ "loss": 1.8703,
80
+ "step": 90
81
+ },
82
+ {
83
+ "epoch": 0.08,
84
+ "grad_norm": 3.0733420848846436,
85
+ "learning_rate": 2.9594594594594598e-05,
86
+ "loss": 1.853,
87
+ "step": 100
88
+ },
89
+ {
90
+ "epoch": 0.088,
91
+ "grad_norm": 2.88864803314209,
92
+ "learning_rate": 2.9513513513513514e-05,
93
+ "loss": 1.7652,
94
+ "step": 110
95
+ },
96
+ {
97
+ "epoch": 0.096,
98
+ "grad_norm": 2.9048268795013428,
99
+ "learning_rate": 2.943243243243243e-05,
100
+ "loss": 1.7984,
101
+ "step": 120
102
+ },
103
+ {
104
+ "epoch": 0.104,
105
+ "grad_norm": 2.6991426944732666,
106
+ "learning_rate": 2.935135135135135e-05,
107
+ "loss": 1.6869,
108
+ "step": 130
109
+ },
110
+ {
111
+ "epoch": 0.112,
112
+ "grad_norm": 2.4570231437683105,
113
+ "learning_rate": 2.927027027027027e-05,
114
+ "loss": 1.7812,
115
+ "step": 140
116
+ },
117
+ {
118
+ "epoch": 0.12,
119
+ "grad_norm": 4.502678871154785,
120
+ "learning_rate": 2.918918918918919e-05,
121
+ "loss": 1.8444,
122
+ "step": 150
123
+ },
124
+ {
125
+ "epoch": 0.128,
126
+ "grad_norm": 3.117838144302368,
127
+ "learning_rate": 2.9108108108108108e-05,
128
+ "loss": 1.7544,
129
+ "step": 160
130
+ },
131
+ {
132
+ "epoch": 0.136,
133
+ "grad_norm": 2.812086343765259,
134
+ "learning_rate": 2.9027027027027028e-05,
135
+ "loss": 1.638,
136
+ "step": 170
137
+ },
138
+ {
139
+ "epoch": 0.144,
140
+ "grad_norm": 3.23271107673645,
141
+ "learning_rate": 2.8954054054054057e-05,
142
+ "loss": 1.7026,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.152,
147
+ "grad_norm": 3.975172996520996,
148
+ "learning_rate": 2.8872972972972977e-05,
149
+ "loss": 1.6715,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.16,
154
+ "grad_norm": 3.8965938091278076,
155
+ "learning_rate": 2.879189189189189e-05,
156
+ "loss": 1.6804,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.168,
161
+ "grad_norm": 4.267274856567383,
162
+ "learning_rate": 2.871081081081081e-05,
163
+ "loss": 1.6587,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.176,
168
+ "grad_norm": 3.524360179901123,
169
+ "learning_rate": 2.862972972972973e-05,
170
+ "loss": 1.4811,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.184,
175
+ "grad_norm": 3.266697883605957,
176
+ "learning_rate": 2.854864864864865e-05,
177
+ "loss": 1.672,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.192,
182
+ "grad_norm": 5.3684186935424805,
183
+ "learning_rate": 2.8467567567567567e-05,
184
+ "loss": 1.5284,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.2,
189
+ "grad_norm": 3.898176431655884,
190
+ "learning_rate": 2.8386486486486487e-05,
191
+ "loss": 1.5774,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.208,
196
+ "grad_norm": 3.189732074737549,
197
+ "learning_rate": 2.8305405405405407e-05,
198
+ "loss": 1.4874,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.216,
203
+ "grad_norm": 3.274244785308838,
204
+ "learning_rate": 2.8224324324324327e-05,
205
+ "loss": 1.5098,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.224,
210
+ "grad_norm": 5.691224098205566,
211
+ "learning_rate": 2.8143243243243244e-05,
212
+ "loss": 1.509,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.232,
217
+ "grad_norm": 6.856773376464844,
218
+ "learning_rate": 2.8062162162162164e-05,
219
+ "loss": 1.4558,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.24,
224
+ "grad_norm": 7.078716278076172,
225
+ "learning_rate": 2.7981081081081084e-05,
226
+ "loss": 1.5298,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.248,
231
+ "grad_norm": 4.4305100440979,
232
+ "learning_rate": 2.79e-05,
233
+ "loss": 1.3387,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.256,
238
+ "grad_norm": 10.400449752807617,
239
+ "learning_rate": 2.7818918918918917e-05,
240
+ "loss": 1.4501,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.264,
245
+ "grad_norm": 5.316948890686035,
246
+ "learning_rate": 2.7737837837837837e-05,
247
+ "loss": 1.367,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.272,
252
+ "grad_norm": 9.753177642822266,
253
+ "learning_rate": 2.7656756756756757e-05,
254
+ "loss": 1.4684,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.28,
259
+ "grad_norm": 8.100529670715332,
260
+ "learning_rate": 2.7575675675675677e-05,
261
+ "loss": 1.4175,
262
+ "step": 350
263
+ },
264
+ {
265
+ "epoch": 0.288,
266
+ "grad_norm": 9.878854751586914,
267
+ "learning_rate": 2.7494594594594594e-05,
268
+ "loss": 1.308,
269
+ "step": 360
270
+ },
271
+ {
272
+ "epoch": 0.296,
273
+ "grad_norm": 5.865877151489258,
274
+ "learning_rate": 2.7413513513513514e-05,
275
+ "loss": 1.3035,
276
+ "step": 370
277
+ },
278
+ {
279
+ "epoch": 0.304,
280
+ "grad_norm": 7.870754241943359,
281
+ "learning_rate": 2.7332432432432434e-05,
282
+ "loss": 1.2915,
283
+ "step": 380
284
+ },
285
+ {
286
+ "epoch": 0.312,
287
+ "grad_norm": 8.517908096313477,
288
+ "learning_rate": 2.7251351351351354e-05,
289
+ "loss": 1.4318,
290
+ "step": 390
291
+ },
292
+ {
293
+ "epoch": 0.32,
294
+ "grad_norm": 4.7960309982299805,
295
+ "learning_rate": 2.717027027027027e-05,
296
+ "loss": 1.3154,
297
+ "step": 400
298
+ },
299
+ {
300
+ "epoch": 0.328,
301
+ "grad_norm": 5.629390716552734,
302
+ "learning_rate": 2.708918918918919e-05,
303
+ "loss": 1.3433,
304
+ "step": 410
305
+ },
306
+ {
307
+ "epoch": 0.336,
308
+ "grad_norm": 8.473249435424805,
309
+ "learning_rate": 2.700810810810811e-05,
310
+ "loss": 1.1474,
311
+ "step": 420
312
+ },
313
+ {
314
+ "epoch": 0.344,
315
+ "grad_norm": 3.652617931365967,
316
+ "learning_rate": 2.6927027027027028e-05,
317
+ "loss": 1.3247,
318
+ "step": 430
319
+ },
320
+ {
321
+ "epoch": 0.352,
322
+ "grad_norm": 4.9890055656433105,
323
+ "learning_rate": 2.6845945945945944e-05,
324
+ "loss": 1.3347,
325
+ "step": 440
326
+ },
327
+ {
328
+ "epoch": 0.36,
329
+ "grad_norm": 5.2355055809021,
330
+ "learning_rate": 2.6764864864864864e-05,
331
+ "loss": 1.0932,
332
+ "step": 450
333
+ },
334
+ {
335
+ "epoch": 0.368,
336
+ "grad_norm": 6.325026512145996,
337
+ "learning_rate": 2.6683783783783785e-05,
338
+ "loss": 1.4873,
339
+ "step": 460
340
+ },
341
+ {
342
+ "epoch": 0.376,
343
+ "grad_norm": 6.78115701675415,
344
+ "learning_rate": 2.6602702702702705e-05,
345
+ "loss": 1.2311,
346
+ "step": 470
347
+ },
348
+ {
349
+ "epoch": 0.384,
350
+ "grad_norm": 4.194353103637695,
351
+ "learning_rate": 2.652162162162162e-05,
352
+ "loss": 1.2493,
353
+ "step": 480
354
+ },
355
+ {
356
+ "epoch": 0.392,
357
+ "grad_norm": 3.8817057609558105,
358
+ "learning_rate": 2.644054054054054e-05,
359
+ "loss": 1.1237,
360
+ "step": 490
361
+ },
362
+ {
363
+ "epoch": 0.4,
364
+ "grad_norm": 6.7539520263671875,
365
+ "learning_rate": 2.635945945945946e-05,
366
+ "loss": 1.1135,
367
+ "step": 500
368
+ },
369
+ {
370
+ "epoch": 0.408,
371
+ "grad_norm": 9.044737815856934,
372
+ "learning_rate": 2.627837837837838e-05,
373
+ "loss": 1.2459,
374
+ "step": 510
375
+ },
376
+ {
377
+ "epoch": 0.416,
378
+ "grad_norm": 15.829017639160156,
379
+ "learning_rate": 2.6197297297297298e-05,
380
+ "loss": 1.2803,
381
+ "step": 520
382
+ },
383
+ {
384
+ "epoch": 0.424,
385
+ "grad_norm": 10.789520263671875,
386
+ "learning_rate": 2.6116216216216218e-05,
387
+ "loss": 1.1912,
388
+ "step": 530
389
+ },
390
+ {
391
+ "epoch": 0.432,
392
+ "grad_norm": 5.011368274688721,
393
+ "learning_rate": 2.6035135135135135e-05,
394
+ "loss": 1.0143,
395
+ "step": 540
396
+ },
397
+ {
398
+ "epoch": 0.44,
399
+ "grad_norm": 8.985868453979492,
400
+ "learning_rate": 2.5954054054054055e-05,
401
+ "loss": 1.118,
402
+ "step": 550
403
+ },
404
+ {
405
+ "epoch": 0.448,
406
+ "grad_norm": 6.862995147705078,
407
+ "learning_rate": 2.587297297297297e-05,
408
+ "loss": 1.1269,
409
+ "step": 560
410
+ },
411
+ {
412
+ "epoch": 0.456,
413
+ "grad_norm": 10.972336769104004,
414
+ "learning_rate": 2.579189189189189e-05,
415
+ "loss": 1.1591,
416
+ "step": 570
417
+ },
418
+ {
419
+ "epoch": 0.464,
420
+ "grad_norm": 8.179327011108398,
421
+ "learning_rate": 2.5710810810810812e-05,
422
+ "loss": 1.154,
423
+ "step": 580
424
+ },
425
+ {
426
+ "epoch": 0.472,
427
+ "grad_norm": 11.713990211486816,
428
+ "learning_rate": 2.5629729729729732e-05,
429
+ "loss": 1.0995,
430
+ "step": 590
431
+ },
432
+ {
433
+ "epoch": 0.48,
434
+ "grad_norm": 10.86710262298584,
435
+ "learning_rate": 2.554864864864865e-05,
436
+ "loss": 1.1544,
437
+ "step": 600
438
+ },
439
+ {
440
+ "epoch": 0.488,
441
+ "grad_norm": 6.228063106536865,
442
+ "learning_rate": 2.546756756756757e-05,
443
+ "loss": 1.2395,
444
+ "step": 610
445
+ },
446
+ {
447
+ "epoch": 0.496,
448
+ "grad_norm": 12.631518363952637,
449
+ "learning_rate": 2.538648648648649e-05,
450
+ "loss": 1.0992,
451
+ "step": 620
452
+ },
453
+ {
454
+ "epoch": 0.504,
455
+ "grad_norm": 7.058006763458252,
456
+ "learning_rate": 2.530540540540541e-05,
457
+ "loss": 1.194,
458
+ "step": 630
459
+ },
460
+ {
461
+ "epoch": 0.512,
462
+ "grad_norm": 5.026750087738037,
463
+ "learning_rate": 2.5224324324324325e-05,
464
+ "loss": 1.103,
465
+ "step": 640
466
+ },
467
+ {
468
+ "epoch": 0.52,
469
+ "grad_norm": 7.1134843826293945,
470
+ "learning_rate": 2.5143243243243242e-05,
471
+ "loss": 0.9427,
472
+ "step": 650
473
+ },
474
+ {
475
+ "epoch": 0.528,
476
+ "grad_norm": 7.147433280944824,
477
+ "learning_rate": 2.5062162162162162e-05,
478
+ "loss": 0.9881,
479
+ "step": 660
480
+ },
481
+ {
482
+ "epoch": 0.536,
483
+ "grad_norm": 6.535639762878418,
484
+ "learning_rate": 2.4981081081081082e-05,
485
+ "loss": 1.1143,
486
+ "step": 670
487
+ },
488
+ {
489
+ "epoch": 0.544,
490
+ "grad_norm": 10.878937721252441,
491
+ "learning_rate": 2.49e-05,
492
+ "loss": 0.8909,
493
+ "step": 680
494
+ },
495
+ {
496
+ "epoch": 0.552,
497
+ "grad_norm": 5.79094934463501,
498
+ "learning_rate": 2.481891891891892e-05,
499
+ "loss": 0.9728,
500
+ "step": 690
501
+ },
502
+ {
503
+ "epoch": 0.56,
504
+ "grad_norm": 6.935592174530029,
505
+ "learning_rate": 2.473783783783784e-05,
506
+ "loss": 1.0735,
507
+ "step": 700
508
+ },
509
+ {
510
+ "epoch": 0.568,
511
+ "grad_norm": 5.661824703216553,
512
+ "learning_rate": 2.465675675675676e-05,
513
+ "loss": 1.0012,
514
+ "step": 710
515
+ },
516
+ {
517
+ "epoch": 0.576,
518
+ "grad_norm": 13.233421325683594,
519
+ "learning_rate": 2.4575675675675676e-05,
520
+ "loss": 1.0315,
521
+ "step": 720
522
+ },
523
+ {
524
+ "epoch": 0.584,
525
+ "grad_norm": 9.292459487915039,
526
+ "learning_rate": 2.4494594594594596e-05,
527
+ "loss": 0.9547,
528
+ "step": 730
529
+ },
530
+ {
531
+ "epoch": 0.592,
532
+ "grad_norm": 13.138367652893066,
533
+ "learning_rate": 2.442162162162162e-05,
534
+ "loss": 0.9379,
535
+ "step": 740
536
+ },
537
+ {
538
+ "epoch": 0.6,
539
+ "grad_norm": 13.352531433105469,
540
+ "learning_rate": 2.434054054054054e-05,
541
+ "loss": 0.9484,
542
+ "step": 750
543
+ },
544
+ {
545
+ "epoch": 0.608,
546
+ "grad_norm": 11.993139266967773,
547
+ "learning_rate": 2.4259459459459458e-05,
548
+ "loss": 1.1064,
549
+ "step": 760
550
+ },
551
+ {
552
+ "epoch": 0.616,
553
+ "grad_norm": 12.132452011108398,
554
+ "learning_rate": 2.4178378378378378e-05,
555
+ "loss": 1.1363,
556
+ "step": 770
557
+ },
558
+ {
559
+ "epoch": 0.624,
560
+ "grad_norm": 13.944737434387207,
561
+ "learning_rate": 2.4097297297297298e-05,
562
+ "loss": 0.9835,
563
+ "step": 780
564
+ },
565
+ {
566
+ "epoch": 0.632,
567
+ "grad_norm": 6.077609062194824,
568
+ "learning_rate": 2.4016216216216218e-05,
569
+ "loss": 0.8391,
570
+ "step": 790
571
+ },
572
+ {
573
+ "epoch": 0.64,
574
+ "grad_norm": 7.873855113983154,
575
+ "learning_rate": 2.3935135135135135e-05,
576
+ "loss": 0.7772,
577
+ "step": 800
578
+ },
579
+ {
580
+ "epoch": 0.648,
581
+ "grad_norm": 13.312115669250488,
582
+ "learning_rate": 2.3854054054054055e-05,
583
+ "loss": 1.0117,
584
+ "step": 810
585
+ },
586
+ {
587
+ "epoch": 0.656,
588
+ "grad_norm": 9.016510963439941,
589
+ "learning_rate": 2.3772972972972975e-05,
590
+ "loss": 0.9353,
591
+ "step": 820
592
+ },
593
+ {
594
+ "epoch": 0.664,
595
+ "grad_norm": 8.618375778198242,
596
+ "learning_rate": 2.3691891891891895e-05,
597
+ "loss": 0.9598,
598
+ "step": 830
599
+ },
600
+ {
601
+ "epoch": 0.672,
602
+ "grad_norm": 10.867205619812012,
603
+ "learning_rate": 2.361081081081081e-05,
604
+ "loss": 0.8726,
605
+ "step": 840
606
+ },
607
+ {
608
+ "epoch": 0.68,
609
+ "grad_norm": 13.182415962219238,
610
+ "learning_rate": 2.3529729729729728e-05,
611
+ "loss": 0.9202,
612
+ "step": 850
613
+ },
614
+ {
615
+ "epoch": 0.688,
616
+ "grad_norm": 12.405129432678223,
617
+ "learning_rate": 2.3448648648648648e-05,
618
+ "loss": 0.8795,
619
+ "step": 860
620
+ },
621
+ {
622
+ "epoch": 0.696,
623
+ "grad_norm": 8.207524299621582,
624
+ "learning_rate": 2.3367567567567568e-05,
625
+ "loss": 0.8015,
626
+ "step": 870
627
+ },
628
+ {
629
+ "epoch": 0.704,
630
+ "grad_norm": 15.442817687988281,
631
+ "learning_rate": 2.3286486486486485e-05,
632
+ "loss": 0.9932,
633
+ "step": 880
634
+ },
635
+ {
636
+ "epoch": 0.712,
637
+ "grad_norm": 13.388226509094238,
638
+ "learning_rate": 2.3205405405405405e-05,
639
+ "loss": 0.87,
640
+ "step": 890
641
+ },
642
+ {
643
+ "epoch": 0.72,
644
+ "grad_norm": 8.635920524597168,
645
+ "learning_rate": 2.3124324324324325e-05,
646
+ "loss": 0.7842,
647
+ "step": 900
648
+ },
649
+ {
650
+ "epoch": 0.728,
651
+ "grad_norm": 11.66073989868164,
652
+ "learning_rate": 2.3043243243243245e-05,
653
+ "loss": 0.9023,
654
+ "step": 910
655
+ },
656
+ {
657
+ "epoch": 0.736,
658
+ "grad_norm": 12.954612731933594,
659
+ "learning_rate": 2.2962162162162162e-05,
660
+ "loss": 1.0076,
661
+ "step": 920
662
+ },
663
+ {
664
+ "epoch": 0.744,
665
+ "grad_norm": 11.18680191040039,
666
+ "learning_rate": 2.2881081081081082e-05,
667
+ "loss": 1.1349,
668
+ "step": 930
669
+ },
670
+ {
671
+ "epoch": 0.752,
672
+ "grad_norm": 8.514711380004883,
673
+ "learning_rate": 2.2800000000000002e-05,
674
+ "loss": 0.9604,
675
+ "step": 940
676
+ },
677
+ {
678
+ "epoch": 0.76,
679
+ "grad_norm": 4.436418056488037,
680
+ "learning_rate": 2.2718918918918922e-05,
681
+ "loss": 0.9632,
682
+ "step": 950
683
+ },
684
+ {
685
+ "epoch": 0.768,
686
+ "grad_norm": 10.213781356811523,
687
+ "learning_rate": 2.263783783783784e-05,
688
+ "loss": 0.8139,
689
+ "step": 960
690
+ },
691
+ {
692
+ "epoch": 0.776,
693
+ "grad_norm": 9.987252235412598,
694
+ "learning_rate": 2.2556756756756755e-05,
695
+ "loss": 0.8276,
696
+ "step": 970
697
+ },
698
+ {
699
+ "epoch": 0.784,
700
+ "grad_norm": 12.511467933654785,
701
+ "learning_rate": 2.2475675675675675e-05,
702
+ "loss": 0.8709,
703
+ "step": 980
704
+ },
705
+ {
706
+ "epoch": 0.792,
707
+ "grad_norm": 8.908098220825195,
708
+ "learning_rate": 2.2394594594594595e-05,
709
+ "loss": 0.8812,
710
+ "step": 990
711
+ },
712
+ {
713
+ "epoch": 0.8,
714
+ "grad_norm": 10.62246322631836,
715
+ "learning_rate": 2.2313513513513512e-05,
716
+ "loss": 0.9733,
717
+ "step": 1000
718
+ },
719
+ {
720
+ "epoch": 0.808,
721
+ "grad_norm": 14.651544570922852,
722
+ "learning_rate": 2.2232432432432432e-05,
723
+ "loss": 1.0309,
724
+ "step": 1010
725
+ },
726
+ {
727
+ "epoch": 0.816,
728
+ "grad_norm": 19.1525936126709,
729
+ "learning_rate": 2.2151351351351352e-05,
730
+ "loss": 0.8808,
731
+ "step": 1020
732
+ },
733
+ {
734
+ "epoch": 0.824,
735
+ "grad_norm": 7.289106369018555,
736
+ "learning_rate": 2.2070270270270272e-05,
737
+ "loss": 0.9126,
738
+ "step": 1030
739
+ },
740
+ {
741
+ "epoch": 0.832,
742
+ "grad_norm": 5.375001907348633,
743
+ "learning_rate": 2.198918918918919e-05,
744
+ "loss": 0.847,
745
+ "step": 1040
746
+ },
747
+ {
748
+ "epoch": 0.84,
749
+ "grad_norm": 8.623431205749512,
750
+ "learning_rate": 2.190810810810811e-05,
751
+ "loss": 0.9139,
752
+ "step": 1050
753
+ },
754
+ {
755
+ "epoch": 0.848,
756
+ "grad_norm": 6.639071941375732,
757
+ "learning_rate": 2.182702702702703e-05,
758
+ "loss": 0.9345,
759
+ "step": 1060
760
+ },
761
+ {
762
+ "epoch": 0.856,
763
+ "grad_norm": 7.635943412780762,
764
+ "learning_rate": 2.174594594594595e-05,
765
+ "loss": 0.8134,
766
+ "step": 1070
767
+ },
768
+ {
769
+ "epoch": 0.864,
770
+ "grad_norm": 12.048315048217773,
771
+ "learning_rate": 2.1664864864864862e-05,
772
+ "loss": 0.6728,
773
+ "step": 1080
774
+ },
775
+ {
776
+ "epoch": 0.872,
777
+ "grad_norm": 13.869949340820312,
778
+ "learning_rate": 2.1583783783783783e-05,
779
+ "loss": 0.8256,
780
+ "step": 1090
781
+ },
782
+ {
783
+ "epoch": 0.88,
784
+ "grad_norm": 26.233325958251953,
785
+ "learning_rate": 2.1502702702702703e-05,
786
+ "loss": 0.7044,
787
+ "step": 1100
788
+ },
789
+ {
790
+ "epoch": 0.888,
791
+ "grad_norm": 7.98716926574707,
792
+ "learning_rate": 2.1421621621621623e-05,
793
+ "loss": 0.7398,
794
+ "step": 1110
795
+ },
796
+ {
797
+ "epoch": 0.896,
798
+ "grad_norm": 13.682205200195312,
799
+ "learning_rate": 2.134054054054054e-05,
800
+ "loss": 0.7522,
801
+ "step": 1120
802
+ },
803
+ {
804
+ "epoch": 0.904,
805
+ "grad_norm": 9.086796760559082,
806
+ "learning_rate": 2.125945945945946e-05,
807
+ "loss": 0.8574,
808
+ "step": 1130
809
+ },
810
+ {
811
+ "epoch": 0.912,
812
+ "grad_norm": 10.3043851852417,
813
+ "learning_rate": 2.117837837837838e-05,
814
+ "loss": 0.8005,
815
+ "step": 1140
816
+ },
817
+ {
818
+ "epoch": 0.92,
819
+ "grad_norm": 12.477950096130371,
820
+ "learning_rate": 2.10972972972973e-05,
821
+ "loss": 0.8436,
822
+ "step": 1150
823
+ },
824
+ {
825
+ "epoch": 0.928,
826
+ "grad_norm": 16.634178161621094,
827
+ "learning_rate": 2.1016216216216216e-05,
828
+ "loss": 0.6515,
829
+ "step": 1160
830
+ },
831
+ {
832
+ "epoch": 0.936,
833
+ "grad_norm": 11.066425323486328,
834
+ "learning_rate": 2.0935135135135136e-05,
835
+ "loss": 0.8689,
836
+ "step": 1170
837
+ },
838
+ {
839
+ "epoch": 0.944,
840
+ "grad_norm": 19.47179412841797,
841
+ "learning_rate": 2.0854054054054056e-05,
842
+ "loss": 0.6605,
843
+ "step": 1180
844
+ },
845
+ {
846
+ "epoch": 0.952,
847
+ "grad_norm": 3.917236804962158,
848
+ "learning_rate": 2.0772972972972973e-05,
849
+ "loss": 0.6826,
850
+ "step": 1190
851
+ },
852
+ {
853
+ "epoch": 0.96,
854
+ "grad_norm": 16.43979263305664,
855
+ "learning_rate": 2.069189189189189e-05,
856
+ "loss": 0.7731,
857
+ "step": 1200
858
+ },
859
+ {
860
+ "epoch": 0.968,
861
+ "grad_norm": 6.7848711013793945,
862
+ "learning_rate": 2.061081081081081e-05,
863
+ "loss": 0.705,
864
+ "step": 1210
865
+ },
866
+ {
867
+ "epoch": 0.976,
868
+ "grad_norm": 7.472936153411865,
869
+ "learning_rate": 2.052972972972973e-05,
870
+ "loss": 0.7663,
871
+ "step": 1220
872
+ },
873
+ {
874
+ "epoch": 0.984,
875
+ "grad_norm": 5.729743957519531,
876
+ "learning_rate": 2.044864864864865e-05,
877
+ "loss": 0.9337,
878
+ "step": 1230
879
+ },
880
+ {
881
+ "epoch": 0.992,
882
+ "grad_norm": 6.306894302368164,
883
+ "learning_rate": 2.0367567567567567e-05,
884
+ "loss": 0.7655,
885
+ "step": 1240
886
+ },
887
+ {
888
+ "epoch": 1.0,
889
+ "grad_norm": 17.98261260986328,
890
+ "learning_rate": 2.0286486486486487e-05,
891
+ "loss": 0.8202,
892
+ "step": 1250
893
+ },
894
+ {
895
+ "epoch": 1.0,
896
+ "eval_accuracy": 0.7668834417208604,
897
+ "eval_f1": 0.7589222033223469,
898
+ "eval_loss": 0.7338727712631226,
899
+ "eval_model_preparation_time": 0.0029,
900
+ "eval_runtime": 59.9403,
901
+ "eval_samples_per_second": 33.35,
902
+ "eval_steps_per_second": 4.171,
903
+ "step": 1250
904
+ },
905
+ {
906
+ "epoch": 1.008,
907
+ "grad_norm": 3.764857292175293,
908
+ "learning_rate": 2.0205405405405407e-05,
909
+ "loss": 0.6991,
910
+ "step": 1260
911
+ },
912
+ {
913
+ "epoch": 1.016,
914
+ "grad_norm": 12.638517379760742,
915
+ "learning_rate": 2.0124324324324327e-05,
916
+ "loss": 0.7653,
917
+ "step": 1270
918
+ },
919
+ {
920
+ "epoch": 1.024,
921
+ "grad_norm": 17.76312255859375,
922
+ "learning_rate": 2.0043243243243243e-05,
923
+ "loss": 0.8033,
924
+ "step": 1280
925
+ },
926
+ {
927
+ "epoch": 1.032,
928
+ "grad_norm": 11.941933631896973,
929
+ "learning_rate": 1.9962162162162163e-05,
930
+ "loss": 0.699,
931
+ "step": 1290
932
+ },
933
+ {
934
+ "epoch": 1.04,
935
+ "grad_norm": 17.107053756713867,
936
+ "learning_rate": 1.988108108108108e-05,
937
+ "loss": 0.706,
938
+ "step": 1300
939
+ },
940
+ {
941
+ "epoch": 1.048,
942
+ "grad_norm": 6.369427680969238,
943
+ "learning_rate": 1.98e-05,
944
+ "loss": 0.6494,
945
+ "step": 1310
946
+ },
947
+ {
948
+ "epoch": 1.056,
949
+ "grad_norm": 7.1872477531433105,
950
+ "learning_rate": 1.9718918918918917e-05,
951
+ "loss": 0.5989,
952
+ "step": 1320
953
+ },
954
+ {
955
+ "epoch": 1.064,
956
+ "grad_norm": 3.7603728771209717,
957
+ "learning_rate": 1.9637837837837837e-05,
958
+ "loss": 0.7582,
959
+ "step": 1330
960
+ },
961
+ {
962
+ "epoch": 1.072,
963
+ "grad_norm": 10.045304298400879,
964
+ "learning_rate": 1.9556756756756757e-05,
965
+ "loss": 0.7192,
966
+ "step": 1340
967
+ },
968
+ {
969
+ "epoch": 1.08,
970
+ "grad_norm": 14.639888763427734,
971
+ "learning_rate": 1.9475675675675677e-05,
972
+ "loss": 0.7959,
973
+ "step": 1350
974
+ },
975
+ {
976
+ "epoch": 1.088,
977
+ "grad_norm": 8.216081619262695,
978
+ "learning_rate": 1.9394594594594594e-05,
979
+ "loss": 0.6062,
980
+ "step": 1360
981
+ },
982
+ {
983
+ "epoch": 1.096,
984
+ "grad_norm": 5.784476280212402,
985
+ "learning_rate": 1.9313513513513514e-05,
986
+ "loss": 0.541,
987
+ "step": 1370
988
+ },
989
+ {
990
+ "epoch": 1.104,
991
+ "grad_norm": 5.356358051300049,
992
+ "learning_rate": 1.9232432432432434e-05,
993
+ "loss": 0.6993,
994
+ "step": 1380
995
+ },
996
+ {
997
+ "epoch": 1.112,
998
+ "grad_norm": 17.11981773376465,
999
+ "learning_rate": 1.9151351351351354e-05,
1000
+ "loss": 0.7217,
1001
+ "step": 1390
1002
+ },
1003
+ {
1004
+ "epoch": 1.12,
1005
+ "grad_norm": 15.52505111694336,
1006
+ "learning_rate": 1.907027027027027e-05,
1007
+ "loss": 0.548,
1008
+ "step": 1400
1009
+ },
1010
+ {
1011
+ "epoch": 1.1280000000000001,
1012
+ "grad_norm": 12.326894760131836,
1013
+ "learning_rate": 1.8989189189189187e-05,
1014
+ "loss": 0.7058,
1015
+ "step": 1410
1016
+ },
1017
+ {
1018
+ "epoch": 1.1360000000000001,
1019
+ "grad_norm": 12.889031410217285,
1020
+ "learning_rate": 1.8908108108108107e-05,
1021
+ "loss": 0.7748,
1022
+ "step": 1420
1023
+ },
1024
+ {
1025
+ "epoch": 1.144,
1026
+ "grad_norm": 12.953654289245605,
1027
+ "learning_rate": 1.8827027027027027e-05,
1028
+ "loss": 0.7251,
1029
+ "step": 1430
1030
+ },
1031
+ {
1032
+ "epoch": 1.152,
1033
+ "grad_norm": 2.564222812652588,
1034
+ "learning_rate": 1.8745945945945944e-05,
1035
+ "loss": 0.551,
1036
+ "step": 1440
1037
+ },
1038
+ {
1039
+ "epoch": 1.16,
1040
+ "grad_norm": 9.111184120178223,
1041
+ "learning_rate": 1.8664864864864864e-05,
1042
+ "loss": 0.5548,
1043
+ "step": 1450
1044
+ },
1045
+ {
1046
+ "epoch": 1.168,
1047
+ "grad_norm": 7.713393211364746,
1048
+ "learning_rate": 1.8583783783783784e-05,
1049
+ "loss": 0.564,
1050
+ "step": 1460
1051
+ },
1052
+ {
1053
+ "epoch": 1.176,
1054
+ "grad_norm": 8.282889366149902,
1055
+ "learning_rate": 1.8502702702702704e-05,
1056
+ "loss": 0.7312,
1057
+ "step": 1470
1058
+ },
1059
+ {
1060
+ "epoch": 1.184,
1061
+ "grad_norm": 15.445865631103516,
1062
+ "learning_rate": 1.842162162162162e-05,
1063
+ "loss": 0.6974,
1064
+ "step": 1480
1065
+ },
1066
+ {
1067
+ "epoch": 1.192,
1068
+ "grad_norm": 6.040890693664551,
1069
+ "learning_rate": 1.834054054054054e-05,
1070
+ "loss": 0.6808,
1071
+ "step": 1490
1072
+ },
1073
+ {
1074
+ "epoch": 1.2,
1075
+ "grad_norm": 17.368532180786133,
1076
+ "learning_rate": 1.825945945945946e-05,
1077
+ "loss": 0.5695,
1078
+ "step": 1500
1079
+ },
1080
+ {
1081
+ "epoch": 1.208,
1082
+ "grad_norm": 11.174856185913086,
1083
+ "learning_rate": 1.817837837837838e-05,
1084
+ "loss": 0.8114,
1085
+ "step": 1510
1086
+ },
1087
+ {
1088
+ "epoch": 1.216,
1089
+ "grad_norm": 18.861087799072266,
1090
+ "learning_rate": 1.8097297297297298e-05,
1091
+ "loss": 0.601,
1092
+ "step": 1520
1093
+ },
1094
+ {
1095
+ "epoch": 1.224,
1096
+ "grad_norm": 4.157520771026611,
1097
+ "learning_rate": 1.8016216216216214e-05,
1098
+ "loss": 0.6195,
1099
+ "step": 1530
1100
+ },
1101
+ {
1102
+ "epoch": 1.232,
1103
+ "grad_norm": 40.063621520996094,
1104
+ "learning_rate": 1.7935135135135134e-05,
1105
+ "loss": 0.6502,
1106
+ "step": 1540
1107
+ },
1108
+ {
1109
+ "epoch": 1.24,
1110
+ "grad_norm": 12.99301528930664,
1111
+ "learning_rate": 1.7854054054054055e-05,
1112
+ "loss": 0.5463,
1113
+ "step": 1550
1114
+ },
1115
+ {
1116
+ "epoch": 1.248,
1117
+ "grad_norm": 3.057586908340454,
1118
+ "learning_rate": 1.7772972972972975e-05,
1119
+ "loss": 0.5025,
1120
+ "step": 1560
1121
+ },
1122
+ {
1123
+ "epoch": 1.256,
1124
+ "grad_norm": 7.806783199310303,
1125
+ "learning_rate": 1.769189189189189e-05,
1126
+ "loss": 0.5544,
1127
+ "step": 1570
1128
+ },
1129
+ {
1130
+ "epoch": 1.264,
1131
+ "grad_norm": 6.477509498596191,
1132
+ "learning_rate": 1.761081081081081e-05,
1133
+ "loss": 0.5447,
1134
+ "step": 1580
1135
+ },
1136
+ {
1137
+ "epoch": 1.272,
1138
+ "grad_norm": 28.918643951416016,
1139
+ "learning_rate": 1.752972972972973e-05,
1140
+ "loss": 0.7803,
1141
+ "step": 1590
1142
+ },
1143
+ {
1144
+ "epoch": 1.28,
1145
+ "grad_norm": 23.26552391052246,
1146
+ "learning_rate": 1.744864864864865e-05,
1147
+ "loss": 0.7442,
1148
+ "step": 1600
1149
+ },
1150
+ {
1151
+ "epoch": 1.288,
1152
+ "grad_norm": 24.359582901000977,
1153
+ "learning_rate": 1.7367567567567568e-05,
1154
+ "loss": 0.705,
1155
+ "step": 1610
1156
+ },
1157
+ {
1158
+ "epoch": 1.296,
1159
+ "grad_norm": 20.904409408569336,
1160
+ "learning_rate": 1.7286486486486488e-05,
1161
+ "loss": 0.7149,
1162
+ "step": 1620
1163
+ },
1164
+ {
1165
+ "epoch": 1.304,
1166
+ "grad_norm": 11.675884246826172,
1167
+ "learning_rate": 1.7205405405405408e-05,
1168
+ "loss": 0.5916,
1169
+ "step": 1630
1170
+ },
1171
+ {
1172
+ "epoch": 1.312,
1173
+ "grad_norm": 19.712337493896484,
1174
+ "learning_rate": 1.7124324324324325e-05,
1175
+ "loss": 0.59,
1176
+ "step": 1640
1177
+ },
1178
+ {
1179
+ "epoch": 1.32,
1180
+ "grad_norm": 1.4685373306274414,
1181
+ "learning_rate": 1.704324324324324e-05,
1182
+ "loss": 0.6127,
1183
+ "step": 1650
1184
+ },
1185
+ {
1186
+ "epoch": 1.328,
1187
+ "grad_norm": 13.145341873168945,
1188
+ "learning_rate": 1.696216216216216e-05,
1189
+ "loss": 0.5311,
1190
+ "step": 1660
1191
+ },
1192
+ {
1193
+ "epoch": 1.336,
1194
+ "grad_norm": 33.24889373779297,
1195
+ "learning_rate": 1.6881081081081082e-05,
1196
+ "loss": 0.539,
1197
+ "step": 1670
1198
+ },
1199
+ {
1200
+ "epoch": 1.3439999999999999,
1201
+ "grad_norm": 2.730905771255493,
1202
+ "learning_rate": 1.6800000000000002e-05,
1203
+ "loss": 0.6207,
1204
+ "step": 1680
1205
+ },
1206
+ {
1207
+ "epoch": 1.3519999999999999,
1208
+ "grad_norm": 18.928560256958008,
1209
+ "learning_rate": 1.671891891891892e-05,
1210
+ "loss": 0.5226,
1211
+ "step": 1690
1212
+ },
1213
+ {
1214
+ "epoch": 1.3599999999999999,
1215
+ "grad_norm": 6.420986175537109,
1216
+ "learning_rate": 1.663783783783784e-05,
1217
+ "loss": 0.5054,
1218
+ "step": 1700
1219
+ },
1220
+ {
1221
+ "epoch": 1.3679999999999999,
1222
+ "grad_norm": 10.68362045288086,
1223
+ "learning_rate": 1.655675675675676e-05,
1224
+ "loss": 0.553,
1225
+ "step": 1710
1226
+ },
1227
+ {
1228
+ "epoch": 1.376,
1229
+ "grad_norm": 23.89041519165039,
1230
+ "learning_rate": 1.647567567567568e-05,
1231
+ "loss": 0.585,
1232
+ "step": 1720
1233
+ },
1234
+ {
1235
+ "epoch": 1.384,
1236
+ "grad_norm": 0.6503021717071533,
1237
+ "learning_rate": 1.6394594594594595e-05,
1238
+ "loss": 0.5443,
1239
+ "step": 1730
1240
+ },
1241
+ {
1242
+ "epoch": 1.392,
1243
+ "grad_norm": 16.69384002685547,
1244
+ "learning_rate": 1.6313513513513515e-05,
1245
+ "loss": 0.777,
1246
+ "step": 1740
1247
+ },
1248
+ {
1249
+ "epoch": 1.4,
1250
+ "grad_norm": 11.571426391601562,
1251
+ "learning_rate": 1.6232432432432432e-05,
1252
+ "loss": 0.7578,
1253
+ "step": 1750
1254
+ },
1255
+ {
1256
+ "epoch": 1.408,
1257
+ "grad_norm": 19.505590438842773,
1258
+ "learning_rate": 1.6151351351351352e-05,
1259
+ "loss": 0.755,
1260
+ "step": 1760
1261
+ },
1262
+ {
1263
+ "epoch": 1.416,
1264
+ "grad_norm": 12.909994125366211,
1265
+ "learning_rate": 1.607027027027027e-05,
1266
+ "loss": 0.5617,
1267
+ "step": 1770
1268
+ },
1269
+ {
1270
+ "epoch": 1.424,
1271
+ "grad_norm": 10.301375389099121,
1272
+ "learning_rate": 1.598918918918919e-05,
1273
+ "loss": 0.7985,
1274
+ "step": 1780
1275
+ },
1276
+ {
1277
+ "epoch": 1.432,
1278
+ "grad_norm": 20.1243839263916,
1279
+ "learning_rate": 1.590810810810811e-05,
1280
+ "loss": 0.6966,
1281
+ "step": 1790
1282
+ },
1283
+ {
1284
+ "epoch": 1.44,
1285
+ "grad_norm": 5.007569789886475,
1286
+ "learning_rate": 1.582702702702703e-05,
1287
+ "loss": 0.5544,
1288
+ "step": 1800
1289
+ },
1290
+ {
1291
+ "epoch": 1.448,
1292
+ "grad_norm": 2.8352081775665283,
1293
+ "learning_rate": 1.5745945945945946e-05,
1294
+ "loss": 0.8716,
1295
+ "step": 1810
1296
+ },
1297
+ {
1298
+ "epoch": 1.456,
1299
+ "grad_norm": 8.246051788330078,
1300
+ "learning_rate": 1.5664864864864866e-05,
1301
+ "loss": 0.449,
1302
+ "step": 1820
1303
+ },
1304
+ {
1305
+ "epoch": 1.464,
1306
+ "grad_norm": 7.072529315948486,
1307
+ "learning_rate": 1.5583783783783786e-05,
1308
+ "loss": 0.6345,
1309
+ "step": 1830
1310
+ },
1311
+ {
1312
+ "epoch": 1.472,
1313
+ "grad_norm": 11.075968742370605,
1314
+ "learning_rate": 1.5502702702702706e-05,
1315
+ "loss": 0.5029,
1316
+ "step": 1840
1317
+ },
1318
+ {
1319
+ "epoch": 1.48,
1320
+ "grad_norm": 10.411526679992676,
1321
+ "learning_rate": 1.5421621621621622e-05,
1322
+ "loss": 0.4708,
1323
+ "step": 1850
1324
+ },
1325
+ {
1326
+ "epoch": 1.488,
1327
+ "grad_norm": 13.274471282958984,
1328
+ "learning_rate": 1.534054054054054e-05,
1329
+ "loss": 0.4823,
1330
+ "step": 1860
1331
+ },
1332
+ {
1333
+ "epoch": 1.496,
1334
+ "grad_norm": 8.039985656738281,
1335
+ "learning_rate": 1.525945945945946e-05,
1336
+ "loss": 0.5719,
1337
+ "step": 1870
1338
+ },
1339
+ {
1340
+ "epoch": 1.504,
1341
+ "grad_norm": 19.872621536254883,
1342
+ "learning_rate": 1.5178378378378381e-05,
1343
+ "loss": 0.7933,
1344
+ "step": 1880
1345
+ },
1346
+ {
1347
+ "epoch": 1.512,
1348
+ "grad_norm": 12.538918495178223,
1349
+ "learning_rate": 1.5097297297297296e-05,
1350
+ "loss": 0.4885,
1351
+ "step": 1890
1352
+ },
1353
+ {
1354
+ "epoch": 1.52,
1355
+ "grad_norm": 26.685623168945312,
1356
+ "learning_rate": 1.5016216216216216e-05,
1357
+ "loss": 0.6399,
1358
+ "step": 1900
1359
+ },
1360
+ {
1361
+ "epoch": 1.528,
1362
+ "grad_norm": 10.571418762207031,
1363
+ "learning_rate": 1.4935135135135136e-05,
1364
+ "loss": 0.6462,
1365
+ "step": 1910
1366
+ },
1367
+ {
1368
+ "epoch": 1.536,
1369
+ "grad_norm": 3.1144027709960938,
1370
+ "learning_rate": 1.4854054054054054e-05,
1371
+ "loss": 0.6041,
1372
+ "step": 1920
1373
+ },
1374
+ {
1375
+ "epoch": 1.544,
1376
+ "grad_norm": 5.647855758666992,
1377
+ "learning_rate": 1.4772972972972975e-05,
1378
+ "loss": 0.584,
1379
+ "step": 1930
1380
+ },
1381
+ {
1382
+ "epoch": 1.552,
1383
+ "grad_norm": 9.756006240844727,
1384
+ "learning_rate": 1.4691891891891893e-05,
1385
+ "loss": 0.3929,
1386
+ "step": 1940
1387
+ },
1388
+ {
1389
+ "epoch": 1.56,
1390
+ "grad_norm": 7.031187534332275,
1391
+ "learning_rate": 1.4610810810810811e-05,
1392
+ "loss": 0.5963,
1393
+ "step": 1950
1394
+ },
1395
+ {
1396
+ "epoch": 1.568,
1397
+ "grad_norm": 19.187641143798828,
1398
+ "learning_rate": 1.452972972972973e-05,
1399
+ "loss": 0.4967,
1400
+ "step": 1960
1401
+ },
1402
+ {
1403
+ "epoch": 1.576,
1404
+ "grad_norm": 16.660043716430664,
1405
+ "learning_rate": 1.444864864864865e-05,
1406
+ "loss": 0.632,
1407
+ "step": 1970
1408
+ },
1409
+ {
1410
+ "epoch": 1.584,
1411
+ "grad_norm": 15.292383193969727,
1412
+ "learning_rate": 1.4367567567567568e-05,
1413
+ "loss": 0.6671,
1414
+ "step": 1980
1415
+ },
1416
+ {
1417
+ "epoch": 1.592,
1418
+ "grad_norm": 15.64156436920166,
1419
+ "learning_rate": 1.4286486486486488e-05,
1420
+ "loss": 0.5182,
1421
+ "step": 1990
1422
+ },
1423
+ {
1424
+ "epoch": 1.6,
1425
+ "grad_norm": 12.81575870513916,
1426
+ "learning_rate": 1.4205405405405405e-05,
1427
+ "loss": 0.7377,
1428
+ "step": 2000
1429
+ },
1430
+ {
1431
+ "epoch": 1.608,
1432
+ "grad_norm": 28.722570419311523,
1433
+ "learning_rate": 1.4124324324324325e-05,
1434
+ "loss": 0.6432,
1435
+ "step": 2010
1436
+ },
1437
+ {
1438
+ "epoch": 1.616,
1439
+ "grad_norm": 9.101573944091797,
1440
+ "learning_rate": 1.4043243243243243e-05,
1441
+ "loss": 0.7597,
1442
+ "step": 2020
1443
+ },
1444
+ {
1445
+ "epoch": 1.624,
1446
+ "grad_norm": 18.51584815979004,
1447
+ "learning_rate": 1.3962162162162163e-05,
1448
+ "loss": 0.5169,
1449
+ "step": 2030
1450
+ },
1451
+ {
1452
+ "epoch": 1.6320000000000001,
1453
+ "grad_norm": 19.951353073120117,
1454
+ "learning_rate": 1.3881081081081082e-05,
1455
+ "loss": 0.6938,
1456
+ "step": 2040
1457
+ },
1458
+ {
1459
+ "epoch": 1.6400000000000001,
1460
+ "grad_norm": 0.7395208477973938,
1461
+ "learning_rate": 1.3800000000000002e-05,
1462
+ "loss": 0.5438,
1463
+ "step": 2050
1464
+ },
1465
+ {
1466
+ "epoch": 1.6480000000000001,
1467
+ "grad_norm": 23.373943328857422,
1468
+ "learning_rate": 1.3718918918918918e-05,
1469
+ "loss": 0.543,
1470
+ "step": 2060
1471
+ },
1472
+ {
1473
+ "epoch": 1.6560000000000001,
1474
+ "grad_norm": 13.313843727111816,
1475
+ "learning_rate": 1.3637837837837838e-05,
1476
+ "loss": 0.752,
1477
+ "step": 2070
1478
+ },
1479
+ {
1480
+ "epoch": 1.6640000000000001,
1481
+ "grad_norm": 19.967775344848633,
1482
+ "learning_rate": 1.3556756756756757e-05,
1483
+ "loss": 0.5858,
1484
+ "step": 2080
1485
+ },
1486
+ {
1487
+ "epoch": 1.6720000000000002,
1488
+ "grad_norm": 6.15806770324707,
1489
+ "learning_rate": 1.3475675675675677e-05,
1490
+ "loss": 0.6735,
1491
+ "step": 2090
1492
+ },
1493
+ {
1494
+ "epoch": 1.6800000000000002,
1495
+ "grad_norm": 20.810691833496094,
1496
+ "learning_rate": 1.3394594594594595e-05,
1497
+ "loss": 0.7207,
1498
+ "step": 2100
1499
+ },
1500
+ {
1501
+ "epoch": 1.688,
1502
+ "grad_norm": 21.559804916381836,
1503
+ "learning_rate": 1.3313513513513514e-05,
1504
+ "loss": 0.6681,
1505
+ "step": 2110
1506
+ },
1507
+ {
1508
+ "epoch": 1.696,
1509
+ "grad_norm": 5.827245235443115,
1510
+ "learning_rate": 1.3232432432432432e-05,
1511
+ "loss": 0.5774,
1512
+ "step": 2120
1513
+ },
1514
+ {
1515
+ "epoch": 1.704,
1516
+ "grad_norm": 4.7927069664001465,
1517
+ "learning_rate": 1.3151351351351352e-05,
1518
+ "loss": 0.6439,
1519
+ "step": 2130
1520
+ },
1521
+ {
1522
+ "epoch": 1.712,
1523
+ "grad_norm": 14.177338600158691,
1524
+ "learning_rate": 1.307027027027027e-05,
1525
+ "loss": 0.7137,
1526
+ "step": 2140
1527
+ },
1528
+ {
1529
+ "epoch": 1.72,
1530
+ "grad_norm": 14.718915939331055,
1531
+ "learning_rate": 1.298918918918919e-05,
1532
+ "loss": 0.5463,
1533
+ "step": 2150
1534
+ },
1535
+ {
1536
+ "epoch": 1.728,
1537
+ "grad_norm": 18.23885726928711,
1538
+ "learning_rate": 1.2908108108108109e-05,
1539
+ "loss": 0.6557,
1540
+ "step": 2160
1541
+ },
1542
+ {
1543
+ "epoch": 1.736,
1544
+ "grad_norm": 9.514300346374512,
1545
+ "learning_rate": 1.2827027027027027e-05,
1546
+ "loss": 0.5304,
1547
+ "step": 2170
1548
+ },
1549
+ {
1550
+ "epoch": 1.744,
1551
+ "grad_norm": 15.950238227844238,
1552
+ "learning_rate": 1.2745945945945946e-05,
1553
+ "loss": 0.721,
1554
+ "step": 2180
1555
+ },
1556
+ {
1557
+ "epoch": 1.752,
1558
+ "grad_norm": 5.722634315490723,
1559
+ "learning_rate": 1.2664864864864866e-05,
1560
+ "loss": 0.3436,
1561
+ "step": 2190
1562
+ },
1563
+ {
1564
+ "epoch": 1.76,
1565
+ "grad_norm": 11.308035850524902,
1566
+ "learning_rate": 1.2583783783783784e-05,
1567
+ "loss": 0.5269,
1568
+ "step": 2200
1569
+ },
1570
+ {
1571
+ "epoch": 1.768,
1572
+ "grad_norm": 5.413994789123535,
1573
+ "learning_rate": 1.2502702702702704e-05,
1574
+ "loss": 0.4461,
1575
+ "step": 2210
1576
+ },
1577
+ {
1578
+ "epoch": 1.776,
1579
+ "grad_norm": 29.982696533203125,
1580
+ "learning_rate": 1.2421621621621622e-05,
1581
+ "loss": 0.5942,
1582
+ "step": 2220
1583
+ },
1584
+ {
1585
+ "epoch": 1.784,
1586
+ "grad_norm": 25.45384979248047,
1587
+ "learning_rate": 1.234054054054054e-05,
1588
+ "loss": 0.5469,
1589
+ "step": 2230
1590
+ },
1591
+ {
1592
+ "epoch": 1.792,
1593
+ "grad_norm": 10.957773208618164,
1594
+ "learning_rate": 1.225945945945946e-05,
1595
+ "loss": 0.4809,
1596
+ "step": 2240
1597
+ },
1598
+ {
1599
+ "epoch": 1.8,
1600
+ "grad_norm": 11.609101295471191,
1601
+ "learning_rate": 1.217837837837838e-05,
1602
+ "loss": 0.73,
1603
+ "step": 2250
1604
+ },
1605
+ {
1606
+ "epoch": 1.808,
1607
+ "grad_norm": 14.325447082519531,
1608
+ "learning_rate": 1.2097297297297298e-05,
1609
+ "loss": 0.3926,
1610
+ "step": 2260
1611
+ },
1612
+ {
1613
+ "epoch": 1.8159999999999998,
1614
+ "grad_norm": 4.8160719871521,
1615
+ "learning_rate": 1.2016216216216218e-05,
1616
+ "loss": 0.5617,
1617
+ "step": 2270
1618
+ },
1619
+ {
1620
+ "epoch": 1.8239999999999998,
1621
+ "grad_norm": 6.287977695465088,
1622
+ "learning_rate": 1.1935135135135134e-05,
1623
+ "loss": 0.3482,
1624
+ "step": 2280
1625
+ },
1626
+ {
1627
+ "epoch": 1.8319999999999999,
1628
+ "grad_norm": 9.819110870361328,
1629
+ "learning_rate": 1.1854054054054054e-05,
1630
+ "loss": 0.5353,
1631
+ "step": 2290
1632
+ },
1633
+ {
1634
+ "epoch": 1.8399999999999999,
1635
+ "grad_norm": 7.572418689727783,
1636
+ "learning_rate": 1.1772972972972973e-05,
1637
+ "loss": 0.7756,
1638
+ "step": 2300
1639
+ },
1640
+ {
1641
+ "epoch": 1.8479999999999999,
1642
+ "grad_norm": 16.719934463500977,
1643
+ "learning_rate": 1.1691891891891893e-05,
1644
+ "loss": 0.6298,
1645
+ "step": 2310
1646
+ },
1647
+ {
1648
+ "epoch": 1.8559999999999999,
1649
+ "grad_norm": 18.21957778930664,
1650
+ "learning_rate": 1.1610810810810811e-05,
1651
+ "loss": 0.4695,
1652
+ "step": 2320
1653
+ },
1654
+ {
1655
+ "epoch": 1.8639999999999999,
1656
+ "grad_norm": 5.47652530670166,
1657
+ "learning_rate": 1.1529729729729731e-05,
1658
+ "loss": 0.2983,
1659
+ "step": 2330
1660
+ },
1661
+ {
1662
+ "epoch": 1.8719999999999999,
1663
+ "grad_norm": 22.044818878173828,
1664
+ "learning_rate": 1.1448648648648648e-05,
1665
+ "loss": 0.5972,
1666
+ "step": 2340
1667
+ },
1668
+ {
1669
+ "epoch": 1.88,
1670
+ "grad_norm": 26.34394645690918,
1671
+ "learning_rate": 1.1367567567567568e-05,
1672
+ "loss": 0.4404,
1673
+ "step": 2350
1674
+ },
1675
+ {
1676
+ "epoch": 1.888,
1677
+ "grad_norm": 21.979583740234375,
1678
+ "learning_rate": 1.1286486486486486e-05,
1679
+ "loss": 0.6474,
1680
+ "step": 2360
1681
+ },
1682
+ {
1683
+ "epoch": 1.896,
1684
+ "grad_norm": 15.81022834777832,
1685
+ "learning_rate": 1.1205405405405406e-05,
1686
+ "loss": 0.552,
1687
+ "step": 2370
1688
+ },
1689
+ {
1690
+ "epoch": 1.904,
1691
+ "grad_norm": 13.853069305419922,
1692
+ "learning_rate": 1.1124324324324325e-05,
1693
+ "loss": 0.5908,
1694
+ "step": 2380
1695
+ },
1696
+ {
1697
+ "epoch": 1.912,
1698
+ "grad_norm": 4.924503326416016,
1699
+ "learning_rate": 1.1043243243243243e-05,
1700
+ "loss": 0.3883,
1701
+ "step": 2390
1702
+ },
1703
+ {
1704
+ "epoch": 1.92,
1705
+ "grad_norm": 15.801043510437012,
1706
+ "learning_rate": 1.0962162162162162e-05,
1707
+ "loss": 0.4635,
1708
+ "step": 2400
1709
+ },
1710
+ {
1711
+ "epoch": 1.928,
1712
+ "grad_norm": 17.398475646972656,
1713
+ "learning_rate": 1.0881081081081082e-05,
1714
+ "loss": 0.5509,
1715
+ "step": 2410
1716
+ },
1717
+ {
1718
+ "epoch": 1.936,
1719
+ "grad_norm": 12.026921272277832,
1720
+ "learning_rate": 1.08e-05,
1721
+ "loss": 0.533,
1722
+ "step": 2420
1723
+ },
1724
+ {
1725
+ "epoch": 1.944,
1726
+ "grad_norm": 23.21822738647461,
1727
+ "learning_rate": 1.071891891891892e-05,
1728
+ "loss": 0.5655,
1729
+ "step": 2430
1730
+ },
1731
+ {
1732
+ "epoch": 1.952,
1733
+ "grad_norm": 9.777156829833984,
1734
+ "learning_rate": 1.0637837837837838e-05,
1735
+ "loss": 0.7364,
1736
+ "step": 2440
1737
+ },
1738
+ {
1739
+ "epoch": 1.96,
1740
+ "grad_norm": 17.892311096191406,
1741
+ "learning_rate": 1.0556756756756757e-05,
1742
+ "loss": 0.5123,
1743
+ "step": 2450
1744
+ },
1745
+ {
1746
+ "epoch": 1.968,
1747
+ "grad_norm": 10.79381275177002,
1748
+ "learning_rate": 1.0475675675675675e-05,
1749
+ "loss": 0.5601,
1750
+ "step": 2460
1751
+ },
1752
+ {
1753
+ "epoch": 1.976,
1754
+ "grad_norm": 16.45550537109375,
1755
+ "learning_rate": 1.0394594594594595e-05,
1756
+ "loss": 0.3604,
1757
+ "step": 2470
1758
+ },
1759
+ {
1760
+ "epoch": 1.984,
1761
+ "grad_norm": 4.945703506469727,
1762
+ "learning_rate": 1.0313513513513514e-05,
1763
+ "loss": 0.5801,
1764
+ "step": 2480
1765
+ },
1766
+ {
1767
+ "epoch": 1.992,
1768
+ "grad_norm": 7.19441556930542,
1769
+ "learning_rate": 1.0232432432432434e-05,
1770
+ "loss": 0.5552,
1771
+ "step": 2490
1772
+ },
1773
+ {
1774
+ "epoch": 2.0,
1775
+ "grad_norm": 16.103708267211914,
1776
+ "learning_rate": 1.0151351351351352e-05,
1777
+ "loss": 0.5873,
1778
+ "step": 2500
1779
+ },
1780
+ {
1781
+ "epoch": 2.0,
1782
+ "eval_accuracy": 0.7983991995997999,
1783
+ "eval_f1": 0.8020345252909776,
1784
+ "eval_loss": 0.6678956747055054,
1785
+ "eval_model_preparation_time": 0.0029,
1786
+ "eval_runtime": 59.5494,
1787
+ "eval_samples_per_second": 33.569,
1788
+ "eval_steps_per_second": 4.198,
1789
+ "step": 2500
1790
+ }
1791
+ ],
1792
+ "logging_steps": 10,
1793
+ "max_steps": 3750,
1794
+ "num_input_tokens_seen": 0,
1795
+ "num_train_epochs": 3,
1796
+ "save_steps": 500,
1797
+ "stateful_callbacks": {
1798
+ "TrainerControl": {
1799
+ "args": {
1800
+ "should_epoch_stop": false,
1801
+ "should_evaluate": false,
1802
+ "should_log": false,
1803
+ "should_save": true,
1804
+ "should_training_stop": false
1805
+ },
1806
+ "attributes": {}
1807
+ }
1808
+ },
1809
+ "total_flos": 8.83389772226827e+17,
1810
+ "train_batch_size": 8,
1811
+ "trial_name": null,
1812
+ "trial_params": null
1813
+ }
checkpoint-2500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a868b38cbd73376b16ac8e3d8306db2a42a29f39b8f6d41ef6bd11bcdc6b19c
3
+ size 5304
checkpoint-3750/config.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "adapter_attn_dim": null,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForSequenceClassification"
10
+ ],
11
+ "attention_dropout": 0.1,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 256,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": false,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "sum",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": false,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_norm": "group",
51
+ "feat_proj_dropout": 0.1,
52
+ "feat_quantizer_dropout": 0.0,
53
+ "final_dropout": 0.0,
54
+ "freeze_feat_extract_train": true,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.1,
57
+ "hidden_size": 768,
58
+ "id2label": {
59
+ "0": "ANG",
60
+ "1": "CAL",
61
+ "2": "DIS",
62
+ "3": "FEA",
63
+ "4": "HAP",
64
+ "5": "NEU",
65
+ "6": "SAD",
66
+ "7": "SUR"
67
+ },
68
+ "initializer_range": 0.02,
69
+ "intermediate_size": 3072,
70
+ "label2id": {
71
+ "ANG": 0,
72
+ "CAL": 1,
73
+ "DIS": 2,
74
+ "FEA": 3,
75
+ "HAP": 4,
76
+ "NEU": 5,
77
+ "SAD": 6,
78
+ "SUR": 7
79
+ },
80
+ "layer_norm_eps": 1e-05,
81
+ "layerdrop": 0.0,
82
+ "mask_channel_length": 10,
83
+ "mask_channel_min_space": 1,
84
+ "mask_channel_other": 0.0,
85
+ "mask_channel_prob": 0.0,
86
+ "mask_channel_selection": "static",
87
+ "mask_feature_length": 10,
88
+ "mask_feature_min_masks": 0,
89
+ "mask_feature_prob": 0.0,
90
+ "mask_time_length": 10,
91
+ "mask_time_min_masks": 2,
92
+ "mask_time_min_space": 1,
93
+ "mask_time_other": 0.0,
94
+ "mask_time_prob": 0.05,
95
+ "mask_time_selection": "static",
96
+ "model_type": "wav2vec2",
97
+ "no_mask_channel_overlap": false,
98
+ "no_mask_time_overlap": false,
99
+ "num_adapter_layers": 3,
100
+ "num_attention_heads": 12,
101
+ "num_codevector_groups": 2,
102
+ "num_codevectors_per_group": 320,
103
+ "num_conv_pos_embedding_groups": 16,
104
+ "num_conv_pos_embeddings": 128,
105
+ "num_feat_extract_layers": 7,
106
+ "num_hidden_layers": 12,
107
+ "num_negatives": 100,
108
+ "output_hidden_size": 768,
109
+ "pad_token_id": 0,
110
+ "proj_codevector_dim": 256,
111
+ "tdnn_dilation": [
112
+ 1,
113
+ 2,
114
+ 3,
115
+ 1,
116
+ 1
117
+ ],
118
+ "tdnn_dim": [
119
+ 512,
120
+ 512,
121
+ 512,
122
+ 512,
123
+ 1500
124
+ ],
125
+ "tdnn_kernel": [
126
+ 5,
127
+ 3,
128
+ 3,
129
+ 1,
130
+ 1
131
+ ],
132
+ "torch_dtype": "float32",
133
+ "transformers_version": "4.50.0",
134
+ "use_weighted_layer_sum": false,
135
+ "vocab_size": 32,
136
+ "xvector_output_dim": 512
137
+ }
checkpoint-3750/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a5ead8de09ac38fb8483538c96e79fef83b590194696a439ef18b4c95592951
3
+ size 378308536
checkpoint-3750/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac41701489dec80c540f22f0fe343445a05fc4189909b5cffc3d408392ef6ed9
3
+ size 723133690
checkpoint-3750/preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "return_attention_mask": false,
8
+ "sampling_rate": 16000
9
+ }
checkpoint-3750/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16d6fbadb3e180991682fda0578fe2c72f302c0f79effe68c518fef6e3900c7a
3
+ size 14244
checkpoint-3750/scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c357e3fcd5a4026c6fbfc21f6d0286251977d70579d35af48fb93121ca019e2d
3
+ size 988
checkpoint-3750/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8119485837290b6e4a0e9049d3ec1ff8eec211a118accca4110e67751b25bb9
3
+ size 1064
checkpoint-3750/trainer_state.json ADDED
@@ -0,0 +1,2699 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": 3750,
3
+ "best_metric": 0.845953955875665,
4
+ "best_model_checkpoint": "voice_emotion_classification/checkpoint-3750",
5
+ "epoch": 3.0,
6
+ "eval_steps": 500,
7
+ "global_step": 3750,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0008,
14
+ "grad_norm": 1.4005043506622314,
15
+ "learning_rate": 6.000000000000001e-07,
16
+ "loss": 2.0903,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.008,
21
+ "grad_norm": 1.3180536031723022,
22
+ "learning_rate": 6e-06,
23
+ "loss": 2.0826,
24
+ "step": 10
25
+ },
26
+ {
27
+ "epoch": 0.016,
28
+ "grad_norm": 1.0437175035476685,
29
+ "learning_rate": 1.2e-05,
30
+ "loss": 2.0693,
31
+ "step": 20
32
+ },
33
+ {
34
+ "epoch": 0.024,
35
+ "grad_norm": 1.371071457862854,
36
+ "learning_rate": 1.8e-05,
37
+ "loss": 2.0463,
38
+ "step": 30
39
+ },
40
+ {
41
+ "epoch": 0.032,
42
+ "grad_norm": 1.51685631275177,
43
+ "learning_rate": 2.4e-05,
44
+ "loss": 2.0423,
45
+ "step": 40
46
+ },
47
+ {
48
+ "epoch": 0.04,
49
+ "grad_norm": 1.4282890558242798,
50
+ "learning_rate": 3e-05,
51
+ "loss": 1.996,
52
+ "step": 50
53
+ },
54
+ {
55
+ "epoch": 0.048,
56
+ "grad_norm": 1.860026478767395,
57
+ "learning_rate": 2.991891891891892e-05,
58
+ "loss": 1.9639,
59
+ "step": 60
60
+ },
61
+ {
62
+ "epoch": 0.056,
63
+ "grad_norm": 2.121481418609619,
64
+ "learning_rate": 2.983783783783784e-05,
65
+ "loss": 1.9517,
66
+ "step": 70
67
+ },
68
+ {
69
+ "epoch": 0.064,
70
+ "grad_norm": 2.056445837020874,
71
+ "learning_rate": 2.9756756756756758e-05,
72
+ "loss": 1.8765,
73
+ "step": 80
74
+ },
75
+ {
76
+ "epoch": 0.072,
77
+ "grad_norm": 2.3928184509277344,
78
+ "learning_rate": 2.9675675675675678e-05,
79
+ "loss": 1.8703,
80
+ "step": 90
81
+ },
82
+ {
83
+ "epoch": 0.08,
84
+ "grad_norm": 3.0733420848846436,
85
+ "learning_rate": 2.9594594594594598e-05,
86
+ "loss": 1.853,
87
+ "step": 100
88
+ },
89
+ {
90
+ "epoch": 0.088,
91
+ "grad_norm": 2.88864803314209,
92
+ "learning_rate": 2.9513513513513514e-05,
93
+ "loss": 1.7652,
94
+ "step": 110
95
+ },
96
+ {
97
+ "epoch": 0.096,
98
+ "grad_norm": 2.9048268795013428,
99
+ "learning_rate": 2.943243243243243e-05,
100
+ "loss": 1.7984,
101
+ "step": 120
102
+ },
103
+ {
104
+ "epoch": 0.104,
105
+ "grad_norm": 2.6991426944732666,
106
+ "learning_rate": 2.935135135135135e-05,
107
+ "loss": 1.6869,
108
+ "step": 130
109
+ },
110
+ {
111
+ "epoch": 0.112,
112
+ "grad_norm": 2.4570231437683105,
113
+ "learning_rate": 2.927027027027027e-05,
114
+ "loss": 1.7812,
115
+ "step": 140
116
+ },
117
+ {
118
+ "epoch": 0.12,
119
+ "grad_norm": 4.502678871154785,
120
+ "learning_rate": 2.918918918918919e-05,
121
+ "loss": 1.8444,
122
+ "step": 150
123
+ },
124
+ {
125
+ "epoch": 0.128,
126
+ "grad_norm": 3.117838144302368,
127
+ "learning_rate": 2.9108108108108108e-05,
128
+ "loss": 1.7544,
129
+ "step": 160
130
+ },
131
+ {
132
+ "epoch": 0.136,
133
+ "grad_norm": 2.812086343765259,
134
+ "learning_rate": 2.9027027027027028e-05,
135
+ "loss": 1.638,
136
+ "step": 170
137
+ },
138
+ {
139
+ "epoch": 0.144,
140
+ "grad_norm": 3.23271107673645,
141
+ "learning_rate": 2.8954054054054057e-05,
142
+ "loss": 1.7026,
143
+ "step": 180
144
+ },
145
+ {
146
+ "epoch": 0.152,
147
+ "grad_norm": 3.975172996520996,
148
+ "learning_rate": 2.8872972972972977e-05,
149
+ "loss": 1.6715,
150
+ "step": 190
151
+ },
152
+ {
153
+ "epoch": 0.16,
154
+ "grad_norm": 3.8965938091278076,
155
+ "learning_rate": 2.879189189189189e-05,
156
+ "loss": 1.6804,
157
+ "step": 200
158
+ },
159
+ {
160
+ "epoch": 0.168,
161
+ "grad_norm": 4.267274856567383,
162
+ "learning_rate": 2.871081081081081e-05,
163
+ "loss": 1.6587,
164
+ "step": 210
165
+ },
166
+ {
167
+ "epoch": 0.176,
168
+ "grad_norm": 3.524360179901123,
169
+ "learning_rate": 2.862972972972973e-05,
170
+ "loss": 1.4811,
171
+ "step": 220
172
+ },
173
+ {
174
+ "epoch": 0.184,
175
+ "grad_norm": 3.266697883605957,
176
+ "learning_rate": 2.854864864864865e-05,
177
+ "loss": 1.672,
178
+ "step": 230
179
+ },
180
+ {
181
+ "epoch": 0.192,
182
+ "grad_norm": 5.3684186935424805,
183
+ "learning_rate": 2.8467567567567567e-05,
184
+ "loss": 1.5284,
185
+ "step": 240
186
+ },
187
+ {
188
+ "epoch": 0.2,
189
+ "grad_norm": 3.898176431655884,
190
+ "learning_rate": 2.8386486486486487e-05,
191
+ "loss": 1.5774,
192
+ "step": 250
193
+ },
194
+ {
195
+ "epoch": 0.208,
196
+ "grad_norm": 3.189732074737549,
197
+ "learning_rate": 2.8305405405405407e-05,
198
+ "loss": 1.4874,
199
+ "step": 260
200
+ },
201
+ {
202
+ "epoch": 0.216,
203
+ "grad_norm": 3.274244785308838,
204
+ "learning_rate": 2.8224324324324327e-05,
205
+ "loss": 1.5098,
206
+ "step": 270
207
+ },
208
+ {
209
+ "epoch": 0.224,
210
+ "grad_norm": 5.691224098205566,
211
+ "learning_rate": 2.8143243243243244e-05,
212
+ "loss": 1.509,
213
+ "step": 280
214
+ },
215
+ {
216
+ "epoch": 0.232,
217
+ "grad_norm": 6.856773376464844,
218
+ "learning_rate": 2.8062162162162164e-05,
219
+ "loss": 1.4558,
220
+ "step": 290
221
+ },
222
+ {
223
+ "epoch": 0.24,
224
+ "grad_norm": 7.078716278076172,
225
+ "learning_rate": 2.7981081081081084e-05,
226
+ "loss": 1.5298,
227
+ "step": 300
228
+ },
229
+ {
230
+ "epoch": 0.248,
231
+ "grad_norm": 4.4305100440979,
232
+ "learning_rate": 2.79e-05,
233
+ "loss": 1.3387,
234
+ "step": 310
235
+ },
236
+ {
237
+ "epoch": 0.256,
238
+ "grad_norm": 10.400449752807617,
239
+ "learning_rate": 2.7818918918918917e-05,
240
+ "loss": 1.4501,
241
+ "step": 320
242
+ },
243
+ {
244
+ "epoch": 0.264,
245
+ "grad_norm": 5.316948890686035,
246
+ "learning_rate": 2.7737837837837837e-05,
247
+ "loss": 1.367,
248
+ "step": 330
249
+ },
250
+ {
251
+ "epoch": 0.272,
252
+ "grad_norm": 9.753177642822266,
253
+ "learning_rate": 2.7656756756756757e-05,
254
+ "loss": 1.4684,
255
+ "step": 340
256
+ },
257
+ {
258
+ "epoch": 0.28,
259
+ "grad_norm": 8.100529670715332,
260
+ "learning_rate": 2.7575675675675677e-05,
261
+ "loss": 1.4175,
262
+ "step": 350
263
+ },
264
+ {
265
+ "epoch": 0.288,
266
+ "grad_norm": 9.878854751586914,
267
+ "learning_rate": 2.7494594594594594e-05,
268
+ "loss": 1.308,
269
+ "step": 360
270
+ },
271
+ {
272
+ "epoch": 0.296,
273
+ "grad_norm": 5.865877151489258,
274
+ "learning_rate": 2.7413513513513514e-05,
275
+ "loss": 1.3035,
276
+ "step": 370
277
+ },
278
+ {
279
+ "epoch": 0.304,
280
+ "grad_norm": 7.870754241943359,
281
+ "learning_rate": 2.7332432432432434e-05,
282
+ "loss": 1.2915,
283
+ "step": 380
284
+ },
285
+ {
286
+ "epoch": 0.312,
287
+ "grad_norm": 8.517908096313477,
288
+ "learning_rate": 2.7251351351351354e-05,
289
+ "loss": 1.4318,
290
+ "step": 390
291
+ },
292
+ {
293
+ "epoch": 0.32,
294
+ "grad_norm": 4.7960309982299805,
295
+ "learning_rate": 2.717027027027027e-05,
296
+ "loss": 1.3154,
297
+ "step": 400
298
+ },
299
+ {
300
+ "epoch": 0.328,
301
+ "grad_norm": 5.629390716552734,
302
+ "learning_rate": 2.708918918918919e-05,
303
+ "loss": 1.3433,
304
+ "step": 410
305
+ },
306
+ {
307
+ "epoch": 0.336,
308
+ "grad_norm": 8.473249435424805,
309
+ "learning_rate": 2.700810810810811e-05,
310
+ "loss": 1.1474,
311
+ "step": 420
312
+ },
313
+ {
314
+ "epoch": 0.344,
315
+ "grad_norm": 3.652617931365967,
316
+ "learning_rate": 2.6927027027027028e-05,
317
+ "loss": 1.3247,
318
+ "step": 430
319
+ },
320
+ {
321
+ "epoch": 0.352,
322
+ "grad_norm": 4.9890055656433105,
323
+ "learning_rate": 2.6845945945945944e-05,
324
+ "loss": 1.3347,
325
+ "step": 440
326
+ },
327
+ {
328
+ "epoch": 0.36,
329
+ "grad_norm": 5.2355055809021,
330
+ "learning_rate": 2.6764864864864864e-05,
331
+ "loss": 1.0932,
332
+ "step": 450
333
+ },
334
+ {
335
+ "epoch": 0.368,
336
+ "grad_norm": 6.325026512145996,
337
+ "learning_rate": 2.6683783783783785e-05,
338
+ "loss": 1.4873,
339
+ "step": 460
340
+ },
341
+ {
342
+ "epoch": 0.376,
343
+ "grad_norm": 6.78115701675415,
344
+ "learning_rate": 2.6602702702702705e-05,
345
+ "loss": 1.2311,
346
+ "step": 470
347
+ },
348
+ {
349
+ "epoch": 0.384,
350
+ "grad_norm": 4.194353103637695,
351
+ "learning_rate": 2.652162162162162e-05,
352
+ "loss": 1.2493,
353
+ "step": 480
354
+ },
355
+ {
356
+ "epoch": 0.392,
357
+ "grad_norm": 3.8817057609558105,
358
+ "learning_rate": 2.644054054054054e-05,
359
+ "loss": 1.1237,
360
+ "step": 490
361
+ },
362
+ {
363
+ "epoch": 0.4,
364
+ "grad_norm": 6.7539520263671875,
365
+ "learning_rate": 2.635945945945946e-05,
366
+ "loss": 1.1135,
367
+ "step": 500
368
+ },
369
+ {
370
+ "epoch": 0.408,
371
+ "grad_norm": 9.044737815856934,
372
+ "learning_rate": 2.627837837837838e-05,
373
+ "loss": 1.2459,
374
+ "step": 510
375
+ },
376
+ {
377
+ "epoch": 0.416,
378
+ "grad_norm": 15.829017639160156,
379
+ "learning_rate": 2.6197297297297298e-05,
380
+ "loss": 1.2803,
381
+ "step": 520
382
+ },
383
+ {
384
+ "epoch": 0.424,
385
+ "grad_norm": 10.789520263671875,
386
+ "learning_rate": 2.6116216216216218e-05,
387
+ "loss": 1.1912,
388
+ "step": 530
389
+ },
390
+ {
391
+ "epoch": 0.432,
392
+ "grad_norm": 5.011368274688721,
393
+ "learning_rate": 2.6035135135135135e-05,
394
+ "loss": 1.0143,
395
+ "step": 540
396
+ },
397
+ {
398
+ "epoch": 0.44,
399
+ "grad_norm": 8.985868453979492,
400
+ "learning_rate": 2.5954054054054055e-05,
401
+ "loss": 1.118,
402
+ "step": 550
403
+ },
404
+ {
405
+ "epoch": 0.448,
406
+ "grad_norm": 6.862995147705078,
407
+ "learning_rate": 2.587297297297297e-05,
408
+ "loss": 1.1269,
409
+ "step": 560
410
+ },
411
+ {
412
+ "epoch": 0.456,
413
+ "grad_norm": 10.972336769104004,
414
+ "learning_rate": 2.579189189189189e-05,
415
+ "loss": 1.1591,
416
+ "step": 570
417
+ },
418
+ {
419
+ "epoch": 0.464,
420
+ "grad_norm": 8.179327011108398,
421
+ "learning_rate": 2.5710810810810812e-05,
422
+ "loss": 1.154,
423
+ "step": 580
424
+ },
425
+ {
426
+ "epoch": 0.472,
427
+ "grad_norm": 11.713990211486816,
428
+ "learning_rate": 2.5629729729729732e-05,
429
+ "loss": 1.0995,
430
+ "step": 590
431
+ },
432
+ {
433
+ "epoch": 0.48,
434
+ "grad_norm": 10.86710262298584,
435
+ "learning_rate": 2.554864864864865e-05,
436
+ "loss": 1.1544,
437
+ "step": 600
438
+ },
439
+ {
440
+ "epoch": 0.488,
441
+ "grad_norm": 6.228063106536865,
442
+ "learning_rate": 2.546756756756757e-05,
443
+ "loss": 1.2395,
444
+ "step": 610
445
+ },
446
+ {
447
+ "epoch": 0.496,
448
+ "grad_norm": 12.631518363952637,
449
+ "learning_rate": 2.538648648648649e-05,
450
+ "loss": 1.0992,
451
+ "step": 620
452
+ },
453
+ {
454
+ "epoch": 0.504,
455
+ "grad_norm": 7.058006763458252,
456
+ "learning_rate": 2.530540540540541e-05,
457
+ "loss": 1.194,
458
+ "step": 630
459
+ },
460
+ {
461
+ "epoch": 0.512,
462
+ "grad_norm": 5.026750087738037,
463
+ "learning_rate": 2.5224324324324325e-05,
464
+ "loss": 1.103,
465
+ "step": 640
466
+ },
467
+ {
468
+ "epoch": 0.52,
469
+ "grad_norm": 7.1134843826293945,
470
+ "learning_rate": 2.5143243243243242e-05,
471
+ "loss": 0.9427,
472
+ "step": 650
473
+ },
474
+ {
475
+ "epoch": 0.528,
476
+ "grad_norm": 7.147433280944824,
477
+ "learning_rate": 2.5062162162162162e-05,
478
+ "loss": 0.9881,
479
+ "step": 660
480
+ },
481
+ {
482
+ "epoch": 0.536,
483
+ "grad_norm": 6.535639762878418,
484
+ "learning_rate": 2.4981081081081082e-05,
485
+ "loss": 1.1143,
486
+ "step": 670
487
+ },
488
+ {
489
+ "epoch": 0.544,
490
+ "grad_norm": 10.878937721252441,
491
+ "learning_rate": 2.49e-05,
492
+ "loss": 0.8909,
493
+ "step": 680
494
+ },
495
+ {
496
+ "epoch": 0.552,
497
+ "grad_norm": 5.79094934463501,
498
+ "learning_rate": 2.481891891891892e-05,
499
+ "loss": 0.9728,
500
+ "step": 690
501
+ },
502
+ {
503
+ "epoch": 0.56,
504
+ "grad_norm": 6.935592174530029,
505
+ "learning_rate": 2.473783783783784e-05,
506
+ "loss": 1.0735,
507
+ "step": 700
508
+ },
509
+ {
510
+ "epoch": 0.568,
511
+ "grad_norm": 5.661824703216553,
512
+ "learning_rate": 2.465675675675676e-05,
513
+ "loss": 1.0012,
514
+ "step": 710
515
+ },
516
+ {
517
+ "epoch": 0.576,
518
+ "grad_norm": 13.233421325683594,
519
+ "learning_rate": 2.4575675675675676e-05,
520
+ "loss": 1.0315,
521
+ "step": 720
522
+ },
523
+ {
524
+ "epoch": 0.584,
525
+ "grad_norm": 9.292459487915039,
526
+ "learning_rate": 2.4494594594594596e-05,
527
+ "loss": 0.9547,
528
+ "step": 730
529
+ },
530
+ {
531
+ "epoch": 0.592,
532
+ "grad_norm": 13.138367652893066,
533
+ "learning_rate": 2.442162162162162e-05,
534
+ "loss": 0.9379,
535
+ "step": 740
536
+ },
537
+ {
538
+ "epoch": 0.6,
539
+ "grad_norm": 13.352531433105469,
540
+ "learning_rate": 2.434054054054054e-05,
541
+ "loss": 0.9484,
542
+ "step": 750
543
+ },
544
+ {
545
+ "epoch": 0.608,
546
+ "grad_norm": 11.993139266967773,
547
+ "learning_rate": 2.4259459459459458e-05,
548
+ "loss": 1.1064,
549
+ "step": 760
550
+ },
551
+ {
552
+ "epoch": 0.616,
553
+ "grad_norm": 12.132452011108398,
554
+ "learning_rate": 2.4178378378378378e-05,
555
+ "loss": 1.1363,
556
+ "step": 770
557
+ },
558
+ {
559
+ "epoch": 0.624,
560
+ "grad_norm": 13.944737434387207,
561
+ "learning_rate": 2.4097297297297298e-05,
562
+ "loss": 0.9835,
563
+ "step": 780
564
+ },
565
+ {
566
+ "epoch": 0.632,
567
+ "grad_norm": 6.077609062194824,
568
+ "learning_rate": 2.4016216216216218e-05,
569
+ "loss": 0.8391,
570
+ "step": 790
571
+ },
572
+ {
573
+ "epoch": 0.64,
574
+ "grad_norm": 7.873855113983154,
575
+ "learning_rate": 2.3935135135135135e-05,
576
+ "loss": 0.7772,
577
+ "step": 800
578
+ },
579
+ {
580
+ "epoch": 0.648,
581
+ "grad_norm": 13.312115669250488,
582
+ "learning_rate": 2.3854054054054055e-05,
583
+ "loss": 1.0117,
584
+ "step": 810
585
+ },
586
+ {
587
+ "epoch": 0.656,
588
+ "grad_norm": 9.016510963439941,
589
+ "learning_rate": 2.3772972972972975e-05,
590
+ "loss": 0.9353,
591
+ "step": 820
592
+ },
593
+ {
594
+ "epoch": 0.664,
595
+ "grad_norm": 8.618375778198242,
596
+ "learning_rate": 2.3691891891891895e-05,
597
+ "loss": 0.9598,
598
+ "step": 830
599
+ },
600
+ {
601
+ "epoch": 0.672,
602
+ "grad_norm": 10.867205619812012,
603
+ "learning_rate": 2.361081081081081e-05,
604
+ "loss": 0.8726,
605
+ "step": 840
606
+ },
607
+ {
608
+ "epoch": 0.68,
609
+ "grad_norm": 13.182415962219238,
610
+ "learning_rate": 2.3529729729729728e-05,
611
+ "loss": 0.9202,
612
+ "step": 850
613
+ },
614
+ {
615
+ "epoch": 0.688,
616
+ "grad_norm": 12.405129432678223,
617
+ "learning_rate": 2.3448648648648648e-05,
618
+ "loss": 0.8795,
619
+ "step": 860
620
+ },
621
+ {
622
+ "epoch": 0.696,
623
+ "grad_norm": 8.207524299621582,
624
+ "learning_rate": 2.3367567567567568e-05,
625
+ "loss": 0.8015,
626
+ "step": 870
627
+ },
628
+ {
629
+ "epoch": 0.704,
630
+ "grad_norm": 15.442817687988281,
631
+ "learning_rate": 2.3286486486486485e-05,
632
+ "loss": 0.9932,
633
+ "step": 880
634
+ },
635
+ {
636
+ "epoch": 0.712,
637
+ "grad_norm": 13.388226509094238,
638
+ "learning_rate": 2.3205405405405405e-05,
639
+ "loss": 0.87,
640
+ "step": 890
641
+ },
642
+ {
643
+ "epoch": 0.72,
644
+ "grad_norm": 8.635920524597168,
645
+ "learning_rate": 2.3124324324324325e-05,
646
+ "loss": 0.7842,
647
+ "step": 900
648
+ },
649
+ {
650
+ "epoch": 0.728,
651
+ "grad_norm": 11.66073989868164,
652
+ "learning_rate": 2.3043243243243245e-05,
653
+ "loss": 0.9023,
654
+ "step": 910
655
+ },
656
+ {
657
+ "epoch": 0.736,
658
+ "grad_norm": 12.954612731933594,
659
+ "learning_rate": 2.2962162162162162e-05,
660
+ "loss": 1.0076,
661
+ "step": 920
662
+ },
663
+ {
664
+ "epoch": 0.744,
665
+ "grad_norm": 11.18680191040039,
666
+ "learning_rate": 2.2881081081081082e-05,
667
+ "loss": 1.1349,
668
+ "step": 930
669
+ },
670
+ {
671
+ "epoch": 0.752,
672
+ "grad_norm": 8.514711380004883,
673
+ "learning_rate": 2.2800000000000002e-05,
674
+ "loss": 0.9604,
675
+ "step": 940
676
+ },
677
+ {
678
+ "epoch": 0.76,
679
+ "grad_norm": 4.436418056488037,
680
+ "learning_rate": 2.2718918918918922e-05,
681
+ "loss": 0.9632,
682
+ "step": 950
683
+ },
684
+ {
685
+ "epoch": 0.768,
686
+ "grad_norm": 10.213781356811523,
687
+ "learning_rate": 2.263783783783784e-05,
688
+ "loss": 0.8139,
689
+ "step": 960
690
+ },
691
+ {
692
+ "epoch": 0.776,
693
+ "grad_norm": 9.987252235412598,
694
+ "learning_rate": 2.2556756756756755e-05,
695
+ "loss": 0.8276,
696
+ "step": 970
697
+ },
698
+ {
699
+ "epoch": 0.784,
700
+ "grad_norm": 12.511467933654785,
701
+ "learning_rate": 2.2475675675675675e-05,
702
+ "loss": 0.8709,
703
+ "step": 980
704
+ },
705
+ {
706
+ "epoch": 0.792,
707
+ "grad_norm": 8.908098220825195,
708
+ "learning_rate": 2.2394594594594595e-05,
709
+ "loss": 0.8812,
710
+ "step": 990
711
+ },
712
+ {
713
+ "epoch": 0.8,
714
+ "grad_norm": 10.62246322631836,
715
+ "learning_rate": 2.2313513513513512e-05,
716
+ "loss": 0.9733,
717
+ "step": 1000
718
+ },
719
+ {
720
+ "epoch": 0.808,
721
+ "grad_norm": 14.651544570922852,
722
+ "learning_rate": 2.2232432432432432e-05,
723
+ "loss": 1.0309,
724
+ "step": 1010
725
+ },
726
+ {
727
+ "epoch": 0.816,
728
+ "grad_norm": 19.1525936126709,
729
+ "learning_rate": 2.2151351351351352e-05,
730
+ "loss": 0.8808,
731
+ "step": 1020
732
+ },
733
+ {
734
+ "epoch": 0.824,
735
+ "grad_norm": 7.289106369018555,
736
+ "learning_rate": 2.2070270270270272e-05,
737
+ "loss": 0.9126,
738
+ "step": 1030
739
+ },
740
+ {
741
+ "epoch": 0.832,
742
+ "grad_norm": 5.375001907348633,
743
+ "learning_rate": 2.198918918918919e-05,
744
+ "loss": 0.847,
745
+ "step": 1040
746
+ },
747
+ {
748
+ "epoch": 0.84,
749
+ "grad_norm": 8.623431205749512,
750
+ "learning_rate": 2.190810810810811e-05,
751
+ "loss": 0.9139,
752
+ "step": 1050
753
+ },
754
+ {
755
+ "epoch": 0.848,
756
+ "grad_norm": 6.639071941375732,
757
+ "learning_rate": 2.182702702702703e-05,
758
+ "loss": 0.9345,
759
+ "step": 1060
760
+ },
761
+ {
762
+ "epoch": 0.856,
763
+ "grad_norm": 7.635943412780762,
764
+ "learning_rate": 2.174594594594595e-05,
765
+ "loss": 0.8134,
766
+ "step": 1070
767
+ },
768
+ {
769
+ "epoch": 0.864,
770
+ "grad_norm": 12.048315048217773,
771
+ "learning_rate": 2.1664864864864862e-05,
772
+ "loss": 0.6728,
773
+ "step": 1080
774
+ },
775
+ {
776
+ "epoch": 0.872,
777
+ "grad_norm": 13.869949340820312,
778
+ "learning_rate": 2.1583783783783783e-05,
779
+ "loss": 0.8256,
780
+ "step": 1090
781
+ },
782
+ {
783
+ "epoch": 0.88,
784
+ "grad_norm": 26.233325958251953,
785
+ "learning_rate": 2.1502702702702703e-05,
786
+ "loss": 0.7044,
787
+ "step": 1100
788
+ },
789
+ {
790
+ "epoch": 0.888,
791
+ "grad_norm": 7.98716926574707,
792
+ "learning_rate": 2.1421621621621623e-05,
793
+ "loss": 0.7398,
794
+ "step": 1110
795
+ },
796
+ {
797
+ "epoch": 0.896,
798
+ "grad_norm": 13.682205200195312,
799
+ "learning_rate": 2.134054054054054e-05,
800
+ "loss": 0.7522,
801
+ "step": 1120
802
+ },
803
+ {
804
+ "epoch": 0.904,
805
+ "grad_norm": 9.086796760559082,
806
+ "learning_rate": 2.125945945945946e-05,
807
+ "loss": 0.8574,
808
+ "step": 1130
809
+ },
810
+ {
811
+ "epoch": 0.912,
812
+ "grad_norm": 10.3043851852417,
813
+ "learning_rate": 2.117837837837838e-05,
814
+ "loss": 0.8005,
815
+ "step": 1140
816
+ },
817
+ {
818
+ "epoch": 0.92,
819
+ "grad_norm": 12.477950096130371,
820
+ "learning_rate": 2.10972972972973e-05,
821
+ "loss": 0.8436,
822
+ "step": 1150
823
+ },
824
+ {
825
+ "epoch": 0.928,
826
+ "grad_norm": 16.634178161621094,
827
+ "learning_rate": 2.1016216216216216e-05,
828
+ "loss": 0.6515,
829
+ "step": 1160
830
+ },
831
+ {
832
+ "epoch": 0.936,
833
+ "grad_norm": 11.066425323486328,
834
+ "learning_rate": 2.0935135135135136e-05,
835
+ "loss": 0.8689,
836
+ "step": 1170
837
+ },
838
+ {
839
+ "epoch": 0.944,
840
+ "grad_norm": 19.47179412841797,
841
+ "learning_rate": 2.0854054054054056e-05,
842
+ "loss": 0.6605,
843
+ "step": 1180
844
+ },
845
+ {
846
+ "epoch": 0.952,
847
+ "grad_norm": 3.917236804962158,
848
+ "learning_rate": 2.0772972972972973e-05,
849
+ "loss": 0.6826,
850
+ "step": 1190
851
+ },
852
+ {
853
+ "epoch": 0.96,
854
+ "grad_norm": 16.43979263305664,
855
+ "learning_rate": 2.069189189189189e-05,
856
+ "loss": 0.7731,
857
+ "step": 1200
858
+ },
859
+ {
860
+ "epoch": 0.968,
861
+ "grad_norm": 6.7848711013793945,
862
+ "learning_rate": 2.061081081081081e-05,
863
+ "loss": 0.705,
864
+ "step": 1210
865
+ },
866
+ {
867
+ "epoch": 0.976,
868
+ "grad_norm": 7.472936153411865,
869
+ "learning_rate": 2.052972972972973e-05,
870
+ "loss": 0.7663,
871
+ "step": 1220
872
+ },
873
+ {
874
+ "epoch": 0.984,
875
+ "grad_norm": 5.729743957519531,
876
+ "learning_rate": 2.044864864864865e-05,
877
+ "loss": 0.9337,
878
+ "step": 1230
879
+ },
880
+ {
881
+ "epoch": 0.992,
882
+ "grad_norm": 6.306894302368164,
883
+ "learning_rate": 2.0367567567567567e-05,
884
+ "loss": 0.7655,
885
+ "step": 1240
886
+ },
887
+ {
888
+ "epoch": 1.0,
889
+ "grad_norm": 17.98261260986328,
890
+ "learning_rate": 2.0286486486486487e-05,
891
+ "loss": 0.8202,
892
+ "step": 1250
893
+ },
894
+ {
895
+ "epoch": 1.0,
896
+ "eval_accuracy": 0.7668834417208604,
897
+ "eval_f1": 0.7589222033223469,
898
+ "eval_loss": 0.7338727712631226,
899
+ "eval_model_preparation_time": 0.0029,
900
+ "eval_runtime": 59.9403,
901
+ "eval_samples_per_second": 33.35,
902
+ "eval_steps_per_second": 4.171,
903
+ "step": 1250
904
+ },
905
+ {
906
+ "epoch": 1.008,
907
+ "grad_norm": 3.764857292175293,
908
+ "learning_rate": 2.0205405405405407e-05,
909
+ "loss": 0.6991,
910
+ "step": 1260
911
+ },
912
+ {
913
+ "epoch": 1.016,
914
+ "grad_norm": 12.638517379760742,
915
+ "learning_rate": 2.0124324324324327e-05,
916
+ "loss": 0.7653,
917
+ "step": 1270
918
+ },
919
+ {
920
+ "epoch": 1.024,
921
+ "grad_norm": 17.76312255859375,
922
+ "learning_rate": 2.0043243243243243e-05,
923
+ "loss": 0.8033,
924
+ "step": 1280
925
+ },
926
+ {
927
+ "epoch": 1.032,
928
+ "grad_norm": 11.941933631896973,
929
+ "learning_rate": 1.9962162162162163e-05,
930
+ "loss": 0.699,
931
+ "step": 1290
932
+ },
933
+ {
934
+ "epoch": 1.04,
935
+ "grad_norm": 17.107053756713867,
936
+ "learning_rate": 1.988108108108108e-05,
937
+ "loss": 0.706,
938
+ "step": 1300
939
+ },
940
+ {
941
+ "epoch": 1.048,
942
+ "grad_norm": 6.369427680969238,
943
+ "learning_rate": 1.98e-05,
944
+ "loss": 0.6494,
945
+ "step": 1310
946
+ },
947
+ {
948
+ "epoch": 1.056,
949
+ "grad_norm": 7.1872477531433105,
950
+ "learning_rate": 1.9718918918918917e-05,
951
+ "loss": 0.5989,
952
+ "step": 1320
953
+ },
954
+ {
955
+ "epoch": 1.064,
956
+ "grad_norm": 3.7603728771209717,
957
+ "learning_rate": 1.9637837837837837e-05,
958
+ "loss": 0.7582,
959
+ "step": 1330
960
+ },
961
+ {
962
+ "epoch": 1.072,
963
+ "grad_norm": 10.045304298400879,
964
+ "learning_rate": 1.9556756756756757e-05,
965
+ "loss": 0.7192,
966
+ "step": 1340
967
+ },
968
+ {
969
+ "epoch": 1.08,
970
+ "grad_norm": 14.639888763427734,
971
+ "learning_rate": 1.9475675675675677e-05,
972
+ "loss": 0.7959,
973
+ "step": 1350
974
+ },
975
+ {
976
+ "epoch": 1.088,
977
+ "grad_norm": 8.216081619262695,
978
+ "learning_rate": 1.9394594594594594e-05,
979
+ "loss": 0.6062,
980
+ "step": 1360
981
+ },
982
+ {
983
+ "epoch": 1.096,
984
+ "grad_norm": 5.784476280212402,
985
+ "learning_rate": 1.9313513513513514e-05,
986
+ "loss": 0.541,
987
+ "step": 1370
988
+ },
989
+ {
990
+ "epoch": 1.104,
991
+ "grad_norm": 5.356358051300049,
992
+ "learning_rate": 1.9232432432432434e-05,
993
+ "loss": 0.6993,
994
+ "step": 1380
995
+ },
996
+ {
997
+ "epoch": 1.112,
998
+ "grad_norm": 17.11981773376465,
999
+ "learning_rate": 1.9151351351351354e-05,
1000
+ "loss": 0.7217,
1001
+ "step": 1390
1002
+ },
1003
+ {
1004
+ "epoch": 1.12,
1005
+ "grad_norm": 15.52505111694336,
1006
+ "learning_rate": 1.907027027027027e-05,
1007
+ "loss": 0.548,
1008
+ "step": 1400
1009
+ },
1010
+ {
1011
+ "epoch": 1.1280000000000001,
1012
+ "grad_norm": 12.326894760131836,
1013
+ "learning_rate": 1.8989189189189187e-05,
1014
+ "loss": 0.7058,
1015
+ "step": 1410
1016
+ },
1017
+ {
1018
+ "epoch": 1.1360000000000001,
1019
+ "grad_norm": 12.889031410217285,
1020
+ "learning_rate": 1.8908108108108107e-05,
1021
+ "loss": 0.7748,
1022
+ "step": 1420
1023
+ },
1024
+ {
1025
+ "epoch": 1.144,
1026
+ "grad_norm": 12.953654289245605,
1027
+ "learning_rate": 1.8827027027027027e-05,
1028
+ "loss": 0.7251,
1029
+ "step": 1430
1030
+ },
1031
+ {
1032
+ "epoch": 1.152,
1033
+ "grad_norm": 2.564222812652588,
1034
+ "learning_rate": 1.8745945945945944e-05,
1035
+ "loss": 0.551,
1036
+ "step": 1440
1037
+ },
1038
+ {
1039
+ "epoch": 1.16,
1040
+ "grad_norm": 9.111184120178223,
1041
+ "learning_rate": 1.8664864864864864e-05,
1042
+ "loss": 0.5548,
1043
+ "step": 1450
1044
+ },
1045
+ {
1046
+ "epoch": 1.168,
1047
+ "grad_norm": 7.713393211364746,
1048
+ "learning_rate": 1.8583783783783784e-05,
1049
+ "loss": 0.564,
1050
+ "step": 1460
1051
+ },
1052
+ {
1053
+ "epoch": 1.176,
1054
+ "grad_norm": 8.282889366149902,
1055
+ "learning_rate": 1.8502702702702704e-05,
1056
+ "loss": 0.7312,
1057
+ "step": 1470
1058
+ },
1059
+ {
1060
+ "epoch": 1.184,
1061
+ "grad_norm": 15.445865631103516,
1062
+ "learning_rate": 1.842162162162162e-05,
1063
+ "loss": 0.6974,
1064
+ "step": 1480
1065
+ },
1066
+ {
1067
+ "epoch": 1.192,
1068
+ "grad_norm": 6.040890693664551,
1069
+ "learning_rate": 1.834054054054054e-05,
1070
+ "loss": 0.6808,
1071
+ "step": 1490
1072
+ },
1073
+ {
1074
+ "epoch": 1.2,
1075
+ "grad_norm": 17.368532180786133,
1076
+ "learning_rate": 1.825945945945946e-05,
1077
+ "loss": 0.5695,
1078
+ "step": 1500
1079
+ },
1080
+ {
1081
+ "epoch": 1.208,
1082
+ "grad_norm": 11.174856185913086,
1083
+ "learning_rate": 1.817837837837838e-05,
1084
+ "loss": 0.8114,
1085
+ "step": 1510
1086
+ },
1087
+ {
1088
+ "epoch": 1.216,
1089
+ "grad_norm": 18.861087799072266,
1090
+ "learning_rate": 1.8097297297297298e-05,
1091
+ "loss": 0.601,
1092
+ "step": 1520
1093
+ },
1094
+ {
1095
+ "epoch": 1.224,
1096
+ "grad_norm": 4.157520771026611,
1097
+ "learning_rate": 1.8016216216216214e-05,
1098
+ "loss": 0.6195,
1099
+ "step": 1530
1100
+ },
1101
+ {
1102
+ "epoch": 1.232,
1103
+ "grad_norm": 40.063621520996094,
1104
+ "learning_rate": 1.7935135135135134e-05,
1105
+ "loss": 0.6502,
1106
+ "step": 1540
1107
+ },
1108
+ {
1109
+ "epoch": 1.24,
1110
+ "grad_norm": 12.99301528930664,
1111
+ "learning_rate": 1.7854054054054055e-05,
1112
+ "loss": 0.5463,
1113
+ "step": 1550
1114
+ },
1115
+ {
1116
+ "epoch": 1.248,
1117
+ "grad_norm": 3.057586908340454,
1118
+ "learning_rate": 1.7772972972972975e-05,
1119
+ "loss": 0.5025,
1120
+ "step": 1560
1121
+ },
1122
+ {
1123
+ "epoch": 1.256,
1124
+ "grad_norm": 7.806783199310303,
1125
+ "learning_rate": 1.769189189189189e-05,
1126
+ "loss": 0.5544,
1127
+ "step": 1570
1128
+ },
1129
+ {
1130
+ "epoch": 1.264,
1131
+ "grad_norm": 6.477509498596191,
1132
+ "learning_rate": 1.761081081081081e-05,
1133
+ "loss": 0.5447,
1134
+ "step": 1580
1135
+ },
1136
+ {
1137
+ "epoch": 1.272,
1138
+ "grad_norm": 28.918643951416016,
1139
+ "learning_rate": 1.752972972972973e-05,
1140
+ "loss": 0.7803,
1141
+ "step": 1590
1142
+ },
1143
+ {
1144
+ "epoch": 1.28,
1145
+ "grad_norm": 23.26552391052246,
1146
+ "learning_rate": 1.744864864864865e-05,
1147
+ "loss": 0.7442,
1148
+ "step": 1600
1149
+ },
1150
+ {
1151
+ "epoch": 1.288,
1152
+ "grad_norm": 24.359582901000977,
1153
+ "learning_rate": 1.7367567567567568e-05,
1154
+ "loss": 0.705,
1155
+ "step": 1610
1156
+ },
1157
+ {
1158
+ "epoch": 1.296,
1159
+ "grad_norm": 20.904409408569336,
1160
+ "learning_rate": 1.7286486486486488e-05,
1161
+ "loss": 0.7149,
1162
+ "step": 1620
1163
+ },
1164
+ {
1165
+ "epoch": 1.304,
1166
+ "grad_norm": 11.675884246826172,
1167
+ "learning_rate": 1.7205405405405408e-05,
1168
+ "loss": 0.5916,
1169
+ "step": 1630
1170
+ },
1171
+ {
1172
+ "epoch": 1.312,
1173
+ "grad_norm": 19.712337493896484,
1174
+ "learning_rate": 1.7124324324324325e-05,
1175
+ "loss": 0.59,
1176
+ "step": 1640
1177
+ },
1178
+ {
1179
+ "epoch": 1.32,
1180
+ "grad_norm": 1.4685373306274414,
1181
+ "learning_rate": 1.704324324324324e-05,
1182
+ "loss": 0.6127,
1183
+ "step": 1650
1184
+ },
1185
+ {
1186
+ "epoch": 1.328,
1187
+ "grad_norm": 13.145341873168945,
1188
+ "learning_rate": 1.696216216216216e-05,
1189
+ "loss": 0.5311,
1190
+ "step": 1660
1191
+ },
1192
+ {
1193
+ "epoch": 1.336,
1194
+ "grad_norm": 33.24889373779297,
1195
+ "learning_rate": 1.6881081081081082e-05,
1196
+ "loss": 0.539,
1197
+ "step": 1670
1198
+ },
1199
+ {
1200
+ "epoch": 1.3439999999999999,
1201
+ "grad_norm": 2.730905771255493,
1202
+ "learning_rate": 1.6800000000000002e-05,
1203
+ "loss": 0.6207,
1204
+ "step": 1680
1205
+ },
1206
+ {
1207
+ "epoch": 1.3519999999999999,
1208
+ "grad_norm": 18.928560256958008,
1209
+ "learning_rate": 1.671891891891892e-05,
1210
+ "loss": 0.5226,
1211
+ "step": 1690
1212
+ },
1213
+ {
1214
+ "epoch": 1.3599999999999999,
1215
+ "grad_norm": 6.420986175537109,
1216
+ "learning_rate": 1.663783783783784e-05,
1217
+ "loss": 0.5054,
1218
+ "step": 1700
1219
+ },
1220
+ {
1221
+ "epoch": 1.3679999999999999,
1222
+ "grad_norm": 10.68362045288086,
1223
+ "learning_rate": 1.655675675675676e-05,
1224
+ "loss": 0.553,
1225
+ "step": 1710
1226
+ },
1227
+ {
1228
+ "epoch": 1.376,
1229
+ "grad_norm": 23.89041519165039,
1230
+ "learning_rate": 1.647567567567568e-05,
1231
+ "loss": 0.585,
1232
+ "step": 1720
1233
+ },
1234
+ {
1235
+ "epoch": 1.384,
1236
+ "grad_norm": 0.6503021717071533,
1237
+ "learning_rate": 1.6394594594594595e-05,
1238
+ "loss": 0.5443,
1239
+ "step": 1730
1240
+ },
1241
+ {
1242
+ "epoch": 1.392,
1243
+ "grad_norm": 16.69384002685547,
1244
+ "learning_rate": 1.6313513513513515e-05,
1245
+ "loss": 0.777,
1246
+ "step": 1740
1247
+ },
1248
+ {
1249
+ "epoch": 1.4,
1250
+ "grad_norm": 11.571426391601562,
1251
+ "learning_rate": 1.6232432432432432e-05,
1252
+ "loss": 0.7578,
1253
+ "step": 1750
1254
+ },
1255
+ {
1256
+ "epoch": 1.408,
1257
+ "grad_norm": 19.505590438842773,
1258
+ "learning_rate": 1.6151351351351352e-05,
1259
+ "loss": 0.755,
1260
+ "step": 1760
1261
+ },
1262
+ {
1263
+ "epoch": 1.416,
1264
+ "grad_norm": 12.909994125366211,
1265
+ "learning_rate": 1.607027027027027e-05,
1266
+ "loss": 0.5617,
1267
+ "step": 1770
1268
+ },
1269
+ {
1270
+ "epoch": 1.424,
1271
+ "grad_norm": 10.301375389099121,
1272
+ "learning_rate": 1.598918918918919e-05,
1273
+ "loss": 0.7985,
1274
+ "step": 1780
1275
+ },
1276
+ {
1277
+ "epoch": 1.432,
1278
+ "grad_norm": 20.1243839263916,
1279
+ "learning_rate": 1.590810810810811e-05,
1280
+ "loss": 0.6966,
1281
+ "step": 1790
1282
+ },
1283
+ {
1284
+ "epoch": 1.44,
1285
+ "grad_norm": 5.007569789886475,
1286
+ "learning_rate": 1.582702702702703e-05,
1287
+ "loss": 0.5544,
1288
+ "step": 1800
1289
+ },
1290
+ {
1291
+ "epoch": 1.448,
1292
+ "grad_norm": 2.8352081775665283,
1293
+ "learning_rate": 1.5745945945945946e-05,
1294
+ "loss": 0.8716,
1295
+ "step": 1810
1296
+ },
1297
+ {
1298
+ "epoch": 1.456,
1299
+ "grad_norm": 8.246051788330078,
1300
+ "learning_rate": 1.5664864864864866e-05,
1301
+ "loss": 0.449,
1302
+ "step": 1820
1303
+ },
1304
+ {
1305
+ "epoch": 1.464,
1306
+ "grad_norm": 7.072529315948486,
1307
+ "learning_rate": 1.5583783783783786e-05,
1308
+ "loss": 0.6345,
1309
+ "step": 1830
1310
+ },
1311
+ {
1312
+ "epoch": 1.472,
1313
+ "grad_norm": 11.075968742370605,
1314
+ "learning_rate": 1.5502702702702706e-05,
1315
+ "loss": 0.5029,
1316
+ "step": 1840
1317
+ },
1318
+ {
1319
+ "epoch": 1.48,
1320
+ "grad_norm": 10.411526679992676,
1321
+ "learning_rate": 1.5421621621621622e-05,
1322
+ "loss": 0.4708,
1323
+ "step": 1850
1324
+ },
1325
+ {
1326
+ "epoch": 1.488,
1327
+ "grad_norm": 13.274471282958984,
1328
+ "learning_rate": 1.534054054054054e-05,
1329
+ "loss": 0.4823,
1330
+ "step": 1860
1331
+ },
1332
+ {
1333
+ "epoch": 1.496,
1334
+ "grad_norm": 8.039985656738281,
1335
+ "learning_rate": 1.525945945945946e-05,
1336
+ "loss": 0.5719,
1337
+ "step": 1870
1338
+ },
1339
+ {
1340
+ "epoch": 1.504,
1341
+ "grad_norm": 19.872621536254883,
1342
+ "learning_rate": 1.5178378378378381e-05,
1343
+ "loss": 0.7933,
1344
+ "step": 1880
1345
+ },
1346
+ {
1347
+ "epoch": 1.512,
1348
+ "grad_norm": 12.538918495178223,
1349
+ "learning_rate": 1.5097297297297296e-05,
1350
+ "loss": 0.4885,
1351
+ "step": 1890
1352
+ },
1353
+ {
1354
+ "epoch": 1.52,
1355
+ "grad_norm": 26.685623168945312,
1356
+ "learning_rate": 1.5016216216216216e-05,
1357
+ "loss": 0.6399,
1358
+ "step": 1900
1359
+ },
1360
+ {
1361
+ "epoch": 1.528,
1362
+ "grad_norm": 10.571418762207031,
1363
+ "learning_rate": 1.4935135135135136e-05,
1364
+ "loss": 0.6462,
1365
+ "step": 1910
1366
+ },
1367
+ {
1368
+ "epoch": 1.536,
1369
+ "grad_norm": 3.1144027709960938,
1370
+ "learning_rate": 1.4854054054054054e-05,
1371
+ "loss": 0.6041,
1372
+ "step": 1920
1373
+ },
1374
+ {
1375
+ "epoch": 1.544,
1376
+ "grad_norm": 5.647855758666992,
1377
+ "learning_rate": 1.4772972972972975e-05,
1378
+ "loss": 0.584,
1379
+ "step": 1930
1380
+ },
1381
+ {
1382
+ "epoch": 1.552,
1383
+ "grad_norm": 9.756006240844727,
1384
+ "learning_rate": 1.4691891891891893e-05,
1385
+ "loss": 0.3929,
1386
+ "step": 1940
1387
+ },
1388
+ {
1389
+ "epoch": 1.56,
1390
+ "grad_norm": 7.031187534332275,
1391
+ "learning_rate": 1.4610810810810811e-05,
1392
+ "loss": 0.5963,
1393
+ "step": 1950
1394
+ },
1395
+ {
1396
+ "epoch": 1.568,
1397
+ "grad_norm": 19.187641143798828,
1398
+ "learning_rate": 1.452972972972973e-05,
1399
+ "loss": 0.4967,
1400
+ "step": 1960
1401
+ },
1402
+ {
1403
+ "epoch": 1.576,
1404
+ "grad_norm": 16.660043716430664,
1405
+ "learning_rate": 1.444864864864865e-05,
1406
+ "loss": 0.632,
1407
+ "step": 1970
1408
+ },
1409
+ {
1410
+ "epoch": 1.584,
1411
+ "grad_norm": 15.292383193969727,
1412
+ "learning_rate": 1.4367567567567568e-05,
1413
+ "loss": 0.6671,
1414
+ "step": 1980
1415
+ },
1416
+ {
1417
+ "epoch": 1.592,
1418
+ "grad_norm": 15.64156436920166,
1419
+ "learning_rate": 1.4286486486486488e-05,
1420
+ "loss": 0.5182,
1421
+ "step": 1990
1422
+ },
1423
+ {
1424
+ "epoch": 1.6,
1425
+ "grad_norm": 12.81575870513916,
1426
+ "learning_rate": 1.4205405405405405e-05,
1427
+ "loss": 0.7377,
1428
+ "step": 2000
1429
+ },
1430
+ {
1431
+ "epoch": 1.608,
1432
+ "grad_norm": 28.722570419311523,
1433
+ "learning_rate": 1.4124324324324325e-05,
1434
+ "loss": 0.6432,
1435
+ "step": 2010
1436
+ },
1437
+ {
1438
+ "epoch": 1.616,
1439
+ "grad_norm": 9.101573944091797,
1440
+ "learning_rate": 1.4043243243243243e-05,
1441
+ "loss": 0.7597,
1442
+ "step": 2020
1443
+ },
1444
+ {
1445
+ "epoch": 1.624,
1446
+ "grad_norm": 18.51584815979004,
1447
+ "learning_rate": 1.3962162162162163e-05,
1448
+ "loss": 0.5169,
1449
+ "step": 2030
1450
+ },
1451
+ {
1452
+ "epoch": 1.6320000000000001,
1453
+ "grad_norm": 19.951353073120117,
1454
+ "learning_rate": 1.3881081081081082e-05,
1455
+ "loss": 0.6938,
1456
+ "step": 2040
1457
+ },
1458
+ {
1459
+ "epoch": 1.6400000000000001,
1460
+ "grad_norm": 0.7395208477973938,
1461
+ "learning_rate": 1.3800000000000002e-05,
1462
+ "loss": 0.5438,
1463
+ "step": 2050
1464
+ },
1465
+ {
1466
+ "epoch": 1.6480000000000001,
1467
+ "grad_norm": 23.373943328857422,
1468
+ "learning_rate": 1.3718918918918918e-05,
1469
+ "loss": 0.543,
1470
+ "step": 2060
1471
+ },
1472
+ {
1473
+ "epoch": 1.6560000000000001,
1474
+ "grad_norm": 13.313843727111816,
1475
+ "learning_rate": 1.3637837837837838e-05,
1476
+ "loss": 0.752,
1477
+ "step": 2070
1478
+ },
1479
+ {
1480
+ "epoch": 1.6640000000000001,
1481
+ "grad_norm": 19.967775344848633,
1482
+ "learning_rate": 1.3556756756756757e-05,
1483
+ "loss": 0.5858,
1484
+ "step": 2080
1485
+ },
1486
+ {
1487
+ "epoch": 1.6720000000000002,
1488
+ "grad_norm": 6.15806770324707,
1489
+ "learning_rate": 1.3475675675675677e-05,
1490
+ "loss": 0.6735,
1491
+ "step": 2090
1492
+ },
1493
+ {
1494
+ "epoch": 1.6800000000000002,
1495
+ "grad_norm": 20.810691833496094,
1496
+ "learning_rate": 1.3394594594594595e-05,
1497
+ "loss": 0.7207,
1498
+ "step": 2100
1499
+ },
1500
+ {
1501
+ "epoch": 1.688,
1502
+ "grad_norm": 21.559804916381836,
1503
+ "learning_rate": 1.3313513513513514e-05,
1504
+ "loss": 0.6681,
1505
+ "step": 2110
1506
+ },
1507
+ {
1508
+ "epoch": 1.696,
1509
+ "grad_norm": 5.827245235443115,
1510
+ "learning_rate": 1.3232432432432432e-05,
1511
+ "loss": 0.5774,
1512
+ "step": 2120
1513
+ },
1514
+ {
1515
+ "epoch": 1.704,
1516
+ "grad_norm": 4.7927069664001465,
1517
+ "learning_rate": 1.3151351351351352e-05,
1518
+ "loss": 0.6439,
1519
+ "step": 2130
1520
+ },
1521
+ {
1522
+ "epoch": 1.712,
1523
+ "grad_norm": 14.177338600158691,
1524
+ "learning_rate": 1.307027027027027e-05,
1525
+ "loss": 0.7137,
1526
+ "step": 2140
1527
+ },
1528
+ {
1529
+ "epoch": 1.72,
1530
+ "grad_norm": 14.718915939331055,
1531
+ "learning_rate": 1.298918918918919e-05,
1532
+ "loss": 0.5463,
1533
+ "step": 2150
1534
+ },
1535
+ {
1536
+ "epoch": 1.728,
1537
+ "grad_norm": 18.23885726928711,
1538
+ "learning_rate": 1.2908108108108109e-05,
1539
+ "loss": 0.6557,
1540
+ "step": 2160
1541
+ },
1542
+ {
1543
+ "epoch": 1.736,
1544
+ "grad_norm": 9.514300346374512,
1545
+ "learning_rate": 1.2827027027027027e-05,
1546
+ "loss": 0.5304,
1547
+ "step": 2170
1548
+ },
1549
+ {
1550
+ "epoch": 1.744,
1551
+ "grad_norm": 15.950238227844238,
1552
+ "learning_rate": 1.2745945945945946e-05,
1553
+ "loss": 0.721,
1554
+ "step": 2180
1555
+ },
1556
+ {
1557
+ "epoch": 1.752,
1558
+ "grad_norm": 5.722634315490723,
1559
+ "learning_rate": 1.2664864864864866e-05,
1560
+ "loss": 0.3436,
1561
+ "step": 2190
1562
+ },
1563
+ {
1564
+ "epoch": 1.76,
1565
+ "grad_norm": 11.308035850524902,
1566
+ "learning_rate": 1.2583783783783784e-05,
1567
+ "loss": 0.5269,
1568
+ "step": 2200
1569
+ },
1570
+ {
1571
+ "epoch": 1.768,
1572
+ "grad_norm": 5.413994789123535,
1573
+ "learning_rate": 1.2502702702702704e-05,
1574
+ "loss": 0.4461,
1575
+ "step": 2210
1576
+ },
1577
+ {
1578
+ "epoch": 1.776,
1579
+ "grad_norm": 29.982696533203125,
1580
+ "learning_rate": 1.2421621621621622e-05,
1581
+ "loss": 0.5942,
1582
+ "step": 2220
1583
+ },
1584
+ {
1585
+ "epoch": 1.784,
1586
+ "grad_norm": 25.45384979248047,
1587
+ "learning_rate": 1.234054054054054e-05,
1588
+ "loss": 0.5469,
1589
+ "step": 2230
1590
+ },
1591
+ {
1592
+ "epoch": 1.792,
1593
+ "grad_norm": 10.957773208618164,
1594
+ "learning_rate": 1.225945945945946e-05,
1595
+ "loss": 0.4809,
1596
+ "step": 2240
1597
+ },
1598
+ {
1599
+ "epoch": 1.8,
1600
+ "grad_norm": 11.609101295471191,
1601
+ "learning_rate": 1.217837837837838e-05,
1602
+ "loss": 0.73,
1603
+ "step": 2250
1604
+ },
1605
+ {
1606
+ "epoch": 1.808,
1607
+ "grad_norm": 14.325447082519531,
1608
+ "learning_rate": 1.2097297297297298e-05,
1609
+ "loss": 0.3926,
1610
+ "step": 2260
1611
+ },
1612
+ {
1613
+ "epoch": 1.8159999999999998,
1614
+ "grad_norm": 4.8160719871521,
1615
+ "learning_rate": 1.2016216216216218e-05,
1616
+ "loss": 0.5617,
1617
+ "step": 2270
1618
+ },
1619
+ {
1620
+ "epoch": 1.8239999999999998,
1621
+ "grad_norm": 6.287977695465088,
1622
+ "learning_rate": 1.1935135135135134e-05,
1623
+ "loss": 0.3482,
1624
+ "step": 2280
1625
+ },
1626
+ {
1627
+ "epoch": 1.8319999999999999,
1628
+ "grad_norm": 9.819110870361328,
1629
+ "learning_rate": 1.1854054054054054e-05,
1630
+ "loss": 0.5353,
1631
+ "step": 2290
1632
+ },
1633
+ {
1634
+ "epoch": 1.8399999999999999,
1635
+ "grad_norm": 7.572418689727783,
1636
+ "learning_rate": 1.1772972972972973e-05,
1637
+ "loss": 0.7756,
1638
+ "step": 2300
1639
+ },
1640
+ {
1641
+ "epoch": 1.8479999999999999,
1642
+ "grad_norm": 16.719934463500977,
1643
+ "learning_rate": 1.1691891891891893e-05,
1644
+ "loss": 0.6298,
1645
+ "step": 2310
1646
+ },
1647
+ {
1648
+ "epoch": 1.8559999999999999,
1649
+ "grad_norm": 18.21957778930664,
1650
+ "learning_rate": 1.1610810810810811e-05,
1651
+ "loss": 0.4695,
1652
+ "step": 2320
1653
+ },
1654
+ {
1655
+ "epoch": 1.8639999999999999,
1656
+ "grad_norm": 5.47652530670166,
1657
+ "learning_rate": 1.1529729729729731e-05,
1658
+ "loss": 0.2983,
1659
+ "step": 2330
1660
+ },
1661
+ {
1662
+ "epoch": 1.8719999999999999,
1663
+ "grad_norm": 22.044818878173828,
1664
+ "learning_rate": 1.1448648648648648e-05,
1665
+ "loss": 0.5972,
1666
+ "step": 2340
1667
+ },
1668
+ {
1669
+ "epoch": 1.88,
1670
+ "grad_norm": 26.34394645690918,
1671
+ "learning_rate": 1.1367567567567568e-05,
1672
+ "loss": 0.4404,
1673
+ "step": 2350
1674
+ },
1675
+ {
1676
+ "epoch": 1.888,
1677
+ "grad_norm": 21.979583740234375,
1678
+ "learning_rate": 1.1286486486486486e-05,
1679
+ "loss": 0.6474,
1680
+ "step": 2360
1681
+ },
1682
+ {
1683
+ "epoch": 1.896,
1684
+ "grad_norm": 15.81022834777832,
1685
+ "learning_rate": 1.1205405405405406e-05,
1686
+ "loss": 0.552,
1687
+ "step": 2370
1688
+ },
1689
+ {
1690
+ "epoch": 1.904,
1691
+ "grad_norm": 13.853069305419922,
1692
+ "learning_rate": 1.1124324324324325e-05,
1693
+ "loss": 0.5908,
1694
+ "step": 2380
1695
+ },
1696
+ {
1697
+ "epoch": 1.912,
1698
+ "grad_norm": 4.924503326416016,
1699
+ "learning_rate": 1.1043243243243243e-05,
1700
+ "loss": 0.3883,
1701
+ "step": 2390
1702
+ },
1703
+ {
1704
+ "epoch": 1.92,
1705
+ "grad_norm": 15.801043510437012,
1706
+ "learning_rate": 1.0962162162162162e-05,
1707
+ "loss": 0.4635,
1708
+ "step": 2400
1709
+ },
1710
+ {
1711
+ "epoch": 1.928,
1712
+ "grad_norm": 17.398475646972656,
1713
+ "learning_rate": 1.0881081081081082e-05,
1714
+ "loss": 0.5509,
1715
+ "step": 2410
1716
+ },
1717
+ {
1718
+ "epoch": 1.936,
1719
+ "grad_norm": 12.026921272277832,
1720
+ "learning_rate": 1.08e-05,
1721
+ "loss": 0.533,
1722
+ "step": 2420
1723
+ },
1724
+ {
1725
+ "epoch": 1.944,
1726
+ "grad_norm": 23.21822738647461,
1727
+ "learning_rate": 1.071891891891892e-05,
1728
+ "loss": 0.5655,
1729
+ "step": 2430
1730
+ },
1731
+ {
1732
+ "epoch": 1.952,
1733
+ "grad_norm": 9.777156829833984,
1734
+ "learning_rate": 1.0637837837837838e-05,
1735
+ "loss": 0.7364,
1736
+ "step": 2440
1737
+ },
1738
+ {
1739
+ "epoch": 1.96,
1740
+ "grad_norm": 17.892311096191406,
1741
+ "learning_rate": 1.0556756756756757e-05,
1742
+ "loss": 0.5123,
1743
+ "step": 2450
1744
+ },
1745
+ {
1746
+ "epoch": 1.968,
1747
+ "grad_norm": 10.79381275177002,
1748
+ "learning_rate": 1.0475675675675675e-05,
1749
+ "loss": 0.5601,
1750
+ "step": 2460
1751
+ },
1752
+ {
1753
+ "epoch": 1.976,
1754
+ "grad_norm": 16.45550537109375,
1755
+ "learning_rate": 1.0394594594594595e-05,
1756
+ "loss": 0.3604,
1757
+ "step": 2470
1758
+ },
1759
+ {
1760
+ "epoch": 1.984,
1761
+ "grad_norm": 4.945703506469727,
1762
+ "learning_rate": 1.0313513513513514e-05,
1763
+ "loss": 0.5801,
1764
+ "step": 2480
1765
+ },
1766
+ {
1767
+ "epoch": 1.992,
1768
+ "grad_norm": 7.19441556930542,
1769
+ "learning_rate": 1.0232432432432434e-05,
1770
+ "loss": 0.5552,
1771
+ "step": 2490
1772
+ },
1773
+ {
1774
+ "epoch": 2.0,
1775
+ "grad_norm": 16.103708267211914,
1776
+ "learning_rate": 1.0151351351351352e-05,
1777
+ "loss": 0.5873,
1778
+ "step": 2500
1779
+ },
1780
+ {
1781
+ "epoch": 2.0,
1782
+ "eval_accuracy": 0.7983991995997999,
1783
+ "eval_f1": 0.8020345252909776,
1784
+ "eval_loss": 0.6678956747055054,
1785
+ "eval_model_preparation_time": 0.0029,
1786
+ "eval_runtime": 59.5494,
1787
+ "eval_samples_per_second": 33.569,
1788
+ "eval_steps_per_second": 4.198,
1789
+ "step": 2500
1790
+ },
1791
+ {
1792
+ "epoch": 2.008,
1793
+ "grad_norm": 14.572410583496094,
1794
+ "learning_rate": 1.007027027027027e-05,
1795
+ "loss": 0.3791,
1796
+ "step": 2510
1797
+ },
1798
+ {
1799
+ "epoch": 2.016,
1800
+ "grad_norm": 12.122941970825195,
1801
+ "learning_rate": 9.989189189189189e-06,
1802
+ "loss": 0.4778,
1803
+ "step": 2520
1804
+ },
1805
+ {
1806
+ "epoch": 2.024,
1807
+ "grad_norm": 12.55219841003418,
1808
+ "learning_rate": 9.908108108108109e-06,
1809
+ "loss": 0.3812,
1810
+ "step": 2530
1811
+ },
1812
+ {
1813
+ "epoch": 2.032,
1814
+ "grad_norm": 0.675682544708252,
1815
+ "learning_rate": 9.827027027027027e-06,
1816
+ "loss": 0.417,
1817
+ "step": 2540
1818
+ },
1819
+ {
1820
+ "epoch": 2.04,
1821
+ "grad_norm": 4.769517421722412,
1822
+ "learning_rate": 9.745945945945947e-06,
1823
+ "loss": 0.4431,
1824
+ "step": 2550
1825
+ },
1826
+ {
1827
+ "epoch": 2.048,
1828
+ "grad_norm": 17.767240524291992,
1829
+ "learning_rate": 9.664864864864864e-06,
1830
+ "loss": 0.8262,
1831
+ "step": 2560
1832
+ },
1833
+ {
1834
+ "epoch": 2.056,
1835
+ "grad_norm": 10.690773010253906,
1836
+ "learning_rate": 9.583783783783784e-06,
1837
+ "loss": 0.2962,
1838
+ "step": 2570
1839
+ },
1840
+ {
1841
+ "epoch": 2.064,
1842
+ "grad_norm": 14.983344078063965,
1843
+ "learning_rate": 9.502702702702702e-06,
1844
+ "loss": 0.338,
1845
+ "step": 2580
1846
+ },
1847
+ {
1848
+ "epoch": 2.072,
1849
+ "grad_norm": 1.5337533950805664,
1850
+ "learning_rate": 9.421621621621622e-06,
1851
+ "loss": 0.3878,
1852
+ "step": 2590
1853
+ },
1854
+ {
1855
+ "epoch": 2.08,
1856
+ "grad_norm": 20.486373901367188,
1857
+ "learning_rate": 9.34054054054054e-06,
1858
+ "loss": 0.5939,
1859
+ "step": 2600
1860
+ },
1861
+ {
1862
+ "epoch": 2.088,
1863
+ "grad_norm": 20.5994815826416,
1864
+ "learning_rate": 9.25945945945946e-06,
1865
+ "loss": 0.2976,
1866
+ "step": 2610
1867
+ },
1868
+ {
1869
+ "epoch": 2.096,
1870
+ "grad_norm": 14.256688117980957,
1871
+ "learning_rate": 9.178378378378377e-06,
1872
+ "loss": 0.5269,
1873
+ "step": 2620
1874
+ },
1875
+ {
1876
+ "epoch": 2.104,
1877
+ "grad_norm": 60.314727783203125,
1878
+ "learning_rate": 9.097297297297298e-06,
1879
+ "loss": 0.4188,
1880
+ "step": 2630
1881
+ },
1882
+ {
1883
+ "epoch": 2.112,
1884
+ "grad_norm": 1.31520676612854,
1885
+ "learning_rate": 9.016216216216216e-06,
1886
+ "loss": 0.4827,
1887
+ "step": 2640
1888
+ },
1889
+ {
1890
+ "epoch": 2.12,
1891
+ "grad_norm": 10.482166290283203,
1892
+ "learning_rate": 8.935135135135136e-06,
1893
+ "loss": 0.4041,
1894
+ "step": 2650
1895
+ },
1896
+ {
1897
+ "epoch": 2.128,
1898
+ "grad_norm": 14.786431312561035,
1899
+ "learning_rate": 8.854054054054054e-06,
1900
+ "loss": 0.5581,
1901
+ "step": 2660
1902
+ },
1903
+ {
1904
+ "epoch": 2.136,
1905
+ "grad_norm": 14.168709754943848,
1906
+ "learning_rate": 8.772972972972973e-06,
1907
+ "loss": 0.5987,
1908
+ "step": 2670
1909
+ },
1910
+ {
1911
+ "epoch": 2.144,
1912
+ "grad_norm": 16.554393768310547,
1913
+ "learning_rate": 8.691891891891891e-06,
1914
+ "loss": 0.3347,
1915
+ "step": 2680
1916
+ },
1917
+ {
1918
+ "epoch": 2.152,
1919
+ "grad_norm": 21.167463302612305,
1920
+ "learning_rate": 8.610810810810811e-06,
1921
+ "loss": 0.3759,
1922
+ "step": 2690
1923
+ },
1924
+ {
1925
+ "epoch": 2.16,
1926
+ "grad_norm": 4.345006942749023,
1927
+ "learning_rate": 8.52972972972973e-06,
1928
+ "loss": 0.4237,
1929
+ "step": 2700
1930
+ },
1931
+ {
1932
+ "epoch": 2.168,
1933
+ "grad_norm": 2.3528635501861572,
1934
+ "learning_rate": 8.44864864864865e-06,
1935
+ "loss": 0.4666,
1936
+ "step": 2710
1937
+ },
1938
+ {
1939
+ "epoch": 2.176,
1940
+ "grad_norm": 19.55575942993164,
1941
+ "learning_rate": 8.367567567567568e-06,
1942
+ "loss": 0.4797,
1943
+ "step": 2720
1944
+ },
1945
+ {
1946
+ "epoch": 2.184,
1947
+ "grad_norm": 4.779571056365967,
1948
+ "learning_rate": 8.286486486486486e-06,
1949
+ "loss": 0.2949,
1950
+ "step": 2730
1951
+ },
1952
+ {
1953
+ "epoch": 2.192,
1954
+ "grad_norm": 2.063556671142578,
1955
+ "learning_rate": 8.205405405405405e-06,
1956
+ "loss": 0.3373,
1957
+ "step": 2740
1958
+ },
1959
+ {
1960
+ "epoch": 2.2,
1961
+ "grad_norm": 16.083656311035156,
1962
+ "learning_rate": 8.124324324324325e-06,
1963
+ "loss": 0.3836,
1964
+ "step": 2750
1965
+ },
1966
+ {
1967
+ "epoch": 2.208,
1968
+ "grad_norm": 17.152666091918945,
1969
+ "learning_rate": 8.043243243243243e-06,
1970
+ "loss": 0.4052,
1971
+ "step": 2760
1972
+ },
1973
+ {
1974
+ "epoch": 2.216,
1975
+ "grad_norm": 23.9571590423584,
1976
+ "learning_rate": 7.962162162162163e-06,
1977
+ "loss": 0.4243,
1978
+ "step": 2770
1979
+ },
1980
+ {
1981
+ "epoch": 2.224,
1982
+ "grad_norm": 25.439889907836914,
1983
+ "learning_rate": 7.889189189189188e-06,
1984
+ "loss": 0.4287,
1985
+ "step": 2780
1986
+ },
1987
+ {
1988
+ "epoch": 2.232,
1989
+ "grad_norm": 7.069023609161377,
1990
+ "learning_rate": 7.808108108108109e-06,
1991
+ "loss": 0.5156,
1992
+ "step": 2790
1993
+ },
1994
+ {
1995
+ "epoch": 2.24,
1996
+ "grad_norm": 20.745628356933594,
1997
+ "learning_rate": 7.727027027027027e-06,
1998
+ "loss": 0.4436,
1999
+ "step": 2800
2000
+ },
2001
+ {
2002
+ "epoch": 2.248,
2003
+ "grad_norm": 21.367656707763672,
2004
+ "learning_rate": 7.645945945945947e-06,
2005
+ "loss": 0.5644,
2006
+ "step": 2810
2007
+ },
2008
+ {
2009
+ "epoch": 2.2560000000000002,
2010
+ "grad_norm": 18.602930068969727,
2011
+ "learning_rate": 7.5648648648648645e-06,
2012
+ "loss": 0.1994,
2013
+ "step": 2820
2014
+ },
2015
+ {
2016
+ "epoch": 2.2640000000000002,
2017
+ "grad_norm": 21.02153205871582,
2018
+ "learning_rate": 7.4837837837837845e-06,
2019
+ "loss": 0.5426,
2020
+ "step": 2830
2021
+ },
2022
+ {
2023
+ "epoch": 2.2720000000000002,
2024
+ "grad_norm": 6.536153793334961,
2025
+ "learning_rate": 7.402702702702703e-06,
2026
+ "loss": 0.3882,
2027
+ "step": 2840
2028
+ },
2029
+ {
2030
+ "epoch": 2.2800000000000002,
2031
+ "grad_norm": 55.836551666259766,
2032
+ "learning_rate": 7.321621621621622e-06,
2033
+ "loss": 0.4888,
2034
+ "step": 2850
2035
+ },
2036
+ {
2037
+ "epoch": 2.288,
2038
+ "grad_norm": 0.8393071293830872,
2039
+ "learning_rate": 7.2405405405405405e-06,
2040
+ "loss": 0.2992,
2041
+ "step": 2860
2042
+ },
2043
+ {
2044
+ "epoch": 2.296,
2045
+ "grad_norm": 15.882612228393555,
2046
+ "learning_rate": 7.15945945945946e-06,
2047
+ "loss": 0.4938,
2048
+ "step": 2870
2049
+ },
2050
+ {
2051
+ "epoch": 2.304,
2052
+ "grad_norm": 18.28456687927246,
2053
+ "learning_rate": 7.078378378378379e-06,
2054
+ "loss": 0.3967,
2055
+ "step": 2880
2056
+ },
2057
+ {
2058
+ "epoch": 2.312,
2059
+ "grad_norm": 23.75568962097168,
2060
+ "learning_rate": 6.997297297297297e-06,
2061
+ "loss": 0.2777,
2062
+ "step": 2890
2063
+ },
2064
+ {
2065
+ "epoch": 2.32,
2066
+ "grad_norm": 2.6475250720977783,
2067
+ "learning_rate": 6.9162162162162165e-06,
2068
+ "loss": 0.5461,
2069
+ "step": 2900
2070
+ },
2071
+ {
2072
+ "epoch": 2.328,
2073
+ "grad_norm": 29.49169921875,
2074
+ "learning_rate": 6.835135135135136e-06,
2075
+ "loss": 0.3449,
2076
+ "step": 2910
2077
+ },
2078
+ {
2079
+ "epoch": 2.336,
2080
+ "grad_norm": 45.848045349121094,
2081
+ "learning_rate": 6.754054054054054e-06,
2082
+ "loss": 0.1694,
2083
+ "step": 2920
2084
+ },
2085
+ {
2086
+ "epoch": 2.344,
2087
+ "grad_norm": 9.47195816040039,
2088
+ "learning_rate": 6.672972972972973e-06,
2089
+ "loss": 0.4175,
2090
+ "step": 2930
2091
+ },
2092
+ {
2093
+ "epoch": 2.352,
2094
+ "grad_norm": 1.0717219114303589,
2095
+ "learning_rate": 6.5918918918918925e-06,
2096
+ "loss": 0.2864,
2097
+ "step": 2940
2098
+ },
2099
+ {
2100
+ "epoch": 2.36,
2101
+ "grad_norm": 29.87921714782715,
2102
+ "learning_rate": 6.510810810810811e-06,
2103
+ "loss": 0.3446,
2104
+ "step": 2950
2105
+ },
2106
+ {
2107
+ "epoch": 2.368,
2108
+ "grad_norm": 36.15072250366211,
2109
+ "learning_rate": 6.42972972972973e-06,
2110
+ "loss": 0.4606,
2111
+ "step": 2960
2112
+ },
2113
+ {
2114
+ "epoch": 2.376,
2115
+ "grad_norm": 17.12785530090332,
2116
+ "learning_rate": 6.348648648648649e-06,
2117
+ "loss": 0.5386,
2118
+ "step": 2970
2119
+ },
2120
+ {
2121
+ "epoch": 2.384,
2122
+ "grad_norm": 34.57358932495117,
2123
+ "learning_rate": 6.267567567567568e-06,
2124
+ "loss": 0.3606,
2125
+ "step": 2980
2126
+ },
2127
+ {
2128
+ "epoch": 2.392,
2129
+ "grad_norm": 8.887199401855469,
2130
+ "learning_rate": 6.186486486486487e-06,
2131
+ "loss": 0.5517,
2132
+ "step": 2990
2133
+ },
2134
+ {
2135
+ "epoch": 2.4,
2136
+ "grad_norm": 27.26741600036621,
2137
+ "learning_rate": 6.105405405405405e-06,
2138
+ "loss": 0.4872,
2139
+ "step": 3000
2140
+ },
2141
+ {
2142
+ "epoch": 2.408,
2143
+ "grad_norm": 40.56803894042969,
2144
+ "learning_rate": 6.0243243243243245e-06,
2145
+ "loss": 0.4345,
2146
+ "step": 3010
2147
+ },
2148
+ {
2149
+ "epoch": 2.416,
2150
+ "grad_norm": 0.35344982147216797,
2151
+ "learning_rate": 5.943243243243244e-06,
2152
+ "loss": 0.3625,
2153
+ "step": 3020
2154
+ },
2155
+ {
2156
+ "epoch": 2.424,
2157
+ "grad_norm": 18.964570999145508,
2158
+ "learning_rate": 5.862162162162162e-06,
2159
+ "loss": 0.2237,
2160
+ "step": 3030
2161
+ },
2162
+ {
2163
+ "epoch": 2.432,
2164
+ "grad_norm": 1.419640302658081,
2165
+ "learning_rate": 5.781081081081081e-06,
2166
+ "loss": 0.2976,
2167
+ "step": 3040
2168
+ },
2169
+ {
2170
+ "epoch": 2.44,
2171
+ "grad_norm": 32.69036102294922,
2172
+ "learning_rate": 5.7000000000000005e-06,
2173
+ "loss": 0.4991,
2174
+ "step": 3050
2175
+ },
2176
+ {
2177
+ "epoch": 2.448,
2178
+ "grad_norm": 9.119990348815918,
2179
+ "learning_rate": 5.618918918918919e-06,
2180
+ "loss": 0.5615,
2181
+ "step": 3060
2182
+ },
2183
+ {
2184
+ "epoch": 2.456,
2185
+ "grad_norm": 7.965466499328613,
2186
+ "learning_rate": 5.537837837837838e-06,
2187
+ "loss": 0.4426,
2188
+ "step": 3070
2189
+ },
2190
+ {
2191
+ "epoch": 2.464,
2192
+ "grad_norm": 0.6287997364997864,
2193
+ "learning_rate": 5.456756756756757e-06,
2194
+ "loss": 0.3599,
2195
+ "step": 3080
2196
+ },
2197
+ {
2198
+ "epoch": 2.472,
2199
+ "grad_norm": 19.988008499145508,
2200
+ "learning_rate": 5.375675675675676e-06,
2201
+ "loss": 0.3474,
2202
+ "step": 3090
2203
+ },
2204
+ {
2205
+ "epoch": 2.48,
2206
+ "grad_norm": 4.660024642944336,
2207
+ "learning_rate": 5.294594594594595e-06,
2208
+ "loss": 0.4499,
2209
+ "step": 3100
2210
+ },
2211
+ {
2212
+ "epoch": 2.488,
2213
+ "grad_norm": 18.610027313232422,
2214
+ "learning_rate": 5.213513513513514e-06,
2215
+ "loss": 0.2914,
2216
+ "step": 3110
2217
+ },
2218
+ {
2219
+ "epoch": 2.496,
2220
+ "grad_norm": 14.92201042175293,
2221
+ "learning_rate": 5.1324324324324324e-06,
2222
+ "loss": 0.4706,
2223
+ "step": 3120
2224
+ },
2225
+ {
2226
+ "epoch": 2.504,
2227
+ "grad_norm": 13.610057830810547,
2228
+ "learning_rate": 5.051351351351352e-06,
2229
+ "loss": 0.5549,
2230
+ "step": 3130
2231
+ },
2232
+ {
2233
+ "epoch": 2.512,
2234
+ "grad_norm": 41.13311004638672,
2235
+ "learning_rate": 4.97027027027027e-06,
2236
+ "loss": 0.4933,
2237
+ "step": 3140
2238
+ },
2239
+ {
2240
+ "epoch": 2.52,
2241
+ "grad_norm": 3.7109222412109375,
2242
+ "learning_rate": 4.889189189189189e-06,
2243
+ "loss": 0.2406,
2244
+ "step": 3150
2245
+ },
2246
+ {
2247
+ "epoch": 2.528,
2248
+ "grad_norm": 6.251349449157715,
2249
+ "learning_rate": 4.8081081081081085e-06,
2250
+ "loss": 0.417,
2251
+ "step": 3160
2252
+ },
2253
+ {
2254
+ "epoch": 2.536,
2255
+ "grad_norm": 22.042970657348633,
2256
+ "learning_rate": 4.727027027027027e-06,
2257
+ "loss": 0.397,
2258
+ "step": 3170
2259
+ },
2260
+ {
2261
+ "epoch": 2.544,
2262
+ "grad_norm": 5.666184902191162,
2263
+ "learning_rate": 4.645945945945946e-06,
2264
+ "loss": 0.5293,
2265
+ "step": 3180
2266
+ },
2267
+ {
2268
+ "epoch": 2.552,
2269
+ "grad_norm": 36.02572250366211,
2270
+ "learning_rate": 4.564864864864865e-06,
2271
+ "loss": 0.3895,
2272
+ "step": 3190
2273
+ },
2274
+ {
2275
+ "epoch": 2.56,
2276
+ "grad_norm": 17.9006404876709,
2277
+ "learning_rate": 4.483783783783784e-06,
2278
+ "loss": 0.3631,
2279
+ "step": 3200
2280
+ },
2281
+ {
2282
+ "epoch": 2.568,
2283
+ "grad_norm": 8.699700355529785,
2284
+ "learning_rate": 4.402702702702703e-06,
2285
+ "loss": 0.4893,
2286
+ "step": 3210
2287
+ },
2288
+ {
2289
+ "epoch": 2.576,
2290
+ "grad_norm": 0.3424607813358307,
2291
+ "learning_rate": 4.321621621621622e-06,
2292
+ "loss": 0.2428,
2293
+ "step": 3220
2294
+ },
2295
+ {
2296
+ "epoch": 2.584,
2297
+ "grad_norm": 4.9575018882751465,
2298
+ "learning_rate": 4.24054054054054e-06,
2299
+ "loss": 0.2304,
2300
+ "step": 3230
2301
+ },
2302
+ {
2303
+ "epoch": 2.592,
2304
+ "grad_norm": 11.896671295166016,
2305
+ "learning_rate": 4.15945945945946e-06,
2306
+ "loss": 0.4873,
2307
+ "step": 3240
2308
+ },
2309
+ {
2310
+ "epoch": 2.6,
2311
+ "grad_norm": 36.56340789794922,
2312
+ "learning_rate": 4.078378378378379e-06,
2313
+ "loss": 0.6687,
2314
+ "step": 3250
2315
+ },
2316
+ {
2317
+ "epoch": 2.608,
2318
+ "grad_norm": 12.333868026733398,
2319
+ "learning_rate": 3.997297297297297e-06,
2320
+ "loss": 0.4143,
2321
+ "step": 3260
2322
+ },
2323
+ {
2324
+ "epoch": 2.616,
2325
+ "grad_norm": 4.652050971984863,
2326
+ "learning_rate": 3.9162162162162164e-06,
2327
+ "loss": 0.3859,
2328
+ "step": 3270
2329
+ },
2330
+ {
2331
+ "epoch": 2.624,
2332
+ "grad_norm": 0.7714823484420776,
2333
+ "learning_rate": 3.835135135135135e-06,
2334
+ "loss": 0.4139,
2335
+ "step": 3280
2336
+ },
2337
+ {
2338
+ "epoch": 2.632,
2339
+ "grad_norm": 11.708436012268066,
2340
+ "learning_rate": 3.754054054054054e-06,
2341
+ "loss": 0.2472,
2342
+ "step": 3290
2343
+ },
2344
+ {
2345
+ "epoch": 2.64,
2346
+ "grad_norm": 13.284863471984863,
2347
+ "learning_rate": 3.6729729729729732e-06,
2348
+ "loss": 0.3367,
2349
+ "step": 3300
2350
+ },
2351
+ {
2352
+ "epoch": 2.648,
2353
+ "grad_norm": 18.693017959594727,
2354
+ "learning_rate": 3.6e-06,
2355
+ "loss": 0.4724,
2356
+ "step": 3310
2357
+ },
2358
+ {
2359
+ "epoch": 2.656,
2360
+ "grad_norm": 1.7859662771224976,
2361
+ "learning_rate": 3.518918918918919e-06,
2362
+ "loss": 0.315,
2363
+ "step": 3320
2364
+ },
2365
+ {
2366
+ "epoch": 2.664,
2367
+ "grad_norm": 37.88275146484375,
2368
+ "learning_rate": 3.437837837837838e-06,
2369
+ "loss": 0.2752,
2370
+ "step": 3330
2371
+ },
2372
+ {
2373
+ "epoch": 2.672,
2374
+ "grad_norm": 0.758948564529419,
2375
+ "learning_rate": 3.3567567567567566e-06,
2376
+ "loss": 0.3667,
2377
+ "step": 3340
2378
+ },
2379
+ {
2380
+ "epoch": 2.68,
2381
+ "grad_norm": 1.520273208618164,
2382
+ "learning_rate": 3.2756756756756754e-06,
2383
+ "loss": 0.263,
2384
+ "step": 3350
2385
+ },
2386
+ {
2387
+ "epoch": 2.6879999999999997,
2388
+ "grad_norm": 0.37978029251098633,
2389
+ "learning_rate": 3.1945945945945946e-06,
2390
+ "loss": 0.3969,
2391
+ "step": 3360
2392
+ },
2393
+ {
2394
+ "epoch": 2.6959999999999997,
2395
+ "grad_norm": 23.12879180908203,
2396
+ "learning_rate": 3.1135135135135134e-06,
2397
+ "loss": 0.2363,
2398
+ "step": 3370
2399
+ },
2400
+ {
2401
+ "epoch": 2.7039999999999997,
2402
+ "grad_norm": 26.812978744506836,
2403
+ "learning_rate": 3.0324324324324322e-06,
2404
+ "loss": 0.5143,
2405
+ "step": 3380
2406
+ },
2407
+ {
2408
+ "epoch": 2.7119999999999997,
2409
+ "grad_norm": 28.945600509643555,
2410
+ "learning_rate": 2.9513513513513514e-06,
2411
+ "loss": 0.243,
2412
+ "step": 3390
2413
+ },
2414
+ {
2415
+ "epoch": 2.7199999999999998,
2416
+ "grad_norm": 8.482378005981445,
2417
+ "learning_rate": 2.8702702702702702e-06,
2418
+ "loss": 0.4,
2419
+ "step": 3400
2420
+ },
2421
+ {
2422
+ "epoch": 2.7279999999999998,
2423
+ "grad_norm": 23.684417724609375,
2424
+ "learning_rate": 2.789189189189189e-06,
2425
+ "loss": 0.3826,
2426
+ "step": 3410
2427
+ },
2428
+ {
2429
+ "epoch": 2.7359999999999998,
2430
+ "grad_norm": 14.898636817932129,
2431
+ "learning_rate": 2.708108108108108e-06,
2432
+ "loss": 0.3521,
2433
+ "step": 3420
2434
+ },
2435
+ {
2436
+ "epoch": 2.7439999999999998,
2437
+ "grad_norm": 1.8441262245178223,
2438
+ "learning_rate": 2.627027027027027e-06,
2439
+ "loss": 0.4089,
2440
+ "step": 3430
2441
+ },
2442
+ {
2443
+ "epoch": 2.752,
2444
+ "grad_norm": 8.590300559997559,
2445
+ "learning_rate": 2.545945945945946e-06,
2446
+ "loss": 0.6467,
2447
+ "step": 3440
2448
+ },
2449
+ {
2450
+ "epoch": 2.76,
2451
+ "grad_norm": 20.450817108154297,
2452
+ "learning_rate": 2.4648648648648646e-06,
2453
+ "loss": 0.3072,
2454
+ "step": 3450
2455
+ },
2456
+ {
2457
+ "epoch": 2.768,
2458
+ "grad_norm": 18.779212951660156,
2459
+ "learning_rate": 2.383783783783784e-06,
2460
+ "loss": 0.393,
2461
+ "step": 3460
2462
+ },
2463
+ {
2464
+ "epoch": 2.776,
2465
+ "grad_norm": 17.161649703979492,
2466
+ "learning_rate": 2.3027027027027026e-06,
2467
+ "loss": 0.3867,
2468
+ "step": 3470
2469
+ },
2470
+ {
2471
+ "epoch": 2.784,
2472
+ "grad_norm": 33.951210021972656,
2473
+ "learning_rate": 2.221621621621622e-06,
2474
+ "loss": 0.1673,
2475
+ "step": 3480
2476
+ },
2477
+ {
2478
+ "epoch": 2.792,
2479
+ "grad_norm": 23.267391204833984,
2480
+ "learning_rate": 2.1405405405405406e-06,
2481
+ "loss": 0.3354,
2482
+ "step": 3490
2483
+ },
2484
+ {
2485
+ "epoch": 2.8,
2486
+ "grad_norm": 35.16474914550781,
2487
+ "learning_rate": 2.05945945945946e-06,
2488
+ "loss": 0.5195,
2489
+ "step": 3500
2490
+ },
2491
+ {
2492
+ "epoch": 2.808,
2493
+ "grad_norm": 13.081944465637207,
2494
+ "learning_rate": 1.9783783783783786e-06,
2495
+ "loss": 0.4999,
2496
+ "step": 3510
2497
+ },
2498
+ {
2499
+ "epoch": 2.816,
2500
+ "grad_norm": 23.258054733276367,
2501
+ "learning_rate": 1.8972972972972976e-06,
2502
+ "loss": 0.364,
2503
+ "step": 3520
2504
+ },
2505
+ {
2506
+ "epoch": 2.824,
2507
+ "grad_norm": 30.492870330810547,
2508
+ "learning_rate": 1.8162162162162162e-06,
2509
+ "loss": 0.4228,
2510
+ "step": 3530
2511
+ },
2512
+ {
2513
+ "epoch": 2.832,
2514
+ "grad_norm": 5.166326999664307,
2515
+ "learning_rate": 1.7351351351351352e-06,
2516
+ "loss": 0.4365,
2517
+ "step": 3540
2518
+ },
2519
+ {
2520
+ "epoch": 2.84,
2521
+ "grad_norm": 14.376697540283203,
2522
+ "learning_rate": 1.654054054054054e-06,
2523
+ "loss": 0.5215,
2524
+ "step": 3550
2525
+ },
2526
+ {
2527
+ "epoch": 2.848,
2528
+ "grad_norm": 22.572032928466797,
2529
+ "learning_rate": 1.572972972972973e-06,
2530
+ "loss": 0.4288,
2531
+ "step": 3560
2532
+ },
2533
+ {
2534
+ "epoch": 2.856,
2535
+ "grad_norm": 23.92298698425293,
2536
+ "learning_rate": 1.4918918918918918e-06,
2537
+ "loss": 0.2887,
2538
+ "step": 3570
2539
+ },
2540
+ {
2541
+ "epoch": 2.864,
2542
+ "grad_norm": 0.24531683325767517,
2543
+ "learning_rate": 1.4108108108108108e-06,
2544
+ "loss": 0.223,
2545
+ "step": 3580
2546
+ },
2547
+ {
2548
+ "epoch": 2.872,
2549
+ "grad_norm": 9.834733963012695,
2550
+ "learning_rate": 1.3297297297297298e-06,
2551
+ "loss": 0.3354,
2552
+ "step": 3590
2553
+ },
2554
+ {
2555
+ "epoch": 2.88,
2556
+ "grad_norm": 24.496023178100586,
2557
+ "learning_rate": 1.2486486486486486e-06,
2558
+ "loss": 0.3741,
2559
+ "step": 3600
2560
+ },
2561
+ {
2562
+ "epoch": 2.888,
2563
+ "grad_norm": 36.65160369873047,
2564
+ "learning_rate": 1.1675675675675676e-06,
2565
+ "loss": 0.3003,
2566
+ "step": 3610
2567
+ },
2568
+ {
2569
+ "epoch": 2.896,
2570
+ "grad_norm": 12.46170425415039,
2571
+ "learning_rate": 1.0864864864864864e-06,
2572
+ "loss": 0.3127,
2573
+ "step": 3620
2574
+ },
2575
+ {
2576
+ "epoch": 2.904,
2577
+ "grad_norm": 10.050320625305176,
2578
+ "learning_rate": 1.0054054054054054e-06,
2579
+ "loss": 0.3092,
2580
+ "step": 3630
2581
+ },
2582
+ {
2583
+ "epoch": 2.912,
2584
+ "grad_norm": 0.31416618824005127,
2585
+ "learning_rate": 9.243243243243244e-07,
2586
+ "loss": 0.3887,
2587
+ "step": 3640
2588
+ },
2589
+ {
2590
+ "epoch": 2.92,
2591
+ "grad_norm": 14.300912857055664,
2592
+ "learning_rate": 8.432432432432433e-07,
2593
+ "loss": 0.4315,
2594
+ "step": 3650
2595
+ },
2596
+ {
2597
+ "epoch": 2.928,
2598
+ "grad_norm": 14.523377418518066,
2599
+ "learning_rate": 7.621621621621622e-07,
2600
+ "loss": 0.574,
2601
+ "step": 3660
2602
+ },
2603
+ {
2604
+ "epoch": 2.936,
2605
+ "grad_norm": 45.13915252685547,
2606
+ "learning_rate": 6.810810810810811e-07,
2607
+ "loss": 0.4068,
2608
+ "step": 3670
2609
+ },
2610
+ {
2611
+ "epoch": 2.944,
2612
+ "grad_norm": 14.518412590026855,
2613
+ "learning_rate": 6.000000000000001e-07,
2614
+ "loss": 0.3466,
2615
+ "step": 3680
2616
+ },
2617
+ {
2618
+ "epoch": 2.952,
2619
+ "grad_norm": 20.95885467529297,
2620
+ "learning_rate": 5.18918918918919e-07,
2621
+ "loss": 0.2941,
2622
+ "step": 3690
2623
+ },
2624
+ {
2625
+ "epoch": 2.96,
2626
+ "grad_norm": 27.785863876342773,
2627
+ "learning_rate": 4.3783783783783787e-07,
2628
+ "loss": 0.5016,
2629
+ "step": 3700
2630
+ },
2631
+ {
2632
+ "epoch": 2.968,
2633
+ "grad_norm": 6.452110767364502,
2634
+ "learning_rate": 3.5675675675675677e-07,
2635
+ "loss": 0.2961,
2636
+ "step": 3710
2637
+ },
2638
+ {
2639
+ "epoch": 2.976,
2640
+ "grad_norm": 6.723122596740723,
2641
+ "learning_rate": 2.7567567567567567e-07,
2642
+ "loss": 0.3583,
2643
+ "step": 3720
2644
+ },
2645
+ {
2646
+ "epoch": 2.984,
2647
+ "grad_norm": 16.233903884887695,
2648
+ "learning_rate": 1.945945945945946e-07,
2649
+ "loss": 0.3477,
2650
+ "step": 3730
2651
+ },
2652
+ {
2653
+ "epoch": 2.992,
2654
+ "grad_norm": 7.397993087768555,
2655
+ "learning_rate": 1.1351351351351351e-07,
2656
+ "loss": 0.2684,
2657
+ "step": 3740
2658
+ },
2659
+ {
2660
+ "epoch": 3.0,
2661
+ "grad_norm": 24.116493225097656,
2662
+ "learning_rate": 3.2432432432432436e-08,
2663
+ "loss": 0.3411,
2664
+ "step": 3750
2665
+ },
2666
+ {
2667
+ "epoch": 3.0,
2668
+ "eval_accuracy": 0.8379189594797398,
2669
+ "eval_f1": 0.845953955875665,
2670
+ "eval_loss": 0.5750879049301147,
2671
+ "eval_model_preparation_time": 0.0029,
2672
+ "eval_runtime": 59.445,
2673
+ "eval_samples_per_second": 33.628,
2674
+ "eval_steps_per_second": 4.206,
2675
+ "step": 3750
2676
+ }
2677
+ ],
2678
+ "logging_steps": 10,
2679
+ "max_steps": 3750,
2680
+ "num_input_tokens_seen": 0,
2681
+ "num_train_epochs": 3,
2682
+ "save_steps": 500,
2683
+ "stateful_callbacks": {
2684
+ "TrainerControl": {
2685
+ "args": {
2686
+ "should_epoch_stop": false,
2687
+ "should_evaluate": false,
2688
+ "should_log": false,
2689
+ "should_save": true,
2690
+ "should_training_stop": true
2691
+ },
2692
+ "attributes": {}
2693
+ }
2694
+ },
2695
+ "total_flos": 1.3246421676490668e+18,
2696
+ "train_batch_size": 8,
2697
+ "trial_name": null,
2698
+ "trial_params": null
2699
+ }
checkpoint-3750/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a868b38cbd73376b16ac8e3d8306db2a42a29f39b8f6d41ef6bd11bcdc6b19c
3
+ size 5304
config.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "adapter_attn_dim": null,
4
+ "adapter_kernel_size": 3,
5
+ "adapter_stride": 2,
6
+ "add_adapter": false,
7
+ "apply_spec_augment": true,
8
+ "architectures": [
9
+ "Wav2Vec2ForSequenceClassification"
10
+ ],
11
+ "attention_dropout": 0.1,
12
+ "bos_token_id": 1,
13
+ "classifier_proj_size": 256,
14
+ "codevector_dim": 256,
15
+ "contrastive_logits_temperature": 0.1,
16
+ "conv_bias": false,
17
+ "conv_dim": [
18
+ 512,
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512
25
+ ],
26
+ "conv_kernel": [
27
+ 10,
28
+ 3,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 2,
33
+ 2
34
+ ],
35
+ "conv_stride": [
36
+ 5,
37
+ 2,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2
43
+ ],
44
+ "ctc_loss_reduction": "sum",
45
+ "ctc_zero_infinity": false,
46
+ "diversity_loss_weight": 0.1,
47
+ "do_stable_layer_norm": false,
48
+ "eos_token_id": 2,
49
+ "feat_extract_activation": "gelu",
50
+ "feat_extract_norm": "group",
51
+ "feat_proj_dropout": 0.1,
52
+ "feat_quantizer_dropout": 0.0,
53
+ "final_dropout": 0.0,
54
+ "freeze_feat_extract_train": true,
55
+ "hidden_act": "gelu",
56
+ "hidden_dropout": 0.1,
57
+ "hidden_size": 768,
58
+ "id2label": {
59
+ "0": "ANG",
60
+ "1": "CAL",
61
+ "2": "DIS",
62
+ "3": "FEA",
63
+ "4": "HAP",
64
+ "5": "NEU",
65
+ "6": "SAD",
66
+ "7": "SUR"
67
+ },
68
+ "initializer_range": 0.02,
69
+ "intermediate_size": 3072,
70
+ "label2id": {
71
+ "ANG": 0,
72
+ "CAL": 1,
73
+ "DIS": 2,
74
+ "FEA": 3,
75
+ "HAP": 4,
76
+ "NEU": 5,
77
+ "SAD": 6,
78
+ "SUR": 7
79
+ },
80
+ "layer_norm_eps": 1e-05,
81
+ "layerdrop": 0.0,
82
+ "mask_channel_length": 10,
83
+ "mask_channel_min_space": 1,
84
+ "mask_channel_other": 0.0,
85
+ "mask_channel_prob": 0.0,
86
+ "mask_channel_selection": "static",
87
+ "mask_feature_length": 10,
88
+ "mask_feature_min_masks": 0,
89
+ "mask_feature_prob": 0.0,
90
+ "mask_time_length": 10,
91
+ "mask_time_min_masks": 2,
92
+ "mask_time_min_space": 1,
93
+ "mask_time_other": 0.0,
94
+ "mask_time_prob": 0.05,
95
+ "mask_time_selection": "static",
96
+ "model_type": "wav2vec2",
97
+ "no_mask_channel_overlap": false,
98
+ "no_mask_time_overlap": false,
99
+ "num_adapter_layers": 3,
100
+ "num_attention_heads": 12,
101
+ "num_codevector_groups": 2,
102
+ "num_codevectors_per_group": 320,
103
+ "num_conv_pos_embedding_groups": 16,
104
+ "num_conv_pos_embeddings": 128,
105
+ "num_feat_extract_layers": 7,
106
+ "num_hidden_layers": 12,
107
+ "num_negatives": 100,
108
+ "output_hidden_size": 768,
109
+ "pad_token_id": 0,
110
+ "proj_codevector_dim": 256,
111
+ "tdnn_dilation": [
112
+ 1,
113
+ 2,
114
+ 3,
115
+ 1,
116
+ 1
117
+ ],
118
+ "tdnn_dim": [
119
+ 512,
120
+ 512,
121
+ 512,
122
+ 512,
123
+ 1500
124
+ ],
125
+ "tdnn_kernel": [
126
+ 5,
127
+ 3,
128
+ 3,
129
+ 1,
130
+ 1
131
+ ],
132
+ "torch_dtype": "float32",
133
+ "transformers_version": "4.50.0",
134
+ "use_weighted_layer_sum": false,
135
+ "vocab_size": 32,
136
+ "xvector_output_dim": 512
137
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a5ead8de09ac38fb8483538c96e79fef83b590194696a439ef18b4c95592951
3
+ size 378308536
preprocessor_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "return_attention_mask": false,
8
+ "sampling_rate": 16000
9
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a868b38cbd73376b16ac8e3d8306db2a42a29f39b8f6d41ef6bd11bcdc6b19c
3
+ size 5304