ErrorAI commited on
Commit
79ce1c5
·
verified ·
1 Parent(s): 5462e4b

Training in progress, step 583, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae7546a980f074238a59c12bab9523bb17951ce735d7a54063a9ea314d2f3319
3
  size 80013120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fe08061c5914eeee60caadb0a66be0d9bd19e2c04f6f3da7fb912244d093aa1
3
  size 80013120
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:216647f10a8c19231879c5eb90464220f4e2c6838e4260057d6db279d81d387e
3
  size 41120084
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8361cc4274487167bf6d981f160dc9caf51a634da2e59781d71dcc6274fcea5d
3
  size 41120084
last-checkpoint/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05898e03ab9e23007808d29a30ba21f35e3303fdd8df2e629a0d9eb2881b9688
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15742b0303b779f7ba81698192a8dbd1ba4ab375f37877d7de83e6a31f9a7244
3
  size 14244
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c30be394b92ae35551f310cf3fc15464e1a7840a9e53503f3226bccd38c74e02
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4477778303bed47c1d65922d6e7649fd04e01c218f6911cbe8e084a58d87e771
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.7516087516087516,
5
  "eval_steps": 500,
6
- "global_step": 438,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -3073,6 +3073,1029 @@
3073
  "learning_rate": 1.4740874490528395e-05,
3074
  "loss": 0.9399,
3075
  "step": 438
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3076
  }
3077
  ],
3078
  "logging_steps": 1,
@@ -3087,12 +4110,12 @@
3087
  "should_evaluate": false,
3088
  "should_log": false,
3089
  "should_save": true,
3090
- "should_training_stop": false
3091
  },
3092
  "attributes": {}
3093
  }
3094
  },
3095
- "total_flos": 2.9594373727872614e+17,
3096
  "train_batch_size": 4,
3097
  "trial_name": null,
3098
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 1.0004290004290004,
5
  "eval_steps": 500,
6
+ "global_step": 583,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
3073
  "learning_rate": 1.4740874490528395e-05,
3074
  "loss": 0.9399,
3075
  "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 0.7533247533247533,
3079
+ "grad_norm": 0.8010863661766052,
3080
+ "learning_rate": 1.4548708510022824e-05,
3081
+ "loss": 0.908,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 0.7550407550407551,
3086
+ "grad_norm": 0.8265634179115295,
3087
+ "learning_rate": 1.435758983905955e-05,
3088
+ "loss": 0.7645,
3089
+ "step": 440
3090
+ },
3091
+ {
3092
+ "epoch": 0.7567567567567568,
3093
+ "grad_norm": 0.9089340567588806,
3094
+ "learning_rate": 1.4167524123706743e-05,
3095
+ "loss": 0.9767,
3096
+ "step": 441
3097
+ },
3098
+ {
3099
+ "epoch": 0.7584727584727585,
3100
+ "grad_norm": 0.8695305585861206,
3101
+ "learning_rate": 1.397851697892592e-05,
3102
+ "loss": 0.9398,
3103
+ "step": 442
3104
+ },
3105
+ {
3106
+ "epoch": 0.7601887601887601,
3107
+ "grad_norm": 0.8917907476425171,
3108
+ "learning_rate": 1.3790573988406074e-05,
3109
+ "loss": 0.8704,
3110
+ "step": 443
3111
+ },
3112
+ {
3113
+ "epoch": 0.7619047619047619,
3114
+ "grad_norm": 0.9949392676353455,
3115
+ "learning_rate": 1.3603700704398741e-05,
3116
+ "loss": 0.9559,
3117
+ "step": 444
3118
+ },
3119
+ {
3120
+ "epoch": 0.7636207636207636,
3121
+ "grad_norm": 0.997254490852356,
3122
+ "learning_rate": 1.3417902647553948e-05,
3123
+ "loss": 0.9587,
3124
+ "step": 445
3125
+ },
3126
+ {
3127
+ "epoch": 0.7653367653367653,
3128
+ "grad_norm": 0.9855479001998901,
3129
+ "learning_rate": 1.3233185306757145e-05,
3130
+ "loss": 0.9924,
3131
+ "step": 446
3132
+ },
3133
+ {
3134
+ "epoch": 0.767052767052767,
3135
+ "grad_norm": 1.1372579336166382,
3136
+ "learning_rate": 1.3049554138967051e-05,
3137
+ "loss": 0.9603,
3138
+ "step": 447
3139
+ },
3140
+ {
3141
+ "epoch": 0.7687687687687688,
3142
+ "grad_norm": 1.082021713256836,
3143
+ "learning_rate": 1.2867014569054387e-05,
3144
+ "loss": 0.9069,
3145
+ "step": 448
3146
+ },
3147
+ {
3148
+ "epoch": 0.7704847704847705,
3149
+ "grad_norm": 1.092729091644287,
3150
+ "learning_rate": 1.2685571989641698e-05,
3151
+ "loss": 0.9393,
3152
+ "step": 449
3153
+ },
3154
+ {
3155
+ "epoch": 0.7722007722007722,
3156
+ "grad_norm": 1.3336204290390015,
3157
+ "learning_rate": 1.2505231760943998e-05,
3158
+ "loss": 1.0138,
3159
+ "step": 450
3160
+ },
3161
+ {
3162
+ "epoch": 0.7739167739167739,
3163
+ "grad_norm": 0.36281949281692505,
3164
+ "learning_rate": 1.2325999210610423e-05,
3165
+ "loss": 1.0211,
3166
+ "step": 451
3167
+ },
3168
+ {
3169
+ "epoch": 0.7756327756327757,
3170
+ "grad_norm": 0.41349583864212036,
3171
+ "learning_rate": 1.214787963356685e-05,
3172
+ "loss": 1.1453,
3173
+ "step": 452
3174
+ },
3175
+ {
3176
+ "epoch": 0.7773487773487774,
3177
+ "grad_norm": 0.4295629560947418,
3178
+ "learning_rate": 1.1970878291859423e-05,
3179
+ "loss": 1.0367,
3180
+ "step": 453
3181
+ },
3182
+ {
3183
+ "epoch": 0.7790647790647791,
3184
+ "grad_norm": 0.48412784934043884,
3185
+ "learning_rate": 1.1795000414499186e-05,
3186
+ "loss": 1.0665,
3187
+ "step": 454
3188
+ },
3189
+ {
3190
+ "epoch": 0.7807807807807807,
3191
+ "grad_norm": 0.5144652724266052,
3192
+ "learning_rate": 1.1620251197307535e-05,
3193
+ "loss": 1.0156,
3194
+ "step": 455
3195
+ },
3196
+ {
3197
+ "epoch": 0.7824967824967825,
3198
+ "grad_norm": 0.5471140146255493,
3199
+ "learning_rate": 1.1446635802762795e-05,
3200
+ "loss": 1.0388,
3201
+ "step": 456
3202
+ },
3203
+ {
3204
+ "epoch": 0.7842127842127842,
3205
+ "grad_norm": 0.5129085779190063,
3206
+ "learning_rate": 1.1274159359847591e-05,
3207
+ "loss": 1.0899,
3208
+ "step": 457
3209
+ },
3210
+ {
3211
+ "epoch": 0.7859287859287859,
3212
+ "grad_norm": 0.54698646068573,
3213
+ "learning_rate": 1.110282696389745e-05,
3214
+ "loss": 1.0068,
3215
+ "step": 458
3216
+ },
3217
+ {
3218
+ "epoch": 0.7876447876447876,
3219
+ "grad_norm": 0.5465560555458069,
3220
+ "learning_rate": 1.0932643676450205e-05,
3221
+ "loss": 0.9523,
3222
+ "step": 459
3223
+ },
3224
+ {
3225
+ "epoch": 0.7893607893607893,
3226
+ "grad_norm": 0.596990704536438,
3227
+ "learning_rate": 1.07636145250965e-05,
3228
+ "loss": 1.0007,
3229
+ "step": 460
3230
+ },
3231
+ {
3232
+ "epoch": 0.7910767910767911,
3233
+ "grad_norm": 0.5810505747795105,
3234
+ "learning_rate": 1.0595744503331207e-05,
3235
+ "loss": 1.011,
3236
+ "step": 461
3237
+ },
3238
+ {
3239
+ "epoch": 0.7927927927927928,
3240
+ "grad_norm": 0.5773686170578003,
3241
+ "learning_rate": 1.0429038570405975e-05,
3242
+ "loss": 1.0056,
3243
+ "step": 462
3244
+ },
3245
+ {
3246
+ "epoch": 0.7945087945087945,
3247
+ "grad_norm": 0.592972457408905,
3248
+ "learning_rate": 1.0263501651182706e-05,
3249
+ "loss": 0.9354,
3250
+ "step": 463
3251
+ },
3252
+ {
3253
+ "epoch": 0.7962247962247962,
3254
+ "grad_norm": 0.5526876449584961,
3255
+ "learning_rate": 1.0099138635988026e-05,
3256
+ "loss": 0.9095,
3257
+ "step": 464
3258
+ },
3259
+ {
3260
+ "epoch": 0.797940797940798,
3261
+ "grad_norm": 0.5966104865074158,
3262
+ "learning_rate": 9.935954380468859e-06,
3263
+ "loss": 0.9298,
3264
+ "step": 465
3265
+ },
3266
+ {
3267
+ "epoch": 0.7996567996567997,
3268
+ "grad_norm": 0.6316750049591064,
3269
+ "learning_rate": 9.773953705448952e-06,
3270
+ "loss": 0.9669,
3271
+ "step": 466
3272
+ },
3273
+ {
3274
+ "epoch": 0.8013728013728014,
3275
+ "grad_norm": 0.6236256957054138,
3276
+ "learning_rate": 9.613141396786462e-06,
3277
+ "loss": 0.8742,
3278
+ "step": 467
3279
+ },
3280
+ {
3281
+ "epoch": 0.803088803088803,
3282
+ "grad_norm": 0.6054806709289551,
3283
+ "learning_rate": 9.453522205232612e-06,
3284
+ "loss": 1.0261,
3285
+ "step": 468
3286
+ },
3287
+ {
3288
+ "epoch": 0.8048048048048048,
3289
+ "grad_norm": 0.6103044152259827,
3290
+ "learning_rate": 9.295100846291238e-06,
3291
+ "loss": 0.919,
3292
+ "step": 469
3293
+ },
3294
+ {
3295
+ "epoch": 0.8065208065208065,
3296
+ "grad_norm": 0.6109324097633362,
3297
+ "learning_rate": 9.137882000079611e-06,
3298
+ "loss": 0.908,
3299
+ "step": 470
3300
+ },
3301
+ {
3302
+ "epoch": 0.8082368082368082,
3303
+ "grad_norm": 0.6719843149185181,
3304
+ "learning_rate": 8.981870311190099e-06,
3305
+ "loss": 0.9301,
3306
+ "step": 471
3307
+ },
3308
+ {
3309
+ "epoch": 0.8099528099528099,
3310
+ "grad_norm": 0.6436265707015991,
3311
+ "learning_rate": 8.827070388552976e-06,
3312
+ "loss": 0.9174,
3313
+ "step": 472
3314
+ },
3315
+ {
3316
+ "epoch": 0.8116688116688117,
3317
+ "grad_norm": 0.6962968111038208,
3318
+ "learning_rate": 8.673486805300263e-06,
3319
+ "loss": 0.8683,
3320
+ "step": 473
3321
+ },
3322
+ {
3323
+ "epoch": 0.8133848133848134,
3324
+ "grad_norm": 0.6864432096481323,
3325
+ "learning_rate": 8.521124098630601e-06,
3326
+ "loss": 0.8904,
3327
+ "step": 474
3328
+ },
3329
+ {
3330
+ "epoch": 0.8151008151008151,
3331
+ "grad_norm": 0.67904132604599,
3332
+ "learning_rate": 8.369986769675269e-06,
3333
+ "loss": 0.9209,
3334
+ "step": 475
3335
+ },
3336
+ {
3337
+ "epoch": 0.8168168168168168,
3338
+ "grad_norm": 0.6990142464637756,
3339
+ "learning_rate": 8.220079283365156e-06,
3340
+ "loss": 0.7947,
3341
+ "step": 476
3342
+ },
3343
+ {
3344
+ "epoch": 0.8185328185328186,
3345
+ "grad_norm": 0.7092682123184204,
3346
+ "learning_rate": 8.071406068298926e-06,
3347
+ "loss": 0.9795,
3348
+ "step": 477
3349
+ },
3350
+ {
3351
+ "epoch": 0.8202488202488203,
3352
+ "grad_norm": 0.7615474462509155,
3353
+ "learning_rate": 7.923971516612067e-06,
3354
+ "loss": 0.9245,
3355
+ "step": 478
3356
+ },
3357
+ {
3358
+ "epoch": 0.821964821964822,
3359
+ "grad_norm": 0.7361223697662354,
3360
+ "learning_rate": 7.77777998384726e-06,
3361
+ "loss": 0.8687,
3362
+ "step": 479
3363
+ },
3364
+ {
3365
+ "epoch": 0.8236808236808236,
3366
+ "grad_norm": 0.7140629291534424,
3367
+ "learning_rate": 7.63283578882566e-06,
3368
+ "loss": 0.9212,
3369
+ "step": 480
3370
+ },
3371
+ {
3372
+ "epoch": 0.8253968253968254,
3373
+ "grad_norm": 0.7426627278327942,
3374
+ "learning_rate": 7.489143213519301e-06,
3375
+ "loss": 0.9459,
3376
+ "step": 481
3377
+ },
3378
+ {
3379
+ "epoch": 0.8271128271128271,
3380
+ "grad_norm": 0.7149568200111389,
3381
+ "learning_rate": 7.346706502924572e-06,
3382
+ "loss": 0.8122,
3383
+ "step": 482
3384
+ },
3385
+ {
3386
+ "epoch": 0.8288288288288288,
3387
+ "grad_norm": 0.7679104804992676,
3388
+ "learning_rate": 7.205529864936883e-06,
3389
+ "loss": 0.9904,
3390
+ "step": 483
3391
+ },
3392
+ {
3393
+ "epoch": 0.8305448305448305,
3394
+ "grad_norm": 0.8156236410140991,
3395
+ "learning_rate": 7.065617470226299e-06,
3396
+ "loss": 0.9974,
3397
+ "step": 484
3398
+ },
3399
+ {
3400
+ "epoch": 0.8322608322608323,
3401
+ "grad_norm": 0.7766450643539429,
3402
+ "learning_rate": 6.926973452114338e-06,
3403
+ "loss": 0.8913,
3404
+ "step": 485
3405
+ },
3406
+ {
3407
+ "epoch": 0.833976833976834,
3408
+ "grad_norm": 0.8424599170684814,
3409
+ "learning_rate": 6.78960190645187e-06,
3410
+ "loss": 0.918,
3411
+ "step": 486
3412
+ },
3413
+ {
3414
+ "epoch": 0.8356928356928357,
3415
+ "grad_norm": 0.869002103805542,
3416
+ "learning_rate": 6.653506891498118e-06,
3417
+ "loss": 0.9658,
3418
+ "step": 487
3419
+ },
3420
+ {
3421
+ "epoch": 0.8374088374088374,
3422
+ "grad_norm": 0.8452862501144409,
3423
+ "learning_rate": 6.518692427800765e-06,
3424
+ "loss": 0.8631,
3425
+ "step": 488
3426
+ },
3427
+ {
3428
+ "epoch": 0.8391248391248392,
3429
+ "grad_norm": 0.8179042339324951,
3430
+ "learning_rate": 6.3851624980771905e-06,
3431
+ "loss": 0.922,
3432
+ "step": 489
3433
+ },
3434
+ {
3435
+ "epoch": 0.8408408408408409,
3436
+ "grad_norm": 0.8862358331680298,
3437
+ "learning_rate": 6.2529210470967605e-06,
3438
+ "loss": 0.878,
3439
+ "step": 490
3440
+ },
3441
+ {
3442
+ "epoch": 0.8425568425568426,
3443
+ "grad_norm": 0.8513782024383545,
3444
+ "learning_rate": 6.121971981564367e-06,
3445
+ "loss": 0.8882,
3446
+ "step": 491
3447
+ },
3448
+ {
3449
+ "epoch": 0.8442728442728443,
3450
+ "grad_norm": 0.8722100257873535,
3451
+ "learning_rate": 5.992319170004967e-06,
3452
+ "loss": 0.8846,
3453
+ "step": 492
3454
+ },
3455
+ {
3456
+ "epoch": 0.8459888459888459,
3457
+ "grad_norm": 0.9226117730140686,
3458
+ "learning_rate": 5.863966442649327e-06,
3459
+ "loss": 0.9863,
3460
+ "step": 493
3461
+ },
3462
+ {
3463
+ "epoch": 0.8477048477048477,
3464
+ "grad_norm": 0.945509672164917,
3465
+ "learning_rate": 5.7369175913208e-06,
3466
+ "loss": 0.9585,
3467
+ "step": 494
3468
+ },
3469
+ {
3470
+ "epoch": 0.8494208494208494,
3471
+ "grad_norm": 1.010611891746521,
3472
+ "learning_rate": 5.611176369323412e-06,
3473
+ "loss": 1.0634,
3474
+ "step": 495
3475
+ },
3476
+ {
3477
+ "epoch": 0.8511368511368511,
3478
+ "grad_norm": 1.1164469718933105,
3479
+ "learning_rate": 5.4867464913308965e-06,
3480
+ "loss": 0.9432,
3481
+ "step": 496
3482
+ },
3483
+ {
3484
+ "epoch": 0.8528528528528528,
3485
+ "grad_norm": 1.1534262895584106,
3486
+ "learning_rate": 5.363631633277006e-06,
3487
+ "loss": 1.0619,
3488
+ "step": 497
3489
+ },
3490
+ {
3491
+ "epoch": 0.8545688545688546,
3492
+ "grad_norm": 1.2209608554840088,
3493
+ "learning_rate": 5.241835432246889e-06,
3494
+ "loss": 1.0216,
3495
+ "step": 498
3496
+ },
3497
+ {
3498
+ "epoch": 0.8562848562848563,
3499
+ "grad_norm": 1.2853692770004272,
3500
+ "learning_rate": 5.121361486369625e-06,
3501
+ "loss": 0.9275,
3502
+ "step": 499
3503
+ },
3504
+ {
3505
+ "epoch": 0.858000858000858,
3506
+ "grad_norm": 1.6175917387008667,
3507
+ "learning_rate": 5.002213354711987e-06,
3508
+ "loss": 0.9393,
3509
+ "step": 500
3510
+ },
3511
+ {
3512
+ "epoch": 0.8597168597168597,
3513
+ "grad_norm": 0.3998446762561798,
3514
+ "learning_rate": 4.884394557173249e-06,
3515
+ "loss": 1.054,
3516
+ "step": 501
3517
+ },
3518
+ {
3519
+ "epoch": 0.8614328614328615,
3520
+ "grad_norm": 0.40906164050102234,
3521
+ "learning_rate": 4.7679085743812306e-06,
3522
+ "loss": 1.1003,
3523
+ "step": 502
3524
+ },
3525
+ {
3526
+ "epoch": 0.8631488631488632,
3527
+ "grad_norm": 0.44782406091690063,
3528
+ "learning_rate": 4.652758847589416e-06,
3529
+ "loss": 0.9262,
3530
+ "step": 503
3531
+ },
3532
+ {
3533
+ "epoch": 0.8648648648648649,
3534
+ "grad_norm": 0.443342387676239,
3535
+ "learning_rate": 4.538948778575375e-06,
3536
+ "loss": 0.9462,
3537
+ "step": 504
3538
+ },
3539
+ {
3540
+ "epoch": 0.8665808665808665,
3541
+ "grad_norm": 0.5111348628997803,
3542
+ "learning_rate": 4.426481729540205e-06,
3543
+ "loss": 1.0407,
3544
+ "step": 505
3545
+ },
3546
+ {
3547
+ "epoch": 0.8682968682968683,
3548
+ "grad_norm": 0.5123763084411621,
3549
+ "learning_rate": 4.315361023009229e-06,
3550
+ "loss": 1.0065,
3551
+ "step": 506
3552
+ },
3553
+ {
3554
+ "epoch": 0.87001287001287,
3555
+ "grad_norm": 0.5139104127883911,
3556
+ "learning_rate": 4.205589941733834e-06,
3557
+ "loss": 0.9588,
3558
+ "step": 507
3559
+ },
3560
+ {
3561
+ "epoch": 0.8717288717288717,
3562
+ "grad_norm": 0.5282160639762878,
3563
+ "learning_rate": 4.0971717285944954e-06,
3564
+ "loss": 0.979,
3565
+ "step": 508
3566
+ },
3567
+ {
3568
+ "epoch": 0.8734448734448734,
3569
+ "grad_norm": 0.5415769219398499,
3570
+ "learning_rate": 3.990109586504965e-06,
3571
+ "loss": 1.0048,
3572
+ "step": 509
3573
+ },
3574
+ {
3575
+ "epoch": 0.8751608751608752,
3576
+ "grad_norm": 0.5474618673324585,
3577
+ "learning_rate": 3.88440667831767e-06,
3578
+ "loss": 0.9979,
3579
+ "step": 510
3580
+ },
3581
+ {
3582
+ "epoch": 0.8768768768768769,
3583
+ "grad_norm": 0.5621387958526611,
3584
+ "learning_rate": 3.7800661267302417e-06,
3585
+ "loss": 0.9676,
3586
+ "step": 511
3587
+ },
3588
+ {
3589
+ "epoch": 0.8785928785928786,
3590
+ "grad_norm": 0.5915668606758118,
3591
+ "learning_rate": 3.6770910141932956e-06,
3592
+ "loss": 0.8742,
3593
+ "step": 512
3594
+ },
3595
+ {
3596
+ "epoch": 0.8803088803088803,
3597
+ "grad_norm": 0.5629801750183105,
3598
+ "learning_rate": 3.5754843828193716e-06,
3599
+ "loss": 0.99,
3600
+ "step": 513
3601
+ },
3602
+ {
3603
+ "epoch": 0.882024882024882,
3604
+ "grad_norm": 0.574283242225647,
3605
+ "learning_rate": 3.4752492342930377e-06,
3606
+ "loss": 0.9522,
3607
+ "step": 514
3608
+ },
3609
+ {
3610
+ "epoch": 0.8837408837408838,
3611
+ "grad_norm": 0.6046752333641052,
3612
+ "learning_rate": 3.376388529782215e-06,
3613
+ "loss": 1.0706,
3614
+ "step": 515
3615
+ },
3616
+ {
3617
+ "epoch": 0.8854568854568855,
3618
+ "grad_norm": 0.653619110584259,
3619
+ "learning_rate": 3.2789051898507228e-06,
3620
+ "loss": 0.9832,
3621
+ "step": 516
3622
+ },
3623
+ {
3624
+ "epoch": 0.8871728871728872,
3625
+ "grad_norm": 0.5925785303115845,
3626
+ "learning_rate": 3.1828020943719894e-06,
3627
+ "loss": 0.9472,
3628
+ "step": 517
3629
+ },
3630
+ {
3631
+ "epoch": 0.8888888888888888,
3632
+ "grad_norm": 0.6344242095947266,
3633
+ "learning_rate": 3.088082082443966e-06,
3634
+ "loss": 0.9807,
3635
+ "step": 518
3636
+ },
3637
+ {
3638
+ "epoch": 0.8906048906048906,
3639
+ "grad_norm": 0.6220735907554626,
3640
+ "learning_rate": 2.9947479523052548e-06,
3641
+ "loss": 1.0254,
3642
+ "step": 519
3643
+ },
3644
+ {
3645
+ "epoch": 0.8923208923208923,
3646
+ "grad_norm": 0.6702473759651184,
3647
+ "learning_rate": 2.9028024612524297e-06,
3648
+ "loss": 0.9763,
3649
+ "step": 520
3650
+ },
3651
+ {
3652
+ "epoch": 0.894036894036894,
3653
+ "grad_norm": 0.6600465178489685,
3654
+ "learning_rate": 2.8122483255586252e-06,
3655
+ "loss": 0.9891,
3656
+ "step": 521
3657
+ },
3658
+ {
3659
+ "epoch": 0.8957528957528957,
3660
+ "grad_norm": 0.6940257549285889,
3661
+ "learning_rate": 2.723088220393244e-06,
3662
+ "loss": 0.9322,
3663
+ "step": 522
3664
+ },
3665
+ {
3666
+ "epoch": 0.8974688974688975,
3667
+ "grad_norm": 0.6549478769302368,
3668
+ "learning_rate": 2.6353247797429535e-06,
3669
+ "loss": 0.904,
3670
+ "step": 523
3671
+ },
3672
+ {
3673
+ "epoch": 0.8991848991848992,
3674
+ "grad_norm": 0.6638982892036438,
3675
+ "learning_rate": 2.5489605963338435e-06,
3676
+ "loss": 0.7746,
3677
+ "step": 524
3678
+ },
3679
+ {
3680
+ "epoch": 0.9009009009009009,
3681
+ "grad_norm": 0.7171722650527954,
3682
+ "learning_rate": 2.463998221554875e-06,
3683
+ "loss": 0.9874,
3684
+ "step": 525
3685
+ },
3686
+ {
3687
+ "epoch": 0.9026169026169026,
3688
+ "grad_norm": 0.7124220728874207,
3689
+ "learning_rate": 2.3804401653824693e-06,
3690
+ "loss": 0.8829,
3691
+ "step": 526
3692
+ },
3693
+ {
3694
+ "epoch": 0.9043329043329044,
3695
+ "grad_norm": 0.7816304564476013,
3696
+ "learning_rate": 2.2982888963063774e-06,
3697
+ "loss": 0.9739,
3698
+ "step": 527
3699
+ },
3700
+ {
3701
+ "epoch": 0.9060489060489061,
3702
+ "grad_norm": 0.7116017937660217,
3703
+ "learning_rate": 2.2175468412567403e-06,
3704
+ "loss": 0.9334,
3705
+ "step": 528
3706
+ },
3707
+ {
3708
+ "epoch": 0.9077649077649078,
3709
+ "grad_norm": 0.7669327259063721,
3710
+ "learning_rate": 2.13821638553241e-06,
3711
+ "loss": 0.82,
3712
+ "step": 529
3713
+ },
3714
+ {
3715
+ "epoch": 0.9094809094809094,
3716
+ "grad_norm": 0.7113355994224548,
3717
+ "learning_rate": 2.060299872730459e-06,
3718
+ "loss": 0.8854,
3719
+ "step": 530
3720
+ },
3721
+ {
3722
+ "epoch": 0.9111969111969112,
3723
+ "grad_norm": 0.7398207187652588,
3724
+ "learning_rate": 1.9837996046769837e-06,
3725
+ "loss": 0.9569,
3726
+ "step": 531
3727
+ },
3728
+ {
3729
+ "epoch": 0.9129129129129129,
3730
+ "grad_norm": 0.7638108730316162,
3731
+ "learning_rate": 1.908717841359048e-06,
3732
+ "loss": 0.9176,
3733
+ "step": 532
3734
+ },
3735
+ {
3736
+ "epoch": 0.9146289146289146,
3737
+ "grad_norm": 0.7596091628074646,
3738
+ "learning_rate": 1.8350568008579705e-06,
3739
+ "loss": 0.834,
3740
+ "step": 533
3741
+ },
3742
+ {
3743
+ "epoch": 0.9163449163449163,
3744
+ "grad_norm": 0.7753133773803711,
3745
+ "learning_rate": 1.762818659283777e-06,
3746
+ "loss": 0.9331,
3747
+ "step": 534
3748
+ },
3749
+ {
3750
+ "epoch": 0.918060918060918,
3751
+ "grad_norm": 0.7847964763641357,
3752
+ "learning_rate": 1.692005550710901e-06,
3753
+ "loss": 0.9003,
3754
+ "step": 535
3755
+ },
3756
+ {
3757
+ "epoch": 0.9197769197769198,
3758
+ "grad_norm": 0.8318800330162048,
3759
+ "learning_rate": 1.6226195671151523e-06,
3760
+ "loss": 1.0136,
3761
+ "step": 536
3762
+ },
3763
+ {
3764
+ "epoch": 0.9214929214929215,
3765
+ "grad_norm": 0.8279291391372681,
3766
+ "learning_rate": 1.5546627583119088e-06,
3767
+ "loss": 0.94,
3768
+ "step": 537
3769
+ },
3770
+ {
3771
+ "epoch": 0.9232089232089232,
3772
+ "grad_norm": 0.8447549939155579,
3773
+ "learning_rate": 1.4881371318955894e-06,
3774
+ "loss": 0.9431,
3775
+ "step": 538
3776
+ },
3777
+ {
3778
+ "epoch": 0.924924924924925,
3779
+ "grad_norm": 0.9098441004753113,
3780
+ "learning_rate": 1.4230446531803e-06,
3781
+ "loss": 1.0035,
3782
+ "step": 539
3783
+ },
3784
+ {
3785
+ "epoch": 0.9266409266409267,
3786
+ "grad_norm": 0.8998938202857971,
3787
+ "learning_rate": 1.3593872451417966e-06,
3788
+ "loss": 0.8762,
3789
+ "step": 540
3790
+ },
3791
+ {
3792
+ "epoch": 0.9283569283569284,
3793
+ "grad_norm": 0.9320370554924011,
3794
+ "learning_rate": 1.2971667883606652e-06,
3795
+ "loss": 0.9394,
3796
+ "step": 541
3797
+ },
3798
+ {
3799
+ "epoch": 0.9300729300729301,
3800
+ "grad_norm": 0.9124845862388611,
3801
+ "learning_rate": 1.2363851209667932e-06,
3802
+ "loss": 0.9666,
3803
+ "step": 542
3804
+ },
3805
+ {
3806
+ "epoch": 0.9317889317889317,
3807
+ "grad_norm": 1.0037184953689575,
3808
+ "learning_rate": 1.1770440385850401e-06,
3809
+ "loss": 0.9045,
3810
+ "step": 543
3811
+ },
3812
+ {
3813
+ "epoch": 0.9335049335049335,
3814
+ "grad_norm": 0.9584035277366638,
3815
+ "learning_rate": 1.1191452942821922e-06,
3816
+ "loss": 0.9868,
3817
+ "step": 544
3818
+ },
3819
+ {
3820
+ "epoch": 0.9352209352209352,
3821
+ "grad_norm": 0.9694145321846008,
3822
+ "learning_rate": 1.062690598515187e-06,
3823
+ "loss": 0.8471,
3824
+ "step": 545
3825
+ },
3826
+ {
3827
+ "epoch": 0.9369369369369369,
3828
+ "grad_norm": 0.9815269112586975,
3829
+ "learning_rate": 1.0076816190805749e-06,
3830
+ "loss": 0.9174,
3831
+ "step": 546
3832
+ },
3833
+ {
3834
+ "epoch": 0.9386529386529386,
3835
+ "grad_norm": 1.0152051448822021,
3836
+ "learning_rate": 9.54119981065238e-07,
3837
+ "loss": 0.9697,
3838
+ "step": 547
3839
+ },
3840
+ {
3841
+ "epoch": 0.9403689403689404,
3842
+ "grad_norm": 1.1063885688781738,
3843
+ "learning_rate": 9.020072667984159e-07,
3844
+ "loss": 0.9757,
3845
+ "step": 548
3846
+ },
3847
+ {
3848
+ "epoch": 0.9420849420849421,
3849
+ "grad_norm": 1.1374117136001587,
3850
+ "learning_rate": 8.513450158049108e-07,
3851
+ "loss": 0.958,
3852
+ "step": 549
3853
+ },
3854
+ {
3855
+ "epoch": 0.9438009438009438,
3856
+ "grad_norm": 1.349505066871643,
3857
+ "learning_rate": 8.021347247596511e-07,
3858
+ "loss": 0.9819,
3859
+ "step": 550
3860
+ },
3861
+ {
3862
+ "epoch": 0.9455169455169455,
3863
+ "grad_norm": 0.3984629213809967,
3864
+ "learning_rate": 7.543778474434438e-07,
3865
+ "loss": 1.0866,
3866
+ "step": 551
3867
+ },
3868
+ {
3869
+ "epoch": 0.9472329472329473,
3870
+ "grad_norm": 0.4379277229309082,
3871
+ "learning_rate": 7.080757947000582e-07,
3872
+ "loss": 1.0593,
3873
+ "step": 552
3874
+ },
3875
+ {
3876
+ "epoch": 0.948948948948949,
3877
+ "grad_norm": 0.43731218576431274,
3878
+ "learning_rate": 6.632299343945103e-07,
3879
+ "loss": 1.0389,
3880
+ "step": 553
3881
+ },
3882
+ {
3883
+ "epoch": 0.9506649506649507,
3884
+ "grad_norm": 0.5112792253494263,
3885
+ "learning_rate": 6.198415913726718e-07,
3886
+ "loss": 1.032,
3887
+ "step": 554
3888
+ },
3889
+ {
3890
+ "epoch": 0.9523809523809523,
3891
+ "grad_norm": 0.5296621322631836,
3892
+ "learning_rate": 5.779120474221522e-07,
3893
+ "loss": 0.9639,
3894
+ "step": 555
3895
+ },
3896
+ {
3897
+ "epoch": 0.954096954096954,
3898
+ "grad_norm": 0.5207895040512085,
3899
+ "learning_rate": 5.374425412343898e-07,
3900
+ "loss": 0.9448,
3901
+ "step": 556
3902
+ },
3903
+ {
3904
+ "epoch": 0.9558129558129558,
3905
+ "grad_norm": 0.5492768883705139,
3906
+ "learning_rate": 4.984342683680809e-07,
3907
+ "loss": 0.8241,
3908
+ "step": 557
3909
+ },
3910
+ {
3911
+ "epoch": 0.9575289575289575,
3912
+ "grad_norm": 0.5462602972984314,
3913
+ "learning_rate": 4.608883812138698e-07,
3914
+ "loss": 0.9968,
3915
+ "step": 558
3916
+ },
3917
+ {
3918
+ "epoch": 0.9592449592449592,
3919
+ "grad_norm": 0.5365292429924011,
3920
+ "learning_rate": 4.2480598896028624e-07,
3921
+ "loss": 0.931,
3922
+ "step": 559
3923
+ },
3924
+ {
3925
+ "epoch": 0.960960960960961,
3926
+ "grad_norm": 0.5722803473472595,
3927
+ "learning_rate": 3.9018815756098893e-07,
3928
+ "loss": 0.931,
3929
+ "step": 560
3930
+ },
3931
+ {
3932
+ "epoch": 0.9626769626769627,
3933
+ "grad_norm": 0.6019049286842346,
3934
+ "learning_rate": 3.570359097032516e-07,
3935
+ "loss": 0.8993,
3936
+ "step": 561
3937
+ },
3938
+ {
3939
+ "epoch": 0.9643929643929644,
3940
+ "grad_norm": 0.5766465067863464,
3941
+ "learning_rate": 3.2535022477779844e-07,
3942
+ "loss": 0.8627,
3943
+ "step": 562
3944
+ },
3945
+ {
3946
+ "epoch": 0.9661089661089661,
3947
+ "grad_norm": 0.6412242650985718,
3948
+ "learning_rate": 2.9513203884981577e-07,
3949
+ "loss": 0.9879,
3950
+ "step": 563
3951
+ },
3952
+ {
3953
+ "epoch": 0.9678249678249679,
3954
+ "grad_norm": 0.6858254075050354,
3955
+ "learning_rate": 2.663822446313469e-07,
3956
+ "loss": 0.9444,
3957
+ "step": 564
3958
+ },
3959
+ {
3960
+ "epoch": 0.9695409695409696,
3961
+ "grad_norm": 0.6252729296684265,
3962
+ "learning_rate": 2.3910169145487936e-07,
3963
+ "loss": 0.7656,
3964
+ "step": 565
3965
+ },
3966
+ {
3967
+ "epoch": 0.9712569712569713,
3968
+ "grad_norm": 0.6497438549995422,
3969
+ "learning_rate": 2.1329118524827662e-07,
3970
+ "loss": 0.8193,
3971
+ "step": 566
3972
+ },
3973
+ {
3974
+ "epoch": 0.972972972972973,
3975
+ "grad_norm": 0.6830260753631592,
3976
+ "learning_rate": 1.889514885109689e-07,
3977
+ "loss": 0.9843,
3978
+ "step": 567
3979
+ },
3980
+ {
3981
+ "epoch": 0.9746889746889746,
3982
+ "grad_norm": 0.7326545119285583,
3983
+ "learning_rate": 1.6608332029141582e-07,
3984
+ "loss": 0.8775,
3985
+ "step": 568
3986
+ },
3987
+ {
3988
+ "epoch": 0.9764049764049764,
3989
+ "grad_norm": 0.6735588908195496,
3990
+ "learning_rate": 1.4468735616587904e-07,
3991
+ "loss": 0.8923,
3992
+ "step": 569
3993
+ },
3994
+ {
3995
+ "epoch": 0.9781209781209781,
3996
+ "grad_norm": 0.7416090369224548,
3997
+ "learning_rate": 1.2476422821844913e-07,
3998
+ "loss": 0.8945,
3999
+ "step": 570
4000
+ },
4001
+ {
4002
+ "epoch": 0.9798369798369798,
4003
+ "grad_norm": 0.7659640312194824,
4004
+ "learning_rate": 1.0631452502237737e-07,
4005
+ "loss": 0.9725,
4006
+ "step": 571
4007
+ },
4008
+ {
4009
+ "epoch": 0.9815529815529815,
4010
+ "grad_norm": 0.8047699928283691,
4011
+ "learning_rate": 8.933879162270065e-08,
4012
+ "loss": 0.8532,
4013
+ "step": 572
4014
+ },
4015
+ {
4016
+ "epoch": 0.9832689832689833,
4017
+ "grad_norm": 0.7674829959869385,
4018
+ "learning_rate": 7.383752952010992e-08,
4019
+ "loss": 0.9066,
4020
+ "step": 573
4021
+ },
4022
+ {
4023
+ "epoch": 0.984984984984985,
4024
+ "grad_norm": 0.7904850840568542,
4025
+ "learning_rate": 5.981119665617319e-08,
4026
+ "loss": 0.8874,
4027
+ "step": 574
4028
+ },
4029
+ {
4030
+ "epoch": 0.9867009867009867,
4031
+ "grad_norm": 0.8970993757247925,
4032
+ "learning_rate": 4.7260207399774105e-08,
4033
+ "loss": 1.0123,
4034
+ "step": 575
4035
+ },
4036
+ {
4037
+ "epoch": 0.9884169884169884,
4038
+ "grad_norm": 0.8804029226303101,
4039
+ "learning_rate": 3.618493253489397e-08,
4040
+ "loss": 0.8999,
4041
+ "step": 576
4042
+ },
4043
+ {
4044
+ "epoch": 0.9901329901329902,
4045
+ "grad_norm": 0.884253203868866,
4046
+ "learning_rate": 2.6585699249642716e-08,
4047
+ "loss": 0.9295,
4048
+ "step": 577
4049
+ },
4050
+ {
4051
+ "epoch": 0.9918489918489919,
4052
+ "grad_norm": 0.9513406157493591,
4053
+ "learning_rate": 1.8462791126588886e-08,
4054
+ "loss": 0.886,
4055
+ "step": 578
4056
+ },
4057
+ {
4058
+ "epoch": 0.9935649935649936,
4059
+ "grad_norm": 0.954138457775116,
4060
+ "learning_rate": 1.181644813441074e-08,
4061
+ "loss": 0.9306,
4062
+ "step": 579
4063
+ },
4064
+ {
4065
+ "epoch": 0.9952809952809952,
4066
+ "grad_norm": 0.9854024052619934,
4067
+ "learning_rate": 6.646866620768633e-09,
4068
+ "loss": 0.9892,
4069
+ "step": 580
4070
+ },
4071
+ {
4072
+ "epoch": 0.996996996996997,
4073
+ "grad_norm": 1.1090114116668701,
4074
+ "learning_rate": 2.9541993065373976e-09,
4075
+ "loss": 0.9475,
4076
+ "step": 581
4077
+ },
4078
+ {
4079
+ "epoch": 0.9987129987129987,
4080
+ "grad_norm": 1.1489694118499756,
4081
+ "learning_rate": 7.385552812710917e-10,
4082
+ "loss": 0.8506,
4083
+ "step": 582
4084
+ },
4085
+ {
4086
+ "epoch": 0.9987129987129987,
4087
+ "eval_loss": 0.9280008673667908,
4088
+ "eval_runtime": 25.7175,
4089
+ "eval_samples_per_second": 19.092,
4090
+ "eval_steps_per_second": 4.783,
4091
+ "step": 582
4092
+ },
4093
+ {
4094
+ "epoch": 1.0004290004290004,
4095
+ "grad_norm": 1.5230551958084106,
4096
+ "learning_rate": 0.0,
4097
+ "loss": 1.0999,
4098
+ "step": 583
4099
  }
4100
  ],
4101
  "logging_steps": 1,
 
4110
  "should_evaluate": false,
4111
  "should_log": false,
4112
  "should_save": true,
4113
+ "should_training_stop": true
4114
  },
4115
  "attributes": {}
4116
  }
4117
  },
4118
+ "total_flos": 3.9366869180114534e+17,
4119
  "train_batch_size": 4,
4120
  "trial_name": null,
4121
  "trial_params": null