|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.263823064770932, |
|
"eval_steps": 1000, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01579778830963665, |
|
"grad_norm": 39.75217819213867, |
|
"learning_rate": 4.4e-07, |
|
"loss": 2.8325, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0315955766192733, |
|
"grad_norm": 29.480606079101562, |
|
"learning_rate": 9.400000000000001e-07, |
|
"loss": 2.6716, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04739336492890995, |
|
"grad_norm": 25.953859329223633, |
|
"learning_rate": 1.44e-06, |
|
"loss": 2.4993, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0631911532385466, |
|
"grad_norm": 22.231847763061523, |
|
"learning_rate": 1.94e-06, |
|
"loss": 2.231, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07898894154818326, |
|
"grad_norm": 22.015960693359375, |
|
"learning_rate": 2.4400000000000004e-06, |
|
"loss": 2.0333, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.0947867298578199, |
|
"grad_norm": 20.71744728088379, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 1.8308, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11058451816745656, |
|
"grad_norm": 21.350202560424805, |
|
"learning_rate": 3.44e-06, |
|
"loss": 1.74, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1263823064770932, |
|
"grad_norm": 19.41323471069336, |
|
"learning_rate": 3.94e-06, |
|
"loss": 1.6661, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14218009478672985, |
|
"grad_norm": 19.41071891784668, |
|
"learning_rate": 4.440000000000001e-06, |
|
"loss": 1.5797, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1579778830963665, |
|
"grad_norm": 20.96115493774414, |
|
"learning_rate": 4.94e-06, |
|
"loss": 1.5021, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17377567140600317, |
|
"grad_norm": 19.24570083618164, |
|
"learning_rate": 5.4400000000000004e-06, |
|
"loss": 1.4673, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1895734597156398, |
|
"grad_norm": 18.830514907836914, |
|
"learning_rate": 5.94e-06, |
|
"loss": 1.4059, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.20537124802527645, |
|
"grad_norm": 19.78591537475586, |
|
"learning_rate": 6.440000000000001e-06, |
|
"loss": 1.386, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2211690363349131, |
|
"grad_norm": 19.39731788635254, |
|
"learning_rate": 6.9400000000000005e-06, |
|
"loss": 1.3143, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23696682464454977, |
|
"grad_norm": 20.41983985900879, |
|
"learning_rate": 7.440000000000001e-06, |
|
"loss": 1.3208, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2527646129541864, |
|
"grad_norm": 19.726163864135742, |
|
"learning_rate": 7.94e-06, |
|
"loss": 1.2967, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2685624012638231, |
|
"grad_norm": 19.085514068603516, |
|
"learning_rate": 8.44e-06, |
|
"loss": 1.2114, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2843601895734597, |
|
"grad_norm": 17.482362747192383, |
|
"learning_rate": 8.94e-06, |
|
"loss": 1.2181, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3001579778830964, |
|
"grad_norm": 20.98807716369629, |
|
"learning_rate": 9.440000000000001e-06, |
|
"loss": 1.1581, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.315955766192733, |
|
"grad_norm": 17.159069061279297, |
|
"learning_rate": 9.940000000000001e-06, |
|
"loss": 1.1218, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.33175355450236965, |
|
"grad_norm": 16.87204933166504, |
|
"learning_rate": 9.853333333333334e-06, |
|
"loss": 1.1329, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.34755134281200634, |
|
"grad_norm": 17.013330459594727, |
|
"learning_rate": 9.686666666666668e-06, |
|
"loss": 1.0566, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36334913112164297, |
|
"grad_norm": 16.366235733032227, |
|
"learning_rate": 9.52e-06, |
|
"loss": 1.0501, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3791469194312796, |
|
"grad_norm": 16.095335006713867, |
|
"learning_rate": 9.353333333333334e-06, |
|
"loss": 1.0634, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3949447077409163, |
|
"grad_norm": 18.551029205322266, |
|
"learning_rate": 9.186666666666666e-06, |
|
"loss": 1.0138, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4107424960505529, |
|
"grad_norm": 15.467816352844238, |
|
"learning_rate": 9.020000000000002e-06, |
|
"loss": 1.0199, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4265402843601896, |
|
"grad_norm": 18.306955337524414, |
|
"learning_rate": 8.853333333333334e-06, |
|
"loss": 1.0209, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4423380726698262, |
|
"grad_norm": 16.152812957763672, |
|
"learning_rate": 8.686666666666668e-06, |
|
"loss": 0.9889, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.45813586097946285, |
|
"grad_norm": 16.533315658569336, |
|
"learning_rate": 8.52e-06, |
|
"loss": 0.9622, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.47393364928909953, |
|
"grad_norm": 16.151216506958008, |
|
"learning_rate": 8.353333333333335e-06, |
|
"loss": 0.9555, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48973143759873616, |
|
"grad_norm": 16.503087997436523, |
|
"learning_rate": 8.186666666666667e-06, |
|
"loss": 0.9833, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5055292259083728, |
|
"grad_norm": 18.59416961669922, |
|
"learning_rate": 8.020000000000001e-06, |
|
"loss": 0.9451, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5213270142180095, |
|
"grad_norm": 17.992612838745117, |
|
"learning_rate": 7.853333333333333e-06, |
|
"loss": 0.9003, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5371248025276462, |
|
"grad_norm": 16.3101806640625, |
|
"learning_rate": 7.686666666666667e-06, |
|
"loss": 0.9197, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5529225908372828, |
|
"grad_norm": 17.981529235839844, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 0.8954, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5687203791469194, |
|
"grad_norm": 15.544134140014648, |
|
"learning_rate": 7.353333333333334e-06, |
|
"loss": 0.9083, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.584518167456556, |
|
"grad_norm": 16.51857566833496, |
|
"learning_rate": 7.186666666666668e-06, |
|
"loss": 0.87, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6003159557661928, |
|
"grad_norm": 15.2325439453125, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 0.8907, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6161137440758294, |
|
"grad_norm": 15.995855331420898, |
|
"learning_rate": 6.853333333333334e-06, |
|
"loss": 0.9026, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.631911532385466, |
|
"grad_norm": 16.089975357055664, |
|
"learning_rate": 6.6866666666666665e-06, |
|
"loss": 0.86, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.631911532385466, |
|
"eval_loss": 0.9047765135765076, |
|
"eval_runtime": 642.0774, |
|
"eval_samples_per_second": 6.076, |
|
"eval_steps_per_second": 0.38, |
|
"eval_wer": 0.5978218873756399, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6477093206951027, |
|
"grad_norm": 15.475512504577637, |
|
"learning_rate": 6.520000000000001e-06, |
|
"loss": 0.8655, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6635071090047393, |
|
"grad_norm": 17.087614059448242, |
|
"learning_rate": 6.353333333333333e-06, |
|
"loss": 0.8498, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6793048973143759, |
|
"grad_norm": 15.394526481628418, |
|
"learning_rate": 6.186666666666668e-06, |
|
"loss": 0.8618, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6951026856240127, |
|
"grad_norm": 16.02501106262207, |
|
"learning_rate": 6.02e-06, |
|
"loss": 0.8323, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7109004739336493, |
|
"grad_norm": 16.395601272583008, |
|
"learning_rate": 5.853333333333335e-06, |
|
"loss": 0.8695, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7266982622432859, |
|
"grad_norm": 14.254459381103516, |
|
"learning_rate": 5.686666666666667e-06, |
|
"loss": 0.8349, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7424960505529226, |
|
"grad_norm": 14.784615516662598, |
|
"learning_rate": 5.5200000000000005e-06, |
|
"loss": 0.83, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7582938388625592, |
|
"grad_norm": 14.301170349121094, |
|
"learning_rate": 5.3533333333333335e-06, |
|
"loss": 0.8208, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7740916271721959, |
|
"grad_norm": 15.711307525634766, |
|
"learning_rate": 5.186666666666667e-06, |
|
"loss": 0.834, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7898894154818326, |
|
"grad_norm": 16.111242294311523, |
|
"learning_rate": 5.02e-06, |
|
"loss": 0.8367, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8056872037914692, |
|
"grad_norm": 14.488164901733398, |
|
"learning_rate": 4.853333333333334e-06, |
|
"loss": 0.8206, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8214849921011058, |
|
"grad_norm": 15.93684196472168, |
|
"learning_rate": 4.686666666666667e-06, |
|
"loss": 0.7844, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8372827804107424, |
|
"grad_norm": 15.021976470947266, |
|
"learning_rate": 4.520000000000001e-06, |
|
"loss": 0.8104, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8530805687203792, |
|
"grad_norm": 17.250614166259766, |
|
"learning_rate": 4.353333333333334e-06, |
|
"loss": 0.8138, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8688783570300158, |
|
"grad_norm": 17.35563087463379, |
|
"learning_rate": 4.1866666666666675e-06, |
|
"loss": 0.7968, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8846761453396524, |
|
"grad_norm": 16.59588623046875, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 0.7904, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9004739336492891, |
|
"grad_norm": 14.530390739440918, |
|
"learning_rate": 3.853333333333334e-06, |
|
"loss": 0.7922, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9162717219589257, |
|
"grad_norm": 16.025850296020508, |
|
"learning_rate": 3.686666666666667e-06, |
|
"loss": 0.7811, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9320695102685624, |
|
"grad_norm": 16.774017333984375, |
|
"learning_rate": 3.52e-06, |
|
"loss": 0.8197, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9478672985781991, |
|
"grad_norm": 16.73988914489746, |
|
"learning_rate": 3.3533333333333336e-06, |
|
"loss": 0.7817, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9636650868878357, |
|
"grad_norm": 16.070240020751953, |
|
"learning_rate": 3.186666666666667e-06, |
|
"loss": 0.7592, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9794628751974723, |
|
"grad_norm": 15.3619966506958, |
|
"learning_rate": 3.0200000000000003e-06, |
|
"loss": 0.7874, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.995260663507109, |
|
"grad_norm": 15.880134582519531, |
|
"learning_rate": 2.8533333333333337e-06, |
|
"loss": 0.7664, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.0110584518167456, |
|
"grad_norm": 14.766377449035645, |
|
"learning_rate": 2.686666666666667e-06, |
|
"loss": 0.7161, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0268562401263823, |
|
"grad_norm": 14.066420555114746, |
|
"learning_rate": 2.52e-06, |
|
"loss": 0.698, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.042654028436019, |
|
"grad_norm": 15.264778137207031, |
|
"learning_rate": 2.3533333333333334e-06, |
|
"loss": 0.6944, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0584518167456556, |
|
"grad_norm": 15.829083442687988, |
|
"learning_rate": 2.1866666666666668e-06, |
|
"loss": 0.6755, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.0742496050552923, |
|
"grad_norm": 16.2938289642334, |
|
"learning_rate": 2.02e-06, |
|
"loss": 0.7162, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.0900473933649288, |
|
"grad_norm": 14.18964672088623, |
|
"learning_rate": 1.8533333333333333e-06, |
|
"loss": 0.6773, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.1058451816745656, |
|
"grad_norm": 13.311141967773438, |
|
"learning_rate": 1.6866666666666667e-06, |
|
"loss": 0.6835, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1216429699842023, |
|
"grad_norm": 13.857542991638184, |
|
"learning_rate": 1.52e-06, |
|
"loss": 0.6995, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.1374407582938388, |
|
"grad_norm": 14.40538501739502, |
|
"learning_rate": 1.3533333333333334e-06, |
|
"loss": 0.705, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1532385466034756, |
|
"grad_norm": 14.70490837097168, |
|
"learning_rate": 1.1866666666666668e-06, |
|
"loss": 0.6889, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.169036334913112, |
|
"grad_norm": 15.341684341430664, |
|
"learning_rate": 1.02e-06, |
|
"loss": 0.6935, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.1848341232227488, |
|
"grad_norm": 15.82376766204834, |
|
"learning_rate": 8.533333333333334e-07, |
|
"loss": 0.6972, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.2006319115323856, |
|
"grad_norm": 15.344350814819336, |
|
"learning_rate": 6.866666666666667e-07, |
|
"loss": 0.6755, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.216429699842022, |
|
"grad_norm": 14.48157787322998, |
|
"learning_rate": 5.2e-07, |
|
"loss": 0.6746, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.2322274881516588, |
|
"grad_norm": 15.609822273254395, |
|
"learning_rate": 3.533333333333334e-07, |
|
"loss": 0.7027, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2480252764612954, |
|
"grad_norm": 14.66109848022461, |
|
"learning_rate": 1.866666666666667e-07, |
|
"loss": 0.7034, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.263823064770932, |
|
"grad_norm": 14.325251579284668, |
|
"learning_rate": 2e-08, |
|
"loss": 0.6944, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.263823064770932, |
|
"eval_loss": 0.8038402795791626, |
|
"eval_runtime": 452.3801, |
|
"eval_samples_per_second": 8.623, |
|
"eval_steps_per_second": 0.539, |
|
"eval_wer": 0.5420651019028301, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.263823064770932, |
|
"step": 2000, |
|
"total_flos": 4.15103975424e+18, |
|
"train_loss": 1.0420200901031493, |
|
"train_runtime": 9413.1109, |
|
"train_samples_per_second": 6.799, |
|
"train_steps_per_second": 0.212 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 2000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.15103975424e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|