Spaces:
Sleeping
Sleeping
{ | |
"best_global_step": 380, | |
"best_metric": 2.9931933879852295, | |
"best_model_checkpoint": "./bert_mini_squadv2_finetuned/checkpoint-380", | |
"epoch": 2.0, | |
"eval_steps": 500, | |
"global_step": 380, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.05263157894736842, | |
"grad_norm": 4.97021484375, | |
"learning_rate": 1.9526315789473688e-05, | |
"loss": 5.8799, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.10526315789473684, | |
"grad_norm": 5.308949947357178, | |
"learning_rate": 1.9e-05, | |
"loss": 5.6001, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.15789473684210525, | |
"grad_norm": 5.93297004699707, | |
"learning_rate": 1.8473684210526317e-05, | |
"loss": 5.356, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.21052631578947367, | |
"grad_norm": 6.4546613693237305, | |
"learning_rate": 1.7947368421052634e-05, | |
"loss": 5.0317, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.2631578947368421, | |
"grad_norm": 6.0348734855651855, | |
"learning_rate": 1.742105263157895e-05, | |
"loss": 4.8894, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.3157894736842105, | |
"grad_norm": 7.766897201538086, | |
"learning_rate": 1.6894736842105263e-05, | |
"loss": 4.5613, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.3684210526315789, | |
"grad_norm": 6.935646057128906, | |
"learning_rate": 1.636842105263158e-05, | |
"loss": 4.4883, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.42105263157894735, | |
"grad_norm": 6.82500696182251, | |
"learning_rate": 1.5842105263157896e-05, | |
"loss": 4.2883, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.47368421052631576, | |
"grad_norm": 7.159075736999512, | |
"learning_rate": 1.5315789473684212e-05, | |
"loss": 4.165, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.5263157894736842, | |
"grad_norm": 7.179778575897217, | |
"learning_rate": 1.4789473684210527e-05, | |
"loss": 4.1544, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.5789473684210527, | |
"grad_norm": 7.615407943725586, | |
"learning_rate": 1.4263157894736843e-05, | |
"loss": 3.9639, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.631578947368421, | |
"grad_norm": 8.331777572631836, | |
"learning_rate": 1.373684210526316e-05, | |
"loss": 3.9313, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.6842105263157895, | |
"grad_norm": 8.408103942871094, | |
"learning_rate": 1.3210526315789476e-05, | |
"loss": 3.8429, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.7368421052631579, | |
"grad_norm": 7.173701286315918, | |
"learning_rate": 1.268421052631579e-05, | |
"loss": 3.8753, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.7894736842105263, | |
"grad_norm": 8.077372550964355, | |
"learning_rate": 1.2157894736842107e-05, | |
"loss": 3.726, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.8421052631578947, | |
"grad_norm": 8.297250747680664, | |
"learning_rate": 1.1631578947368423e-05, | |
"loss": 3.7247, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.8947368421052632, | |
"grad_norm": 8.952900886535645, | |
"learning_rate": 1.1105263157894736e-05, | |
"loss": 3.5831, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.9473684210526315, | |
"grad_norm": 7.744213581085205, | |
"learning_rate": 1.0578947368421053e-05, | |
"loss": 3.6806, | |
"step": 180 | |
}, | |
{ | |
"epoch": 1.0, | |
"grad_norm": 8.072354316711426, | |
"learning_rate": 1.005263157894737e-05, | |
"loss": 3.6219, | |
"step": 190 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_loss": 3.1472160816192627, | |
"eval_runtime": 33.3578, | |
"eval_samples_per_second": 15.259, | |
"eval_steps_per_second": 0.959, | |
"step": 190 | |
}, | |
{ | |
"epoch": 1.0526315789473684, | |
"grad_norm": 8.19955825805664, | |
"learning_rate": 9.526315789473684e-06, | |
"loss": 3.6322, | |
"step": 200 | |
}, | |
{ | |
"epoch": 1.1052631578947367, | |
"grad_norm": 9.592784881591797, | |
"learning_rate": 9e-06, | |
"loss": 3.4887, | |
"step": 210 | |
}, | |
{ | |
"epoch": 1.1578947368421053, | |
"grad_norm": 10.357446670532227, | |
"learning_rate": 8.473684210526317e-06, | |
"loss": 3.468, | |
"step": 220 | |
}, | |
{ | |
"epoch": 1.2105263157894737, | |
"grad_norm": 9.067779541015625, | |
"learning_rate": 7.947368421052633e-06, | |
"loss": 3.7465, | |
"step": 230 | |
}, | |
{ | |
"epoch": 1.263157894736842, | |
"grad_norm": 8.634210586547852, | |
"learning_rate": 7.421052631578948e-06, | |
"loss": 3.2895, | |
"step": 240 | |
}, | |
{ | |
"epoch": 1.3157894736842106, | |
"grad_norm": 7.1132941246032715, | |
"learning_rate": 6.894736842105264e-06, | |
"loss": 3.6037, | |
"step": 250 | |
}, | |
{ | |
"epoch": 1.368421052631579, | |
"grad_norm": 8.3925142288208, | |
"learning_rate": 6.3684210526315795e-06, | |
"loss": 3.4975, | |
"step": 260 | |
}, | |
{ | |
"epoch": 1.4210526315789473, | |
"grad_norm": 8.534266471862793, | |
"learning_rate": 5.842105263157896e-06, | |
"loss": 3.4788, | |
"step": 270 | |
}, | |
{ | |
"epoch": 1.4736842105263157, | |
"grad_norm": 10.572562217712402, | |
"learning_rate": 5.315789473684211e-06, | |
"loss": 3.7405, | |
"step": 280 | |
}, | |
{ | |
"epoch": 1.526315789473684, | |
"grad_norm": 8.039414405822754, | |
"learning_rate": 4.789473684210527e-06, | |
"loss": 3.4092, | |
"step": 290 | |
}, | |
{ | |
"epoch": 1.5789473684210527, | |
"grad_norm": 8.852104187011719, | |
"learning_rate": 4.2631578947368425e-06, | |
"loss": 3.5734, | |
"step": 300 | |
}, | |
{ | |
"epoch": 1.631578947368421, | |
"grad_norm": 9.102316856384277, | |
"learning_rate": 3.736842105263158e-06, | |
"loss": 3.3341, | |
"step": 310 | |
}, | |
{ | |
"epoch": 1.6842105263157894, | |
"grad_norm": 8.393866539001465, | |
"learning_rate": 3.210526315789474e-06, | |
"loss": 3.3955, | |
"step": 320 | |
}, | |
{ | |
"epoch": 1.736842105263158, | |
"grad_norm": 9.89341926574707, | |
"learning_rate": 2.68421052631579e-06, | |
"loss": 3.4553, | |
"step": 330 | |
}, | |
{ | |
"epoch": 1.7894736842105263, | |
"grad_norm": 9.145729064941406, | |
"learning_rate": 2.1578947368421054e-06, | |
"loss": 3.4255, | |
"step": 340 | |
}, | |
{ | |
"epoch": 1.8421052631578947, | |
"grad_norm": 8.320271492004395, | |
"learning_rate": 1.6315789473684212e-06, | |
"loss": 3.1645, | |
"step": 350 | |
}, | |
{ | |
"epoch": 1.8947368421052633, | |
"grad_norm": 10.283031463623047, | |
"learning_rate": 1.1052631578947369e-06, | |
"loss": 3.3179, | |
"step": 360 | |
}, | |
{ | |
"epoch": 1.9473684210526314, | |
"grad_norm": 11.425944328308105, | |
"learning_rate": 5.789473684210526e-07, | |
"loss": 3.2488, | |
"step": 370 | |
}, | |
{ | |
"epoch": 2.0, | |
"grad_norm": 9.913457870483398, | |
"learning_rate": 5.263157894736842e-08, | |
"loss": 3.5268, | |
"step": 380 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_loss": 2.9931933879852295, | |
"eval_runtime": 34.157, | |
"eval_samples_per_second": 14.902, | |
"eval_steps_per_second": 0.937, | |
"step": 380 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 380, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 2, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 44208969412608.0, | |
"train_batch_size": 16, | |
"trial_name": null, | |
"trial_params": null | |
} | |