| { | |
| "best_metric": 0.5880094937717885, | |
| "best_model_checkpoint": "./fine-tune/bert-base-uncased/cola/checkpoint-804", | |
| "epoch": 3.0, | |
| "global_step": 804, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.45977166295051575, | |
| "eval_matthews_correlation": 0.5134946392219878, | |
| "eval_runtime": 5.9561, | |
| "eval_samples_per_second": 175.115, | |
| "eval_steps_per_second": 21.994, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 7.5621890547263685e-06, | |
| "loss": 0.393, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.4875448942184448, | |
| "eval_matthews_correlation": 0.5573424050983508, | |
| "eval_runtime": 2.8786, | |
| "eval_samples_per_second": 362.324, | |
| "eval_steps_per_second": 45.508, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 0.5405848622322083, | |
| "eval_matthews_correlation": 0.5880094937717885, | |
| "eval_runtime": 5.5175, | |
| "eval_samples_per_second": 189.036, | |
| "eval_steps_per_second": 23.743, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 804, | |
| "total_flos": 1687396975787520.0, | |
| "train_loss": 0.31582939565478274, | |
| "train_runtime": 345.3367, | |
| "train_samples_per_second": 74.284, | |
| "train_steps_per_second": 2.328 | |
| } | |
| ], | |
| "max_steps": 804, | |
| "num_train_epochs": 3, | |
| "total_flos": 1687396975787520.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |