felixbrock's picture
Upload folder using huggingface_hub
afc834d
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.888888888888889,
"eval_steps": 500,
"global_step": 24,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.11,
"learning_rate": 9.999999999999999e-06,
"loss": 1.7694,
"step": 1
},
{
"epoch": 0.22,
"learning_rate": 1.9999999999999998e-05,
"loss": 1.9243,
"step": 2
},
{
"epoch": 0.33,
"learning_rate": 3e-05,
"loss": 2.0994,
"step": 3
},
{
"epoch": 0.44,
"learning_rate": 2.875e-05,
"loss": 1.8787,
"step": 4
},
{
"epoch": 0.56,
"learning_rate": 2.75e-05,
"loss": 1.9916,
"step": 5
},
{
"epoch": 0.67,
"learning_rate": 2.625e-05,
"loss": 1.9369,
"step": 6
},
{
"epoch": 0.78,
"learning_rate": 2.5e-05,
"loss": 1.9494,
"step": 7
},
{
"epoch": 0.89,
"learning_rate": 2.3749999999999998e-05,
"loss": 2.0855,
"step": 8
},
{
"epoch": 1.11,
"learning_rate": 2.25e-05,
"loss": 2.0216,
"step": 9
},
{
"epoch": 1.22,
"learning_rate": 2.125e-05,
"loss": 1.8724,
"step": 10
},
{
"epoch": 1.33,
"learning_rate": 1.9999999999999998e-05,
"loss": 1.8561,
"step": 11
},
{
"epoch": 1.44,
"learning_rate": 1.8750000000000002e-05,
"loss": 1.9997,
"step": 12
},
{
"epoch": 1.56,
"learning_rate": 1.7500000000000002e-05,
"loss": 1.7089,
"step": 13
},
{
"epoch": 1.67,
"learning_rate": 1.625e-05,
"loss": 1.9458,
"step": 14
},
{
"epoch": 1.78,
"learning_rate": 1.5e-05,
"loss": 1.7845,
"step": 15
},
{
"epoch": 1.89,
"learning_rate": 1.375e-05,
"loss": 1.9242,
"step": 16
},
{
"epoch": 2.11,
"learning_rate": 1.25e-05,
"loss": 1.6615,
"step": 17
},
{
"epoch": 2.22,
"learning_rate": 1.125e-05,
"loss": 1.9213,
"step": 18
},
{
"epoch": 2.33,
"learning_rate": 9.999999999999999e-06,
"loss": 1.8328,
"step": 19
},
{
"epoch": 2.44,
"learning_rate": 8.750000000000001e-06,
"loss": 1.8141,
"step": 20
},
{
"epoch": 2.56,
"learning_rate": 7.5e-06,
"loss": 1.9057,
"step": 21
},
{
"epoch": 2.67,
"learning_rate": 6.25e-06,
"loss": 1.7507,
"step": 22
},
{
"epoch": 2.78,
"learning_rate": 4.9999999999999996e-06,
"loss": 1.9675,
"step": 23
},
{
"epoch": 2.89,
"learning_rate": 3.75e-06,
"loss": 1.9288,
"step": 24
}
],
"logging_steps": 1,
"max_steps": 27,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 1967839720243200.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}