hexuan21's picture
Upload folder using huggingface_hub
79d7c40 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.2786873824287605,
"eval_steps": 500,
"global_step": 500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0027868738242876052,
"grad_norm": 3.5925259590148926,
"learning_rate": 2.2222222222222225e-06,
"loss": 2.5484,
"step": 5
},
{
"epoch": 0.0055737476485752105,
"grad_norm": 3.7384228706359863,
"learning_rate": 5e-06,
"loss": 2.5036,
"step": 10
},
{
"epoch": 0.008360621472862817,
"grad_norm": 3.7028982639312744,
"learning_rate": 7.777777777777777e-06,
"loss": 2.5177,
"step": 15
},
{
"epoch": 0.011147495297150421,
"grad_norm": 3.1780974864959717,
"learning_rate": 1.0555555555555555e-05,
"loss": 2.4746,
"step": 20
},
{
"epoch": 0.013934369121438027,
"grad_norm": 2.8278558254241943,
"learning_rate": 1.3333333333333333e-05,
"loss": 2.5083,
"step": 25
},
{
"epoch": 0.016721242945725634,
"grad_norm": 2.5323452949523926,
"learning_rate": 1.6111111111111115e-05,
"loss": 2.4383,
"step": 30
},
{
"epoch": 0.019508116770013236,
"grad_norm": 1.2514830827713013,
"learning_rate": 1.888888888888889e-05,
"loss": 2.1978,
"step": 35
},
{
"epoch": 0.022294990594300842,
"grad_norm": 1.1612293720245361,
"learning_rate": 2.1666666666666667e-05,
"loss": 2.0549,
"step": 40
},
{
"epoch": 0.025081864418588447,
"grad_norm": 0.9937079548835754,
"learning_rate": 2.4444444444444445e-05,
"loss": 2.1649,
"step": 45
},
{
"epoch": 0.027868738242876053,
"grad_norm": 1.056492567062378,
"learning_rate": 2.7222222222222223e-05,
"loss": 2.0852,
"step": 50
},
{
"epoch": 0.03065561206716366,
"grad_norm": 0.9864413142204285,
"learning_rate": 3e-05,
"loss": 1.934,
"step": 55
},
{
"epoch": 0.03344248589145127,
"grad_norm": 0.9184162616729736,
"learning_rate": 3.277777777777778e-05,
"loss": 2.0746,
"step": 60
},
{
"epoch": 0.03622935971573887,
"grad_norm": 1.044710397720337,
"learning_rate": 3.555555555555556e-05,
"loss": 1.9812,
"step": 65
},
{
"epoch": 0.03901623354002647,
"grad_norm": 0.9160545468330383,
"learning_rate": 3.8333333333333334e-05,
"loss": 1.8794,
"step": 70
},
{
"epoch": 0.04180310736431408,
"grad_norm": 0.9865038990974426,
"learning_rate": 4.111111111111111e-05,
"loss": 1.9266,
"step": 75
},
{
"epoch": 0.044589981188601684,
"grad_norm": 0.9860550165176392,
"learning_rate": 4.388888888888889e-05,
"loss": 1.8417,
"step": 80
},
{
"epoch": 0.04737685501288929,
"grad_norm": 0.9826656579971313,
"learning_rate": 4.666666666666667e-05,
"loss": 1.9756,
"step": 85
},
{
"epoch": 0.050163728837176895,
"grad_norm": 0.9479106664657593,
"learning_rate": 4.9444444444444446e-05,
"loss": 1.8803,
"step": 90
},
{
"epoch": 0.0529506026614645,
"grad_norm": 1.005937933921814,
"learning_rate": 5.222222222222223e-05,
"loss": 1.8677,
"step": 95
},
{
"epoch": 0.055737476485752106,
"grad_norm": 1.1173028945922852,
"learning_rate": 5.500000000000001e-05,
"loss": 1.8416,
"step": 100
},
{
"epoch": 0.05852435031003971,
"grad_norm": 1.0000165700912476,
"learning_rate": 5.7777777777777776e-05,
"loss": 1.8607,
"step": 105
},
{
"epoch": 0.06131122413432732,
"grad_norm": 1.3015921115875244,
"learning_rate": 6.055555555555555e-05,
"loss": 1.9752,
"step": 110
},
{
"epoch": 0.06409809795861493,
"grad_norm": 1.0431042909622192,
"learning_rate": 6.333333333333333e-05,
"loss": 1.8514,
"step": 115
},
{
"epoch": 0.06688497178290254,
"grad_norm": 1.0463422536849976,
"learning_rate": 6.611111111111111e-05,
"loss": 1.8656,
"step": 120
},
{
"epoch": 0.06967184560719013,
"grad_norm": 1.0947015285491943,
"learning_rate": 6.88888888888889e-05,
"loss": 1.9479,
"step": 125
},
{
"epoch": 0.07245871943147773,
"grad_norm": 0.9953688979148865,
"learning_rate": 7.166666666666667e-05,
"loss": 1.8627,
"step": 130
},
{
"epoch": 0.07524559325576534,
"grad_norm": 1.0779668092727661,
"learning_rate": 7.444444444444444e-05,
"loss": 1.8262,
"step": 135
},
{
"epoch": 0.07803246708005294,
"grad_norm": 1.2166972160339355,
"learning_rate": 7.722222222222223e-05,
"loss": 1.8461,
"step": 140
},
{
"epoch": 0.08081934090434055,
"grad_norm": 1.055553674697876,
"learning_rate": 8e-05,
"loss": 1.9684,
"step": 145
},
{
"epoch": 0.08360621472862816,
"grad_norm": 1.0838943719863892,
"learning_rate": 8.277777777777778e-05,
"loss": 1.8891,
"step": 150
},
{
"epoch": 0.08639308855291576,
"grad_norm": 0.9238198399543762,
"learning_rate": 8.555555555555556e-05,
"loss": 1.8507,
"step": 155
},
{
"epoch": 0.08917996237720337,
"grad_norm": 1.186009168624878,
"learning_rate": 8.833333333333333e-05,
"loss": 1.8269,
"step": 160
},
{
"epoch": 0.09196683620149097,
"grad_norm": 1.122663140296936,
"learning_rate": 9.111111111111112e-05,
"loss": 1.7877,
"step": 165
},
{
"epoch": 0.09475371002577858,
"grad_norm": 1.176620364189148,
"learning_rate": 9.388888888888889e-05,
"loss": 1.8373,
"step": 170
},
{
"epoch": 0.09754058385006618,
"grad_norm": 1.0495474338531494,
"learning_rate": 9.666666666666667e-05,
"loss": 1.838,
"step": 175
},
{
"epoch": 0.10032745767435379,
"grad_norm": 1.0859992504119873,
"learning_rate": 9.944444444444446e-05,
"loss": 1.845,
"step": 180
},
{
"epoch": 0.1031143314986414,
"grad_norm": 1.1708000898361206,
"learning_rate": 9.999848639521432e-05,
"loss": 1.8148,
"step": 185
},
{
"epoch": 0.105901205322929,
"grad_norm": 1.0338629484176636,
"learning_rate": 9.999233753283091e-05,
"loss": 1.7982,
"step": 190
},
{
"epoch": 0.1086880791472166,
"grad_norm": 1.2173702716827393,
"learning_rate": 9.998145939378577e-05,
"loss": 1.792,
"step": 195
},
{
"epoch": 0.11147495297150421,
"grad_norm": 1.1683762073516846,
"learning_rate": 9.996585300715116e-05,
"loss": 1.8381,
"step": 200
},
{
"epoch": 0.11426182679579182,
"grad_norm": 1.0897382497787476,
"learning_rate": 9.994551984929175e-05,
"loss": 1.8142,
"step": 205
},
{
"epoch": 0.11704870062007942,
"grad_norm": 0.9195744395256042,
"learning_rate": 9.992046184372492e-05,
"loss": 1.8362,
"step": 210
},
{
"epoch": 0.11983557444436703,
"grad_norm": 1.039374589920044,
"learning_rate": 9.989068136093873e-05,
"loss": 1.7565,
"step": 215
},
{
"epoch": 0.12262244826865464,
"grad_norm": 1.0218507051467896,
"learning_rate": 9.985618121816779e-05,
"loss": 1.8728,
"step": 220
},
{
"epoch": 0.12540932209294225,
"grad_norm": 1.0597004890441895,
"learning_rate": 9.981696467912664e-05,
"loss": 1.7938,
"step": 225
},
{
"epoch": 0.12819619591722986,
"grad_norm": 1.1160184144973755,
"learning_rate": 9.97730354537011e-05,
"loss": 1.8156,
"step": 230
},
{
"epoch": 0.13098306974151747,
"grad_norm": 0.99184250831604,
"learning_rate": 9.972439769759722e-05,
"loss": 1.7255,
"step": 235
},
{
"epoch": 0.13376994356580507,
"grad_norm": 1.0527223348617554,
"learning_rate": 9.967105601194823e-05,
"loss": 1.8942,
"step": 240
},
{
"epoch": 0.13655681739009268,
"grad_norm": 1.0618565082550049,
"learning_rate": 9.961301544287922e-05,
"loss": 1.7666,
"step": 245
},
{
"epoch": 0.13934369121438026,
"grad_norm": 1.1174343824386597,
"learning_rate": 9.955028148102979e-05,
"loss": 1.7274,
"step": 250
},
{
"epoch": 0.14213056503866786,
"grad_norm": 1.045905351638794,
"learning_rate": 9.948286006103466e-05,
"loss": 1.9156,
"step": 255
},
{
"epoch": 0.14491743886295547,
"grad_norm": 1.1782068014144897,
"learning_rate": 9.941075756096226e-05,
"loss": 1.8142,
"step": 260
},
{
"epoch": 0.14770431268724307,
"grad_norm": 1.0786527395248413,
"learning_rate": 9.933398080171123e-05,
"loss": 1.8206,
"step": 265
},
{
"epoch": 0.15049118651153068,
"grad_norm": 1.0150487422943115,
"learning_rate": 9.925253704636543e-05,
"loss": 1.7779,
"step": 270
},
{
"epoch": 0.15327806033581828,
"grad_norm": 1.0553853511810303,
"learning_rate": 9.916643399950656e-05,
"loss": 1.8223,
"step": 275
},
{
"epoch": 0.1560649341601059,
"grad_norm": 1.0968186855316162,
"learning_rate": 9.907567980648549e-05,
"loss": 1.7834,
"step": 280
},
{
"epoch": 0.1588518079843935,
"grad_norm": 0.9188632369041443,
"learning_rate": 9.898028305265169e-05,
"loss": 1.8808,
"step": 285
},
{
"epoch": 0.1616386818086811,
"grad_norm": 0.9829431772232056,
"learning_rate": 9.888025276254096e-05,
"loss": 1.8455,
"step": 290
},
{
"epoch": 0.1644255556329687,
"grad_norm": 1.034116268157959,
"learning_rate": 9.877559839902184e-05,
"loss": 1.8042,
"step": 295
},
{
"epoch": 0.1672124294572563,
"grad_norm": 1.0451691150665283,
"learning_rate": 9.86663298624003e-05,
"loss": 1.7696,
"step": 300
},
{
"epoch": 0.16999930328154392,
"grad_norm": 1.064620852470398,
"learning_rate": 9.855245748948326e-05,
"loss": 1.8668,
"step": 305
},
{
"epoch": 0.17278617710583152,
"grad_norm": 1.0409430265426636,
"learning_rate": 9.843399205260068e-05,
"loss": 1.8274,
"step": 310
},
{
"epoch": 0.17557305093011913,
"grad_norm": 1.0153954029083252,
"learning_rate": 9.831094475858652e-05,
"loss": 1.8084,
"step": 315
},
{
"epoch": 0.17835992475440673,
"grad_norm": 0.9218955039978027,
"learning_rate": 9.818332724771857e-05,
"loss": 1.8457,
"step": 320
},
{
"epoch": 0.18114679857869434,
"grad_norm": 0.9664631485939026,
"learning_rate": 9.805115159261726e-05,
"loss": 1.772,
"step": 325
},
{
"epoch": 0.18393367240298195,
"grad_norm": 1.0060662031173706,
"learning_rate": 9.791443029710361e-05,
"loss": 1.727,
"step": 330
},
{
"epoch": 0.18672054622726955,
"grad_norm": 1.0184046030044556,
"learning_rate": 9.777317629501636e-05,
"loss": 1.7638,
"step": 335
},
{
"epoch": 0.18950742005155716,
"grad_norm": 0.9374409317970276,
"learning_rate": 9.762740294898846e-05,
"loss": 1.7323,
"step": 340
},
{
"epoch": 0.19229429387584476,
"grad_norm": 0.9715288877487183,
"learning_rate": 9.747712404918286e-05,
"loss": 1.7169,
"step": 345
},
{
"epoch": 0.19508116770013237,
"grad_norm": 1.088680624961853,
"learning_rate": 9.732235381198813e-05,
"loss": 1.8065,
"step": 350
},
{
"epoch": 0.19786804152441997,
"grad_norm": 0.9513164758682251,
"learning_rate": 9.716310687867342e-05,
"loss": 1.6545,
"step": 355
},
{
"epoch": 0.20065491534870758,
"grad_norm": 0.9501610398292542,
"learning_rate": 9.699939831400351e-05,
"loss": 1.8605,
"step": 360
},
{
"epoch": 0.20344178917299519,
"grad_norm": 0.9788233041763306,
"learning_rate": 9.683124360481364e-05,
"loss": 1.7838,
"step": 365
},
{
"epoch": 0.2062286629972828,
"grad_norm": 1.0686471462249756,
"learning_rate": 9.665865865854445e-05,
"loss": 1.7832,
"step": 370
},
{
"epoch": 0.2090155368215704,
"grad_norm": 0.9658113121986389,
"learning_rate": 9.648165980173712e-05,
"loss": 1.7721,
"step": 375
},
{
"epoch": 0.211802410645858,
"grad_norm": 1.0429141521453857,
"learning_rate": 9.630026377848892e-05,
"loss": 1.7741,
"step": 380
},
{
"epoch": 0.2145892844701456,
"grad_norm": 0.9593327641487122,
"learning_rate": 9.611448774886924e-05,
"loss": 1.7534,
"step": 385
},
{
"epoch": 0.2173761582944332,
"grad_norm": 0.9077600240707397,
"learning_rate": 9.592434928729616e-05,
"loss": 1.8398,
"step": 390
},
{
"epoch": 0.22016303211872082,
"grad_norm": 1.048581600189209,
"learning_rate": 9.572986638087396e-05,
"loss": 1.8134,
"step": 395
},
{
"epoch": 0.22294990594300843,
"grad_norm": 0.9498254060745239,
"learning_rate": 9.553105742769154e-05,
"loss": 1.7497,
"step": 400
},
{
"epoch": 0.22573677976729603,
"grad_norm": 1.0249079465866089,
"learning_rate": 9.532794123508197e-05,
"loss": 1.6412,
"step": 405
},
{
"epoch": 0.22852365359158364,
"grad_norm": 0.9723719358444214,
"learning_rate": 9.512053701784329e-05,
"loss": 1.7741,
"step": 410
},
{
"epoch": 0.23131052741587124,
"grad_norm": 0.954607367515564,
"learning_rate": 9.490886439642081e-05,
"loss": 1.7865,
"step": 415
},
{
"epoch": 0.23409740124015885,
"grad_norm": 0.9485211372375488,
"learning_rate": 9.469294339505098e-05,
"loss": 1.813,
"step": 420
},
{
"epoch": 0.23688427506444645,
"grad_norm": 0.9606624841690063,
"learning_rate": 9.447279443986716e-05,
"loss": 1.7426,
"step": 425
},
{
"epoch": 0.23967114888873406,
"grad_norm": 0.9899424910545349,
"learning_rate": 9.424843835696724e-05,
"loss": 1.7802,
"step": 430
},
{
"epoch": 0.24245802271302166,
"grad_norm": 1.0416887998580933,
"learning_rate": 9.401989637044355e-05,
"loss": 1.7418,
"step": 435
},
{
"epoch": 0.24524489653730927,
"grad_norm": 0.958951473236084,
"learning_rate": 9.3787190100375e-05,
"loss": 1.7791,
"step": 440
},
{
"epoch": 0.24803177036159688,
"grad_norm": 0.993934154510498,
"learning_rate": 9.355034156078188e-05,
"loss": 1.739,
"step": 445
},
{
"epoch": 0.2508186441858845,
"grad_norm": 0.9372468590736389,
"learning_rate": 9.330937315754329e-05,
"loss": 1.7406,
"step": 450
},
{
"epoch": 0.2536055180101721,
"grad_norm": 0.9601876735687256,
"learning_rate": 9.306430768627753e-05,
"loss": 1.7357,
"step": 455
},
{
"epoch": 0.2563923918344597,
"grad_norm": 0.9922548532485962,
"learning_rate": 9.281516833018571e-05,
"loss": 1.7677,
"step": 460
},
{
"epoch": 0.2591792656587473,
"grad_norm": 0.9353616833686829,
"learning_rate": 9.256197865785854e-05,
"loss": 1.8202,
"step": 465
},
{
"epoch": 0.26196613948303493,
"grad_norm": 0.8890717625617981,
"learning_rate": 9.230476262104677e-05,
"loss": 1.7473,
"step": 470
},
{
"epoch": 0.26475301330732254,
"grad_norm": 1.0059962272644043,
"learning_rate": 9.204354455239539e-05,
"loss": 1.7978,
"step": 475
},
{
"epoch": 0.26753988713161014,
"grad_norm": 0.9888349771499634,
"learning_rate": 9.177834916314165e-05,
"loss": 1.7481,
"step": 480
},
{
"epoch": 0.27032676095589775,
"grad_norm": 0.9407850503921509,
"learning_rate": 9.150920154077754e-05,
"loss": 1.7667,
"step": 485
},
{
"epoch": 0.27311363478018535,
"grad_norm": 1.0001949071884155,
"learning_rate": 9.123612714667634e-05,
"loss": 1.7742,
"step": 490
},
{
"epoch": 0.2759005086044729,
"grad_norm": 0.9057056307792664,
"learning_rate": 9.095915181368412e-05,
"loss": 1.816,
"step": 495
},
{
"epoch": 0.2786873824287605,
"grad_norm": 1.0183491706848145,
"learning_rate": 9.067830174367586e-05,
"loss": 1.8137,
"step": 500
}
],
"logging_steps": 5,
"max_steps": 1795,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.2509451155603456e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}