|
{ |
|
"best_metric": 0.2940025329589844, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.01893849723024478, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 9.46924861512239e-05, |
|
"grad_norm": 2.454827070236206, |
|
"learning_rate": 5e-06, |
|
"loss": 1.218, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 9.46924861512239e-05, |
|
"eval_loss": 1.7879356145858765, |
|
"eval_runtime": 1373.1479, |
|
"eval_samples_per_second": 12.953, |
|
"eval_steps_per_second": 6.477, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0001893849723024478, |
|
"grad_norm": 3.4767465591430664, |
|
"learning_rate": 1e-05, |
|
"loss": 1.1959, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0002840774584536717, |
|
"grad_norm": 4.736673355102539, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.3324, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0003787699446048956, |
|
"grad_norm": 3.548912763595581, |
|
"learning_rate": 2e-05, |
|
"loss": 1.2875, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0004734624307561195, |
|
"grad_norm": 2.0464327335357666, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.1956, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005681549169073434, |
|
"grad_norm": 2.3961620330810547, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1326, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0006628474030585673, |
|
"grad_norm": 1.7311418056488037, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.1647, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0007575398892097912, |
|
"grad_norm": 2.298950433731079, |
|
"learning_rate": 4e-05, |
|
"loss": 1.1572, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0008522323753610151, |
|
"grad_norm": 1.7803407907485962, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.144, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.000946924861512239, |
|
"grad_norm": 1.5894001722335815, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9952, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0010416173476634628, |
|
"grad_norm": 1.2197067737579346, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 0.6901, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0011363098338146868, |
|
"grad_norm": 1.7695541381835938, |
|
"learning_rate": 6e-05, |
|
"loss": 0.779, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0012310023199659107, |
|
"grad_norm": 1.0885900259017944, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.7552, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0013256948061171345, |
|
"grad_norm": 1.3367475271224976, |
|
"learning_rate": 7e-05, |
|
"loss": 0.754, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0014203872922683586, |
|
"grad_norm": 2.406761407852173, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.6969, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0015150797784195824, |
|
"grad_norm": 2.8892931938171387, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6946, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0016097722645708062, |
|
"grad_norm": 2.041106939315796, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.6596, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0017044647507220303, |
|
"grad_norm": 1.3814897537231445, |
|
"learning_rate": 9e-05, |
|
"loss": 0.5348, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.001799157236873254, |
|
"grad_norm": 0.7577526569366455, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.4126, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.001893849723024478, |
|
"grad_norm": 1.5962600708007812, |
|
"learning_rate": 0.0001, |
|
"loss": 0.5226, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0019885422091757018, |
|
"grad_norm": 0.854233980178833, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 0.4707, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0020832346953269256, |
|
"grad_norm": 0.9870491027832031, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.4483, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.00217792718147815, |
|
"grad_norm": 1.0716090202331543, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.4955, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0022726196676293737, |
|
"grad_norm": 1.1506928205490112, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 0.5013, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0023673121537805975, |
|
"grad_norm": 0.9374679327011108, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.351, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0024620046399318214, |
|
"grad_norm": 1.4406720399856567, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.5091, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.002556697126083045, |
|
"grad_norm": 0.7583168745040894, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 0.3653, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.002651389612234269, |
|
"grad_norm": 0.706031322479248, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 0.4258, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0027460820983854933, |
|
"grad_norm": 0.5467961430549622, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 0.3124, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.002840774584536717, |
|
"grad_norm": 1.217199444770813, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.482, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.002935467070687941, |
|
"grad_norm": 0.8480075597763062, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 0.5182, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0030301595568391648, |
|
"grad_norm": 0.8619151711463928, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.3981, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0031248520429903886, |
|
"grad_norm": 0.684477686882019, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 0.3356, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0032195445291416124, |
|
"grad_norm": 0.6660991311073303, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 0.3082, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0033142370152928367, |
|
"grad_norm": 0.6820000410079956, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.2841, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0034089295014440605, |
|
"grad_norm": 1.5081778764724731, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.4676, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0035036219875952844, |
|
"grad_norm": 2.342919111251831, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 0.4332, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.003598314473746508, |
|
"grad_norm": 0.7415688633918762, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.4409, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.003693006959897732, |
|
"grad_norm": 0.5942909717559814, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 0.2993, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.003787699446048956, |
|
"grad_norm": 0.581596851348877, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.2952, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.00388239193220018, |
|
"grad_norm": 0.924413800239563, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 0.3663, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0039770844183514035, |
|
"grad_norm": 0.7919589281082153, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 0.3553, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.004071776904502627, |
|
"grad_norm": 0.5328347086906433, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 0.3145, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.004166469390653851, |
|
"grad_norm": 0.7753825783729553, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.4335, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.004261161876805076, |
|
"grad_norm": 0.8243319988250732, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.3628, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0043558543629563, |
|
"grad_norm": 0.6980270147323608, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 0.4811, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0044505468491075235, |
|
"grad_norm": 0.7640014290809631, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.3831, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.004545239335258747, |
|
"grad_norm": 0.9565992951393127, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 0.437, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.004639931821409971, |
|
"grad_norm": 0.6740021705627441, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 0.3222, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.004734624307561195, |
|
"grad_norm": 1.1682296991348267, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.5101, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004734624307561195, |
|
"eval_loss": 0.3515163064002991, |
|
"eval_runtime": 1379.1591, |
|
"eval_samples_per_second": 12.897, |
|
"eval_steps_per_second": 6.449, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004829316793712419, |
|
"grad_norm": 0.6039596796035767, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.2464, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.004924009279863643, |
|
"grad_norm": 0.7683883905410767, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.2204, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0050187017660148665, |
|
"grad_norm": 0.7136397361755371, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.3258, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.00511339425216609, |
|
"grad_norm": 0.6061449646949768, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.3261, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.005208086738317314, |
|
"grad_norm": 0.5305026769638062, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.4383, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.005302779224468538, |
|
"grad_norm": 0.4076201319694519, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.2777, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.005397471710619763, |
|
"grad_norm": 0.4348657727241516, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.3071, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0054921641967709866, |
|
"grad_norm": 0.4837627708911896, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.2998, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00558685668292221, |
|
"grad_norm": 0.5451851487159729, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.3282, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005681549169073434, |
|
"grad_norm": 0.8234379291534424, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.3596, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.005776241655224658, |
|
"grad_norm": 0.37643444538116455, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.2947, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.005870934141375882, |
|
"grad_norm": 0.4549543261528015, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.4284, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.005965626627527106, |
|
"grad_norm": 0.43117886781692505, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.3467, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0060603191136783295, |
|
"grad_norm": 0.5648695230484009, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.3844, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.006155011599829553, |
|
"grad_norm": 0.3704398274421692, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.257, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.006249704085980777, |
|
"grad_norm": 0.3764655292034149, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.3198, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.006344396572132001, |
|
"grad_norm": 0.4212140738964081, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.2658, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.006439089058283225, |
|
"grad_norm": 0.3406229615211487, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.2289, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.006533781544434449, |
|
"grad_norm": 0.4278174340724945, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.3288, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.006628474030585673, |
|
"grad_norm": 0.5700973868370056, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.3443, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006723166516736897, |
|
"grad_norm": 0.3512937128543854, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.2637, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.006817859002888121, |
|
"grad_norm": 0.4744971990585327, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.3999, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.006912551489039345, |
|
"grad_norm": 0.38957759737968445, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.3128, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.007007243975190569, |
|
"grad_norm": 0.4311201274394989, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3424, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0071019364613417926, |
|
"grad_norm": 0.42028185725212097, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.2738, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.007196628947493016, |
|
"grad_norm": 0.8206042647361755, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.3201, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.00729132143364424, |
|
"grad_norm": 0.5771775841712952, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.2649, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.007386013919795464, |
|
"grad_norm": 0.5105963945388794, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 0.2409, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.007480706405946688, |
|
"grad_norm": 0.6381076574325562, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 0.3571, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.007575398892097912, |
|
"grad_norm": 0.6105549335479736, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.324, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0076700913782491355, |
|
"grad_norm": 0.5321648716926575, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 0.2994, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.00776478386440036, |
|
"grad_norm": 0.5671144723892212, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 0.2814, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.007859476350551583, |
|
"grad_norm": 0.46841391921043396, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.2263, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.007954168836702807, |
|
"grad_norm": 0.5258737206459045, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 0.3163, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.008048861322854031, |
|
"grad_norm": 0.5847420692443848, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.3243, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.008143553809005255, |
|
"grad_norm": 0.3755183517932892, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.2863, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.008238246295156479, |
|
"grad_norm": 0.5984253287315369, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.3218, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.008332938781307702, |
|
"grad_norm": 0.8427244424819946, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 0.4936, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.008427631267458928, |
|
"grad_norm": 0.5401929020881653, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.3244, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.008522323753610152, |
|
"grad_norm": 0.5147351026535034, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.3014, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.008617016239761376, |
|
"grad_norm": 0.9610792398452759, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 0.3499, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0087117087259126, |
|
"grad_norm": 0.5880213975906372, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.3897, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.008806401212063823, |
|
"grad_norm": 0.6239131689071655, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 0.3423, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.008901093698215047, |
|
"grad_norm": 0.6501753926277161, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 0.2901, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.008995786184366271, |
|
"grad_norm": 0.6097372174263, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.3358, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.009090478670517495, |
|
"grad_norm": 0.48593440651893616, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.3299, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.009185171156668719, |
|
"grad_norm": 0.5818943977355957, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 0.3705, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.009279863642819942, |
|
"grad_norm": 0.5785600543022156, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.3194, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.009374556128971166, |
|
"grad_norm": 0.5820888876914978, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 0.3804, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.00946924861512239, |
|
"grad_norm": 0.8857993483543396, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.3766, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.00946924861512239, |
|
"eval_loss": 0.3120051324367523, |
|
"eval_runtime": 1376.547, |
|
"eval_samples_per_second": 12.921, |
|
"eval_steps_per_second": 6.461, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.009563941101273614, |
|
"grad_norm": 0.4451744258403778, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.2185, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.009658633587424838, |
|
"grad_norm": 0.393392950296402, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.2326, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.009753326073576062, |
|
"grad_norm": 0.3935139775276184, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 0.2376, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.009848018559727285, |
|
"grad_norm": 0.49239593744277954, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.3127, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.00994271104587851, |
|
"grad_norm": 0.5239997506141663, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.4293, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.010037403532029733, |
|
"grad_norm": 0.4456406831741333, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.3016, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.010132096018180957, |
|
"grad_norm": 0.40427473187446594, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.186, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01022678850433218, |
|
"grad_norm": 0.32894396781921387, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.2442, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.010321480990483405, |
|
"grad_norm": 0.4474416971206665, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 0.2203, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.010416173476634628, |
|
"grad_norm": 0.4535369873046875, |
|
"learning_rate": 5e-05, |
|
"loss": 0.329, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.010510865962785852, |
|
"grad_norm": 0.566321074962616, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 0.3164, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.010605558448937076, |
|
"grad_norm": 0.33763089776039124, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.2745, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0107002509350883, |
|
"grad_norm": 0.37318381667137146, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.2406, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.010794943421239525, |
|
"grad_norm": 0.35014447569847107, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.2285, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.01088963590739075, |
|
"grad_norm": 0.3987157642841339, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.2837, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.010984328393541973, |
|
"grad_norm": 0.36429470777511597, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.2372, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.011079020879693197, |
|
"grad_norm": 0.4331720769405365, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 0.3545, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.01117371336584442, |
|
"grad_norm": 0.32597437500953674, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 0.2248, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.011268405851995645, |
|
"grad_norm": 0.32949554920196533, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.2144, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.011363098338146868, |
|
"grad_norm": 0.5105450749397278, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.4314, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.011457790824298092, |
|
"grad_norm": 0.37223193049430847, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 0.2877, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.011552483310449316, |
|
"grad_norm": 0.33164238929748535, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.2723, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01164717579660054, |
|
"grad_norm": 0.37629207968711853, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 0.2631, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.011741868282751764, |
|
"grad_norm": 0.4204508364200592, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.2915, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.011836560768902988, |
|
"grad_norm": 0.49447402358055115, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.3062, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.011931253255054211, |
|
"grad_norm": 0.38985031843185425, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.3535, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.012025945741205435, |
|
"grad_norm": 0.393546462059021, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 0.3109, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.012120638227356659, |
|
"grad_norm": 0.4812166690826416, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.2909, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.012215330713507883, |
|
"grad_norm": 0.4680134952068329, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 0.3586, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.012310023199659107, |
|
"grad_norm": 0.30080264806747437, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.2044, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01240471568581033, |
|
"grad_norm": 0.3511880338191986, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.2577, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.012499408171961554, |
|
"grad_norm": 0.482510507106781, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.3746, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.012594100658112778, |
|
"grad_norm": 0.4897306263446808, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 0.2549, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.012688793144264002, |
|
"grad_norm": 0.48270678520202637, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.2571, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.012783485630415226, |
|
"grad_norm": 0.7660849094390869, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.3252, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.01287817811656645, |
|
"grad_norm": 0.34716224670410156, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 0.2741, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.012972870602717674, |
|
"grad_norm": 0.41306325793266296, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.2623, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.013067563088868897, |
|
"grad_norm": 0.47318580746650696, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 0.309, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.013162255575020123, |
|
"grad_norm": 0.537813663482666, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 0.3915, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.013256948061171347, |
|
"grad_norm": 0.5747792720794678, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.3346, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01335164054732257, |
|
"grad_norm": 0.5642172694206238, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 0.3576, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.013446333033473794, |
|
"grad_norm": 0.725172758102417, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.4239, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.013541025519625018, |
|
"grad_norm": 0.6283183097839355, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.3412, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.013635718005776242, |
|
"grad_norm": 0.5846209526062012, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 0.3374, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.013730410491927466, |
|
"grad_norm": 0.7421755194664001, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.3344, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.01382510297807869, |
|
"grad_norm": 0.5736775994300842, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.215, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.013919795464229914, |
|
"grad_norm": 0.5754392743110657, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 0.2971, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.014014487950381137, |
|
"grad_norm": 0.5859729051589966, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 0.3441, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.014109180436532361, |
|
"grad_norm": 0.6326302886009216, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 0.3143, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.014203872922683585, |
|
"grad_norm": 0.47283610701560974, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.3032, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.014203872922683585, |
|
"eval_loss": 0.3101840913295746, |
|
"eval_runtime": 1378.4955, |
|
"eval_samples_per_second": 12.903, |
|
"eval_steps_per_second": 6.452, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.014298565408834809, |
|
"grad_norm": 0.6273069381713867, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 0.2193, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.014393257894986033, |
|
"grad_norm": 0.5617712140083313, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.2056, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.014487950381137257, |
|
"grad_norm": 0.7072075009346008, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 0.3089, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.01458264286728848, |
|
"grad_norm": 0.6112410426139832, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 0.2099, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.014677335353439704, |
|
"grad_norm": 0.6010673642158508, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.4229, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.014772027839590928, |
|
"grad_norm": 0.5914064049720764, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 0.1822, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.014866720325742152, |
|
"grad_norm": 0.6139535307884216, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 0.2749, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.014961412811893376, |
|
"grad_norm": 0.4070436954498291, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.2948, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.0150561052980446, |
|
"grad_norm": 0.5271391868591309, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 0.255, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.015150797784195823, |
|
"grad_norm": 0.5544912219047546, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.3203, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.015245490270347047, |
|
"grad_norm": 0.4444845914840698, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.2705, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.015340182756498271, |
|
"grad_norm": 0.3598558306694031, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 0.2842, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.015434875242649495, |
|
"grad_norm": 0.42015647888183594, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 0.3233, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.01552956772880072, |
|
"grad_norm": 0.33955568075180054, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.2131, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.015624260214951944, |
|
"grad_norm": 0.3296820819377899, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.2057, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.015718952701103166, |
|
"grad_norm": 0.40896186232566833, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 0.2299, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.01581364518725439, |
|
"grad_norm": 0.5911323428153992, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 0.2567, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.015908337673405614, |
|
"grad_norm": 0.469912052154541, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 0.3262, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.016003030159556838, |
|
"grad_norm": 0.3689268231391907, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 0.3411, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.016097722645708062, |
|
"grad_norm": 0.5381249189376831, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.3265, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.016192415131859286, |
|
"grad_norm": 0.42854657769203186, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 0.26, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.01628710761801051, |
|
"grad_norm": 0.4198633134365082, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 0.2319, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.016381800104161733, |
|
"grad_norm": 0.7672091126441956, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 0.3694, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.016476492590312957, |
|
"grad_norm": 0.4732285737991333, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.3039, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.01657118507646418, |
|
"grad_norm": 0.3932953178882599, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.2077, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.016665877562615405, |
|
"grad_norm": 0.42570826411247253, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.2604, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.016760570048766632, |
|
"grad_norm": 0.583980143070221, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 0.4246, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.016855262534917856, |
|
"grad_norm": 0.3952184021472931, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 0.3048, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.01694995502106908, |
|
"grad_norm": 0.46545296907424927, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 0.3136, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.017044647507220304, |
|
"grad_norm": 0.5798347592353821, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.3032, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.017139339993371527, |
|
"grad_norm": 0.4284912943840027, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 0.3, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.01723403247952275, |
|
"grad_norm": 0.49665868282318115, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.384, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.017328724965673975, |
|
"grad_norm": 0.4045867621898651, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 0.2759, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0174234174518252, |
|
"grad_norm": 0.38050130009651184, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 0.2523, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.017518109937976423, |
|
"grad_norm": 0.5833321213722229, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.2836, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.017612802424127647, |
|
"grad_norm": 0.45387473702430725, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 0.2927, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.01770749491027887, |
|
"grad_norm": 0.47009631991386414, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 0.264, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.017802187396430094, |
|
"grad_norm": 0.4835204482078552, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.1906, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.017896879882581318, |
|
"grad_norm": 0.6437592506408691, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 0.331, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.017991572368732542, |
|
"grad_norm": 0.5117820501327515, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.3278, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.018086264854883766, |
|
"grad_norm": 0.5106391310691833, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 0.4103, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.01818095734103499, |
|
"grad_norm": 0.5827416777610779, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 0.3457, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.018275649827186213, |
|
"grad_norm": 0.40404656529426575, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 0.3005, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.018370342313337437, |
|
"grad_norm": 0.5825130939483643, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.429, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.01846503479948866, |
|
"grad_norm": 0.7986671328544617, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.379, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.018559727285639885, |
|
"grad_norm": 0.9346861839294434, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 0.4103, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.01865441977179111, |
|
"grad_norm": 0.7575134634971619, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 0.2874, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.018749112257942332, |
|
"grad_norm": 0.6524055600166321, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 0.4532, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.018843804744093556, |
|
"grad_norm": 0.813246488571167, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 0.4367, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.01893849723024478, |
|
"grad_norm": 0.8081971406936646, |
|
"learning_rate": 0.0, |
|
"loss": 0.3835, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01893849723024478, |
|
"eval_loss": 0.2940025329589844, |
|
"eval_runtime": 1376.3415, |
|
"eval_samples_per_second": 12.923, |
|
"eval_steps_per_second": 6.462, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.660991548522496e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|