|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9991668980838657, |
|
"eval_steps": 500, |
|
"global_step": 1350, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0022216051096917524, |
|
"grad_norm": 0.002830490469932556, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.6744, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004443210219383505, |
|
"grad_norm": 0.0013051991118118167, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.5043, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006664815329075257, |
|
"grad_norm": 0.0013274818193167448, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.7621, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00888642043876701, |
|
"grad_norm": 0.0017257591243833303, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.6256, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011108025548458762, |
|
"grad_norm": 0.001670735189691186, |
|
"learning_rate": 0.0001111111111111111, |
|
"loss": 0.5722, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013329630658150514, |
|
"grad_norm": 0.0013650484615936875, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 0.7089, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015551235767842266, |
|
"grad_norm": 0.0022685532458126545, |
|
"learning_rate": 0.00015555555555555556, |
|
"loss": 0.6669, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01777284087753402, |
|
"grad_norm": 0.0012487670173868537, |
|
"learning_rate": 0.00017777777777777779, |
|
"loss": 0.6834, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01999444598722577, |
|
"grad_norm": 0.0018260080832988024, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7538, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.022216051096917523, |
|
"grad_norm": 0.0013124588876962662, |
|
"learning_rate": 0.0002222222222222222, |
|
"loss": 0.6298, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.024437656206609277, |
|
"grad_norm": 0.0009908919455483556, |
|
"learning_rate": 0.0002444444444444445, |
|
"loss": 0.5524, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.026659261316301027, |
|
"grad_norm": 0.0016819137381389737, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 0.6242, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02888086642599278, |
|
"grad_norm": 0.0014569928171113133, |
|
"learning_rate": 0.0002888888888888889, |
|
"loss": 0.5705, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03110247153568453, |
|
"grad_norm": 0.002944767475128174, |
|
"learning_rate": 0.0003111111111111111, |
|
"loss": 0.7228, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03332407664537628, |
|
"grad_norm": 0.001788086025044322, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 0.7305, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03554568175506804, |
|
"grad_norm": 0.0013934284215793014, |
|
"learning_rate": 0.00035555555555555557, |
|
"loss": 0.6049, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03776728686475979, |
|
"grad_norm": 0.0018239800119772553, |
|
"learning_rate": 0.00037777777777777777, |
|
"loss": 0.6171, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03998889197445154, |
|
"grad_norm": 0.0020568512845784426, |
|
"learning_rate": 0.0004, |
|
"loss": 0.7091, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.042210497084143296, |
|
"grad_norm": 0.0022871694527566433, |
|
"learning_rate": 0.00042222222222222227, |
|
"loss": 0.8162, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.044432102193835046, |
|
"grad_norm": 0.002156606176868081, |
|
"learning_rate": 0.0004444444444444444, |
|
"loss": 0.7374, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0466537073035268, |
|
"grad_norm": 0.001277489005587995, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 0.5195, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.048875312413218554, |
|
"grad_norm": 0.000715097296051681, |
|
"learning_rate": 0.000488888888888889, |
|
"loss": 0.5719, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.051096917522910304, |
|
"grad_norm": 0.0021112209651619196, |
|
"learning_rate": 0.0005111111111111112, |
|
"loss": 0.6308, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.053318522632602054, |
|
"grad_norm": 0.0011713097337633371, |
|
"learning_rate": 0.0005333333333333334, |
|
"loss": 0.6671, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.055540127742293804, |
|
"grad_norm": 0.0019275946542620659, |
|
"learning_rate": 0.0005555555555555556, |
|
"loss": 0.6584, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05776173285198556, |
|
"grad_norm": 0.0022250341717153788, |
|
"learning_rate": 0.0005777777777777778, |
|
"loss": 0.789, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05998333796167731, |
|
"grad_norm": 0.0013691845815628767, |
|
"learning_rate": 0.0006000000000000001, |
|
"loss": 0.6083, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06220494307136906, |
|
"grad_norm": 0.0016656300285831094, |
|
"learning_rate": 0.0006222222222222223, |
|
"loss": 0.7864, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06442654818106082, |
|
"grad_norm": 0.001373667037114501, |
|
"learning_rate": 0.0006444444444444444, |
|
"loss": 0.7652, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06664815329075256, |
|
"grad_norm": 0.0013376525603234768, |
|
"learning_rate": 0.0006666666666666666, |
|
"loss": 0.6543, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06886975840044432, |
|
"grad_norm": 0.001769097289070487, |
|
"learning_rate": 0.000688888888888889, |
|
"loss": 0.6709, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07109136351013608, |
|
"grad_norm": 0.002042358508333564, |
|
"learning_rate": 0.0007111111111111111, |
|
"loss": 0.7638, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07331296861982782, |
|
"grad_norm": 0.0013311082730069757, |
|
"learning_rate": 0.0007333333333333333, |
|
"loss": 0.6428, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07553457372951958, |
|
"grad_norm": 0.0014501510886475444, |
|
"learning_rate": 0.0007555555555555555, |
|
"loss": 0.7086, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07775617883921133, |
|
"grad_norm": 0.0018582150805741549, |
|
"learning_rate": 0.0007777777777777777, |
|
"loss": 0.8002, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07997778394890308, |
|
"grad_norm": 0.0015540813328698277, |
|
"learning_rate": 0.0008, |
|
"loss": 0.5421, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08219938905859484, |
|
"grad_norm": 0.002326158108189702, |
|
"learning_rate": 0.0008222222222222222, |
|
"loss": 0.6808, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08442099416828659, |
|
"grad_norm": 0.002218228066340089, |
|
"learning_rate": 0.0008444444444444445, |
|
"loss": 0.5874, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08664259927797834, |
|
"grad_norm": 0.0012414696393534541, |
|
"learning_rate": 0.0008666666666666666, |
|
"loss": 0.7572, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08886420438767009, |
|
"grad_norm": 0.0017535301158204675, |
|
"learning_rate": 0.0008888888888888888, |
|
"loss": 0.7115, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09108580949736185, |
|
"grad_norm": 0.0016572384629398584, |
|
"learning_rate": 0.0009111111111111111, |
|
"loss": 0.7242, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0933074146070536, |
|
"grad_norm": 0.0014017591020092368, |
|
"learning_rate": 0.0009333333333333333, |
|
"loss": 0.7907, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09552901971674535, |
|
"grad_norm": 0.001873611006885767, |
|
"learning_rate": 0.0009555555555555556, |
|
"loss": 0.7169, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09775062482643711, |
|
"grad_norm": 0.002249490935355425, |
|
"learning_rate": 0.000977777777777778, |
|
"loss": 0.651, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09997222993612885, |
|
"grad_norm": 0.001455418299883604, |
|
"learning_rate": 0.001, |
|
"loss": 0.5291, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10219383504582061, |
|
"grad_norm": 0.0013861401239410043, |
|
"learning_rate": 0.0010222222222222223, |
|
"loss": 0.7166, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10441544015551235, |
|
"grad_norm": 0.0016446675872430205, |
|
"learning_rate": 0.0010444444444444444, |
|
"loss": 0.8524, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10663704526520411, |
|
"grad_norm": 0.0018337472574785352, |
|
"learning_rate": 0.0010666666666666667, |
|
"loss": 0.6347, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10885865037489587, |
|
"grad_norm": 0.0014166847104206681, |
|
"learning_rate": 0.001088888888888889, |
|
"loss": 0.6729, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11108025548458761, |
|
"grad_norm": 0.002324986970052123, |
|
"learning_rate": 0.0011111111111111111, |
|
"loss": 0.5649, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11330186059427937, |
|
"grad_norm": 0.0018736826023086905, |
|
"learning_rate": 0.0011333333333333334, |
|
"loss": 0.7072, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11552346570397112, |
|
"grad_norm": 0.001425831695087254, |
|
"learning_rate": 0.0011555555555555555, |
|
"loss": 0.5661, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11774507081366287, |
|
"grad_norm": 0.0012704561231657863, |
|
"learning_rate": 0.0011777777777777778, |
|
"loss": 0.65, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11996667592335462, |
|
"grad_norm": 0.001298760762438178, |
|
"learning_rate": 0.0012000000000000001, |
|
"loss": 0.6336, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12218828103304638, |
|
"grad_norm": 0.0010759654687717557, |
|
"learning_rate": 0.0012222222222222222, |
|
"loss": 0.5591, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12440988614273812, |
|
"grad_norm": 0.0019087704131379724, |
|
"learning_rate": 0.0012444444444444445, |
|
"loss": 0.6595, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12663149125242987, |
|
"grad_norm": 0.001623319461941719, |
|
"learning_rate": 0.0012666666666666666, |
|
"loss": 0.6856, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12885309636212164, |
|
"grad_norm": 0.001485670218244195, |
|
"learning_rate": 0.001288888888888889, |
|
"loss": 0.5978, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13107470147181338, |
|
"grad_norm": 0.001623534713871777, |
|
"learning_rate": 0.0013111111111111112, |
|
"loss": 0.5635, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13329630658150513, |
|
"grad_norm": 0.001306689577177167, |
|
"learning_rate": 0.0013333333333333333, |
|
"loss": 0.7234, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1355179116911969, |
|
"grad_norm": 0.002071897964924574, |
|
"learning_rate": 0.0013555555555555556, |
|
"loss": 0.7316, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13773951680088864, |
|
"grad_norm": 0.0016734000528231263, |
|
"learning_rate": 0.001377777777777778, |
|
"loss": 0.6618, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13996112191058038, |
|
"grad_norm": 0.0018130919197574258, |
|
"learning_rate": 0.0014, |
|
"loss": 0.6921, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.14218272702027215, |
|
"grad_norm": 0.0018388992175459862, |
|
"learning_rate": 0.0014222222222222223, |
|
"loss": 0.7907, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1444043321299639, |
|
"grad_norm": 0.001330480445176363, |
|
"learning_rate": 0.0014444444444444444, |
|
"loss": 0.5721, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14662593723965564, |
|
"grad_norm": 0.0015514479018747807, |
|
"learning_rate": 0.0014666666666666667, |
|
"loss": 0.7752, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1488475423493474, |
|
"grad_norm": 0.0014846732374280691, |
|
"learning_rate": 0.001488888888888889, |
|
"loss": 0.9079, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15106914745903915, |
|
"grad_norm": 0.0014325291849672794, |
|
"learning_rate": 0.001511111111111111, |
|
"loss": 0.7094, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1532907525687309, |
|
"grad_norm": 0.0013538472121581435, |
|
"learning_rate": 0.0015333333333333332, |
|
"loss": 0.6732, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15551235767842267, |
|
"grad_norm": 0.002166144549846649, |
|
"learning_rate": 0.0015555555555555555, |
|
"loss": 0.5317, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1577339627881144, |
|
"grad_norm": 0.0011681554606184363, |
|
"learning_rate": 0.0015777777777777778, |
|
"loss": 0.5447, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15995556789780616, |
|
"grad_norm": 0.0016300322022289038, |
|
"learning_rate": 0.0016, |
|
"loss": 0.6186, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16217717300749793, |
|
"grad_norm": 0.0012133439304307103, |
|
"learning_rate": 0.0016222222222222222, |
|
"loss": 0.6517, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16439877811718967, |
|
"grad_norm": 0.0017300712643191218, |
|
"learning_rate": 0.0016444444444444445, |
|
"loss": 0.6562, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1666203832268814, |
|
"grad_norm": 0.0012843969743698835, |
|
"learning_rate": 0.0016666666666666668, |
|
"loss": 0.6749, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16884198833657318, |
|
"grad_norm": 0.001517774653621018, |
|
"learning_rate": 0.001688888888888889, |
|
"loss": 0.6396, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17106359344626493, |
|
"grad_norm": 0.0013772399397566915, |
|
"learning_rate": 0.0017111111111111114, |
|
"loss": 0.5603, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.17328519855595667, |
|
"grad_norm": 0.0013864693464711308, |
|
"learning_rate": 0.0017333333333333333, |
|
"loss": 0.5805, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17550680366564844, |
|
"grad_norm": 0.0018722772365435958, |
|
"learning_rate": 0.0017555555555555556, |
|
"loss": 0.7825, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.17772840877534019, |
|
"grad_norm": 0.0014643195318058133, |
|
"learning_rate": 0.0017777777777777776, |
|
"loss": 0.6691, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17995001388503193, |
|
"grad_norm": 0.001783651881851256, |
|
"learning_rate": 0.0018, |
|
"loss": 0.6402, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1821716189947237, |
|
"grad_norm": 0.001158166560344398, |
|
"learning_rate": 0.0018222222222222223, |
|
"loss": 0.555, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18439322410441544, |
|
"grad_norm": 0.0013353719841688871, |
|
"learning_rate": 0.0018444444444444446, |
|
"loss": 0.6524, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1866148292141072, |
|
"grad_norm": 0.002519479487091303, |
|
"learning_rate": 0.0018666666666666666, |
|
"loss": 0.6841, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18883643432379896, |
|
"grad_norm": 0.0019098934717476368, |
|
"learning_rate": 0.001888888888888889, |
|
"loss": 0.5673, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1910580394334907, |
|
"grad_norm": 0.0013814260018989444, |
|
"learning_rate": 0.0019111111111111113, |
|
"loss": 0.7098, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.19327964454318244, |
|
"grad_norm": 0.0016817323630675673, |
|
"learning_rate": 0.0019333333333333336, |
|
"loss": 0.6082, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19550124965287421, |
|
"grad_norm": 0.0018059983849525452, |
|
"learning_rate": 0.001955555555555556, |
|
"loss": 0.5896, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19772285476256596, |
|
"grad_norm": 0.0019373574759811163, |
|
"learning_rate": 0.0019777777777777775, |
|
"loss": 0.7525, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1999444598722577, |
|
"grad_norm": 0.0014255450805649161, |
|
"learning_rate": 0.002, |
|
"loss": 0.6427, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20216606498194944, |
|
"grad_norm": 0.0015481573063880205, |
|
"learning_rate": 0.002022222222222222, |
|
"loss": 0.7504, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.20438767009164122, |
|
"grad_norm": 0.0018919891444966197, |
|
"learning_rate": 0.0020444444444444447, |
|
"loss": 0.679, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.20660927520133296, |
|
"grad_norm": 0.001393840298987925, |
|
"learning_rate": 0.0020666666666666667, |
|
"loss": 0.5829, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2088308803110247, |
|
"grad_norm": 0.002521130722016096, |
|
"learning_rate": 0.002088888888888889, |
|
"loss": 0.6741, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.21105248542071647, |
|
"grad_norm": 0.001192085794173181, |
|
"learning_rate": 0.0021111111111111113, |
|
"loss": 0.612, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21327409053040822, |
|
"grad_norm": 0.0020134742371737957, |
|
"learning_rate": 0.0021333333333333334, |
|
"loss": 0.6554, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.21549569564009996, |
|
"grad_norm": 0.001502550090663135, |
|
"learning_rate": 0.0021555555555555555, |
|
"loss": 0.6807, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21771730074979173, |
|
"grad_norm": 0.0012775326613336802, |
|
"learning_rate": 0.002177777777777778, |
|
"loss": 0.5654, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21993890585948347, |
|
"grad_norm": 0.0017310921102762222, |
|
"learning_rate": 0.0021999999999999997, |
|
"loss": 0.6411, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22216051096917522, |
|
"grad_norm": 0.0025989811401814222, |
|
"learning_rate": 0.0022222222222222222, |
|
"loss": 0.7062, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.224382116078867, |
|
"grad_norm": 0.0010904415976256132, |
|
"learning_rate": 0.0022444444444444443, |
|
"loss": 0.6367, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.22660372118855873, |
|
"grad_norm": 0.002046885434538126, |
|
"learning_rate": 0.002266666666666667, |
|
"loss": 0.5193, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22882532629825048, |
|
"grad_norm": 0.0019200810929760337, |
|
"learning_rate": 0.002288888888888889, |
|
"loss": 0.5122, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.23104693140794225, |
|
"grad_norm": 0.002231223974376917, |
|
"learning_rate": 0.002311111111111111, |
|
"loss": 0.7042, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.233268536517634, |
|
"grad_norm": 0.001883045770227909, |
|
"learning_rate": 0.0023333333333333335, |
|
"loss": 0.5673, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23549014162732573, |
|
"grad_norm": 0.0014234882546588778, |
|
"learning_rate": 0.0023555555555555556, |
|
"loss": 0.6498, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2377117467370175, |
|
"grad_norm": 0.0013277216348797083, |
|
"learning_rate": 0.002377777777777778, |
|
"loss": 0.644, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23993335184670925, |
|
"grad_norm": 0.0019941842183470726, |
|
"learning_rate": 0.0024000000000000002, |
|
"loss": 0.6425, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.242154956956401, |
|
"grad_norm": 0.0017136182868853211, |
|
"learning_rate": 0.0024222222222222223, |
|
"loss": 0.6934, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24437656206609276, |
|
"grad_norm": 0.0018905687611550093, |
|
"learning_rate": 0.0024444444444444444, |
|
"loss": 0.6896, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2465981671757845, |
|
"grad_norm": 0.0024479280691593885, |
|
"learning_rate": 0.0024666666666666665, |
|
"loss": 0.6073, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.24881977228547625, |
|
"grad_norm": 0.00235267193056643, |
|
"learning_rate": 0.002488888888888889, |
|
"loss": 0.716, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.251041377395168, |
|
"grad_norm": 0.0016832964029163122, |
|
"learning_rate": 0.002511111111111111, |
|
"loss": 0.616, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.25326298250485973, |
|
"grad_norm": 0.001741166110150516, |
|
"learning_rate": 0.002533333333333333, |
|
"loss": 0.6916, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2554845876145515, |
|
"grad_norm": 0.0035528475418686867, |
|
"learning_rate": 0.0025555555555555557, |
|
"loss": 0.5828, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2577061927242433, |
|
"grad_norm": 0.0017036315985023975, |
|
"learning_rate": 0.002577777777777778, |
|
"loss": 0.7605, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.259927797833935, |
|
"grad_norm": 0.0015289601869881153, |
|
"learning_rate": 0.0026000000000000003, |
|
"loss": 0.6244, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.26214940294362676, |
|
"grad_norm": 0.002781358314678073, |
|
"learning_rate": 0.0026222222222222224, |
|
"loss": 0.6295, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.26437100805331853, |
|
"grad_norm": 0.0013995033223181963, |
|
"learning_rate": 0.0026444444444444445, |
|
"loss": 0.6083, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.26659261316301025, |
|
"grad_norm": 0.0064442637376487255, |
|
"learning_rate": 0.0026666666666666666, |
|
"loss": 0.9319, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.268814218272702, |
|
"grad_norm": 0.002406611107289791, |
|
"learning_rate": 0.0026888888888888887, |
|
"loss": 0.8775, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2710358233823938, |
|
"grad_norm": 0.002220314694568515, |
|
"learning_rate": 0.002711111111111111, |
|
"loss": 0.6365, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2732574284920855, |
|
"grad_norm": 0.003288067877292633, |
|
"learning_rate": 0.0027333333333333333, |
|
"loss": 0.703, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2754790336017773, |
|
"grad_norm": 0.0018617436289787292, |
|
"learning_rate": 0.002755555555555556, |
|
"loss": 0.6479, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.27770063871146905, |
|
"grad_norm": 0.002223158022388816, |
|
"learning_rate": 0.002777777777777778, |
|
"loss": 0.5834, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.27992224382116077, |
|
"grad_norm": 0.002752475207671523, |
|
"learning_rate": 0.0028, |
|
"loss": 0.6261, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.28214384893085254, |
|
"grad_norm": 0.002583549590781331, |
|
"learning_rate": 0.0028222222222222225, |
|
"loss": 0.6627, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2843654540405443, |
|
"grad_norm": 0.003046439029276371, |
|
"learning_rate": 0.0028444444444444446, |
|
"loss": 0.6843, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.286587059150236, |
|
"grad_norm": 0.0020824731327593327, |
|
"learning_rate": 0.0028666666666666667, |
|
"loss": 0.6444, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2888086642599278, |
|
"grad_norm": 0.002808007877320051, |
|
"learning_rate": 0.0028888888888888888, |
|
"loss": 0.6429, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29103026936961957, |
|
"grad_norm": 0.0021083897445350885, |
|
"learning_rate": 0.002911111111111111, |
|
"loss": 0.7279, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2932518744793113, |
|
"grad_norm": 0.0023642443120479584, |
|
"learning_rate": 0.0029333333333333334, |
|
"loss": 0.6642, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.29547347958900305, |
|
"grad_norm": 0.002401075093075633, |
|
"learning_rate": 0.0029555555555555555, |
|
"loss": 0.6309, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2976950846986948, |
|
"grad_norm": 0.0029004139360040426, |
|
"learning_rate": 0.002977777777777778, |
|
"loss": 0.7236, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.29991668980838654, |
|
"grad_norm": 0.003024290781468153, |
|
"learning_rate": 0.003, |
|
"loss": 0.7847, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3021382949180783, |
|
"grad_norm": 0.004532734397798777, |
|
"learning_rate": 0.0029975308641975312, |
|
"loss": 0.6109, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3043599000277701, |
|
"grad_norm": 0.0032856380566954613, |
|
"learning_rate": 0.0029950617283950615, |
|
"loss": 0.704, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3065815051374618, |
|
"grad_norm": 0.0034370562061667442, |
|
"learning_rate": 0.0029925925925925927, |
|
"loss": 0.7568, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.30880311024715357, |
|
"grad_norm": 0.002310090931132436, |
|
"learning_rate": 0.0029901234567901234, |
|
"loss": 0.7956, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.31102471535684534, |
|
"grad_norm": 0.002518034540116787, |
|
"learning_rate": 0.0029876543209876546, |
|
"loss": 0.6315, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31324632046653705, |
|
"grad_norm": 0.002741769189015031, |
|
"learning_rate": 0.002985185185185185, |
|
"loss": 0.5235, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3154679255762288, |
|
"grad_norm": 0.002382303588092327, |
|
"learning_rate": 0.002982716049382716, |
|
"loss": 0.6484, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3176895306859206, |
|
"grad_norm": 0.0025453900452703238, |
|
"learning_rate": 0.0029802469135802472, |
|
"loss": 0.5183, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3199111357956123, |
|
"grad_norm": 0.002527153817936778, |
|
"learning_rate": 0.002977777777777778, |
|
"loss": 0.8045, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3221327409053041, |
|
"grad_norm": 0.0027810451574623585, |
|
"learning_rate": 0.0029753086419753087, |
|
"loss": 0.6737, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.32435434601499585, |
|
"grad_norm": 0.003914841450750828, |
|
"learning_rate": 0.0029728395061728394, |
|
"loss": 0.6568, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.32657595112468757, |
|
"grad_norm": 0.0025024593342095613, |
|
"learning_rate": 0.0029703703703703706, |
|
"loss": 0.5706, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.32879755623437934, |
|
"grad_norm": 0.003010642249137163, |
|
"learning_rate": 0.0029679012345679013, |
|
"loss": 0.6303, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3310191613440711, |
|
"grad_norm": 0.003823331091552973, |
|
"learning_rate": 0.002965432098765432, |
|
"loss": 0.5775, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3332407664537628, |
|
"grad_norm": 0.003069646656513214, |
|
"learning_rate": 0.002962962962962963, |
|
"loss": 0.771, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3354623715634546, |
|
"grad_norm": 0.0031701356638222933, |
|
"learning_rate": 0.002960493827160494, |
|
"loss": 0.6726, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.33768397667314637, |
|
"grad_norm": 0.0047783139161765575, |
|
"learning_rate": 0.0029580246913580247, |
|
"loss": 0.5175, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3399055817828381, |
|
"grad_norm": 0.002878882922232151, |
|
"learning_rate": 0.0029555555555555555, |
|
"loss": 0.66, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.34212718689252986, |
|
"grad_norm": 0.002322252606973052, |
|
"learning_rate": 0.0029530864197530866, |
|
"loss": 0.5851, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3443487920022216, |
|
"grad_norm": 0.004187839105725288, |
|
"learning_rate": 0.0029506172839506174, |
|
"loss": 0.5594, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.34657039711191334, |
|
"grad_norm": 0.0040303790010511875, |
|
"learning_rate": 0.002948148148148148, |
|
"loss": 0.7593, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3487920022216051, |
|
"grad_norm": 0.004690828267484903, |
|
"learning_rate": 0.002945679012345679, |
|
"loss": 0.591, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3510136073312969, |
|
"grad_norm": 0.0035938743967562914, |
|
"learning_rate": 0.00294320987654321, |
|
"loss": 0.7527, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3532352124409886, |
|
"grad_norm": 0.0031942499335855246, |
|
"learning_rate": 0.0029407407407407407, |
|
"loss": 0.6939, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.35545681755068037, |
|
"grad_norm": 0.00442874850705266, |
|
"learning_rate": 0.0029382716049382715, |
|
"loss": 0.4872, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35767842266037214, |
|
"grad_norm": 0.002961769700050354, |
|
"learning_rate": 0.0029358024691358026, |
|
"loss": 0.6919, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.35990002777006386, |
|
"grad_norm": 0.004248150624334812, |
|
"learning_rate": 0.0029333333333333334, |
|
"loss": 0.5914, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.36212163287975563, |
|
"grad_norm": 0.003655125154182315, |
|
"learning_rate": 0.002930864197530864, |
|
"loss": 0.6434, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3643432379894474, |
|
"grad_norm": 0.0027984960470348597, |
|
"learning_rate": 0.002928395061728395, |
|
"loss": 0.6544, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3665648430991391, |
|
"grad_norm": 0.003448409028351307, |
|
"learning_rate": 0.002925925925925926, |
|
"loss": 0.6135, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3687864482088309, |
|
"grad_norm": 0.0023583476431667805, |
|
"learning_rate": 0.002923456790123457, |
|
"loss": 0.5549, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.37100805331852266, |
|
"grad_norm": 0.003774197306483984, |
|
"learning_rate": 0.0029209876543209875, |
|
"loss": 0.6155, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3732296584282144, |
|
"grad_norm": 0.0039054304361343384, |
|
"learning_rate": 0.0029185185185185186, |
|
"loss": 0.4939, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.37545126353790614, |
|
"grad_norm": 0.0034814560785889626, |
|
"learning_rate": 0.0029160493827160494, |
|
"loss": 0.6343, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3776728686475979, |
|
"grad_norm": 0.002560647204518318, |
|
"learning_rate": 0.0029135802469135805, |
|
"loss": 0.5198, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37989447375728963, |
|
"grad_norm": 0.0023003127425909042, |
|
"learning_rate": 0.002911111111111111, |
|
"loss": 0.5431, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3821160788669814, |
|
"grad_norm": 0.003486617701128125, |
|
"learning_rate": 0.002908641975308642, |
|
"loss": 0.4754, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3843376839766732, |
|
"grad_norm": 0.0030546062625944614, |
|
"learning_rate": 0.002906172839506173, |
|
"loss": 0.6999, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3865592890863649, |
|
"grad_norm": 0.003570267930626869, |
|
"learning_rate": 0.002903703703703704, |
|
"loss": 0.6752, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.38878089419605666, |
|
"grad_norm": 0.0036987727507948875, |
|
"learning_rate": 0.0029012345679012346, |
|
"loss": 0.6123, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.39100249930574843, |
|
"grad_norm": 0.004126350861042738, |
|
"learning_rate": 0.0028987654320987654, |
|
"loss": 0.5922, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.39322410441544015, |
|
"grad_norm": 0.0043579223565757275, |
|
"learning_rate": 0.0028962962962962966, |
|
"loss": 0.7249, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3954457095251319, |
|
"grad_norm": 0.005049738567322493, |
|
"learning_rate": 0.0028938271604938273, |
|
"loss": 0.7098, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3976673146348237, |
|
"grad_norm": 0.004803841933608055, |
|
"learning_rate": 0.002891358024691358, |
|
"loss": 0.6547, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3998889197445154, |
|
"grad_norm": 0.004663265775889158, |
|
"learning_rate": 0.0028888888888888888, |
|
"loss": 0.5621, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4021105248542072, |
|
"grad_norm": 0.005818499252200127, |
|
"learning_rate": 0.00288641975308642, |
|
"loss": 0.6772, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.4043321299638989, |
|
"grad_norm": 0.003460573498159647, |
|
"learning_rate": 0.0028839506172839507, |
|
"loss": 0.4637, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.40655373507359066, |
|
"grad_norm": 0.004111323039978743, |
|
"learning_rate": 0.0028814814814814814, |
|
"loss": 0.5993, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.40877534018328243, |
|
"grad_norm": 0.0037442215252667665, |
|
"learning_rate": 0.0028790123456790126, |
|
"loss": 0.6081, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.41099694529297415, |
|
"grad_norm": 0.005370614118874073, |
|
"learning_rate": 0.0028765432098765433, |
|
"loss": 0.628, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4132185504026659, |
|
"grad_norm": 0.009289095178246498, |
|
"learning_rate": 0.002874074074074074, |
|
"loss": 0.5611, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4154401555123577, |
|
"grad_norm": 0.013159370049834251, |
|
"learning_rate": 0.0028716049382716048, |
|
"loss": 0.6732, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4176617606220494, |
|
"grad_norm": 0.008524056524038315, |
|
"learning_rate": 0.002869135802469136, |
|
"loss": 0.5685, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4198833657317412, |
|
"grad_norm": 0.008815361186861992, |
|
"learning_rate": 0.0028666666666666667, |
|
"loss": 0.7353, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.42210497084143295, |
|
"grad_norm": 0.005370002705603838, |
|
"learning_rate": 0.0028641975308641974, |
|
"loss": 0.5858, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.42432657595112466, |
|
"grad_norm": 0.00529759656637907, |
|
"learning_rate": 0.0028617283950617286, |
|
"loss": 0.7153, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.42654818106081643, |
|
"grad_norm": 0.004369831178337336, |
|
"learning_rate": 0.0028592592592592593, |
|
"loss": 0.7741, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4287697861705082, |
|
"grad_norm": 0.003865456208586693, |
|
"learning_rate": 0.00285679012345679, |
|
"loss": 0.6637, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4309913912801999, |
|
"grad_norm": 0.004272242542356253, |
|
"learning_rate": 0.0028543209876543208, |
|
"loss": 0.4771, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4332129963898917, |
|
"grad_norm": 0.003245055442675948, |
|
"learning_rate": 0.002851851851851852, |
|
"loss": 0.4521, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.43543460149958346, |
|
"grad_norm": 0.004816419444978237, |
|
"learning_rate": 0.002849382716049383, |
|
"loss": 0.6017, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4376562066092752, |
|
"grad_norm": 0.004628610797226429, |
|
"learning_rate": 0.0028469135802469134, |
|
"loss": 0.616, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.43987781171896695, |
|
"grad_norm": 0.004131810273975134, |
|
"learning_rate": 0.0028444444444444446, |
|
"loss": 0.5681, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4420994168286587, |
|
"grad_norm": 0.0029758147429674864, |
|
"learning_rate": 0.0028419753086419753, |
|
"loss": 0.5721, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.44432102193835044, |
|
"grad_norm": 0.0033105132170021534, |
|
"learning_rate": 0.0028395061728395065, |
|
"loss": 0.5717, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4465426270480422, |
|
"grad_norm": 0.003180293133482337, |
|
"learning_rate": 0.002837037037037037, |
|
"loss": 0.5594, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.448764232157734, |
|
"grad_norm": 0.004509236663579941, |
|
"learning_rate": 0.002834567901234568, |
|
"loss": 0.5528, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4509858372674257, |
|
"grad_norm": 0.0039048679172992706, |
|
"learning_rate": 0.002832098765432099, |
|
"loss": 0.6696, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.45320744237711746, |
|
"grad_norm": 0.004375668242573738, |
|
"learning_rate": 0.00282962962962963, |
|
"loss": 0.5643, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.45542904748680924, |
|
"grad_norm": 0.004228411708027124, |
|
"learning_rate": 0.0028271604938271606, |
|
"loss": 0.4618, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.45765065259650095, |
|
"grad_norm": 0.00472232885658741, |
|
"learning_rate": 0.0028246913580246913, |
|
"loss": 0.6037, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4598722577061927, |
|
"grad_norm": 0.003651043400168419, |
|
"learning_rate": 0.0028222222222222225, |
|
"loss": 0.6719, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4620938628158845, |
|
"grad_norm": 0.00520613556727767, |
|
"learning_rate": 0.0028197530864197532, |
|
"loss": 0.6436, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4643154679255762, |
|
"grad_norm": 0.0054262257181108, |
|
"learning_rate": 0.002817283950617284, |
|
"loss": 0.5663, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.466537073035268, |
|
"grad_norm": 0.002916304161772132, |
|
"learning_rate": 0.0028148148148148147, |
|
"loss": 0.6353, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.46875867814495975, |
|
"grad_norm": 0.0056013804860413074, |
|
"learning_rate": 0.002812345679012346, |
|
"loss": 0.6574, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.47098028325465147, |
|
"grad_norm": 0.004248346202075481, |
|
"learning_rate": 0.0028098765432098766, |
|
"loss": 0.7195, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.47320188836434324, |
|
"grad_norm": 0.004659584257751703, |
|
"learning_rate": 0.0028074074074074073, |
|
"loss": 0.5662, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.475423493474035, |
|
"grad_norm": 0.007387971039861441, |
|
"learning_rate": 0.0028049382716049385, |
|
"loss": 0.5926, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4776450985837267, |
|
"grad_norm": 0.005048746708780527, |
|
"learning_rate": 0.0028024691358024692, |
|
"loss": 0.6544, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4798667036934185, |
|
"grad_norm": 0.004469868261367083, |
|
"learning_rate": 0.0028, |
|
"loss": 0.6088, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.48208830880311027, |
|
"grad_norm": 0.004091240931302309, |
|
"learning_rate": 0.0027975308641975307, |
|
"loss": 0.5936, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.484309913912802, |
|
"grad_norm": 0.004576526582241058, |
|
"learning_rate": 0.002795061728395062, |
|
"loss": 0.6662, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.48653151902249375, |
|
"grad_norm": 0.006232080049812794, |
|
"learning_rate": 0.0027925925925925926, |
|
"loss": 0.6338, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4887531241321855, |
|
"grad_norm": 0.004759920295327902, |
|
"learning_rate": 0.0027901234567901233, |
|
"loss": 0.5844, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49097472924187724, |
|
"grad_norm": 0.004427391104400158, |
|
"learning_rate": 0.0027876543209876545, |
|
"loss": 0.5888, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.493196334351569, |
|
"grad_norm": 0.005065519362688065, |
|
"learning_rate": 0.0027851851851851852, |
|
"loss": 0.6999, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4954179394612608, |
|
"grad_norm": 0.005866500083357096, |
|
"learning_rate": 0.002782716049382716, |
|
"loss": 0.5718, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.4976395445709525, |
|
"grad_norm": 0.0052678692154586315, |
|
"learning_rate": 0.0027802469135802467, |
|
"loss": 0.5274, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.49986114968064427, |
|
"grad_norm": 0.0039049943443387747, |
|
"learning_rate": 0.002777777777777778, |
|
"loss": 0.7269, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.502082754790336, |
|
"grad_norm": 0.003937671426683664, |
|
"learning_rate": 0.002775308641975309, |
|
"loss": 0.6606, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5043043599000278, |
|
"grad_norm": 0.002638050587847829, |
|
"learning_rate": 0.0027728395061728394, |
|
"loss": 0.6129, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5065259650097195, |
|
"grad_norm": 0.0044793980196118355, |
|
"learning_rate": 0.0027703703703703705, |
|
"loss": 0.6376, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5087475701194113, |
|
"grad_norm": 0.004454802256077528, |
|
"learning_rate": 0.0027679012345679013, |
|
"loss": 0.6064, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.510969175229103, |
|
"grad_norm": 0.003144732676446438, |
|
"learning_rate": 0.0027654320987654324, |
|
"loss": 0.5875, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5131907803387947, |
|
"grad_norm": 0.004579763859510422, |
|
"learning_rate": 0.0027629629629629627, |
|
"loss": 0.7472, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5154123854484866, |
|
"grad_norm": 0.007608956657350063, |
|
"learning_rate": 0.002760493827160494, |
|
"loss": 0.5966, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5176339905581783, |
|
"grad_norm": 0.003314356319606304, |
|
"learning_rate": 0.0027580246913580246, |
|
"loss": 0.5678, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.51985559566787, |
|
"grad_norm": 0.021426383405923843, |
|
"learning_rate": 0.002755555555555556, |
|
"loss": 0.8359, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5220772007775618, |
|
"grad_norm": 0.004511243663728237, |
|
"learning_rate": 0.0027530864197530865, |
|
"loss": 0.4909, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5242988058872535, |
|
"grad_norm": 0.00266853766515851, |
|
"learning_rate": 0.0027506172839506173, |
|
"loss": 0.6042, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5265204109969452, |
|
"grad_norm": 0.006429709494113922, |
|
"learning_rate": 0.0027481481481481484, |
|
"loss": 0.7027, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5287420161066371, |
|
"grad_norm": 0.003927881829440594, |
|
"learning_rate": 0.002745679012345679, |
|
"loss": 0.7546, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5309636212163288, |
|
"grad_norm": 0.0050257411785423756, |
|
"learning_rate": 0.00274320987654321, |
|
"loss": 0.7372, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5331852263260205, |
|
"grad_norm": 0.003318399889394641, |
|
"learning_rate": 0.0027407407407407406, |
|
"loss": 0.6836, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5354068314357123, |
|
"grad_norm": 0.005656864959746599, |
|
"learning_rate": 0.002738271604938272, |
|
"loss": 0.5766, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.537628436545404, |
|
"grad_norm": 0.004331695381551981, |
|
"learning_rate": 0.0027358024691358025, |
|
"loss": 0.5942, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5398500416550958, |
|
"grad_norm": 0.003149270312860608, |
|
"learning_rate": 0.0027333333333333333, |
|
"loss": 0.5426, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5420716467647876, |
|
"grad_norm": 0.004050451330840588, |
|
"learning_rate": 0.0027308641975308644, |
|
"loss": 0.5257, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5442932518744793, |
|
"grad_norm": 0.0048174443654716015, |
|
"learning_rate": 0.002728395061728395, |
|
"loss": 0.569, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.546514856984171, |
|
"grad_norm": 0.004108496475964785, |
|
"learning_rate": 0.002725925925925926, |
|
"loss": 0.7042, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5487364620938628, |
|
"grad_norm": 0.006572370883077383, |
|
"learning_rate": 0.0027234567901234566, |
|
"loss": 0.6698, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5509580672035546, |
|
"grad_norm": 0.0034175198525190353, |
|
"learning_rate": 0.002720987654320988, |
|
"loss": 0.7017, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5531796723132463, |
|
"grad_norm": 0.0031640417873859406, |
|
"learning_rate": 0.0027185185185185185, |
|
"loss": 0.5557, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5554012774229381, |
|
"grad_norm": 0.003084770869463682, |
|
"learning_rate": 0.0027160493827160493, |
|
"loss": 0.4013, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5576228825326298, |
|
"grad_norm": 0.004879522603005171, |
|
"learning_rate": 0.0027135802469135805, |
|
"loss": 0.6765, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5598444876423215, |
|
"grad_norm": 0.0038094199262559414, |
|
"learning_rate": 0.002711111111111111, |
|
"loss": 0.7209, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5620660927520134, |
|
"grad_norm": 0.004499315749853849, |
|
"learning_rate": 0.002708641975308642, |
|
"loss": 0.6623, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5642876978617051, |
|
"grad_norm": 0.005681379698216915, |
|
"learning_rate": 0.0027061728395061727, |
|
"loss": 0.6463, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5665093029713968, |
|
"grad_norm": 0.0031827313359826803, |
|
"learning_rate": 0.002703703703703704, |
|
"loss": 0.4868, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5687309080810886, |
|
"grad_norm": 0.002911441260948777, |
|
"learning_rate": 0.002701234567901235, |
|
"loss": 0.6855, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5709525131907803, |
|
"grad_norm": 0.002942469669505954, |
|
"learning_rate": 0.0026987654320987653, |
|
"loss": 0.6749, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.573174118300472, |
|
"grad_norm": 0.0030430902261286974, |
|
"learning_rate": 0.0026962962962962965, |
|
"loss": 0.5328, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5753957234101639, |
|
"grad_norm": 0.0035275311674922705, |
|
"learning_rate": 0.002693827160493827, |
|
"loss": 0.8042, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5776173285198556, |
|
"grad_norm": 0.005831616465002298, |
|
"learning_rate": 0.0026913580246913584, |
|
"loss": 0.5116, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5798389336295473, |
|
"grad_norm": 0.00925903208553791, |
|
"learning_rate": 0.0026888888888888887, |
|
"loss": 0.6452, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5820605387392391, |
|
"grad_norm": 0.0031285895965993404, |
|
"learning_rate": 0.00268641975308642, |
|
"loss": 0.5717, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5842821438489308, |
|
"grad_norm": 0.004181864205747843, |
|
"learning_rate": 0.0026839506172839506, |
|
"loss": 0.608, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5865037489586226, |
|
"grad_norm": 0.02851380594074726, |
|
"learning_rate": 0.0026814814814814817, |
|
"loss": 0.6639, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5887253540683144, |
|
"grad_norm": 0.004196640104055405, |
|
"learning_rate": 0.0026790123456790125, |
|
"loss": 0.6573, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5909469591780061, |
|
"grad_norm": 0.002790750004351139, |
|
"learning_rate": 0.002676543209876543, |
|
"loss": 0.6721, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5931685642876978, |
|
"grad_norm": 0.004602896049618721, |
|
"learning_rate": 0.0026740740740740744, |
|
"loss": 0.6925, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5953901693973896, |
|
"grad_norm": 0.008625895716249943, |
|
"learning_rate": 0.0026716049382716047, |
|
"loss": 0.6228, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5976117745070814, |
|
"grad_norm": 0.00362372025847435, |
|
"learning_rate": 0.002669135802469136, |
|
"loss": 0.518, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5998333796167731, |
|
"grad_norm": 0.008692633360624313, |
|
"learning_rate": 0.0026666666666666666, |
|
"loss": 0.6194, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6020549847264649, |
|
"grad_norm": 0.015166512690484524, |
|
"learning_rate": 0.0026641975308641977, |
|
"loss": 0.7203, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6042765898361566, |
|
"grad_norm": 0.0038714068941771984, |
|
"learning_rate": 0.0026617283950617285, |
|
"loss": 0.5438, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.6064981949458483, |
|
"grad_norm": 0.005590848624706268, |
|
"learning_rate": 0.002659259259259259, |
|
"loss": 0.5981, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.6087198000555402, |
|
"grad_norm": 0.013241161592304707, |
|
"learning_rate": 0.0026567901234567904, |
|
"loss": 0.7574, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6109414051652319, |
|
"grad_norm": 0.004338677506893873, |
|
"learning_rate": 0.002654320987654321, |
|
"loss": 0.6665, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6131630102749236, |
|
"grad_norm": 0.00662162946537137, |
|
"learning_rate": 0.002651851851851852, |
|
"loss": 0.6267, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.004217423964291811, |
|
"learning_rate": 0.0026493827160493826, |
|
"loss": 0.6512, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.6176062204943071, |
|
"grad_norm": 0.004670712631195784, |
|
"learning_rate": 0.0026469135802469138, |
|
"loss": 0.6363, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6198278256039988, |
|
"grad_norm": 0.004922980442643166, |
|
"learning_rate": 0.0026444444444444445, |
|
"loss": 0.6037, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6220494307136907, |
|
"grad_norm": 0.004932676907628775, |
|
"learning_rate": 0.0026419753086419752, |
|
"loss": 0.576, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6242710358233824, |
|
"grad_norm": 0.004741290118545294, |
|
"learning_rate": 0.0026395061728395064, |
|
"loss": 0.5563, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6264926409330741, |
|
"grad_norm": 0.00402566883713007, |
|
"learning_rate": 0.002637037037037037, |
|
"loss": 0.5417, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6287142460427659, |
|
"grad_norm": 0.003514640498906374, |
|
"learning_rate": 0.002634567901234568, |
|
"loss": 0.727, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6309358511524576, |
|
"grad_norm": 0.004528773948550224, |
|
"learning_rate": 0.0026320987654320986, |
|
"loss": 0.5701, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6331574562621494, |
|
"grad_norm": 0.004958092700690031, |
|
"learning_rate": 0.0026296296296296298, |
|
"loss": 0.6251, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6353790613718412, |
|
"grad_norm": 0.004396567586809397, |
|
"learning_rate": 0.0026271604938271605, |
|
"loss": 0.6076, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6376006664815329, |
|
"grad_norm": 0.004292050842195749, |
|
"learning_rate": 0.0026246913580246912, |
|
"loss": 0.5767, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6398222715912246, |
|
"grad_norm": 0.0037857245188206434, |
|
"learning_rate": 0.0026222222222222224, |
|
"loss": 0.6488, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6420438767009164, |
|
"grad_norm": 0.002987690968438983, |
|
"learning_rate": 0.002619753086419753, |
|
"loss": 0.5093, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6442654818106082, |
|
"grad_norm": 0.005551345180720091, |
|
"learning_rate": 0.002617283950617284, |
|
"loss": 0.7926, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6464870869202999, |
|
"grad_norm": 0.003886839607730508, |
|
"learning_rate": 0.0026148148148148146, |
|
"loss": 0.6786, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6487086920299917, |
|
"grad_norm": 0.0051884097047150135, |
|
"learning_rate": 0.0026123456790123458, |
|
"loss": 0.6151, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6509302971396834, |
|
"grad_norm": 0.005092722829431295, |
|
"learning_rate": 0.0026098765432098765, |
|
"loss": 0.7552, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6531519022493751, |
|
"grad_norm": 0.004387577064335346, |
|
"learning_rate": 0.0026074074074074072, |
|
"loss": 0.6128, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.655373507359067, |
|
"grad_norm": 0.005776650737971067, |
|
"learning_rate": 0.0026049382716049384, |
|
"loss": 0.5585, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6575951124687587, |
|
"grad_norm": 0.004767559934407473, |
|
"learning_rate": 0.002602469135802469, |
|
"loss": 0.5365, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6598167175784504, |
|
"grad_norm": 0.002910695504397154, |
|
"learning_rate": 0.0026000000000000003, |
|
"loss": 0.5649, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6620383226881422, |
|
"grad_norm": 0.003558649681508541, |
|
"learning_rate": 0.0025975308641975306, |
|
"loss": 0.5774, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6642599277978339, |
|
"grad_norm": 0.003824816318228841, |
|
"learning_rate": 0.002595061728395062, |
|
"loss": 0.5732, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6664815329075257, |
|
"grad_norm": 0.0028791490476578474, |
|
"learning_rate": 0.0025925925925925925, |
|
"loss": 0.5018, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6687031380172175, |
|
"grad_norm": 0.0036279659252613783, |
|
"learning_rate": 0.0025901234567901237, |
|
"loss": 0.682, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6709247431269092, |
|
"grad_norm": 0.004511178005486727, |
|
"learning_rate": 0.0025876543209876544, |
|
"loss": 0.6445, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6731463482366009, |
|
"grad_norm": 0.00374900852330029, |
|
"learning_rate": 0.002585185185185185, |
|
"loss": 0.618, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6753679533462927, |
|
"grad_norm": 0.0064397756941616535, |
|
"learning_rate": 0.0025827160493827163, |
|
"loss": 0.5854, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6775895584559845, |
|
"grad_norm": 0.0034314116928726435, |
|
"learning_rate": 0.002580246913580247, |
|
"loss": 0.5673, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6798111635656762, |
|
"grad_norm": 0.0028468379750847816, |
|
"learning_rate": 0.002577777777777778, |
|
"loss": 0.557, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.682032768675368, |
|
"grad_norm": 0.004024473484605551, |
|
"learning_rate": 0.0025753086419753085, |
|
"loss": 0.6834, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6842543737850597, |
|
"grad_norm": 0.00284021208062768, |
|
"learning_rate": 0.0025728395061728397, |
|
"loss": 0.5258, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6864759788947514, |
|
"grad_norm": 0.004184060264378786, |
|
"learning_rate": 0.0025703703703703704, |
|
"loss": 0.5752, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6886975840044433, |
|
"grad_norm": 0.00356755661778152, |
|
"learning_rate": 0.002567901234567901, |
|
"loss": 0.535, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.690919189114135, |
|
"grad_norm": 0.002997822593897581, |
|
"learning_rate": 0.0025654320987654323, |
|
"loss": 0.5818, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6931407942238267, |
|
"grad_norm": 0.003147626994177699, |
|
"learning_rate": 0.002562962962962963, |
|
"loss": 0.5176, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6953623993335185, |
|
"grad_norm": 0.0036907889880239964, |
|
"learning_rate": 0.002560493827160494, |
|
"loss": 0.6186, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6975840044432102, |
|
"grad_norm": 0.004177046474069357, |
|
"learning_rate": 0.0025580246913580245, |
|
"loss": 0.6218, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6998056095529019, |
|
"grad_norm": 0.0035435082390904427, |
|
"learning_rate": 0.0025555555555555557, |
|
"loss": 0.7149, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7020272146625938, |
|
"grad_norm": 0.004030556883662939, |
|
"learning_rate": 0.0025530864197530864, |
|
"loss": 0.6261, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7042488197722855, |
|
"grad_norm": 0.003368956968188286, |
|
"learning_rate": 0.002550617283950617, |
|
"loss": 0.6349, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7064704248819772, |
|
"grad_norm": 0.0029223670717328787, |
|
"learning_rate": 0.0025481481481481483, |
|
"loss": 0.5547, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.708692029991669, |
|
"grad_norm": 0.004348669201135635, |
|
"learning_rate": 0.002545679012345679, |
|
"loss": 0.5123, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.7109136351013607, |
|
"grad_norm": 0.012978849932551384, |
|
"learning_rate": 0.00254320987654321, |
|
"loss": 0.6939, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.7131352402110525, |
|
"grad_norm": 0.0044335960410535336, |
|
"learning_rate": 0.0025407407407407405, |
|
"loss": 0.5573, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7153568453207443, |
|
"grad_norm": 0.0029696300625801086, |
|
"learning_rate": 0.0025382716049382717, |
|
"loss": 0.6562, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.717578450430436, |
|
"grad_norm": 0.003993246704339981, |
|
"learning_rate": 0.0025358024691358024, |
|
"loss": 0.6783, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7198000555401277, |
|
"grad_norm": 0.0035177140962332487, |
|
"learning_rate": 0.002533333333333333, |
|
"loss": 0.6443, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7220216606498195, |
|
"grad_norm": 0.006393967662006617, |
|
"learning_rate": 0.0025308641975308644, |
|
"loss": 0.5847, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7242432657595113, |
|
"grad_norm": 0.003029291285201907, |
|
"learning_rate": 0.002528395061728395, |
|
"loss": 0.3798, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.726464870869203, |
|
"grad_norm": 0.0022584160324186087, |
|
"learning_rate": 0.0025259259259259263, |
|
"loss": 0.6006, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.7286864759788948, |
|
"grad_norm": 0.0029388736002147198, |
|
"learning_rate": 0.0025234567901234566, |
|
"loss": 0.5932, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.7309080810885865, |
|
"grad_norm": 0.0026504788547754288, |
|
"learning_rate": 0.0025209876543209877, |
|
"loss": 0.4713, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7331296861982782, |
|
"grad_norm": 0.002620173152536154, |
|
"learning_rate": 0.0025185185185185185, |
|
"loss": 0.5698, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7353512913079701, |
|
"grad_norm": 0.003549919929355383, |
|
"learning_rate": 0.0025160493827160496, |
|
"loss": 0.5552, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7375728964176618, |
|
"grad_norm": 0.003457101294770837, |
|
"learning_rate": 0.0025135802469135804, |
|
"loss": 0.6285, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7397945015273535, |
|
"grad_norm": 0.003464099019765854, |
|
"learning_rate": 0.002511111111111111, |
|
"loss": 0.7362, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7420161066370453, |
|
"grad_norm": 0.0033051518257707357, |
|
"learning_rate": 0.0025086419753086423, |
|
"loss": 0.6319, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.744237711746737, |
|
"grad_norm": 0.003773927688598633, |
|
"learning_rate": 0.002506172839506173, |
|
"loss": 0.7711, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7464593168564287, |
|
"grad_norm": 0.0026653832755982876, |
|
"learning_rate": 0.0025037037037037037, |
|
"loss": 0.6425, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.7486809219661206, |
|
"grad_norm": 0.002547910436987877, |
|
"learning_rate": 0.0025012345679012345, |
|
"loss": 0.5432, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.7509025270758123, |
|
"grad_norm": 0.00405934639275074, |
|
"learning_rate": 0.0024987654320987656, |
|
"loss": 0.7786, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.753124132185504, |
|
"grad_norm": 0.00281044514849782, |
|
"learning_rate": 0.0024962962962962964, |
|
"loss": 0.6023, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7553457372951958, |
|
"grad_norm": 0.002545573515817523, |
|
"learning_rate": 0.002493827160493827, |
|
"loss": 0.407, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7575673424048875, |
|
"grad_norm": 0.002559161279350519, |
|
"learning_rate": 0.0024913580246913583, |
|
"loss": 0.6799, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7597889475145793, |
|
"grad_norm": 0.003410926554352045, |
|
"learning_rate": 0.002488888888888889, |
|
"loss": 0.5536, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7620105526242711, |
|
"grad_norm": 0.002621614607051015, |
|
"learning_rate": 0.0024864197530864197, |
|
"loss": 0.5806, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7642321577339628, |
|
"grad_norm": 0.005297700874507427, |
|
"learning_rate": 0.0024839506172839505, |
|
"loss": 0.5808, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7664537628436545, |
|
"grad_norm": 0.003013307461515069, |
|
"learning_rate": 0.0024814814814814816, |
|
"loss": 0.6203, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7686753679533463, |
|
"grad_norm": 0.00272989459335804, |
|
"learning_rate": 0.0024790123456790124, |
|
"loss": 0.6735, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7708969730630381, |
|
"grad_norm": 0.004028683062642813, |
|
"learning_rate": 0.002476543209876543, |
|
"loss": 0.5465, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7731185781727298, |
|
"grad_norm": 0.0042002699337899685, |
|
"learning_rate": 0.0024740740740740743, |
|
"loss": 0.6278, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7753401832824216, |
|
"grad_norm": 0.0058557018637657166, |
|
"learning_rate": 0.002471604938271605, |
|
"loss": 0.5699, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7775617883921133, |
|
"grad_norm": 0.0032247588969767094, |
|
"learning_rate": 0.0024691358024691358, |
|
"loss": 0.558, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.779783393501805, |
|
"grad_norm": 0.0032824466470628977, |
|
"learning_rate": 0.0024666666666666665, |
|
"loss": 0.5787, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7820049986114969, |
|
"grad_norm": 0.0036916215904057026, |
|
"learning_rate": 0.0024641975308641977, |
|
"loss": 0.5193, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7842266037211886, |
|
"grad_norm": 0.0034254384227097034, |
|
"learning_rate": 0.0024617283950617284, |
|
"loss": 0.6091, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7864482088308803, |
|
"grad_norm": 0.0031547548715025187, |
|
"learning_rate": 0.002459259259259259, |
|
"loss": 0.5296, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7886698139405721, |
|
"grad_norm": 0.005953786429017782, |
|
"learning_rate": 0.0024567901234567903, |
|
"loss": 0.5461, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7908914190502638, |
|
"grad_norm": 0.0031025484204292297, |
|
"learning_rate": 0.002454320987654321, |
|
"loss": 0.6348, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7931130241599555, |
|
"grad_norm": 0.0038411449640989304, |
|
"learning_rate": 0.002451851851851852, |
|
"loss": 0.5176, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7953346292696474, |
|
"grad_norm": 0.0034447757061570883, |
|
"learning_rate": 0.0024493827160493825, |
|
"loss": 0.5765, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7975562343793391, |
|
"grad_norm": 0.0037516497541218996, |
|
"learning_rate": 0.0024469135802469137, |
|
"loss": 0.7173, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7997778394890308, |
|
"grad_norm": 0.003684694878757, |
|
"learning_rate": 0.0024444444444444444, |
|
"loss": 0.5194, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8019994445987225, |
|
"grad_norm": 0.006603560410439968, |
|
"learning_rate": 0.0024419753086419756, |
|
"loss": 0.6116, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.8042210497084143, |
|
"grad_norm": 0.003936069086194038, |
|
"learning_rate": 0.0024395061728395063, |
|
"loss": 0.6464, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8064426548181061, |
|
"grad_norm": 0.007097030058503151, |
|
"learning_rate": 0.002437037037037037, |
|
"loss": 0.6659, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8086642599277978, |
|
"grad_norm": 0.0037213300820440054, |
|
"learning_rate": 0.002434567901234568, |
|
"loss": 0.5492, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8108858650374896, |
|
"grad_norm": 0.003655084175989032, |
|
"learning_rate": 0.002432098765432099, |
|
"loss": 0.5408, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8131074701471813, |
|
"grad_norm": 0.0034437603317201138, |
|
"learning_rate": 0.0024296296296296297, |
|
"loss": 0.7443, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.815329075256873, |
|
"grad_norm": 0.007910266518592834, |
|
"learning_rate": 0.0024271604938271604, |
|
"loss": 0.6611, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8175506803665649, |
|
"grad_norm": 0.00407488364726305, |
|
"learning_rate": 0.0024246913580246916, |
|
"loss": 0.4984, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.8197722854762566, |
|
"grad_norm": 0.004847107920795679, |
|
"learning_rate": 0.0024222222222222223, |
|
"loss": 0.697, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8219938905859483, |
|
"grad_norm": 0.006795075722038746, |
|
"learning_rate": 0.002419753086419753, |
|
"loss": 0.639, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8242154956956401, |
|
"grad_norm": 0.003597404109314084, |
|
"learning_rate": 0.002417283950617284, |
|
"loss": 0.5233, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8264371008053318, |
|
"grad_norm": 0.0035218833945691586, |
|
"learning_rate": 0.002414814814814815, |
|
"loss": 0.6507, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8286587059150236, |
|
"grad_norm": 0.0034329844638705254, |
|
"learning_rate": 0.0024123456790123457, |
|
"loss": 0.5302, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8308803110247154, |
|
"grad_norm": 0.005119191948324442, |
|
"learning_rate": 0.0024098765432098764, |
|
"loss": 0.8082, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8331019161344071, |
|
"grad_norm": 0.0064886524342000484, |
|
"learning_rate": 0.0024074074074074076, |
|
"loss": 0.7949, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8353235212440988, |
|
"grad_norm": 0.004545222967863083, |
|
"learning_rate": 0.0024049382716049383, |
|
"loss": 0.6628, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8375451263537906, |
|
"grad_norm": 0.003230273723602295, |
|
"learning_rate": 0.002402469135802469, |
|
"loss": 0.546, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8397667314634824, |
|
"grad_norm": 0.00707286037504673, |
|
"learning_rate": 0.0024000000000000002, |
|
"loss": 0.8847, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8419883365731741, |
|
"grad_norm": 0.003968073055148125, |
|
"learning_rate": 0.002397530864197531, |
|
"loss": 0.6247, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8442099416828659, |
|
"grad_norm": 0.0037850975058972836, |
|
"learning_rate": 0.0023950617283950617, |
|
"loss": 0.481, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8464315467925576, |
|
"grad_norm": 0.0032914101611822844, |
|
"learning_rate": 0.0023925925925925924, |
|
"loss": 0.6331, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8486531519022493, |
|
"grad_norm": 0.005043490324169397, |
|
"learning_rate": 0.0023901234567901236, |
|
"loss": 0.5586, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8508747570119412, |
|
"grad_norm": 0.0028147133998572826, |
|
"learning_rate": 0.0023876543209876543, |
|
"loss": 0.6125, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8530963621216329, |
|
"grad_norm": 0.0038198321126401424, |
|
"learning_rate": 0.002385185185185185, |
|
"loss": 0.5318, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8553179672313246, |
|
"grad_norm": 0.003384563373401761, |
|
"learning_rate": 0.0023827160493827162, |
|
"loss": 0.7372, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8575395723410164, |
|
"grad_norm": 0.0064719198271632195, |
|
"learning_rate": 0.002380246913580247, |
|
"loss": 0.5661, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.8597611774507081, |
|
"grad_norm": 0.0028373387176543474, |
|
"learning_rate": 0.002377777777777778, |
|
"loss": 0.577, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.8619827825603998, |
|
"grad_norm": 0.0027057346887886524, |
|
"learning_rate": 0.0023753086419753084, |
|
"loss": 0.6654, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.8642043876700917, |
|
"grad_norm": 0.0032722018659114838, |
|
"learning_rate": 0.0023728395061728396, |
|
"loss": 0.5527, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.8664259927797834, |
|
"grad_norm": 0.004043676424771547, |
|
"learning_rate": 0.0023703703703703703, |
|
"loss": 0.6648, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8686475978894751, |
|
"grad_norm": 0.0032604134175926447, |
|
"learning_rate": 0.0023679012345679015, |
|
"loss": 0.5295, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.8708692029991669, |
|
"grad_norm": 0.0035888762213289738, |
|
"learning_rate": 0.0023654320987654322, |
|
"loss": 0.6486, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.8730908081088586, |
|
"grad_norm": 0.009759027510881424, |
|
"learning_rate": 0.002362962962962963, |
|
"loss": 0.5339, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8753124132185504, |
|
"grad_norm": 0.0034896486904472113, |
|
"learning_rate": 0.002360493827160494, |
|
"loss": 0.8196, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8775340183282422, |
|
"grad_norm": 0.0029943559784442186, |
|
"learning_rate": 0.002358024691358025, |
|
"loss": 0.5729, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8797556234379339, |
|
"grad_norm": 0.0038155601359903812, |
|
"learning_rate": 0.0023555555555555556, |
|
"loss": 0.5233, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8819772285476256, |
|
"grad_norm": 0.0032906325068324804, |
|
"learning_rate": 0.0023530864197530863, |
|
"loss": 0.5703, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8841988336573174, |
|
"grad_norm": 0.008255512453615665, |
|
"learning_rate": 0.0023506172839506175, |
|
"loss": 0.6795, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8864204387670092, |
|
"grad_norm": 0.003714282065629959, |
|
"learning_rate": 0.002348148148148148, |
|
"loss": 0.5704, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.8886420438767009, |
|
"grad_norm": 0.004658196121454239, |
|
"learning_rate": 0.002345679012345679, |
|
"loss": 0.6216, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8908636489863927, |
|
"grad_norm": 0.003137049498036504, |
|
"learning_rate": 0.00234320987654321, |
|
"loss": 0.6422, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8930852540960844, |
|
"grad_norm": 0.004380072932690382, |
|
"learning_rate": 0.002340740740740741, |
|
"loss": 0.6654, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8953068592057761, |
|
"grad_norm": 0.0035470370203256607, |
|
"learning_rate": 0.0023382716049382716, |
|
"loss": 0.5809, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.897528464315468, |
|
"grad_norm": 0.012754486873745918, |
|
"learning_rate": 0.0023358024691358024, |
|
"loss": 0.5879, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8997500694251597, |
|
"grad_norm": 0.003565397346392274, |
|
"learning_rate": 0.0023333333333333335, |
|
"loss": 0.5179, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9019716745348514, |
|
"grad_norm": 0.004204631317406893, |
|
"learning_rate": 0.0023308641975308643, |
|
"loss": 0.7359, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.9041932796445432, |
|
"grad_norm": 0.002687248168513179, |
|
"learning_rate": 0.002328395061728395, |
|
"loss": 0.6678, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9064148847542349, |
|
"grad_norm": 0.004071474075317383, |
|
"learning_rate": 0.002325925925925926, |
|
"loss": 0.5053, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9086364898639266, |
|
"grad_norm": 0.004420380108058453, |
|
"learning_rate": 0.002323456790123457, |
|
"loss": 0.7788, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9108580949736185, |
|
"grad_norm": 0.004801613744348288, |
|
"learning_rate": 0.0023209876543209876, |
|
"loss": 0.6536, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9130797000833102, |
|
"grad_norm": 0.00353393261320889, |
|
"learning_rate": 0.0023185185185185184, |
|
"loss": 0.4745, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.9153013051930019, |
|
"grad_norm": 0.005573590286076069, |
|
"learning_rate": 0.0023160493827160495, |
|
"loss": 0.6881, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.9175229103026937, |
|
"grad_norm": 0.0035207164473831654, |
|
"learning_rate": 0.0023135802469135803, |
|
"loss": 0.6957, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9197445154123854, |
|
"grad_norm": 0.003412191988900304, |
|
"learning_rate": 0.002311111111111111, |
|
"loss": 0.6391, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9219661205220772, |
|
"grad_norm": 0.004509443417191505, |
|
"learning_rate": 0.002308641975308642, |
|
"loss": 0.6225, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.924187725631769, |
|
"grad_norm": 0.0027398800011724234, |
|
"learning_rate": 0.002306172839506173, |
|
"loss": 0.4486, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.9264093307414607, |
|
"grad_norm": 0.002736851805821061, |
|
"learning_rate": 0.0023037037037037036, |
|
"loss": 0.5405, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9286309358511524, |
|
"grad_norm": 0.00329567096196115, |
|
"learning_rate": 0.0023012345679012344, |
|
"loss": 0.5796, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9308525409608442, |
|
"grad_norm": 0.0037880768068134785, |
|
"learning_rate": 0.0022987654320987655, |
|
"loss": 0.5197, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.933074146070536, |
|
"grad_norm": 0.0035441648215055466, |
|
"learning_rate": 0.0022962962962962963, |
|
"loss": 0.6808, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9352957511802277, |
|
"grad_norm": 0.00435903575271368, |
|
"learning_rate": 0.002293827160493827, |
|
"loss": 0.65, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9375173562899195, |
|
"grad_norm": 0.003232384566217661, |
|
"learning_rate": 0.002291358024691358, |
|
"loss": 0.6367, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9397389613996112, |
|
"grad_norm": 0.006525840610265732, |
|
"learning_rate": 0.002288888888888889, |
|
"loss": 0.634, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.9419605665093029, |
|
"grad_norm": 0.004283812828361988, |
|
"learning_rate": 0.00228641975308642, |
|
"loss": 0.6266, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9441821716189948, |
|
"grad_norm": 0.00269131432287395, |
|
"learning_rate": 0.0022839506172839504, |
|
"loss": 0.6423, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9464037767286865, |
|
"grad_norm": 0.005134789738804102, |
|
"learning_rate": 0.0022814814814814816, |
|
"loss": 0.741, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9486253818383782, |
|
"grad_norm": 0.003960168920457363, |
|
"learning_rate": 0.0022790123456790123, |
|
"loss": 0.653, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.95084698694807, |
|
"grad_norm": 0.005250333808362484, |
|
"learning_rate": 0.0022765432098765435, |
|
"loss": 0.5763, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9530685920577617, |
|
"grad_norm": 0.002757916459813714, |
|
"learning_rate": 0.0022740740740740738, |
|
"loss": 0.4882, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9552901971674534, |
|
"grad_norm": 0.0023539310786873102, |
|
"learning_rate": 0.002271604938271605, |
|
"loss": 0.5957, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9575118022771453, |
|
"grad_norm": 0.00454498128965497, |
|
"learning_rate": 0.002269135802469136, |
|
"loss": 0.634, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.959733407386837, |
|
"grad_norm": 0.0027770919259637594, |
|
"learning_rate": 0.002266666666666667, |
|
"loss": 0.6027, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.9619550124965287, |
|
"grad_norm": 0.0041671195067465305, |
|
"learning_rate": 0.0022641975308641976, |
|
"loss": 0.5002, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.9641766176062205, |
|
"grad_norm": 0.003652903251349926, |
|
"learning_rate": 0.0022617283950617283, |
|
"loss": 0.5282, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.9663982227159122, |
|
"grad_norm": 0.002368962625041604, |
|
"learning_rate": 0.0022592592592592595, |
|
"loss": 0.5286, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.968619827825604, |
|
"grad_norm": 0.0032470771111547947, |
|
"learning_rate": 0.00225679012345679, |
|
"loss": 0.5601, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.9708414329352958, |
|
"grad_norm": 0.0038956725038588047, |
|
"learning_rate": 0.002254320987654321, |
|
"loss": 0.669, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.9730630380449875, |
|
"grad_norm": 0.0024040553253144026, |
|
"learning_rate": 0.002251851851851852, |
|
"loss": 0.6164, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.9752846431546792, |
|
"grad_norm": 0.0025828545913100243, |
|
"learning_rate": 0.002249382716049383, |
|
"loss": 0.6219, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.977506248264371, |
|
"grad_norm": 0.005943648051470518, |
|
"learning_rate": 0.0022469135802469136, |
|
"loss": 0.5701, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9797278533740628, |
|
"grad_norm": 0.0035154123324900866, |
|
"learning_rate": 0.0022444444444444443, |
|
"loss": 0.6773, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.9819494584837545, |
|
"grad_norm": 0.0027923276647925377, |
|
"learning_rate": 0.0022419753086419755, |
|
"loss": 0.6962, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9841710635934463, |
|
"grad_norm": 0.0050351908430457115, |
|
"learning_rate": 0.002239506172839506, |
|
"loss": 0.5091, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.986392668703138, |
|
"grad_norm": 0.010067122988402843, |
|
"learning_rate": 0.002237037037037037, |
|
"loss": 0.5033, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9886142738128297, |
|
"grad_norm": 0.003369242651388049, |
|
"learning_rate": 0.002234567901234568, |
|
"loss": 0.4704, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9908358789225216, |
|
"grad_norm": 0.002988790860399604, |
|
"learning_rate": 0.002232098765432099, |
|
"loss": 0.7471, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9930574840322133, |
|
"grad_norm": 0.003928947728127241, |
|
"learning_rate": 0.0022296296296296296, |
|
"loss": 0.7318, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.995279089141905, |
|
"grad_norm": 0.0032487758435308933, |
|
"learning_rate": 0.0022271604938271603, |
|
"loss": 0.6627, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.9975006942515968, |
|
"grad_norm": 0.0050149112939834595, |
|
"learning_rate": 0.0022246913580246915, |
|
"loss": 0.7153, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9997222993612885, |
|
"grad_norm": 0.002519611269235611, |
|
"learning_rate": 0.0022222222222222222, |
|
"loss": 0.6003, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0019439044709804, |
|
"grad_norm": 0.0036871463526040316, |
|
"learning_rate": 0.002219753086419753, |
|
"loss": 0.7394, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.004165509580672, |
|
"grad_norm": 0.003514339914545417, |
|
"learning_rate": 0.002217283950617284, |
|
"loss": 0.5648, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.0063871146903638, |
|
"grad_norm": 0.0024536640848964453, |
|
"learning_rate": 0.002214814814814815, |
|
"loss": 0.5651, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.0086087198000555, |
|
"grad_norm": 0.0029373124707490206, |
|
"learning_rate": 0.002212345679012346, |
|
"loss": 0.5751, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.0108303249097472, |
|
"grad_norm": 0.004164501093327999, |
|
"learning_rate": 0.0022098765432098763, |
|
"loss": 0.542, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.013051930019439, |
|
"grad_norm": 0.002176556270569563, |
|
"learning_rate": 0.0022074074074074075, |
|
"loss": 0.5003, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.0152735351291309, |
|
"grad_norm": 0.002453757217153907, |
|
"learning_rate": 0.0022049382716049382, |
|
"loss": 0.5938, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.0174951402388226, |
|
"grad_norm": 0.002585896523669362, |
|
"learning_rate": 0.0022024691358024694, |
|
"loss": 0.4372, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.0197167453485143, |
|
"grad_norm": 0.0036106205079704523, |
|
"learning_rate": 0.0021999999999999997, |
|
"loss": 0.4973, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.021938350458206, |
|
"grad_norm": 0.010448912158608437, |
|
"learning_rate": 0.002197530864197531, |
|
"loss": 0.6627, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0241599555678977, |
|
"grad_norm": 0.0033705660607665777, |
|
"learning_rate": 0.002195061728395062, |
|
"loss": 0.6634, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.0263815606775895, |
|
"grad_norm": 0.004475906956940889, |
|
"learning_rate": 0.0021925925925925928, |
|
"loss": 0.6017, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.0286031657872814, |
|
"grad_norm": 0.002899282844737172, |
|
"learning_rate": 0.0021901234567901235, |
|
"loss": 0.6399, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.030824770896973, |
|
"grad_norm": 0.002981000579893589, |
|
"learning_rate": 0.0021876543209876542, |
|
"loss": 0.6914, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.0330463760066648, |
|
"grad_norm": 0.0031841108575463295, |
|
"learning_rate": 0.0021851851851851854, |
|
"loss": 0.6894, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.0352679811163565, |
|
"grad_norm": 0.00265736342407763, |
|
"learning_rate": 0.002182716049382716, |
|
"loss": 0.4852, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.0374895862260483, |
|
"grad_norm": 0.003279666183516383, |
|
"learning_rate": 0.002180246913580247, |
|
"loss": 0.5547, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.03971119133574, |
|
"grad_norm": 0.002579506253823638, |
|
"learning_rate": 0.002177777777777778, |
|
"loss": 0.5056, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.041932796445432, |
|
"grad_norm": 0.0029568190220743418, |
|
"learning_rate": 0.0021753086419753088, |
|
"loss": 0.6682, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.0441544015551236, |
|
"grad_norm": 0.0029284320771694183, |
|
"learning_rate": 0.0021728395061728395, |
|
"loss": 0.5206, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0463760066648153, |
|
"grad_norm": 0.003473050892353058, |
|
"learning_rate": 0.0021703703703703702, |
|
"loss": 0.6092, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.048597611774507, |
|
"grad_norm": 0.003467805916443467, |
|
"learning_rate": 0.0021679012345679014, |
|
"loss": 0.7541, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.0508192168841988, |
|
"grad_norm": 0.0023657323326915503, |
|
"learning_rate": 0.002165432098765432, |
|
"loss": 0.588, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.0530408219938905, |
|
"grad_norm": 0.003372120438143611, |
|
"learning_rate": 0.002162962962962963, |
|
"loss": 0.5177, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.0552624271035824, |
|
"grad_norm": 0.002507074037566781, |
|
"learning_rate": 0.002160493827160494, |
|
"loss": 0.5315, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0574840322132741, |
|
"grad_norm": 0.003847063286229968, |
|
"learning_rate": 0.002158024691358025, |
|
"loss": 0.6434, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0597056373229659, |
|
"grad_norm": 0.004577492829412222, |
|
"learning_rate": 0.0021555555555555555, |
|
"loss": 0.6605, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.0619272424326576, |
|
"grad_norm": 0.00435985391959548, |
|
"learning_rate": 0.0021530864197530863, |
|
"loss": 0.4803, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.0641488475423493, |
|
"grad_norm": 0.0066704158671200275, |
|
"learning_rate": 0.0021506172839506174, |
|
"loss": 0.4825, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.066370452652041, |
|
"grad_norm": 0.0034896789584308863, |
|
"learning_rate": 0.002148148148148148, |
|
"loss": 0.4425, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.068592057761733, |
|
"grad_norm": 0.0028791052754968405, |
|
"learning_rate": 0.002145679012345679, |
|
"loss": 0.6792, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.0708136628714247, |
|
"grad_norm": 0.0027687002439051867, |
|
"learning_rate": 0.0021432098765432096, |
|
"loss": 0.6594, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.0730352679811164, |
|
"grad_norm": 0.004298859275877476, |
|
"learning_rate": 0.002140740740740741, |
|
"loss": 0.6979, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.075256873090808, |
|
"grad_norm": 0.0024634443689137697, |
|
"learning_rate": 0.002138271604938272, |
|
"loss": 0.7344, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.0774784782004998, |
|
"grad_norm": 0.0029783491045236588, |
|
"learning_rate": 0.0021358024691358023, |
|
"loss": 0.7162, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.0797000833101915, |
|
"grad_norm": 0.003992694895714521, |
|
"learning_rate": 0.0021333333333333334, |
|
"loss": 0.6333, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.0819216884198835, |
|
"grad_norm": 0.0027143885381519794, |
|
"learning_rate": 0.002130864197530864, |
|
"loss": 0.7107, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.0841432935295752, |
|
"grad_norm": 0.002360287122428417, |
|
"learning_rate": 0.0021283950617283953, |
|
"loss": 0.6408, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.0863648986392669, |
|
"grad_norm": 0.0030325008556246758, |
|
"learning_rate": 0.0021259259259259256, |
|
"loss": 0.6446, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.0885865037489586, |
|
"grad_norm": 0.003571762004867196, |
|
"learning_rate": 0.002123456790123457, |
|
"loss": 0.4972, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0908081088586503, |
|
"grad_norm": 0.0028126256074756384, |
|
"learning_rate": 0.002120987654320988, |
|
"loss": 0.4611, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.093029713968342, |
|
"grad_norm": 0.0031972727738320827, |
|
"learning_rate": 0.0021185185185185187, |
|
"loss": 0.5785, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.095251319078034, |
|
"grad_norm": 0.0038447740953415632, |
|
"learning_rate": 0.0021160493827160494, |
|
"loss": 0.7667, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.0974729241877257, |
|
"grad_norm": 0.002788472454994917, |
|
"learning_rate": 0.00211358024691358, |
|
"loss": 0.6148, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.0996945292974174, |
|
"grad_norm": 0.003124644048511982, |
|
"learning_rate": 0.0021111111111111113, |
|
"loss": 0.7035, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.1019161344071091, |
|
"grad_norm": 0.003380119800567627, |
|
"learning_rate": 0.002108641975308642, |
|
"loss": 0.5542, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.1041377395168008, |
|
"grad_norm": 0.004016113001853228, |
|
"learning_rate": 0.002106172839506173, |
|
"loss": 0.5868, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.1063593446264925, |
|
"grad_norm": 0.003642839379608631, |
|
"learning_rate": 0.002103703703703704, |
|
"loss": 0.6001, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.1085809497361845, |
|
"grad_norm": 0.002972975606098771, |
|
"learning_rate": 0.0021012345679012347, |
|
"loss": 0.5375, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.1108025548458762, |
|
"grad_norm": 0.0027982559986412525, |
|
"learning_rate": 0.0020987654320987655, |
|
"loss": 0.6106, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.113024159955568, |
|
"grad_norm": 0.003983978182077408, |
|
"learning_rate": 0.002096296296296296, |
|
"loss": 0.6312, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.1152457650652596, |
|
"grad_norm": 0.0028390882071107626, |
|
"learning_rate": 0.0020938271604938274, |
|
"loss": 0.6555, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.1174673701749513, |
|
"grad_norm": 0.006304751615971327, |
|
"learning_rate": 0.002091358024691358, |
|
"loss": 0.6585, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.119688975284643, |
|
"grad_norm": 0.003519695485010743, |
|
"learning_rate": 0.002088888888888889, |
|
"loss": 0.5764, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.121910580394335, |
|
"grad_norm": 0.002687301719561219, |
|
"learning_rate": 0.00208641975308642, |
|
"loss": 0.6775, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.1241321855040267, |
|
"grad_norm": 0.004116454627364874, |
|
"learning_rate": 0.0020839506172839507, |
|
"loss": 0.4918, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.1263537906137184, |
|
"grad_norm": 0.0029182173311710358, |
|
"learning_rate": 0.0020814814814814815, |
|
"loss": 0.6922, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.1285753957234101, |
|
"grad_norm": 0.0036673294380307198, |
|
"learning_rate": 0.002079012345679012, |
|
"loss": 0.5987, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.1307970008331019, |
|
"grad_norm": 0.005055381450802088, |
|
"learning_rate": 0.0020765432098765434, |
|
"loss": 0.547, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.1330186059427936, |
|
"grad_norm": 0.003081630915403366, |
|
"learning_rate": 0.002074074074074074, |
|
"loss": 0.7392, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.1352402110524855, |
|
"grad_norm": 0.003554833820089698, |
|
"learning_rate": 0.002071604938271605, |
|
"loss": 0.666, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.1374618161621772, |
|
"grad_norm": 0.0036279240157455206, |
|
"learning_rate": 0.0020691358024691356, |
|
"loss": 0.5381, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.139683421271869, |
|
"grad_norm": 0.004544177558273077, |
|
"learning_rate": 0.0020666666666666667, |
|
"loss": 0.4533, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.1419050263815607, |
|
"grad_norm": 0.003769755130633712, |
|
"learning_rate": 0.002064197530864198, |
|
"loss": 0.7778, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.1441266314912524, |
|
"grad_norm": 0.0022846919018775225, |
|
"learning_rate": 0.002061728395061728, |
|
"loss": 0.567, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.146348236600944, |
|
"grad_norm": 0.0027041470166295767, |
|
"learning_rate": 0.0020592592592592594, |
|
"loss": 0.5174, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.148569841710636, |
|
"grad_norm": 0.003274301066994667, |
|
"learning_rate": 0.00205679012345679, |
|
"loss": 0.6534, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.1507914468203277, |
|
"grad_norm": 0.0030452990904450417, |
|
"learning_rate": 0.0020543209876543213, |
|
"loss": 0.5024, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.1530130519300195, |
|
"grad_norm": 0.004656861070543528, |
|
"learning_rate": 0.0020518518518518516, |
|
"loss": 0.5775, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.1552346570397112, |
|
"grad_norm": 0.0048223827034235, |
|
"learning_rate": 0.0020493827160493827, |
|
"loss": 0.5477, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.157456262149403, |
|
"grad_norm": 0.0036563219036906958, |
|
"learning_rate": 0.002046913580246914, |
|
"loss": 0.7265, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.1596778672590946, |
|
"grad_norm": 0.002614110242575407, |
|
"learning_rate": 0.0020444444444444447, |
|
"loss": 0.539, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.1618994723687863, |
|
"grad_norm": 0.006397682707756758, |
|
"learning_rate": 0.0020419753086419754, |
|
"loss": 0.727, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.1641210774784783, |
|
"grad_norm": 0.006625836715102196, |
|
"learning_rate": 0.002039506172839506, |
|
"loss": 0.4989, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.16634268258817, |
|
"grad_norm": 0.0033377420622855425, |
|
"learning_rate": 0.0020370370370370373, |
|
"loss": 0.5469, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.1685642876978617, |
|
"grad_norm": 0.0037035797722637653, |
|
"learning_rate": 0.002034567901234568, |
|
"loss": 0.6719, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.1707858928075534, |
|
"grad_norm": 0.0035533548798412085, |
|
"learning_rate": 0.0020320987654320988, |
|
"loss": 0.4496, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.1730074979172451, |
|
"grad_norm": 0.0026416785549372435, |
|
"learning_rate": 0.00202962962962963, |
|
"loss": 0.5356, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.175229103026937, |
|
"grad_norm": 0.0026185268070548773, |
|
"learning_rate": 0.0020271604938271607, |
|
"loss": 0.6109, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.1774507081366288, |
|
"grad_norm": 0.003254238050431013, |
|
"learning_rate": 0.0020246913580246914, |
|
"loss": 0.6925, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1796723132463205, |
|
"grad_norm": 0.006051741540431976, |
|
"learning_rate": 0.002022222222222222, |
|
"loss": 0.5446, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.1818939183560122, |
|
"grad_norm": 0.0027108059730380774, |
|
"learning_rate": 0.0020197530864197533, |
|
"loss": 0.6273, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.184115523465704, |
|
"grad_norm": 0.0033522590529173613, |
|
"learning_rate": 0.002017283950617284, |
|
"loss": 0.6232, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.1863371285753956, |
|
"grad_norm": 0.0041407193057239056, |
|
"learning_rate": 0.0020148148148148148, |
|
"loss": 0.5172, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.1885587336850874, |
|
"grad_norm": 0.0039221737533807755, |
|
"learning_rate": 0.002012345679012346, |
|
"loss": 0.5179, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.1907803387947793, |
|
"grad_norm": 0.0050890520215034485, |
|
"learning_rate": 0.0020098765432098767, |
|
"loss": 0.6051, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.193001943904471, |
|
"grad_norm": 0.0036343582905828953, |
|
"learning_rate": 0.0020074074074074074, |
|
"loss": 0.6176, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.1952235490141627, |
|
"grad_norm": 0.0032197320833802223, |
|
"learning_rate": 0.002004938271604938, |
|
"loss": 0.5136, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.1974451541238544, |
|
"grad_norm": 0.0023664599284529686, |
|
"learning_rate": 0.0020024691358024693, |
|
"loss": 0.5228, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.1996667592335462, |
|
"grad_norm": 0.002611706266179681, |
|
"learning_rate": 0.002, |
|
"loss": 0.5393, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.201888364343238, |
|
"grad_norm": 0.002908397000283003, |
|
"learning_rate": 0.0019975308641975308, |
|
"loss": 0.5602, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.2041099694529298, |
|
"grad_norm": 0.0035483166575431824, |
|
"learning_rate": 0.0019950617283950615, |
|
"loss": 0.5777, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.2063315745626215, |
|
"grad_norm": 0.002436819253489375, |
|
"learning_rate": 0.0019925925925925927, |
|
"loss": 0.6247, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.2085531796723132, |
|
"grad_norm": 0.00271439366042614, |
|
"learning_rate": 0.001990123456790124, |
|
"loss": 0.634, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.210774784782005, |
|
"grad_norm": 0.0029384950175881386, |
|
"learning_rate": 0.001987654320987654, |
|
"loss": 0.5216, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.2129963898916967, |
|
"grad_norm": 0.0022015413269400597, |
|
"learning_rate": 0.0019851851851851853, |
|
"loss": 0.6649, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.2152179950013884, |
|
"grad_norm": 0.00338186277076602, |
|
"learning_rate": 0.001982716049382716, |
|
"loss": 0.5563, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.2174396001110803, |
|
"grad_norm": 0.002683288650587201, |
|
"learning_rate": 0.001980246913580247, |
|
"loss": 0.6024, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.219661205220772, |
|
"grad_norm": 0.002985814120620489, |
|
"learning_rate": 0.0019777777777777775, |
|
"loss": 0.4619, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.2218828103304638, |
|
"grad_norm": 0.0031337959226220846, |
|
"learning_rate": 0.0019753086419753087, |
|
"loss": 0.5346, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.2241044154401555, |
|
"grad_norm": 0.010030501522123814, |
|
"learning_rate": 0.00197283950617284, |
|
"loss": 0.6331, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.2263260205498472, |
|
"grad_norm": 0.0031471492256969213, |
|
"learning_rate": 0.00197037037037037, |
|
"loss": 0.5195, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.2285476256595391, |
|
"grad_norm": 0.0028481080662459135, |
|
"learning_rate": 0.0019679012345679013, |
|
"loss": 0.4754, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.004545793868601322, |
|
"learning_rate": 0.001965432098765432, |
|
"loss": 0.6763, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.2329908358789226, |
|
"grad_norm": 0.002784636802971363, |
|
"learning_rate": 0.0019629629629629632, |
|
"loss": 0.5884, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.2352124409886143, |
|
"grad_norm": 0.0035187830217182636, |
|
"learning_rate": 0.0019604938271604935, |
|
"loss": 0.6781, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.237434046098306, |
|
"grad_norm": 0.003770441748201847, |
|
"learning_rate": 0.0019580246913580247, |
|
"loss": 0.6714, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.2396556512079977, |
|
"grad_norm": 0.002745373174548149, |
|
"learning_rate": 0.001955555555555556, |
|
"loss": 0.6145, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.2418772563176894, |
|
"grad_norm": 0.003247046610340476, |
|
"learning_rate": 0.0019530864197530864, |
|
"loss": 0.5698, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.2440988614273814, |
|
"grad_norm": 0.0034373749513179064, |
|
"learning_rate": 0.0019506172839506173, |
|
"loss": 0.7554, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.246320466537073, |
|
"grad_norm": 0.0037724527064710855, |
|
"learning_rate": 0.001948148148148148, |
|
"loss": 0.8468, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.2485420716467648, |
|
"grad_norm": 0.0030816916842013597, |
|
"learning_rate": 0.001945679012345679, |
|
"loss": 0.5393, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.2507636767564565, |
|
"grad_norm": 0.004649344366043806, |
|
"learning_rate": 0.0019432098765432098, |
|
"loss": 0.553, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.2529852818661482, |
|
"grad_norm": 0.002786665689200163, |
|
"learning_rate": 0.0019407407407407407, |
|
"loss": 0.5569, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.2552068869758402, |
|
"grad_norm": 0.0026445803232491016, |
|
"learning_rate": 0.0019382716049382714, |
|
"loss": 0.5139, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.2574284920855319, |
|
"grad_norm": 0.005002819932997227, |
|
"learning_rate": 0.0019358024691358024, |
|
"loss": 0.6082, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.2596500971952236, |
|
"grad_norm": 0.005818431731313467, |
|
"learning_rate": 0.0019333333333333336, |
|
"loss": 0.7579, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.2618717023049153, |
|
"grad_norm": 0.005721336230635643, |
|
"learning_rate": 0.001930864197530864, |
|
"loss": 0.6972, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.264093307414607, |
|
"grad_norm": 0.0032174643129110336, |
|
"learning_rate": 0.0019283950617283952, |
|
"loss": 0.5709, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.2663149125242987, |
|
"grad_norm": 0.00355134648270905, |
|
"learning_rate": 0.0019259259259259258, |
|
"loss": 0.6242, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2685365176339904, |
|
"grad_norm": 0.0028797700069844723, |
|
"learning_rate": 0.001923456790123457, |
|
"loss": 0.6642, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.2707581227436824, |
|
"grad_norm": 0.0029408205300569534, |
|
"learning_rate": 0.0019209876543209875, |
|
"loss": 0.6446, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.272979727853374, |
|
"grad_norm": 0.00306298746727407, |
|
"learning_rate": 0.0019185185185185186, |
|
"loss": 0.5913, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.2752013329630658, |
|
"grad_norm": 0.004586388822644949, |
|
"learning_rate": 0.0019160493827160496, |
|
"loss": 0.6747, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.2774229380727575, |
|
"grad_norm": 0.0031426565255969763, |
|
"learning_rate": 0.0019135802469135803, |
|
"loss": 0.7594, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2796445431824492, |
|
"grad_norm": 0.003649466671049595, |
|
"learning_rate": 0.0019111111111111113, |
|
"loss": 0.5871, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.2818661482921412, |
|
"grad_norm": 0.006696793716400862, |
|
"learning_rate": 0.001908641975308642, |
|
"loss": 0.5172, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.284087753401833, |
|
"grad_norm": 0.004110109061002731, |
|
"learning_rate": 0.001906172839506173, |
|
"loss": 0.6425, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.2863093585115246, |
|
"grad_norm": 0.0030410694889724255, |
|
"learning_rate": 0.0019037037037037037, |
|
"loss": 0.5343, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.2885309636212163, |
|
"grad_norm": 0.0044550783932209015, |
|
"learning_rate": 0.0019012345679012346, |
|
"loss": 0.5118, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.290752568730908, |
|
"grad_norm": 0.004111356567591429, |
|
"learning_rate": 0.0018987654320987656, |
|
"loss": 0.6189, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.2929741738405998, |
|
"grad_norm": 0.002536875894293189, |
|
"learning_rate": 0.0018962962962962963, |
|
"loss": 0.5173, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.2951957789502915, |
|
"grad_norm": 0.003421909874305129, |
|
"learning_rate": 0.0018938271604938273, |
|
"loss": 0.562, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.2974173840599834, |
|
"grad_norm": 0.003108728677034378, |
|
"learning_rate": 0.001891358024691358, |
|
"loss": 0.5725, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.2996389891696751, |
|
"grad_norm": 0.0026788951363414526, |
|
"learning_rate": 0.001888888888888889, |
|
"loss": 0.574, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.3018605942793668, |
|
"grad_norm": 0.0026323439087718725, |
|
"learning_rate": 0.0018864197530864197, |
|
"loss": 0.8206, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.3040821993890586, |
|
"grad_norm": 0.003548834938555956, |
|
"learning_rate": 0.0018839506172839506, |
|
"loss": 0.636, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.3063038044987503, |
|
"grad_norm": 0.004747429396957159, |
|
"learning_rate": 0.0018814814814814816, |
|
"loss": 0.6796, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.3085254096084422, |
|
"grad_norm": 0.0041969697922468185, |
|
"learning_rate": 0.0018790123456790123, |
|
"loss": 0.5519, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.310747014718134, |
|
"grad_norm": 0.003130959114059806, |
|
"learning_rate": 0.0018765432098765433, |
|
"loss": 0.4979, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.3129686198278256, |
|
"grad_norm": 0.0030902365688234568, |
|
"learning_rate": 0.001874074074074074, |
|
"loss": 0.4823, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.3151902249375174, |
|
"grad_norm": 0.0031351533252745867, |
|
"learning_rate": 0.001871604938271605, |
|
"loss": 0.6665, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.317411830047209, |
|
"grad_norm": 0.003788917325437069, |
|
"learning_rate": 0.0018691358024691357, |
|
"loss": 0.6516, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.3196334351569008, |
|
"grad_norm": 0.004900997504591942, |
|
"learning_rate": 0.0018666666666666666, |
|
"loss": 0.6315, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.3218550402665925, |
|
"grad_norm": 0.0032752864062786102, |
|
"learning_rate": 0.0018641975308641974, |
|
"loss": 0.639, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.3240766453762844, |
|
"grad_norm": 0.002660909667611122, |
|
"learning_rate": 0.0018617283950617283, |
|
"loss": 0.6107, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.3262982504859762, |
|
"grad_norm": 0.003523773979395628, |
|
"learning_rate": 0.0018592592592592595, |
|
"loss": 0.5339, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.3285198555956679, |
|
"grad_norm": 0.002650630660355091, |
|
"learning_rate": 0.00185679012345679, |
|
"loss": 0.5768, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.3307414607053596, |
|
"grad_norm": 0.004082327708601952, |
|
"learning_rate": 0.0018543209876543212, |
|
"loss": 0.462, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.3329630658150513, |
|
"grad_norm": 0.004044753964990377, |
|
"learning_rate": 0.0018518518518518517, |
|
"loss": 0.6988, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.3351846709247432, |
|
"grad_norm": 0.0031004957854747772, |
|
"learning_rate": 0.0018493827160493829, |
|
"loss": 0.6714, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.337406276034435, |
|
"grad_norm": 0.003743590787053108, |
|
"learning_rate": 0.0018469135802469134, |
|
"loss": 0.5599, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.3396278811441267, |
|
"grad_norm": 0.0025853265542536974, |
|
"learning_rate": 0.0018444444444444446, |
|
"loss": 0.4862, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.3418494862538184, |
|
"grad_norm": 0.002392064081504941, |
|
"learning_rate": 0.0018419753086419755, |
|
"loss": 0.6645, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.34407109136351, |
|
"grad_norm": 0.004032174590975046, |
|
"learning_rate": 0.0018395061728395062, |
|
"loss": 0.6411, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.3462926964732018, |
|
"grad_norm": 0.003317909548059106, |
|
"learning_rate": 0.0018370370370370372, |
|
"loss": 0.6159, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.3485143015828935, |
|
"grad_norm": 0.0038619996048510075, |
|
"learning_rate": 0.001834567901234568, |
|
"loss": 0.6031, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.3507359066925855, |
|
"grad_norm": 0.003428127383813262, |
|
"learning_rate": 0.0018320987654320989, |
|
"loss": 0.7644, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.3529575118022772, |
|
"grad_norm": 0.003281916258856654, |
|
"learning_rate": 0.0018296296296296296, |
|
"loss": 0.6009, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.355179116911969, |
|
"grad_norm": 0.0022182667162269354, |
|
"learning_rate": 0.0018271604938271606, |
|
"loss": 0.5008, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.3574007220216606, |
|
"grad_norm": 0.008659104816615582, |
|
"learning_rate": 0.0018246913580246915, |
|
"loss": 0.7363, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.3596223271313523, |
|
"grad_norm": 0.0032885768450796604, |
|
"learning_rate": 0.0018222222222222223, |
|
"loss": 0.6114, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.3618439322410443, |
|
"grad_norm": 0.0024236771278083324, |
|
"learning_rate": 0.0018197530864197532, |
|
"loss": 0.5882, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.364065537350736, |
|
"grad_norm": 0.002592614386230707, |
|
"learning_rate": 0.001817283950617284, |
|
"loss": 0.5504, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.3662871424604277, |
|
"grad_norm": 0.0033317788038402796, |
|
"learning_rate": 0.001814814814814815, |
|
"loss": 0.5076, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3685087475701194, |
|
"grad_norm": 0.002845801878720522, |
|
"learning_rate": 0.0018123456790123456, |
|
"loss": 0.5616, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.3707303526798111, |
|
"grad_norm": 0.0022413271944969893, |
|
"learning_rate": 0.0018098765432098766, |
|
"loss": 0.4234, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.3729519577895029, |
|
"grad_norm": 0.004213643725961447, |
|
"learning_rate": 0.0018074074074074075, |
|
"loss": 0.6248, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.3751735628991946, |
|
"grad_norm": 0.0032481811940670013, |
|
"learning_rate": 0.0018049382716049383, |
|
"loss": 0.5779, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.3773951680088863, |
|
"grad_norm": 0.004150136839598417, |
|
"learning_rate": 0.0018024691358024692, |
|
"loss": 0.7694, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.3796167731185782, |
|
"grad_norm": 0.003350265324115753, |
|
"learning_rate": 0.0018, |
|
"loss": 0.7772, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.38183837822827, |
|
"grad_norm": 0.0031599882058799267, |
|
"learning_rate": 0.001797530864197531, |
|
"loss": 0.5887, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.3840599833379617, |
|
"grad_norm": 0.0033556423150002956, |
|
"learning_rate": 0.0017950617283950616, |
|
"loss": 0.6057, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.3862815884476534, |
|
"grad_norm": 0.003302312456071377, |
|
"learning_rate": 0.0017925925925925926, |
|
"loss": 0.5406, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.3885031935573453, |
|
"grad_norm": 0.003953338600695133, |
|
"learning_rate": 0.0017901234567901233, |
|
"loss": 0.5672, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.390724798667037, |
|
"grad_norm": 0.00439725024625659, |
|
"learning_rate": 0.0017876543209876543, |
|
"loss": 0.5253, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.3929464037767287, |
|
"grad_norm": 0.015487024560570717, |
|
"learning_rate": 0.0017851851851851854, |
|
"loss": 0.7185, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.3951680088864205, |
|
"grad_norm": 0.0034425046760588884, |
|
"learning_rate": 0.001782716049382716, |
|
"loss": 0.6243, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.3973896139961122, |
|
"grad_norm": 0.003950211219489574, |
|
"learning_rate": 0.0017802469135802471, |
|
"loss": 0.5918, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.3996112191058039, |
|
"grad_norm": 0.002602384425699711, |
|
"learning_rate": 0.0017777777777777776, |
|
"loss": 0.5422, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4018328242154956, |
|
"grad_norm": 0.003526592394337058, |
|
"learning_rate": 0.0017753086419753088, |
|
"loss": 0.6647, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.4040544293251873, |
|
"grad_norm": 0.0025864916387945414, |
|
"learning_rate": 0.0017728395061728393, |
|
"loss": 0.6296, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.4062760344348793, |
|
"grad_norm": 0.0031570426654070616, |
|
"learning_rate": 0.0017703703703703705, |
|
"loss": 0.547, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.408497639544571, |
|
"grad_norm": 0.0037307501770555973, |
|
"learning_rate": 0.0017679012345679015, |
|
"loss": 0.6843, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.4107192446542627, |
|
"grad_norm": 0.0035500023514032364, |
|
"learning_rate": 0.0017654320987654322, |
|
"loss": 0.5334, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.4129408497639544, |
|
"grad_norm": 0.0032879123464226723, |
|
"learning_rate": 0.0017629629629629631, |
|
"loss": 0.5945, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.4151624548736463, |
|
"grad_norm": 0.0034462285693734884, |
|
"learning_rate": 0.0017604938271604939, |
|
"loss": 0.7461, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.417384059983338, |
|
"grad_norm": 0.0030851562041789293, |
|
"learning_rate": 0.0017580246913580248, |
|
"loss": 0.6538, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.4196056650930298, |
|
"grad_norm": 0.0039033882785588503, |
|
"learning_rate": 0.0017555555555555556, |
|
"loss": 0.4908, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.4218272702027215, |
|
"grad_norm": 0.004719994496554136, |
|
"learning_rate": 0.0017530864197530865, |
|
"loss": 0.6726, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.4240488753124132, |
|
"grad_norm": 0.0027313127648085356, |
|
"learning_rate": 0.0017506172839506175, |
|
"loss": 0.625, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.426270480422105, |
|
"grad_norm": 0.00425785081461072, |
|
"learning_rate": 0.0017481481481481482, |
|
"loss": 0.5663, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.4284920855317966, |
|
"grad_norm": 0.003618143033236265, |
|
"learning_rate": 0.0017456790123456791, |
|
"loss": 0.5657, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.4307136906414883, |
|
"grad_norm": 0.0034850751981139183, |
|
"learning_rate": 0.0017432098765432099, |
|
"loss": 0.4107, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.4329352957511803, |
|
"grad_norm": 0.00514020211994648, |
|
"learning_rate": 0.0017407407407407408, |
|
"loss": 0.4958, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.435156900860872, |
|
"grad_norm": 0.003885776735842228, |
|
"learning_rate": 0.0017382716049382716, |
|
"loss": 0.7162, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.4373785059705637, |
|
"grad_norm": 0.003870051121339202, |
|
"learning_rate": 0.0017358024691358025, |
|
"loss": 0.5913, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.4396001110802554, |
|
"grad_norm": 0.004496397916227579, |
|
"learning_rate": 0.0017333333333333333, |
|
"loss": 0.5303, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.4418217161899474, |
|
"grad_norm": 0.0030751321464776993, |
|
"learning_rate": 0.0017308641975308642, |
|
"loss": 0.5838, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.444043321299639, |
|
"grad_norm": 0.002886572852730751, |
|
"learning_rate": 0.0017283950617283952, |
|
"loss": 0.5713, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.4462649264093308, |
|
"grad_norm": 0.011516193859279156, |
|
"learning_rate": 0.0017259259259259259, |
|
"loss": 0.5521, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.4484865315190225, |
|
"grad_norm": 0.0032446719706058502, |
|
"learning_rate": 0.0017234567901234568, |
|
"loss": 0.6537, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.4507081366287142, |
|
"grad_norm": 0.0029585566371679306, |
|
"learning_rate": 0.0017209876543209876, |
|
"loss": 0.7199, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.452929741738406, |
|
"grad_norm": 0.0026364405639469624, |
|
"learning_rate": 0.0017185185185185185, |
|
"loss": 0.4793, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.4551513468480977, |
|
"grad_norm": 0.004324890207499266, |
|
"learning_rate": 0.0017160493827160493, |
|
"loss": 0.6209, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.4573729519577894, |
|
"grad_norm": 0.002921945182606578, |
|
"learning_rate": 0.0017135802469135802, |
|
"loss": 0.5385, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.4595945570674813, |
|
"grad_norm": 0.003601512871682644, |
|
"learning_rate": 0.0017111111111111114, |
|
"loss": 0.5211, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.461816162177173, |
|
"grad_norm": 0.004054198041558266, |
|
"learning_rate": 0.001708641975308642, |
|
"loss": 0.6397, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.4640377672868647, |
|
"grad_norm": 0.007789338007569313, |
|
"learning_rate": 0.001706172839506173, |
|
"loss": 0.6116, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.4662593723965565, |
|
"grad_norm": 0.002359089907258749, |
|
"learning_rate": 0.0017037037037037036, |
|
"loss": 0.7018, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4684809775062484, |
|
"grad_norm": 0.0033406647853553295, |
|
"learning_rate": 0.0017012345679012348, |
|
"loss": 0.7088, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.4707025826159401, |
|
"grad_norm": 0.0042248377576470375, |
|
"learning_rate": 0.0016987654320987653, |
|
"loss": 0.5889, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.4729241877256318, |
|
"grad_norm": 0.003515084972605109, |
|
"learning_rate": 0.0016962962962962964, |
|
"loss": 0.526, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.4751457928353235, |
|
"grad_norm": 0.003441104432567954, |
|
"learning_rate": 0.0016938271604938274, |
|
"loss": 0.6096, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.4773673979450153, |
|
"grad_norm": 0.010995453223586082, |
|
"learning_rate": 0.0016913580246913581, |
|
"loss": 0.6834, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.479589003054707, |
|
"grad_norm": 0.002769651124253869, |
|
"learning_rate": 0.001688888888888889, |
|
"loss": 0.5797, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.4818106081643987, |
|
"grad_norm": 0.0071142856031656265, |
|
"learning_rate": 0.0016864197530864198, |
|
"loss": 0.6198, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.4840322132740904, |
|
"grad_norm": 0.0038076492492109537, |
|
"learning_rate": 0.0016839506172839508, |
|
"loss": 0.5671, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.4862538183837823, |
|
"grad_norm": 0.0030527866911143064, |
|
"learning_rate": 0.0016814814814814813, |
|
"loss": 0.709, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.488475423493474, |
|
"grad_norm": 0.003952282480895519, |
|
"learning_rate": 0.0016790123456790125, |
|
"loss": 0.5556, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.4906970286031658, |
|
"grad_norm": 0.002633793978020549, |
|
"learning_rate": 0.0016765432098765434, |
|
"loss": 0.6726, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.4929186337128575, |
|
"grad_norm": 0.003946142271161079, |
|
"learning_rate": 0.0016740740740740741, |
|
"loss": 0.5391, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.4951402388225494, |
|
"grad_norm": 0.002832691418007016, |
|
"learning_rate": 0.001671604938271605, |
|
"loss": 0.5886, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.4973618439322411, |
|
"grad_norm": 0.0036020914558321238, |
|
"learning_rate": 0.0016691358024691358, |
|
"loss": 0.6381, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.4995834490419329, |
|
"grad_norm": 0.003216502955183387, |
|
"learning_rate": 0.0016666666666666668, |
|
"loss": 0.6763, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.5018050541516246, |
|
"grad_norm": 0.004902282729744911, |
|
"learning_rate": 0.0016641975308641975, |
|
"loss": 0.6803, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.5040266592613163, |
|
"grad_norm": 0.0033530874643474817, |
|
"learning_rate": 0.0016617283950617285, |
|
"loss": 0.6021, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.506248264371008, |
|
"grad_norm": 0.0028880988247692585, |
|
"learning_rate": 0.0016592592592592592, |
|
"loss": 0.5753, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.5084698694806997, |
|
"grad_norm": 0.0035627970937639475, |
|
"learning_rate": 0.0016567901234567901, |
|
"loss": 0.4803, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.5106914745903914, |
|
"grad_norm": 0.0036246783565729856, |
|
"learning_rate": 0.001654320987654321, |
|
"loss": 0.5201, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.5129130797000832, |
|
"grad_norm": 0.007211472373455763, |
|
"learning_rate": 0.0016518518518518518, |
|
"loss": 0.5862, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.515134684809775, |
|
"grad_norm": 0.0033675231970846653, |
|
"learning_rate": 0.0016493827160493828, |
|
"loss": 0.5614, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.5173562899194668, |
|
"grad_norm": 0.003647651756182313, |
|
"learning_rate": 0.0016469135802469135, |
|
"loss": 0.6762, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.5195778950291585, |
|
"grad_norm": 0.01229399535804987, |
|
"learning_rate": 0.0016444444444444445, |
|
"loss": 0.5211, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.5217995001388505, |
|
"grad_norm": 0.0026124136056751013, |
|
"learning_rate": 0.0016419753086419752, |
|
"loss": 0.4749, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.5240211052485422, |
|
"grad_norm": 0.0031844114419072866, |
|
"learning_rate": 0.0016395061728395062, |
|
"loss": 0.7737, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.526242710358234, |
|
"grad_norm": 0.0031438611913472414, |
|
"learning_rate": 0.001637037037037037, |
|
"loss": 0.6449, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.5284643154679256, |
|
"grad_norm": 0.0029068414587527514, |
|
"learning_rate": 0.0016345679012345678, |
|
"loss": 0.7685, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.5306859205776173, |
|
"grad_norm": 0.0024135911371558905, |
|
"learning_rate": 0.0016320987654320988, |
|
"loss": 0.5223, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.532907525687309, |
|
"grad_norm": 0.003742596134543419, |
|
"learning_rate": 0.0016296296296296295, |
|
"loss": 0.6033, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.5351291307970008, |
|
"grad_norm": 0.0037296046502888203, |
|
"learning_rate": 0.0016271604938271605, |
|
"loss": 0.569, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.5373507359066925, |
|
"grad_norm": 0.0029978591483086348, |
|
"learning_rate": 0.0016246913580246912, |
|
"loss": 0.5678, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.5395723410163842, |
|
"grad_norm": 0.0031321558635681868, |
|
"learning_rate": 0.0016222222222222222, |
|
"loss": 0.5599, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.5417939461260761, |
|
"grad_norm": 0.006184721831232309, |
|
"learning_rate": 0.0016197530864197533, |
|
"loss": 0.6828, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.5440155512357678, |
|
"grad_norm": 0.0033378407824784517, |
|
"learning_rate": 0.0016172839506172839, |
|
"loss": 0.6147, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.5462371563454596, |
|
"grad_norm": 0.009592154994606972, |
|
"learning_rate": 0.001614814814814815, |
|
"loss": 0.4876, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.5484587614551515, |
|
"grad_norm": 0.0029813058208674192, |
|
"learning_rate": 0.0016123456790123455, |
|
"loss": 0.6589, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.5506803665648432, |
|
"grad_norm": 0.0029721122700721025, |
|
"learning_rate": 0.0016098765432098767, |
|
"loss": 0.6142, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.552901971674535, |
|
"grad_norm": 0.003871340537443757, |
|
"learning_rate": 0.0016074074074074072, |
|
"loss": 0.3377, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.5551235767842266, |
|
"grad_norm": 0.0040636686608195305, |
|
"learning_rate": 0.0016049382716049384, |
|
"loss": 0.4972, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.5573451818939184, |
|
"grad_norm": 0.002279713749885559, |
|
"learning_rate": 0.0016024691358024693, |
|
"loss": 0.5894, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.55956678700361, |
|
"grad_norm": 0.00906870886683464, |
|
"learning_rate": 0.0016, |
|
"loss": 0.5124, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.5617883921133018, |
|
"grad_norm": 0.0029125467408448458, |
|
"learning_rate": 0.001597530864197531, |
|
"loss": 0.4712, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.5640099972229935, |
|
"grad_norm": 0.004257185850292444, |
|
"learning_rate": 0.0015950617283950618, |
|
"loss": 0.6892, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.5662316023326852, |
|
"grad_norm": 0.003058369504287839, |
|
"learning_rate": 0.0015925925925925927, |
|
"loss": 0.6638, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.5684532074423772, |
|
"grad_norm": 0.00876684207469225, |
|
"learning_rate": 0.0015901234567901234, |
|
"loss": 0.8143, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.5706748125520689, |
|
"grad_norm": 0.004514805041253567, |
|
"learning_rate": 0.0015876543209876544, |
|
"loss": 0.6552, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.5728964176617606, |
|
"grad_norm": 0.0033371017780154943, |
|
"learning_rate": 0.0015851851851851851, |
|
"loss": 0.5918, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.5751180227714525, |
|
"grad_norm": 0.004203807096928358, |
|
"learning_rate": 0.001582716049382716, |
|
"loss": 0.5464, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.5773396278811442, |
|
"grad_norm": 0.00238715554587543, |
|
"learning_rate": 0.001580246913580247, |
|
"loss": 0.5678, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.579561232990836, |
|
"grad_norm": 0.003361933631822467, |
|
"learning_rate": 0.0015777777777777778, |
|
"loss": 0.6751, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.5817828381005277, |
|
"grad_norm": 0.005657128058373928, |
|
"learning_rate": 0.0015753086419753087, |
|
"loss": 0.6271, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.5840044432102194, |
|
"grad_norm": 0.003028120379894972, |
|
"learning_rate": 0.0015728395061728395, |
|
"loss": 0.5869, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.586226048319911, |
|
"grad_norm": 0.0022292290814220905, |
|
"learning_rate": 0.0015703703703703704, |
|
"loss": 0.6638, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.5884476534296028, |
|
"grad_norm": 0.005532798357307911, |
|
"learning_rate": 0.0015679012345679011, |
|
"loss": 0.62, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.5906692585392945, |
|
"grad_norm": 0.0030338081996887922, |
|
"learning_rate": 0.001565432098765432, |
|
"loss": 0.5918, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.5928908636489862, |
|
"grad_norm": 0.002758693415671587, |
|
"learning_rate": 0.001562962962962963, |
|
"loss": 0.5983, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.5951124687586782, |
|
"grad_norm": 0.003892328590154648, |
|
"learning_rate": 0.0015604938271604938, |
|
"loss": 0.7643, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.59733407386837, |
|
"grad_norm": 0.003598182462155819, |
|
"learning_rate": 0.0015580246913580247, |
|
"loss": 0.5539, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.5995556789780616, |
|
"grad_norm": 0.0035891798324882984, |
|
"learning_rate": 0.0015555555555555555, |
|
"loss": 0.6757, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.6017772840877536, |
|
"grad_norm": 0.0055450694635510445, |
|
"learning_rate": 0.0015530864197530864, |
|
"loss": 0.5755, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.6039988891974453, |
|
"grad_norm": 0.0034944158978760242, |
|
"learning_rate": 0.0015506172839506172, |
|
"loss": 0.5402, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.606220494307137, |
|
"grad_norm": 0.0032796862069517374, |
|
"learning_rate": 0.001548148148148148, |
|
"loss": 0.6529, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.6084420994168287, |
|
"grad_norm": 0.004137154668569565, |
|
"learning_rate": 0.0015456790123456793, |
|
"loss": 0.5997, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.6106637045265204, |
|
"grad_norm": 0.003077402478083968, |
|
"learning_rate": 0.0015432098765432098, |
|
"loss": 0.5607, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.6128853096362121, |
|
"grad_norm": 0.0024837322998791933, |
|
"learning_rate": 0.001540740740740741, |
|
"loss": 0.4938, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.6151069147459038, |
|
"grad_norm": 0.0028923966456204653, |
|
"learning_rate": 0.0015382716049382715, |
|
"loss": 0.5682, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.6173285198555956, |
|
"grad_norm": 0.0036173651460558176, |
|
"learning_rate": 0.0015358024691358026, |
|
"loss": 0.5425, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.6195501249652873, |
|
"grad_norm": 0.00272387289442122, |
|
"learning_rate": 0.0015333333333333332, |
|
"loss": 0.5502, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.6217717300749792, |
|
"grad_norm": 0.005459922831505537, |
|
"learning_rate": 0.0015308641975308643, |
|
"loss": 0.6427, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.623993335184671, |
|
"grad_norm": 0.0024996125139296055, |
|
"learning_rate": 0.0015283950617283948, |
|
"loss": 0.4652, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.6262149402943626, |
|
"grad_norm": 0.002894905162975192, |
|
"learning_rate": 0.001525925925925926, |
|
"loss": 0.5918, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.6284365454040546, |
|
"grad_norm": 0.002637655008584261, |
|
"learning_rate": 0.001523456790123457, |
|
"loss": 0.7916, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.6306581505137463, |
|
"grad_norm": 0.003807837376371026, |
|
"learning_rate": 0.0015209876543209877, |
|
"loss": 0.6596, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.632879755623438, |
|
"grad_norm": 0.0031546615064144135, |
|
"learning_rate": 0.0015185185185185187, |
|
"loss": 0.5268, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.6351013607331297, |
|
"grad_norm": 0.002546357223764062, |
|
"learning_rate": 0.0015160493827160494, |
|
"loss": 0.5968, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.6373229658428214, |
|
"grad_norm": 0.003256875555962324, |
|
"learning_rate": 0.0015135802469135803, |
|
"loss": 0.6944, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.6395445709525132, |
|
"grad_norm": 0.0026704976335167885, |
|
"learning_rate": 0.001511111111111111, |
|
"loss": 0.6394, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.6417661760622049, |
|
"grad_norm": 0.0033118342980742455, |
|
"learning_rate": 0.001508641975308642, |
|
"loss": 0.5765, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.6439877811718966, |
|
"grad_norm": 0.0022287603933364153, |
|
"learning_rate": 0.001506172839506173, |
|
"loss": 0.5156, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.6462093862815883, |
|
"grad_norm": 0.0037926218938082457, |
|
"learning_rate": 0.0015037037037037037, |
|
"loss": 0.6705, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.6484309913912802, |
|
"grad_norm": 0.0030129130464047194, |
|
"learning_rate": 0.0015012345679012347, |
|
"loss": 0.5043, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.650652596500972, |
|
"grad_norm": 0.0029962975531816483, |
|
"learning_rate": 0.0014987654320987656, |
|
"loss": 0.486, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.6528742016106637, |
|
"grad_norm": 0.0029747833032160997, |
|
"learning_rate": 0.0014962962962962963, |
|
"loss": 0.7188, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.6550958067203556, |
|
"grad_norm": 0.0021846694871783257, |
|
"learning_rate": 0.0014938271604938273, |
|
"loss": 0.6036, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.6573174118300473, |
|
"grad_norm": 0.0029039906803518534, |
|
"learning_rate": 0.001491358024691358, |
|
"loss": 0.4769, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.659539016939739, |
|
"grad_norm": 0.0031887327786535025, |
|
"learning_rate": 0.001488888888888889, |
|
"loss": 0.4571, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.6617606220494308, |
|
"grad_norm": 0.0024829183239489794, |
|
"learning_rate": 0.0014864197530864197, |
|
"loss": 0.5437, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.6639822271591225, |
|
"grad_norm": 0.0026573697105050087, |
|
"learning_rate": 0.0014839506172839507, |
|
"loss": 0.4695, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.6662038322688142, |
|
"grad_norm": 0.004018016159534454, |
|
"learning_rate": 0.0014814814814814814, |
|
"loss": 0.5496, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.668425437378506, |
|
"grad_norm": 0.0031915605068206787, |
|
"learning_rate": 0.0014790123456790124, |
|
"loss": 0.5897, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.6706470424881976, |
|
"grad_norm": 0.0019371211528778076, |
|
"learning_rate": 0.0014765432098765433, |
|
"loss": 0.5734, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.6728686475978893, |
|
"grad_norm": 0.023410236462950706, |
|
"learning_rate": 0.001474074074074074, |
|
"loss": 0.829, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.6750902527075813, |
|
"grad_norm": 0.001892826403491199, |
|
"learning_rate": 0.001471604938271605, |
|
"loss": 0.7567, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.677311857817273, |
|
"grad_norm": 0.004156641196459532, |
|
"learning_rate": 0.0014691358024691357, |
|
"loss": 0.5387, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.6795334629269647, |
|
"grad_norm": 0.0030142583418637514, |
|
"learning_rate": 0.0014666666666666667, |
|
"loss": 0.5668, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.6817550680366566, |
|
"grad_norm": 0.004339707084000111, |
|
"learning_rate": 0.0014641975308641974, |
|
"loss": 0.5851, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.6839766731463484, |
|
"grad_norm": 0.0036378875374794006, |
|
"learning_rate": 0.0014617283950617286, |
|
"loss": 0.4967, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.68619827825604, |
|
"grad_norm": 0.0033033231738954782, |
|
"learning_rate": 0.0014592592592592593, |
|
"loss": 0.5743, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.6884198833657318, |
|
"grad_norm": 0.0028085929807275534, |
|
"learning_rate": 0.0014567901234567903, |
|
"loss": 0.6463, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6906414884754235, |
|
"grad_norm": 0.005265321582555771, |
|
"learning_rate": 0.001454320987654321, |
|
"loss": 0.5585, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.6928630935851152, |
|
"grad_norm": 0.005091564729809761, |
|
"learning_rate": 0.001451851851851852, |
|
"loss": 0.5628, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.695084698694807, |
|
"grad_norm": 0.003041359828785062, |
|
"learning_rate": 0.0014493827160493827, |
|
"loss": 0.5766, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.6973063038044987, |
|
"grad_norm": 0.0026597119867801666, |
|
"learning_rate": 0.0014469135802469136, |
|
"loss": 0.7254, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.6995279089141904, |
|
"grad_norm": 0.003554214723408222, |
|
"learning_rate": 0.0014444444444444444, |
|
"loss": 0.5711, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.7017495140238823, |
|
"grad_norm": 0.0022794667165726423, |
|
"learning_rate": 0.0014419753086419753, |
|
"loss": 0.6111, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.703971119133574, |
|
"grad_norm": 0.00306728295981884, |
|
"learning_rate": 0.0014395061728395063, |
|
"loss": 0.4681, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.7061927242432657, |
|
"grad_norm": 0.0037019511219114065, |
|
"learning_rate": 0.001437037037037037, |
|
"loss": 0.5045, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.7084143293529577, |
|
"grad_norm": 0.010336251929402351, |
|
"learning_rate": 0.001434567901234568, |
|
"loss": 0.6809, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.7106359344626494, |
|
"grad_norm": 0.0039057794492691755, |
|
"learning_rate": 0.0014320987654320987, |
|
"loss": 0.537, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.712857539572341, |
|
"grad_norm": 0.005506658926606178, |
|
"learning_rate": 0.0014296296296296297, |
|
"loss": 0.5169, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.7150791446820328, |
|
"grad_norm": 0.0042714932933449745, |
|
"learning_rate": 0.0014271604938271604, |
|
"loss": 0.6398, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.7173007497917245, |
|
"grad_norm": 0.0034559788182377815, |
|
"learning_rate": 0.0014246913580246916, |
|
"loss": 0.5793, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.7195223549014163, |
|
"grad_norm": 0.0028325633611530066, |
|
"learning_rate": 0.0014222222222222223, |
|
"loss": 0.6201, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.721743960011108, |
|
"grad_norm": 0.0031491892877966166, |
|
"learning_rate": 0.0014197530864197532, |
|
"loss": 0.5324, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.7239655651207997, |
|
"grad_norm": 0.003899619448930025, |
|
"learning_rate": 0.001417283950617284, |
|
"loss": 0.4846, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.7261871702304914, |
|
"grad_norm": 0.005161529406905174, |
|
"learning_rate": 0.001414814814814815, |
|
"loss": 0.5526, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.7284087753401833, |
|
"grad_norm": 0.0032485160045325756, |
|
"learning_rate": 0.0014123456790123457, |
|
"loss": 0.5336, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.730630380449875, |
|
"grad_norm": 0.0028458782471716404, |
|
"learning_rate": 0.0014098765432098766, |
|
"loss": 0.4992, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.7328519855595668, |
|
"grad_norm": 0.004183284472674131, |
|
"learning_rate": 0.0014074074074074073, |
|
"loss": 0.5624, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.7350735906692587, |
|
"grad_norm": 0.0023061821702867746, |
|
"learning_rate": 0.0014049382716049383, |
|
"loss": 0.5448, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.7372951957789504, |
|
"grad_norm": 0.003513550153002143, |
|
"learning_rate": 0.0014024691358024693, |
|
"loss": 0.681, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.7395168008886421, |
|
"grad_norm": 0.0030224856454879045, |
|
"learning_rate": 0.0014, |
|
"loss": 0.6037, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.7417384059983338, |
|
"grad_norm": 0.0029998235404491425, |
|
"learning_rate": 0.001397530864197531, |
|
"loss": 0.4593, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.7439600111080256, |
|
"grad_norm": 0.003832211485132575, |
|
"learning_rate": 0.0013950617283950617, |
|
"loss": 0.5945, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.7461816162177173, |
|
"grad_norm": 0.003351503750309348, |
|
"learning_rate": 0.0013925925925925926, |
|
"loss": 0.5424, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.748403221327409, |
|
"grad_norm": 0.004922722466289997, |
|
"learning_rate": 0.0013901234567901234, |
|
"loss": 0.5702, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.7506248264371007, |
|
"grad_norm": 0.0033356482163071632, |
|
"learning_rate": 0.0013876543209876545, |
|
"loss": 0.6295, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.7528464315467924, |
|
"grad_norm": 0.0028782282024621964, |
|
"learning_rate": 0.0013851851851851853, |
|
"loss": 0.6662, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.7550680366564844, |
|
"grad_norm": 0.0029406151734292507, |
|
"learning_rate": 0.0013827160493827162, |
|
"loss": 0.6298, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.757289641766176, |
|
"grad_norm": 0.005104298237711191, |
|
"learning_rate": 0.001380246913580247, |
|
"loss": 0.7489, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.7595112468758678, |
|
"grad_norm": 0.0033603517804294825, |
|
"learning_rate": 0.001377777777777778, |
|
"loss": 0.6464, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.7617328519855595, |
|
"grad_norm": 0.003993791062384844, |
|
"learning_rate": 0.0013753086419753086, |
|
"loss": 0.7019, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.7639544570952514, |
|
"grad_norm": 0.0031684539280831814, |
|
"learning_rate": 0.0013728395061728396, |
|
"loss": 0.6741, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.7661760622049432, |
|
"grad_norm": 0.005214819684624672, |
|
"learning_rate": 0.0013703703703703703, |
|
"loss": 0.7585, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.7683976673146349, |
|
"grad_norm": 0.0023873383179306984, |
|
"learning_rate": 0.0013679012345679013, |
|
"loss": 0.477, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.7706192724243266, |
|
"grad_norm": 0.008193212561309338, |
|
"learning_rate": 0.0013654320987654322, |
|
"loss": 0.5233, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.7728408775340183, |
|
"grad_norm": 0.0024355570785701275, |
|
"learning_rate": 0.001362962962962963, |
|
"loss": 0.6476, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.77506248264371, |
|
"grad_norm": 0.003934087231755257, |
|
"learning_rate": 0.001360493827160494, |
|
"loss": 0.5401, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.7772840877534017, |
|
"grad_norm": 0.0030379316303879023, |
|
"learning_rate": 0.0013580246913580246, |
|
"loss": 0.7182, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.7795056928630935, |
|
"grad_norm": 0.0031336776446551085, |
|
"learning_rate": 0.0013555555555555556, |
|
"loss": 0.576, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.7817272979727852, |
|
"grad_norm": 0.0028493087738752365, |
|
"learning_rate": 0.0013530864197530863, |
|
"loss": 0.4405, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.783948903082477, |
|
"grad_norm": 0.0059678577817976475, |
|
"learning_rate": 0.0013506172839506175, |
|
"loss": 0.7326, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.7861705081921688, |
|
"grad_norm": 0.0037832173984497786, |
|
"learning_rate": 0.0013481481481481482, |
|
"loss": 0.7904, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.7883921133018605, |
|
"grad_norm": 0.005568805616348982, |
|
"learning_rate": 0.0013456790123456792, |
|
"loss": 0.4736, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7906137184115525, |
|
"grad_norm": 0.003628462553024292, |
|
"learning_rate": 0.00134320987654321, |
|
"loss": 0.7379, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.7928353235212442, |
|
"grad_norm": 0.0037421484012156725, |
|
"learning_rate": 0.0013407407407407409, |
|
"loss": 0.8635, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.795056928630936, |
|
"grad_norm": 0.0035124209243804216, |
|
"learning_rate": 0.0013382716049382716, |
|
"loss": 0.7215, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.7972785337406276, |
|
"grad_norm": 0.0027946464251726866, |
|
"learning_rate": 0.0013358024691358023, |
|
"loss": 0.6327, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.7995001388503193, |
|
"grad_norm": 0.0035201977007091045, |
|
"learning_rate": 0.0013333333333333333, |
|
"loss": 0.8533, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.801721743960011, |
|
"grad_norm": 0.002365663181990385, |
|
"learning_rate": 0.0013308641975308642, |
|
"loss": 0.56, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.8039433490697028, |
|
"grad_norm": 0.004486533813178539, |
|
"learning_rate": 0.0013283950617283952, |
|
"loss": 0.6485, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.8061649541793945, |
|
"grad_norm": 0.0047790007665753365, |
|
"learning_rate": 0.001325925925925926, |
|
"loss": 0.6221, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.8083865592890862, |
|
"grad_norm": 0.003134514670819044, |
|
"learning_rate": 0.0013234567901234569, |
|
"loss": 0.627, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.8106081643987781, |
|
"grad_norm": 0.00474384892731905, |
|
"learning_rate": 0.0013209876543209876, |
|
"loss": 0.4979, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.8128297695084699, |
|
"grad_norm": 0.002987217390909791, |
|
"learning_rate": 0.0013185185185185186, |
|
"loss": 0.5478, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.8150513746181616, |
|
"grad_norm": 0.004350063391029835, |
|
"learning_rate": 0.0013160493827160493, |
|
"loss": 0.6971, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.8172729797278535, |
|
"grad_norm": 0.0028352115768939257, |
|
"learning_rate": 0.0013135802469135802, |
|
"loss": 0.5121, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.8194945848375452, |
|
"grad_norm": 0.003356591099873185, |
|
"learning_rate": 0.0013111111111111112, |
|
"loss": 0.5808, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.821716189947237, |
|
"grad_norm": 0.002301991917192936, |
|
"learning_rate": 0.001308641975308642, |
|
"loss": 0.5578, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.8239377950569287, |
|
"grad_norm": 0.002217639936134219, |
|
"learning_rate": 0.0013061728395061729, |
|
"loss": 0.5005, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.8261594001666204, |
|
"grad_norm": 0.003184704342857003, |
|
"learning_rate": 0.0013037037037037036, |
|
"loss": 0.5521, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.828381005276312, |
|
"grad_norm": 0.0025311317294836044, |
|
"learning_rate": 0.0013012345679012346, |
|
"loss": 0.5431, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.8306026103860038, |
|
"grad_norm": 0.0038392578717321157, |
|
"learning_rate": 0.0012987654320987653, |
|
"loss": 0.7709, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.8328242154956955, |
|
"grad_norm": 0.00471165357157588, |
|
"learning_rate": 0.0012962962962962963, |
|
"loss": 0.515, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.8350458206053872, |
|
"grad_norm": 0.00271948822773993, |
|
"learning_rate": 0.0012938271604938272, |
|
"loss": 0.631, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.8372674257150792, |
|
"grad_norm": 0.003444887697696686, |
|
"learning_rate": 0.0012913580246913582, |
|
"loss": 0.7599, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.8394890308247709, |
|
"grad_norm": 0.004435122944414616, |
|
"learning_rate": 0.001288888888888889, |
|
"loss": 0.4582, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.8417106359344626, |
|
"grad_norm": 0.004231697879731655, |
|
"learning_rate": 0.0012864197530864198, |
|
"loss": 0.4969, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.8439322410441545, |
|
"grad_norm": 0.004003169015049934, |
|
"learning_rate": 0.0012839506172839506, |
|
"loss": 0.4949, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.002871347591280937, |
|
"learning_rate": 0.0012814814814814815, |
|
"loss": 0.5691, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.848375451263538, |
|
"grad_norm": 0.0037768930196762085, |
|
"learning_rate": 0.0012790123456790123, |
|
"loss": 0.5709, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.8505970563732297, |
|
"grad_norm": 0.003291292116045952, |
|
"learning_rate": 0.0012765432098765432, |
|
"loss": 0.5088, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.8528186614829214, |
|
"grad_norm": 0.0028356255497783422, |
|
"learning_rate": 0.0012740740740740742, |
|
"loss": 0.6365, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.8550402665926131, |
|
"grad_norm": 0.002764333738014102, |
|
"learning_rate": 0.001271604938271605, |
|
"loss": 0.5681, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.8572618717023048, |
|
"grad_norm": 0.0034658394288271666, |
|
"learning_rate": 0.0012691358024691359, |
|
"loss": 0.6079, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.8594834768119965, |
|
"grad_norm": 0.0034142339136451483, |
|
"learning_rate": 0.0012666666666666666, |
|
"loss": 0.5121, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.8617050819216883, |
|
"grad_norm": 0.00351393548771739, |
|
"learning_rate": 0.0012641975308641975, |
|
"loss": 0.5633, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.8639266870313802, |
|
"grad_norm": 0.002946634776890278, |
|
"learning_rate": 0.0012617283950617283, |
|
"loss": 0.5348, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.866148292141072, |
|
"grad_norm": 0.0031421848107129335, |
|
"learning_rate": 0.0012592592592592592, |
|
"loss": 0.512, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.8683698972507636, |
|
"grad_norm": 0.0020834184251725674, |
|
"learning_rate": 0.0012567901234567902, |
|
"loss": 0.6114, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.8705915023604556, |
|
"grad_norm": 0.0052506448701024055, |
|
"learning_rate": 0.0012543209876543211, |
|
"loss": 0.5895, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.8728131074701473, |
|
"grad_norm": 0.002586257178336382, |
|
"learning_rate": 0.0012518518518518519, |
|
"loss": 0.4924, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.875034712579839, |
|
"grad_norm": 0.005527224391698837, |
|
"learning_rate": 0.0012493827160493828, |
|
"loss": 0.5824, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.8772563176895307, |
|
"grad_norm": 0.002853000769391656, |
|
"learning_rate": 0.0012469135802469136, |
|
"loss": 0.6263, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.8794779227992224, |
|
"grad_norm": 0.0035244813188910484, |
|
"learning_rate": 0.0012444444444444445, |
|
"loss": 0.5518, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.8816995279089141, |
|
"grad_norm": 0.00501825287938118, |
|
"learning_rate": 0.0012419753086419752, |
|
"loss": 0.7406, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.8839211330186059, |
|
"grad_norm": 0.0029411388095468283, |
|
"learning_rate": 0.0012395061728395062, |
|
"loss": 0.5762, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.8861427381282976, |
|
"grad_norm": 0.004388920497149229, |
|
"learning_rate": 0.0012370370370370371, |
|
"loss": 0.6418, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.8883643432379893, |
|
"grad_norm": 0.0025772335939109325, |
|
"learning_rate": 0.0012345679012345679, |
|
"loss": 0.4931, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8905859483476812, |
|
"grad_norm": 0.0032501844689249992, |
|
"learning_rate": 0.0012320987654320988, |
|
"loss": 0.7966, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.892807553457373, |
|
"grad_norm": 0.002551119541749358, |
|
"learning_rate": 0.0012296296296296296, |
|
"loss": 0.4498, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.8950291585670647, |
|
"grad_norm": 0.0032290250528603792, |
|
"learning_rate": 0.0012271604938271605, |
|
"loss": 0.5856, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.8972507636767566, |
|
"grad_norm": 0.004684037994593382, |
|
"learning_rate": 0.0012246913580246912, |
|
"loss": 0.7181, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.8994723687864483, |
|
"grad_norm": 0.0029012972954660654, |
|
"learning_rate": 0.0012222222222222222, |
|
"loss": 0.5601, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.90169397389614, |
|
"grad_norm": 0.002747067715972662, |
|
"learning_rate": 0.0012197530864197532, |
|
"loss": 0.5593, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.9039155790058317, |
|
"grad_norm": 0.003568375715985894, |
|
"learning_rate": 0.001217283950617284, |
|
"loss": 0.7568, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.9061371841155235, |
|
"grad_norm": 0.0028879425954073668, |
|
"learning_rate": 0.0012148148148148148, |
|
"loss": 0.5961, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.9083587892252152, |
|
"grad_norm": 0.0026781039778143167, |
|
"learning_rate": 0.0012123456790123458, |
|
"loss": 0.5069, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.910580394334907, |
|
"grad_norm": 0.005707379896193743, |
|
"learning_rate": 0.0012098765432098765, |
|
"loss": 0.5974, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.9128019994445986, |
|
"grad_norm": 0.0032613372895866632, |
|
"learning_rate": 0.0012074074074074075, |
|
"loss": 0.5567, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.9150236045542903, |
|
"grad_norm": 0.0032240943983197212, |
|
"learning_rate": 0.0012049382716049382, |
|
"loss": 0.4579, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.9172452096639823, |
|
"grad_norm": 0.0034050929825752974, |
|
"learning_rate": 0.0012024691358024692, |
|
"loss": 0.5795, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.919466814773674, |
|
"grad_norm": 0.002941009821370244, |
|
"learning_rate": 0.0012000000000000001, |
|
"loss": 0.5617, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.9216884198833657, |
|
"grad_norm": 0.003575817449018359, |
|
"learning_rate": 0.0011975308641975308, |
|
"loss": 0.6024, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.9239100249930576, |
|
"grad_norm": 0.003908672835677862, |
|
"learning_rate": 0.0011950617283950618, |
|
"loss": 0.6763, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.9261316301027493, |
|
"grad_norm": 0.0037765023298561573, |
|
"learning_rate": 0.0011925925925925925, |
|
"loss": 0.8389, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.928353235212441, |
|
"grad_norm": 0.0037054913118481636, |
|
"learning_rate": 0.0011901234567901235, |
|
"loss": 0.6546, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.9305748403221328, |
|
"grad_norm": 0.003900451585650444, |
|
"learning_rate": 0.0011876543209876542, |
|
"loss": 0.9324, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.9327964454318245, |
|
"grad_norm": 0.004091076552867889, |
|
"learning_rate": 0.0011851851851851852, |
|
"loss": 0.5661, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.9350180505415162, |
|
"grad_norm": 0.0036010409239679575, |
|
"learning_rate": 0.0011827160493827161, |
|
"loss": 0.5512, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.937239655651208, |
|
"grad_norm": 0.002555584069341421, |
|
"learning_rate": 0.001180246913580247, |
|
"loss": 0.5976, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.9394612607608996, |
|
"grad_norm": 0.002963186940178275, |
|
"learning_rate": 0.0011777777777777778, |
|
"loss": 0.5241, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.9416828658705914, |
|
"grad_norm": 0.002275889739394188, |
|
"learning_rate": 0.0011753086419753088, |
|
"loss": 0.4056, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.9439044709802833, |
|
"grad_norm": 0.004920678213238716, |
|
"learning_rate": 0.0011728395061728395, |
|
"loss": 0.768, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.946126076089975, |
|
"grad_norm": 0.003980088047683239, |
|
"learning_rate": 0.0011703703703703704, |
|
"loss": 0.823, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.9483476811996667, |
|
"grad_norm": 0.006616650614887476, |
|
"learning_rate": 0.0011679012345679012, |
|
"loss": 0.5191, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.9505692863093587, |
|
"grad_norm": 0.009624580852687359, |
|
"learning_rate": 0.0011654320987654321, |
|
"loss": 0.5275, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.9527908914190504, |
|
"grad_norm": 0.0033147030044347048, |
|
"learning_rate": 0.001162962962962963, |
|
"loss": 0.6125, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.955012496528742, |
|
"grad_norm": 0.004352504387497902, |
|
"learning_rate": 0.0011604938271604938, |
|
"loss": 0.6313, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.9572341016384338, |
|
"grad_norm": 0.0033672174904495478, |
|
"learning_rate": 0.0011580246913580248, |
|
"loss": 0.749, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.9594557067481255, |
|
"grad_norm": 0.003146470058709383, |
|
"learning_rate": 0.0011555555555555555, |
|
"loss": 0.6821, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.9616773118578172, |
|
"grad_norm": 0.004087381064891815, |
|
"learning_rate": 0.0011530864197530865, |
|
"loss": 0.7134, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.963898916967509, |
|
"grad_norm": 0.005879619624465704, |
|
"learning_rate": 0.0011506172839506172, |
|
"loss": 0.6927, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.9661205220772007, |
|
"grad_norm": 0.003711952827870846, |
|
"learning_rate": 0.0011481481481481481, |
|
"loss": 0.5362, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.9683421271868924, |
|
"grad_norm": 0.002924004103988409, |
|
"learning_rate": 0.001145679012345679, |
|
"loss": 0.6104, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.9705637322965843, |
|
"grad_norm": 0.0042654345743358135, |
|
"learning_rate": 0.00114320987654321, |
|
"loss": 0.6043, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.972785337406276, |
|
"grad_norm": 0.0030271972063928843, |
|
"learning_rate": 0.0011407407407407408, |
|
"loss": 0.6018, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.9750069425159678, |
|
"grad_norm": 0.0038058331701904535, |
|
"learning_rate": 0.0011382716049382717, |
|
"loss": 0.5575, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.9772285476256597, |
|
"grad_norm": 0.0035539015661925077, |
|
"learning_rate": 0.0011358024691358025, |
|
"loss": 0.6049, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.9794501527353514, |
|
"grad_norm": 0.0024260911159217358, |
|
"learning_rate": 0.0011333333333333334, |
|
"loss": 0.544, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.9816717578450431, |
|
"grad_norm": 0.0033254839945584536, |
|
"learning_rate": 0.0011308641975308641, |
|
"loss": 0.6019, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.9838933629547348, |
|
"grad_norm": 0.003086360404267907, |
|
"learning_rate": 0.001128395061728395, |
|
"loss": 0.7242, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.9861149680644266, |
|
"grad_norm": 0.004596822429448366, |
|
"learning_rate": 0.001125925925925926, |
|
"loss": 0.6787, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.9883365731741183, |
|
"grad_norm": 0.0026326498482376337, |
|
"learning_rate": 0.0011234567901234568, |
|
"loss": 0.7331, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.99055817828381, |
|
"grad_norm": 0.002320569707080722, |
|
"learning_rate": 0.0011209876543209877, |
|
"loss": 0.5561, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.9927797833935017, |
|
"grad_norm": 0.0028732616920024157, |
|
"learning_rate": 0.0011185185185185185, |
|
"loss": 0.6496, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.9950013885031934, |
|
"grad_norm": 0.0031755072996020317, |
|
"learning_rate": 0.0011160493827160494, |
|
"loss": 0.5572, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.9972229936128854, |
|
"grad_norm": 0.008104439824819565, |
|
"learning_rate": 0.0011135802469135802, |
|
"loss": 0.6715, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.999444598722577, |
|
"grad_norm": 0.0031844815239310265, |
|
"learning_rate": 0.0011111111111111111, |
|
"loss": 0.6162, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.001666203832269, |
|
"grad_norm": 0.0032060628291219473, |
|
"learning_rate": 0.001108641975308642, |
|
"loss": 0.8407, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.0038878089419607, |
|
"grad_norm": 0.004372204653918743, |
|
"learning_rate": 0.001106172839506173, |
|
"loss": 0.6518, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.0061094140516524, |
|
"grad_norm": 0.004050056450068951, |
|
"learning_rate": 0.0011037037037037037, |
|
"loss": 0.5921, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.008331019161344, |
|
"grad_norm": 0.0027456870302557945, |
|
"learning_rate": 0.0011012345679012347, |
|
"loss": 0.62, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.010552624271036, |
|
"grad_norm": 0.003426674986258149, |
|
"learning_rate": 0.0010987654320987654, |
|
"loss": 0.5978, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.0127742293807276, |
|
"grad_norm": 0.0026935499627143145, |
|
"learning_rate": 0.0010962962962962964, |
|
"loss": 0.7481, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.0149958344904193, |
|
"grad_norm": 0.003700273809954524, |
|
"learning_rate": 0.0010938271604938271, |
|
"loss": 0.5227, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.017217439600111, |
|
"grad_norm": 0.002455906942486763, |
|
"learning_rate": 0.001091358024691358, |
|
"loss": 0.5486, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.0194390447098027, |
|
"grad_norm": 0.005244240630418062, |
|
"learning_rate": 0.001088888888888889, |
|
"loss": 0.5229, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.0216606498194944, |
|
"grad_norm": 0.0026684950571507215, |
|
"learning_rate": 0.0010864197530864198, |
|
"loss": 0.6032, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.023882254929186, |
|
"grad_norm": 0.003200845792889595, |
|
"learning_rate": 0.0010839506172839507, |
|
"loss": 0.7075, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.026103860038878, |
|
"grad_norm": 0.0034468970261514187, |
|
"learning_rate": 0.0010814814814814814, |
|
"loss": 0.6763, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.02832546514857, |
|
"grad_norm": 0.0038897364865988493, |
|
"learning_rate": 0.0010790123456790124, |
|
"loss": 0.6753, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.0305470702582618, |
|
"grad_norm": 0.004033284727483988, |
|
"learning_rate": 0.0010765432098765431, |
|
"loss": 0.585, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.0327686753679535, |
|
"grad_norm": 0.0034496188163757324, |
|
"learning_rate": 0.001074074074074074, |
|
"loss": 0.5053, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.034990280477645, |
|
"grad_norm": 0.0031487636733800173, |
|
"learning_rate": 0.0010716049382716048, |
|
"loss": 0.4998, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.037211885587337, |
|
"grad_norm": 0.010006478987634182, |
|
"learning_rate": 0.001069135802469136, |
|
"loss": 0.7352, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.0394334906970286, |
|
"grad_norm": 0.0017054947093129158, |
|
"learning_rate": 0.0010666666666666667, |
|
"loss": 0.5026, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.0416550958067203, |
|
"grad_norm": 0.002679955679923296, |
|
"learning_rate": 0.0010641975308641977, |
|
"loss": 0.5098, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.043876700916412, |
|
"grad_norm": 0.0028986730612814426, |
|
"learning_rate": 0.0010617283950617284, |
|
"loss": 0.5148, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.0460983060261038, |
|
"grad_norm": 0.0033778445795178413, |
|
"learning_rate": 0.0010592592592592594, |
|
"loss": 0.6228, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.0483199111357955, |
|
"grad_norm": 0.0032284266781061888, |
|
"learning_rate": 0.00105679012345679, |
|
"loss": 0.486, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.050541516245487, |
|
"grad_norm": 0.005231673363596201, |
|
"learning_rate": 0.001054320987654321, |
|
"loss": 0.6242, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.052763121355179, |
|
"grad_norm": 0.0021800033282488585, |
|
"learning_rate": 0.001051851851851852, |
|
"loss": 0.791, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.054984726464871, |
|
"grad_norm": 0.002410687506198883, |
|
"learning_rate": 0.0010493827160493827, |
|
"loss": 0.4821, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.057206331574563, |
|
"grad_norm": 0.002446606522426009, |
|
"learning_rate": 0.0010469135802469137, |
|
"loss": 0.4773, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.0594279366842545, |
|
"grad_norm": 0.0032503134571015835, |
|
"learning_rate": 0.0010444444444444444, |
|
"loss": 0.56, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.061649541793946, |
|
"grad_norm": 0.003430373501032591, |
|
"learning_rate": 0.0010419753086419754, |
|
"loss": 0.5977, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.063871146903638, |
|
"grad_norm": 0.002888357499614358, |
|
"learning_rate": 0.001039506172839506, |
|
"loss": 0.4282, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.0660927520133296, |
|
"grad_norm": 0.0051438468508422375, |
|
"learning_rate": 0.001037037037037037, |
|
"loss": 0.74, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.0683143571230214, |
|
"grad_norm": 0.002881046384572983, |
|
"learning_rate": 0.0010345679012345678, |
|
"loss": 0.6234, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.070535962232713, |
|
"grad_norm": 0.003086875891312957, |
|
"learning_rate": 0.001032098765432099, |
|
"loss": 0.5821, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.072757567342405, |
|
"grad_norm": 0.002403114689514041, |
|
"learning_rate": 0.0010296296296296297, |
|
"loss": 0.5231, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.0749791724520965, |
|
"grad_norm": 0.003417134517803788, |
|
"learning_rate": 0.0010271604938271606, |
|
"loss": 0.5367, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.0772007775617882, |
|
"grad_norm": 0.0030301024671643972, |
|
"learning_rate": 0.0010246913580246914, |
|
"loss": 0.717, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.07942238267148, |
|
"grad_norm": 0.0032155404333025217, |
|
"learning_rate": 0.0010222222222222223, |
|
"loss": 0.4748, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.081643987781172, |
|
"grad_norm": 0.003064756980165839, |
|
"learning_rate": 0.001019753086419753, |
|
"loss": 0.6103, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.083865592890864, |
|
"grad_norm": 0.0030713852029293776, |
|
"learning_rate": 0.001017283950617284, |
|
"loss": 0.5911, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.0860871980005555, |
|
"grad_norm": 0.0025016344152390957, |
|
"learning_rate": 0.001014814814814815, |
|
"loss": 0.6379, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.0883088031102472, |
|
"grad_norm": 0.002508206060156226, |
|
"learning_rate": 0.0010123456790123457, |
|
"loss": 0.5557, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.090530408219939, |
|
"grad_norm": 0.0027723240200430155, |
|
"learning_rate": 0.0010098765432098766, |
|
"loss": 0.6908, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.0927520133296307, |
|
"grad_norm": 0.0024629731196910143, |
|
"learning_rate": 0.0010074074074074074, |
|
"loss": 0.4695, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.0949736184393224, |
|
"grad_norm": 0.004411566071212292, |
|
"learning_rate": 0.0010049382716049383, |
|
"loss": 0.4918, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.097195223549014, |
|
"grad_norm": 0.0032425241079181433, |
|
"learning_rate": 0.001002469135802469, |
|
"loss": 0.7062, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.099416828658706, |
|
"grad_norm": 0.0037023716140538454, |
|
"learning_rate": 0.001, |
|
"loss": 0.4463, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.1016384337683975, |
|
"grad_norm": 0.0033389173913747072, |
|
"learning_rate": 0.0009975308641975308, |
|
"loss": 0.6592, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.1038600388780893, |
|
"grad_norm": 0.002863410161808133, |
|
"learning_rate": 0.000995061728395062, |
|
"loss": 0.5698, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.106081643987781, |
|
"grad_norm": 0.002832047175616026, |
|
"learning_rate": 0.0009925925925925927, |
|
"loss": 0.5297, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.108303249097473, |
|
"grad_norm": 0.002443015808239579, |
|
"learning_rate": 0.0009901234567901234, |
|
"loss": 0.5083, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.110524854207165, |
|
"grad_norm": 0.003680097870528698, |
|
"learning_rate": 0.0009876543209876543, |
|
"loss": 0.6119, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.1127464593168566, |
|
"grad_norm": 0.0026307208463549614, |
|
"learning_rate": 0.000985185185185185, |
|
"loss": 0.5917, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.1149680644265483, |
|
"grad_norm": 0.006063534878194332, |
|
"learning_rate": 0.000982716049382716, |
|
"loss": 0.7092, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.11718966953624, |
|
"grad_norm": 0.0022731171920895576, |
|
"learning_rate": 0.0009802469135802468, |
|
"loss": 0.6964, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.1194112746459317, |
|
"grad_norm": 0.0027985614724457264, |
|
"learning_rate": 0.000977777777777778, |
|
"loss": 0.4312, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.1216328797556234, |
|
"grad_norm": 0.0022834171541035175, |
|
"learning_rate": 0.0009753086419753087, |
|
"loss": 0.6673, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.123854484865315, |
|
"grad_norm": 0.003239828860387206, |
|
"learning_rate": 0.0009728395061728395, |
|
"loss": 0.696, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.126076089975007, |
|
"grad_norm": 0.003194565186277032, |
|
"learning_rate": 0.0009703703703703704, |
|
"loss": 0.5976, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.1282976950846986, |
|
"grad_norm": 0.002975456416606903, |
|
"learning_rate": 0.0009679012345679012, |
|
"loss": 0.6193, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.1305193001943903, |
|
"grad_norm": 0.0038265048060566187, |
|
"learning_rate": 0.000965432098765432, |
|
"loss": 0.6149, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.132740905304082, |
|
"grad_norm": 0.004864771384745836, |
|
"learning_rate": 0.0009629629629629629, |
|
"loss": 0.5857, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.134962510413774, |
|
"grad_norm": 0.0027290659490972757, |
|
"learning_rate": 0.0009604938271604937, |
|
"loss": 0.4769, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.137184115523466, |
|
"grad_norm": 0.0031398101709783077, |
|
"learning_rate": 0.0009580246913580248, |
|
"loss": 0.609, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.1394057206331576, |
|
"grad_norm": 0.0030726627446711063, |
|
"learning_rate": 0.0009555555555555556, |
|
"loss": 0.5457, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.1416273257428493, |
|
"grad_norm": 0.003792215371504426, |
|
"learning_rate": 0.0009530864197530865, |
|
"loss": 0.6251, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.143848930852541, |
|
"grad_norm": 0.003504569409415126, |
|
"learning_rate": 0.0009506172839506173, |
|
"loss": 0.5433, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.1460705359622327, |
|
"grad_norm": 0.002695186994969845, |
|
"learning_rate": 0.0009481481481481482, |
|
"loss": 0.7355, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.1482921410719245, |
|
"grad_norm": 0.00423799641430378, |
|
"learning_rate": 0.000945679012345679, |
|
"loss": 0.707, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.150513746181616, |
|
"grad_norm": 0.0020570242777466774, |
|
"learning_rate": 0.0009432098765432098, |
|
"loss": 0.554, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.152735351291308, |
|
"grad_norm": 0.008327562361955643, |
|
"learning_rate": 0.0009407407407407408, |
|
"loss": 0.7368, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.1549569564009996, |
|
"grad_norm": 0.003192309755831957, |
|
"learning_rate": 0.0009382716049382716, |
|
"loss": 0.5697, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.1571785615106913, |
|
"grad_norm": 0.003064699936658144, |
|
"learning_rate": 0.0009358024691358025, |
|
"loss": 0.6009, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.159400166620383, |
|
"grad_norm": 0.004011985845863819, |
|
"learning_rate": 0.0009333333333333333, |
|
"loss": 0.591, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.1616217717300747, |
|
"grad_norm": 0.0022674642968922853, |
|
"learning_rate": 0.0009308641975308642, |
|
"loss": 0.5875, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.163843376839767, |
|
"grad_norm": 0.0029301177710294724, |
|
"learning_rate": 0.000928395061728395, |
|
"loss": 0.5329, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.1660649819494586, |
|
"grad_norm": 0.003778566373512149, |
|
"learning_rate": 0.0009259259259259259, |
|
"loss": 0.5196, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.1682865870591503, |
|
"grad_norm": 0.006877492181956768, |
|
"learning_rate": 0.0009234567901234567, |
|
"loss": 0.531, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.170508192168842, |
|
"grad_norm": 0.0031633051112294197, |
|
"learning_rate": 0.0009209876543209878, |
|
"loss": 0.6596, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.1727297972785338, |
|
"grad_norm": 0.003557397285476327, |
|
"learning_rate": 0.0009185185185185186, |
|
"loss": 0.5644, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.1749514023882255, |
|
"grad_norm": 0.003621811978518963, |
|
"learning_rate": 0.0009160493827160494, |
|
"loss": 0.6401, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.177173007497917, |
|
"grad_norm": 0.0038098511286079884, |
|
"learning_rate": 0.0009135802469135803, |
|
"loss": 0.6542, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.179394612607609, |
|
"grad_norm": 0.002912967000156641, |
|
"learning_rate": 0.0009111111111111111, |
|
"loss": 0.5677, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.1816162177173006, |
|
"grad_norm": 0.0035600329283624887, |
|
"learning_rate": 0.000908641975308642, |
|
"loss": 0.538, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.1838378228269923, |
|
"grad_norm": 0.0066943420097231865, |
|
"learning_rate": 0.0009061728395061728, |
|
"loss": 0.5981, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.186059427936684, |
|
"grad_norm": 0.0033611019607633352, |
|
"learning_rate": 0.0009037037037037038, |
|
"loss": 0.6569, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.1882810330463762, |
|
"grad_norm": 0.002343050902709365, |
|
"learning_rate": 0.0009012345679012346, |
|
"loss": 0.5339, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.190502638156068, |
|
"grad_norm": 0.0032377007883042097, |
|
"learning_rate": 0.0008987654320987655, |
|
"loss": 0.5378, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.1927242432657597, |
|
"grad_norm": 0.0030594319105148315, |
|
"learning_rate": 0.0008962962962962963, |
|
"loss": 0.5795, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.1949458483754514, |
|
"grad_norm": 0.0029608525801450014, |
|
"learning_rate": 0.0008938271604938271, |
|
"loss": 0.5973, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.197167453485143, |
|
"grad_norm": 0.0032938173972070217, |
|
"learning_rate": 0.000891358024691358, |
|
"loss": 0.4822, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.199389058594835, |
|
"grad_norm": 0.0032497644424438477, |
|
"learning_rate": 0.0008888888888888888, |
|
"loss": 0.5267, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.2016106637045265, |
|
"grad_norm": 0.0032970972824841738, |
|
"learning_rate": 0.0008864197530864197, |
|
"loss": 0.6145, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.2038322688142182, |
|
"grad_norm": 0.005727086216211319, |
|
"learning_rate": 0.0008839506172839507, |
|
"loss": 0.5812, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.20605387392391, |
|
"grad_norm": 0.002563419518992305, |
|
"learning_rate": 0.0008814814814814816, |
|
"loss": 0.4931, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.2082754790336017, |
|
"grad_norm": 0.004069584421813488, |
|
"learning_rate": 0.0008790123456790124, |
|
"loss": 0.5933, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.2104970841432934, |
|
"grad_norm": 0.0038564614951610565, |
|
"learning_rate": 0.0008765432098765433, |
|
"loss": 0.559, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.212718689252985, |
|
"grad_norm": 0.00243211816996336, |
|
"learning_rate": 0.0008740740740740741, |
|
"loss": 0.5612, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.214940294362677, |
|
"grad_norm": 0.002944133710116148, |
|
"learning_rate": 0.0008716049382716049, |
|
"loss": 0.4462, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.217161899472369, |
|
"grad_norm": 0.004629380535334349, |
|
"learning_rate": 0.0008691358024691358, |
|
"loss": 0.5722, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.2193835045820607, |
|
"grad_norm": 0.0027174127753823996, |
|
"learning_rate": 0.0008666666666666666, |
|
"loss": 0.5904, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.2216051096917524, |
|
"grad_norm": 0.0032827944960445166, |
|
"learning_rate": 0.0008641975308641976, |
|
"loss": 0.6582, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.223826714801444, |
|
"grad_norm": 0.0030610065441578627, |
|
"learning_rate": 0.0008617283950617284, |
|
"loss": 0.542, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.226048319911136, |
|
"grad_norm": 0.003364963224157691, |
|
"learning_rate": 0.0008592592592592593, |
|
"loss": 0.6866, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.2282699250208275, |
|
"grad_norm": 0.0030600426252931356, |
|
"learning_rate": 0.0008567901234567901, |
|
"loss": 0.7132, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.2304915301305193, |
|
"grad_norm": 0.003552238689735532, |
|
"learning_rate": 0.000854320987654321, |
|
"loss": 0.5064, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.232713135240211, |
|
"grad_norm": 0.0041660708375275135, |
|
"learning_rate": 0.0008518518518518518, |
|
"loss": 0.6445, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.2349347403499027, |
|
"grad_norm": 0.0053717633709311485, |
|
"learning_rate": 0.0008493827160493826, |
|
"loss": 0.5408, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.2371563454595944, |
|
"grad_norm": 0.00260202307254076, |
|
"learning_rate": 0.0008469135802469137, |
|
"loss": 0.6055, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.239377950569286, |
|
"grad_norm": 0.002337533049285412, |
|
"learning_rate": 0.0008444444444444445, |
|
"loss": 0.6636, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.2415995556789783, |
|
"grad_norm": 0.003964942414313555, |
|
"learning_rate": 0.0008419753086419754, |
|
"loss": 0.577, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.24382116078867, |
|
"grad_norm": 0.0029365664813667536, |
|
"learning_rate": 0.0008395061728395062, |
|
"loss": 0.4955, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.2460427658983617, |
|
"grad_norm": 0.004111704416573048, |
|
"learning_rate": 0.0008370370370370371, |
|
"loss": 0.5974, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.2482643710080534, |
|
"grad_norm": 0.00641743466258049, |
|
"learning_rate": 0.0008345679012345679, |
|
"loss": 0.5814, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.250485976117745, |
|
"grad_norm": 0.0032692174427211285, |
|
"learning_rate": 0.0008320987654320988, |
|
"loss": 0.5916, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.252707581227437, |
|
"grad_norm": 0.007102708797901869, |
|
"learning_rate": 0.0008296296296296296, |
|
"loss": 0.6731, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.2549291863371286, |
|
"grad_norm": 0.004556303843855858, |
|
"learning_rate": 0.0008271604938271605, |
|
"loss": 0.6026, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.2571507914468203, |
|
"grad_norm": 0.004175705835223198, |
|
"learning_rate": 0.0008246913580246914, |
|
"loss": 0.5041, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.259372396556512, |
|
"grad_norm": 0.0031485306099057198, |
|
"learning_rate": 0.0008222222222222222, |
|
"loss": 0.5685, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.2615940016662037, |
|
"grad_norm": 0.0030771722085773945, |
|
"learning_rate": 0.0008197530864197531, |
|
"loss": 0.651, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.2638156067758954, |
|
"grad_norm": 0.0028245358262211084, |
|
"learning_rate": 0.0008172839506172839, |
|
"loss": 0.6397, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.266037211885587, |
|
"grad_norm": 0.0038441738579422235, |
|
"learning_rate": 0.0008148148148148148, |
|
"loss": 0.5486, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.268258816995279, |
|
"grad_norm": 0.0029330954421311617, |
|
"learning_rate": 0.0008123456790123456, |
|
"loss": 0.5605, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.270480422104971, |
|
"grad_norm": 0.0047192759811878204, |
|
"learning_rate": 0.0008098765432098767, |
|
"loss": 0.5762, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.2727020272146627, |
|
"grad_norm": 0.004585467744618654, |
|
"learning_rate": 0.0008074074074074075, |
|
"loss": 0.6548, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.2749236323243545, |
|
"grad_norm": 0.004374127369374037, |
|
"learning_rate": 0.0008049382716049384, |
|
"loss": 0.5263, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.277145237434046, |
|
"grad_norm": 0.0028513516299426556, |
|
"learning_rate": 0.0008024691358024692, |
|
"loss": 0.6538, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.279366842543738, |
|
"grad_norm": 0.0029575314838439226, |
|
"learning_rate": 0.0008, |
|
"loss": 0.7316, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.2815884476534296, |
|
"grad_norm": 0.0030439943075180054, |
|
"learning_rate": 0.0007975308641975309, |
|
"loss": 0.6945, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.2838100527631213, |
|
"grad_norm": 0.003041263669729233, |
|
"learning_rate": 0.0007950617283950617, |
|
"loss": 0.5912, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.286031657872813, |
|
"grad_norm": 0.004571863915771246, |
|
"learning_rate": 0.0007925925925925926, |
|
"loss": 0.6742, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.2882532629825048, |
|
"grad_norm": 0.002520073438063264, |
|
"learning_rate": 0.0007901234567901235, |
|
"loss": 0.5184, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.2904748680921965, |
|
"grad_norm": 0.003077463945373893, |
|
"learning_rate": 0.0007876543209876544, |
|
"loss": 0.7712, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.292696473201888, |
|
"grad_norm": 0.0033735015895217657, |
|
"learning_rate": 0.0007851851851851852, |
|
"loss": 0.6573, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.2949180783115803, |
|
"grad_norm": 0.0035345982760190964, |
|
"learning_rate": 0.000782716049382716, |
|
"loss": 0.5874, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.297139683421272, |
|
"grad_norm": 0.009167306125164032, |
|
"learning_rate": 0.0007802469135802469, |
|
"loss": 0.6943, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.2993612885309638, |
|
"grad_norm": 0.0029318418819457293, |
|
"learning_rate": 0.0007777777777777777, |
|
"loss": 0.5472, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.3015828936406555, |
|
"grad_norm": 0.006966088432818651, |
|
"learning_rate": 0.0007753086419753086, |
|
"loss": 0.6548, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.303804498750347, |
|
"grad_norm": 0.0032308287918567657, |
|
"learning_rate": 0.0007728395061728396, |
|
"loss": 0.4253, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.306026103860039, |
|
"grad_norm": 0.0036757299676537514, |
|
"learning_rate": 0.0007703703703703705, |
|
"loss": 0.7802, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.3082477089697306, |
|
"grad_norm": 0.003490788396447897, |
|
"learning_rate": 0.0007679012345679013, |
|
"loss": 0.5233, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.3104693140794224, |
|
"grad_norm": 0.0038194290827959776, |
|
"learning_rate": 0.0007654320987654322, |
|
"loss": 0.6783, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.312690919189114, |
|
"grad_norm": 0.0034465447533875704, |
|
"learning_rate": 0.000762962962962963, |
|
"loss": 0.5965, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.314912524298806, |
|
"grad_norm": 0.0025339655112475157, |
|
"learning_rate": 0.0007604938271604939, |
|
"loss": 0.5269, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.3171341294084975, |
|
"grad_norm": 0.003927841316908598, |
|
"learning_rate": 0.0007580246913580247, |
|
"loss": 0.5006, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.319355734518189, |
|
"grad_norm": 0.0037475882563740015, |
|
"learning_rate": 0.0007555555555555555, |
|
"loss": 0.736, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.321577339627881, |
|
"grad_norm": 0.0032975671347230673, |
|
"learning_rate": 0.0007530864197530865, |
|
"loss": 0.553, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.3237989447375726, |
|
"grad_norm": 0.0042790621519088745, |
|
"learning_rate": 0.0007506172839506173, |
|
"loss": 0.8489, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.326020549847265, |
|
"grad_norm": 0.005435307510197163, |
|
"learning_rate": 0.0007481481481481482, |
|
"loss": 0.6543, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.3282421549569565, |
|
"grad_norm": 0.003635212080553174, |
|
"learning_rate": 0.000745679012345679, |
|
"loss": 0.5125, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.3304637600666482, |
|
"grad_norm": 0.003130897181108594, |
|
"learning_rate": 0.0007432098765432099, |
|
"loss": 0.5689, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.33268536517634, |
|
"grad_norm": 0.0036147863138467073, |
|
"learning_rate": 0.0007407407407407407, |
|
"loss": 0.6393, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.3349069702860317, |
|
"grad_norm": 0.0032868238631635904, |
|
"learning_rate": 0.0007382716049382717, |
|
"loss": 0.5392, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.3371285753957234, |
|
"grad_norm": 0.0031462872866541147, |
|
"learning_rate": 0.0007358024691358025, |
|
"loss": 0.6029, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.339350180505415, |
|
"grad_norm": 0.002507926896214485, |
|
"learning_rate": 0.0007333333333333333, |
|
"loss": 0.6052, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.341571785615107, |
|
"grad_norm": 0.0035951449535787106, |
|
"learning_rate": 0.0007308641975308643, |
|
"loss": 0.5695, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.3437933907247985, |
|
"grad_norm": 0.003026745980605483, |
|
"learning_rate": 0.0007283950617283951, |
|
"loss": 0.5688, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.3460149958344902, |
|
"grad_norm": 0.003848330583423376, |
|
"learning_rate": 0.000725925925925926, |
|
"loss": 0.5459, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.3482366009441824, |
|
"grad_norm": 0.0019765642937272787, |
|
"learning_rate": 0.0007234567901234568, |
|
"loss": 0.4718, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.350458206053874, |
|
"grad_norm": 0.002635281765833497, |
|
"learning_rate": 0.0007209876543209877, |
|
"loss": 0.567, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.352679811163566, |
|
"grad_norm": 0.005967604462057352, |
|
"learning_rate": 0.0007185185185185185, |
|
"loss": 0.5747, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.3549014162732576, |
|
"grad_norm": 0.00438288738951087, |
|
"learning_rate": 0.0007160493827160494, |
|
"loss": 0.6649, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.3571230213829493, |
|
"grad_norm": 0.0029575556982308626, |
|
"learning_rate": 0.0007135802469135802, |
|
"loss": 0.4728, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.359344626492641, |
|
"grad_norm": 0.002983496058732271, |
|
"learning_rate": 0.0007111111111111111, |
|
"loss": 0.5236, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.3615662316023327, |
|
"grad_norm": 0.004713087808340788, |
|
"learning_rate": 0.000708641975308642, |
|
"loss": 0.5725, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.3637878367120244, |
|
"grad_norm": 0.0026739819440990686, |
|
"learning_rate": 0.0007061728395061728, |
|
"loss": 0.4965, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.366009441821716, |
|
"grad_norm": 0.002458624541759491, |
|
"learning_rate": 0.0007037037037037037, |
|
"loss": 0.5445, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.368231046931408, |
|
"grad_norm": 0.003967548720538616, |
|
"learning_rate": 0.0007012345679012346, |
|
"loss": 0.6989, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.3704526520410996, |
|
"grad_norm": 0.003330156672745943, |
|
"learning_rate": 0.0006987654320987655, |
|
"loss": 0.5463, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.3726742571507913, |
|
"grad_norm": 0.003007511142641306, |
|
"learning_rate": 0.0006962962962962963, |
|
"loss": 0.5579, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.374895862260483, |
|
"grad_norm": 0.016308341175317764, |
|
"learning_rate": 0.0006938271604938273, |
|
"loss": 0.5285, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.3771174673701747, |
|
"grad_norm": 0.0036616057623177767, |
|
"learning_rate": 0.0006913580246913581, |
|
"loss": 0.6642, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.379339072479867, |
|
"grad_norm": 0.0035557104274630547, |
|
"learning_rate": 0.000688888888888889, |
|
"loss": 0.5241, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.3815606775895586, |
|
"grad_norm": 0.0035361377522349358, |
|
"learning_rate": 0.0006864197530864198, |
|
"loss": 0.648, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.3837822826992503, |
|
"grad_norm": 0.003730503376573324, |
|
"learning_rate": 0.0006839506172839506, |
|
"loss": 0.514, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.386003887808942, |
|
"grad_norm": 0.004067933186888695, |
|
"learning_rate": 0.0006814814814814815, |
|
"loss": 0.5257, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.3882254929186337, |
|
"grad_norm": 0.0020785662345588207, |
|
"learning_rate": 0.0006790123456790123, |
|
"loss": 0.6464, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.3904470980283254, |
|
"grad_norm": 0.002413610927760601, |
|
"learning_rate": 0.0006765432098765432, |
|
"loss": 0.5519, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.392668703138017, |
|
"grad_norm": 0.007650860119611025, |
|
"learning_rate": 0.0006740740740740741, |
|
"loss": 0.5797, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.394890308247709, |
|
"grad_norm": 0.002815013052895665, |
|
"learning_rate": 0.000671604938271605, |
|
"loss": 0.6294, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.3971119133574006, |
|
"grad_norm": 0.0022983052767813206, |
|
"learning_rate": 0.0006691358024691358, |
|
"loss": 0.4428, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.3993335184670923, |
|
"grad_norm": 0.003167651128023863, |
|
"learning_rate": 0.0006666666666666666, |
|
"loss": 0.467, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.4015551235767845, |
|
"grad_norm": 0.00482570007443428, |
|
"learning_rate": 0.0006641975308641976, |
|
"loss": 0.6744, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.403776728686476, |
|
"grad_norm": 0.005482307635247707, |
|
"learning_rate": 0.0006617283950617284, |
|
"loss": 0.5776, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.405998333796168, |
|
"grad_norm": 0.0035053386818617582, |
|
"learning_rate": 0.0006592592592592593, |
|
"loss": 0.4751, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.4082199389058596, |
|
"grad_norm": 0.0036530292127281427, |
|
"learning_rate": 0.0006567901234567901, |
|
"loss": 0.7041, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.4104415440155513, |
|
"grad_norm": 0.004132540430873632, |
|
"learning_rate": 0.000654320987654321, |
|
"loss": 0.5617, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.412663149125243, |
|
"grad_norm": 0.004573510028421879, |
|
"learning_rate": 0.0006518518518518518, |
|
"loss": 0.4957, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.4148847542349348, |
|
"grad_norm": 0.004237889777868986, |
|
"learning_rate": 0.0006493827160493827, |
|
"loss": 0.5725, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.4171063593446265, |
|
"grad_norm": 0.00312399142421782, |
|
"learning_rate": 0.0006469135802469136, |
|
"loss": 0.5986, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.419327964454318, |
|
"grad_norm": 0.003359878435730934, |
|
"learning_rate": 0.0006444444444444444, |
|
"loss": 0.5232, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.42154956956401, |
|
"grad_norm": 0.005386237986385822, |
|
"learning_rate": 0.0006419753086419753, |
|
"loss": 0.4914, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.4237711746737016, |
|
"grad_norm": 0.009064272977411747, |
|
"learning_rate": 0.0006395061728395061, |
|
"loss": 0.5483, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.4259927797833933, |
|
"grad_norm": 0.0025790943764150143, |
|
"learning_rate": 0.0006370370370370371, |
|
"loss": 0.5408, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.428214384893085, |
|
"grad_norm": 0.004174118861556053, |
|
"learning_rate": 0.0006345679012345679, |
|
"loss": 0.5581, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.4304359900027768, |
|
"grad_norm": 0.004470566753298044, |
|
"learning_rate": 0.0006320987654320988, |
|
"loss": 0.5416, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.432657595112469, |
|
"grad_norm": 0.003054042812436819, |
|
"learning_rate": 0.0006296296296296296, |
|
"loss": 0.5733, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.4348792002221606, |
|
"grad_norm": 0.005424006376415491, |
|
"learning_rate": 0.0006271604938271606, |
|
"loss": 0.7099, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.4371008053318524, |
|
"grad_norm": 0.004061280284076929, |
|
"learning_rate": 0.0006246913580246914, |
|
"loss": 0.6355, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.439322410441544, |
|
"grad_norm": 0.004855146631598473, |
|
"learning_rate": 0.0006222222222222223, |
|
"loss": 0.5481, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.441544015551236, |
|
"grad_norm": 0.002916454104706645, |
|
"learning_rate": 0.0006197530864197531, |
|
"loss": 0.6916, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.4437656206609275, |
|
"grad_norm": 0.003550430526956916, |
|
"learning_rate": 0.0006172839506172839, |
|
"loss": 0.5293, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.4459872257706192, |
|
"grad_norm": 0.00397482980042696, |
|
"learning_rate": 0.0006148148148148148, |
|
"loss": 0.5562, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.448208830880311, |
|
"grad_norm": 0.006127642467617989, |
|
"learning_rate": 0.0006123456790123456, |
|
"loss": 0.7633, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.4504304359900027, |
|
"grad_norm": 0.002872834214940667, |
|
"learning_rate": 0.0006098765432098766, |
|
"loss": 0.4499, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.4526520410996944, |
|
"grad_norm": 0.0024833932984620333, |
|
"learning_rate": 0.0006074074074074074, |
|
"loss": 0.5494, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.4548736462093865, |
|
"grad_norm": 0.0026254167314618826, |
|
"learning_rate": 0.0006049382716049383, |
|
"loss": 0.5676, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.4570952513190782, |
|
"grad_norm": 0.0036874699871987104, |
|
"learning_rate": 0.0006024691358024691, |
|
"loss": 0.5925, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.45931685642877, |
|
"grad_norm": 0.005146813113242388, |
|
"learning_rate": 0.0006000000000000001, |
|
"loss": 0.6584, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.0026747530791908503, |
|
"learning_rate": 0.0005975308641975309, |
|
"loss": 0.6409, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.4637600666481534, |
|
"grad_norm": 0.004561138339340687, |
|
"learning_rate": 0.0005950617283950617, |
|
"loss": 0.4832, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.465981671757845, |
|
"grad_norm": 0.0025099292397499084, |
|
"learning_rate": 0.0005925925925925926, |
|
"loss": 0.6076, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.468203276867537, |
|
"grad_norm": 0.005029266234487295, |
|
"learning_rate": 0.0005901234567901235, |
|
"loss": 0.5094, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.4704248819772285, |
|
"grad_norm": 0.0034568537957966328, |
|
"learning_rate": 0.0005876543209876544, |
|
"loss": 0.6715, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.4726464870869203, |
|
"grad_norm": 0.0031753075309097767, |
|
"learning_rate": 0.0005851851851851852, |
|
"loss": 0.8845, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.474868092196612, |
|
"grad_norm": 0.003458821913227439, |
|
"learning_rate": 0.0005827160493827161, |
|
"loss": 0.8207, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.4770896973063037, |
|
"grad_norm": 0.0038800067268311977, |
|
"learning_rate": 0.0005802469135802469, |
|
"loss": 0.6511, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.4793113024159954, |
|
"grad_norm": 0.002908184193074703, |
|
"learning_rate": 0.0005777777777777778, |
|
"loss": 0.5199, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.481532907525687, |
|
"grad_norm": 0.0035255809780210257, |
|
"learning_rate": 0.0005753086419753086, |
|
"loss": 0.5549, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.483754512635379, |
|
"grad_norm": 0.003498326987028122, |
|
"learning_rate": 0.0005728395061728395, |
|
"loss": 0.6323, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.485976117745071, |
|
"grad_norm": 0.003741385880857706, |
|
"learning_rate": 0.0005703703703703704, |
|
"loss": 0.5352, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.4881977228547627, |
|
"grad_norm": 0.003014420159161091, |
|
"learning_rate": 0.0005679012345679012, |
|
"loss": 0.5901, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.4904193279644544, |
|
"grad_norm": 0.0028464593924582005, |
|
"learning_rate": 0.0005654320987654321, |
|
"loss": 0.5738, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.492640933074146, |
|
"grad_norm": 0.003817921504378319, |
|
"learning_rate": 0.000562962962962963, |
|
"loss": 0.611, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.494862538183838, |
|
"grad_norm": 0.004369066096842289, |
|
"learning_rate": 0.0005604938271604939, |
|
"loss": 0.8018, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.4970841432935296, |
|
"grad_norm": 0.003815658623352647, |
|
"learning_rate": 0.0005580246913580247, |
|
"loss": 0.6316, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.4993057484032213, |
|
"grad_norm": 0.0025187828578054905, |
|
"learning_rate": 0.0005555555555555556, |
|
"loss": 0.6251, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.501527353512913, |
|
"grad_norm": 0.0034318508114665747, |
|
"learning_rate": 0.0005530864197530865, |
|
"loss": 0.6364, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.5037489586226047, |
|
"grad_norm": 0.003666200442239642, |
|
"learning_rate": 0.0005506172839506173, |
|
"loss": 0.529, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.5059705637322964, |
|
"grad_norm": 0.0033737020567059517, |
|
"learning_rate": 0.0005481481481481482, |
|
"loss": 0.499, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.5081921688419886, |
|
"grad_norm": 0.004506078083068132, |
|
"learning_rate": 0.000545679012345679, |
|
"loss": 0.4437, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.5104137739516803, |
|
"grad_norm": 0.004632357507944107, |
|
"learning_rate": 0.0005432098765432099, |
|
"loss": 0.5765, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.512635379061372, |
|
"grad_norm": 0.0025815838016569614, |
|
"learning_rate": 0.0005407407407407407, |
|
"loss": 0.5222, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.5148569841710637, |
|
"grad_norm": 0.002767681609839201, |
|
"learning_rate": 0.0005382716049382716, |
|
"loss": 0.6225, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.5170785892807555, |
|
"grad_norm": 0.0037222790997475386, |
|
"learning_rate": 0.0005358024691358024, |
|
"loss": 0.685, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.519300194390447, |
|
"grad_norm": 0.0034634550102055073, |
|
"learning_rate": 0.0005333333333333334, |
|
"loss": 0.5363, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.521521799500139, |
|
"grad_norm": 0.0035786996595561504, |
|
"learning_rate": 0.0005308641975308642, |
|
"loss": 0.5579, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.5237434046098306, |
|
"grad_norm": 0.0031474498100578785, |
|
"learning_rate": 0.000528395061728395, |
|
"loss": 0.6542, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.5259650097195223, |
|
"grad_norm": 0.0023730038665235043, |
|
"learning_rate": 0.000525925925925926, |
|
"loss": 0.4511, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.528186614829214, |
|
"grad_norm": 0.003312505781650543, |
|
"learning_rate": 0.0005234567901234568, |
|
"loss": 0.6185, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.5304082199389057, |
|
"grad_norm": 0.004715273156762123, |
|
"learning_rate": 0.0005209876543209877, |
|
"loss": 0.6263, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.5326298250485975, |
|
"grad_norm": 0.0033239570911973715, |
|
"learning_rate": 0.0005185185185185185, |
|
"loss": 0.5316, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.534851430158289, |
|
"grad_norm": 0.003266816260293126, |
|
"learning_rate": 0.0005160493827160495, |
|
"loss": 0.6333, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.537073035267981, |
|
"grad_norm": 0.0032662188168615103, |
|
"learning_rate": 0.0005135802469135803, |
|
"loss": 0.6425, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.5392946403776726, |
|
"grad_norm": 0.0029916493222117424, |
|
"learning_rate": 0.0005111111111111112, |
|
"loss": 0.6858, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.5415162454873648, |
|
"grad_norm": 0.003962009213864803, |
|
"learning_rate": 0.000508641975308642, |
|
"loss": 0.506, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.5437378505970565, |
|
"grad_norm": 0.003180817700922489, |
|
"learning_rate": 0.0005061728395061728, |
|
"loss": 0.6995, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.545959455706748, |
|
"grad_norm": 0.004911080002784729, |
|
"learning_rate": 0.0005037037037037037, |
|
"loss": 0.6845, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.54818106081644, |
|
"grad_norm": 0.0032035394106060266, |
|
"learning_rate": 0.0005012345679012345, |
|
"loss": 0.5308, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.5504026659261316, |
|
"grad_norm": 0.00372600881382823, |
|
"learning_rate": 0.0004987654320987654, |
|
"loss": 0.6681, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.5526242710358233, |
|
"grad_norm": 0.002806935226544738, |
|
"learning_rate": 0.0004962962962962963, |
|
"loss": 0.5279, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.554845876145515, |
|
"grad_norm": 0.002871694741770625, |
|
"learning_rate": 0.0004938271604938272, |
|
"loss": 0.6015, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.5570674812552068, |
|
"grad_norm": 0.004236211068928242, |
|
"learning_rate": 0.000491358024691358, |
|
"loss": 0.7049, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.5592890863648985, |
|
"grad_norm": 0.003672770457342267, |
|
"learning_rate": 0.000488888888888889, |
|
"loss": 0.7154, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.5615106914745907, |
|
"grad_norm": 0.0026950067840516567, |
|
"learning_rate": 0.00048641975308641976, |
|
"loss": 0.5824, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.5637322965842824, |
|
"grad_norm": 0.003053544322028756, |
|
"learning_rate": 0.0004839506172839506, |
|
"loss": 0.4743, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.565953901693974, |
|
"grad_norm": 0.006343662738800049, |
|
"learning_rate": 0.00048148148148148144, |
|
"loss": 0.5234, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.568175506803666, |
|
"grad_norm": 0.002904749009758234, |
|
"learning_rate": 0.0004790123456790124, |
|
"loss": 0.8594, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.5703971119133575, |
|
"grad_norm": 0.003864643629640341, |
|
"learning_rate": 0.00047654320987654324, |
|
"loss": 0.6856, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.5726187170230492, |
|
"grad_norm": 0.005410465411841869, |
|
"learning_rate": 0.0004740740740740741, |
|
"loss": 0.5771, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.574840322132741, |
|
"grad_norm": 0.0040793525986373425, |
|
"learning_rate": 0.0004716049382716049, |
|
"loss": 0.5899, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.5770619272424327, |
|
"grad_norm": 0.003131742123514414, |
|
"learning_rate": 0.0004691358024691358, |
|
"loss": 0.5691, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.5792835323521244, |
|
"grad_norm": 0.0028007898945361376, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 0.8081, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.581505137461816, |
|
"grad_norm": 0.0028667037840932608, |
|
"learning_rate": 0.0004641975308641975, |
|
"loss": 0.6398, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.583726742571508, |
|
"grad_norm": 0.002876229351386428, |
|
"learning_rate": 0.00046172839506172835, |
|
"loss": 0.6649, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.5859483476811995, |
|
"grad_norm": 0.0037671527825295925, |
|
"learning_rate": 0.0004592592592592593, |
|
"loss": 0.7111, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.5881699527908912, |
|
"grad_norm": 0.004723631776869297, |
|
"learning_rate": 0.00045679012345679014, |
|
"loss": 0.4845, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.590391557900583, |
|
"grad_norm": 0.004219359718263149, |
|
"learning_rate": 0.000454320987654321, |
|
"loss": 0.6386, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.5926131630102747, |
|
"grad_norm": 0.002575064543634653, |
|
"learning_rate": 0.0004518518518518519, |
|
"loss": 0.79, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.594834768119967, |
|
"grad_norm": 0.0030822402331978083, |
|
"learning_rate": 0.0004493827160493827, |
|
"loss": 0.6529, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.5970563732296585, |
|
"grad_norm": 0.0026442541275173426, |
|
"learning_rate": 0.00044691358024691357, |
|
"loss": 0.6858, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.5992779783393503, |
|
"grad_norm": 0.004252356477081776, |
|
"learning_rate": 0.0004444444444444444, |
|
"loss": 0.6116, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.601499583449042, |
|
"grad_norm": 0.002785630989819765, |
|
"learning_rate": 0.00044197530864197536, |
|
"loss": 0.5769, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.6037211885587337, |
|
"grad_norm": 0.0027077009435743093, |
|
"learning_rate": 0.0004395061728395062, |
|
"loss": 0.7772, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.6059427936684254, |
|
"grad_norm": 0.0035961696412414312, |
|
"learning_rate": 0.00043703703703703705, |
|
"loss": 0.5475, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.608164398778117, |
|
"grad_norm": 0.0038466020487248898, |
|
"learning_rate": 0.0004345679012345679, |
|
"loss": 0.661, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.610386003887809, |
|
"grad_norm": 0.0033505731262266636, |
|
"learning_rate": 0.0004320987654320988, |
|
"loss": 0.5725, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.6126076089975006, |
|
"grad_norm": 0.004187949933111668, |
|
"learning_rate": 0.00042962962962962963, |
|
"loss": 0.5103, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.6148292141071927, |
|
"grad_norm": 0.009911083616316319, |
|
"learning_rate": 0.0004271604938271605, |
|
"loss": 0.6615, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.6170508192168844, |
|
"grad_norm": 0.003106280229985714, |
|
"learning_rate": 0.0004246913580246913, |
|
"loss": 0.4906, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.619272424326576, |
|
"grad_norm": 0.0043709734454751015, |
|
"learning_rate": 0.00042222222222222227, |
|
"loss": 0.7297, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.621494029436268, |
|
"grad_norm": 0.0024783313274383545, |
|
"learning_rate": 0.0004197530864197531, |
|
"loss": 0.654, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.6237156345459596, |
|
"grad_norm": 0.002610607771202922, |
|
"learning_rate": 0.00041728395061728396, |
|
"loss": 0.5909, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.6259372396556513, |
|
"grad_norm": 0.006081472150981426, |
|
"learning_rate": 0.0004148148148148148, |
|
"loss": 0.6009, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.628158844765343, |
|
"grad_norm": 0.0038016068283468485, |
|
"learning_rate": 0.0004123456790123457, |
|
"loss": 0.7115, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.6303804498750347, |
|
"grad_norm": 0.0031236426439136267, |
|
"learning_rate": 0.00040987654320987654, |
|
"loss": 0.41, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.6326020549847264, |
|
"grad_norm": 0.003347929334267974, |
|
"learning_rate": 0.0004074074074074074, |
|
"loss": 0.6007, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.634823660094418, |
|
"grad_norm": 0.00643836660310626, |
|
"learning_rate": 0.00040493827160493833, |
|
"loss": 0.5467, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.63704526520411, |
|
"grad_norm": 0.0026751633267849684, |
|
"learning_rate": 0.0004024691358024692, |
|
"loss": 0.5962, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.6392668703138016, |
|
"grad_norm": 0.005678553134202957, |
|
"learning_rate": 0.0004, |
|
"loss": 0.6097, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.6414884754234933, |
|
"grad_norm": 0.007360636722296476, |
|
"learning_rate": 0.00039753086419753086, |
|
"loss": 0.6594, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.643710080533185, |
|
"grad_norm": 0.0031560687348246574, |
|
"learning_rate": 0.00039506172839506176, |
|
"loss": 0.6907, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.6459316856428767, |
|
"grad_norm": 0.002814547624439001, |
|
"learning_rate": 0.0003925925925925926, |
|
"loss": 0.6022, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.648153290752569, |
|
"grad_norm": 0.0033059653360396624, |
|
"learning_rate": 0.00039012345679012345, |
|
"loss": 0.6396, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.6503748958622606, |
|
"grad_norm": 0.004498746711760759, |
|
"learning_rate": 0.0003876543209876543, |
|
"loss": 0.6909, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.6525965009719523, |
|
"grad_norm": 0.0029875014442950487, |
|
"learning_rate": 0.00038518518518518524, |
|
"loss": 0.485, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.654818106081644, |
|
"grad_norm": 0.0031488672830164433, |
|
"learning_rate": 0.0003827160493827161, |
|
"loss": 0.5441, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.6570397111913358, |
|
"grad_norm": 0.003440035041421652, |
|
"learning_rate": 0.0003802469135802469, |
|
"loss": 0.7539, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.6592613163010275, |
|
"grad_norm": 0.0036845544818788767, |
|
"learning_rate": 0.00037777777777777777, |
|
"loss": 0.7387, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.661482921410719, |
|
"grad_norm": 0.0035468495916575193, |
|
"learning_rate": 0.00037530864197530867, |
|
"loss": 0.6392, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.663704526520411, |
|
"grad_norm": 0.0034712713677436113, |
|
"learning_rate": 0.0003728395061728395, |
|
"loss": 0.6942, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.6659261316301026, |
|
"grad_norm": 0.0027431834023445845, |
|
"learning_rate": 0.00037037037037037035, |
|
"loss": 0.433, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.6681477367397948, |
|
"grad_norm": 0.011069080792367458, |
|
"learning_rate": 0.00036790123456790125, |
|
"loss": 0.6633, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.6703693418494865, |
|
"grad_norm": 0.008258162997663021, |
|
"learning_rate": 0.00036543209876543215, |
|
"loss": 0.4325, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.672590946959178, |
|
"grad_norm": 0.003486806061118841, |
|
"learning_rate": 0.000362962962962963, |
|
"loss": 0.5709, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.67481255206887, |
|
"grad_norm": 0.003325529396533966, |
|
"learning_rate": 0.00036049382716049383, |
|
"loss": 0.5366, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.6770341571785616, |
|
"grad_norm": 0.0030097831040620804, |
|
"learning_rate": 0.0003580246913580247, |
|
"loss": 0.577, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.6792557622882534, |
|
"grad_norm": 0.0038523420225828886, |
|
"learning_rate": 0.00035555555555555557, |
|
"loss": 0.7107, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.681477367397945, |
|
"grad_norm": 0.0037631280720233917, |
|
"learning_rate": 0.0003530864197530864, |
|
"loss": 0.6634, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.683698972507637, |
|
"grad_norm": 0.0037562635261565447, |
|
"learning_rate": 0.0003506172839506173, |
|
"loss": 0.5952, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.6859205776173285, |
|
"grad_norm": 0.004943422507494688, |
|
"learning_rate": 0.00034814814814814816, |
|
"loss": 0.7673, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.68814218272702, |
|
"grad_norm": 0.0029032754246145487, |
|
"learning_rate": 0.00034567901234567905, |
|
"loss": 0.5417, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.690363787836712, |
|
"grad_norm": 0.0029870313592255116, |
|
"learning_rate": 0.0003432098765432099, |
|
"loss": 0.5779, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.6925853929464036, |
|
"grad_norm": 0.003654767759144306, |
|
"learning_rate": 0.00034074074074074074, |
|
"loss": 0.5605, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.6948069980560954, |
|
"grad_norm": 0.00890091247856617, |
|
"learning_rate": 0.0003382716049382716, |
|
"loss": 0.637, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.697028603165787, |
|
"grad_norm": 0.003046605037525296, |
|
"learning_rate": 0.0003358024691358025, |
|
"loss": 0.5595, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.699250208275479, |
|
"grad_norm": 0.0023681134916841984, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 0.6964, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.701471813385171, |
|
"grad_norm": 0.0029847752302885056, |
|
"learning_rate": 0.0003308641975308642, |
|
"loss": 0.6097, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.7036934184948627, |
|
"grad_norm": 0.005827764980494976, |
|
"learning_rate": 0.00032839506172839506, |
|
"loss": 0.5802, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.7059150236045544, |
|
"grad_norm": 0.008738023228943348, |
|
"learning_rate": 0.0003259259259259259, |
|
"loss": 0.7378, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.708136628714246, |
|
"grad_norm": 0.004061623010784388, |
|
"learning_rate": 0.0003234567901234568, |
|
"loss": 0.6561, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.710358233823938, |
|
"grad_norm": 0.003264094004407525, |
|
"learning_rate": 0.00032098765432098765, |
|
"loss": 0.6734, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.7125798389336295, |
|
"grad_norm": 0.002878582803532481, |
|
"learning_rate": 0.00031851851851851854, |
|
"loss": 0.5257, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.7148014440433212, |
|
"grad_norm": 0.0038063591346144676, |
|
"learning_rate": 0.0003160493827160494, |
|
"loss": 0.6192, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.717023049153013, |
|
"grad_norm": 0.0025181041564792395, |
|
"learning_rate": 0.0003135802469135803, |
|
"loss": 0.616, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.7192446542627047, |
|
"grad_norm": 0.003332714084535837, |
|
"learning_rate": 0.0003111111111111111, |
|
"loss": 0.5767, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.7214662593723964, |
|
"grad_norm": 0.004335831850767136, |
|
"learning_rate": 0.00030864197530864197, |
|
"loss": 0.6023, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.7236878644820885, |
|
"grad_norm": 0.0024720283690840006, |
|
"learning_rate": 0.0003061728395061728, |
|
"loss": 0.5528, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.7259094695917803, |
|
"grad_norm": 0.005799434147775173, |
|
"learning_rate": 0.0003037037037037037, |
|
"loss": 0.6276, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.728131074701472, |
|
"grad_norm": 0.006058346480131149, |
|
"learning_rate": 0.00030123456790123455, |
|
"loss": 0.5782, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.7303526798111637, |
|
"grad_norm": 0.0030225799418985844, |
|
"learning_rate": 0.00029876543209876545, |
|
"loss": 0.5824, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.7325742849208554, |
|
"grad_norm": 0.01594926044344902, |
|
"learning_rate": 0.0002962962962962963, |
|
"loss": 0.6547, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.734795890030547, |
|
"grad_norm": 0.0027183282654732466, |
|
"learning_rate": 0.0002938271604938272, |
|
"loss": 0.6479, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.737017495140239, |
|
"grad_norm": 0.010722714476287365, |
|
"learning_rate": 0.00029135802469135803, |
|
"loss": 0.5311, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.7392391002499306, |
|
"grad_norm": 0.0027126965578645468, |
|
"learning_rate": 0.0002888888888888889, |
|
"loss": 0.4191, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.7414607053596223, |
|
"grad_norm": 0.0027542004827409983, |
|
"learning_rate": 0.00028641975308641977, |
|
"loss": 0.6691, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.743682310469314, |
|
"grad_norm": 0.003709719516336918, |
|
"learning_rate": 0.0002839506172839506, |
|
"loss": 0.4843, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.7459039155790057, |
|
"grad_norm": 0.0035991393961012363, |
|
"learning_rate": 0.0002814814814814815, |
|
"loss": 0.6054, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.7481255206886974, |
|
"grad_norm": 0.002744116587564349, |
|
"learning_rate": 0.00027901234567901236, |
|
"loss": 0.6879, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.750347125798389, |
|
"grad_norm": 0.004210659768432379, |
|
"learning_rate": 0.00027654320987654325, |
|
"loss": 0.5684, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.752568730908081, |
|
"grad_norm": 0.0034985432866960764, |
|
"learning_rate": 0.0002740740740740741, |
|
"loss": 0.4931, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.7547903360177726, |
|
"grad_norm": 0.012942667119204998, |
|
"learning_rate": 0.00027160493827160494, |
|
"loss": 0.5202, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.7570119411274647, |
|
"grad_norm": 0.0034006896894425154, |
|
"learning_rate": 0.0002691358024691358, |
|
"loss": 0.7363, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.7592335462371564, |
|
"grad_norm": 0.002795079257339239, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 0.5581, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.761455151346848, |
|
"grad_norm": 0.0036135506816208363, |
|
"learning_rate": 0.0002641975308641975, |
|
"loss": 0.6858, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.76367675645654, |
|
"grad_norm": 0.0034595513716340065, |
|
"learning_rate": 0.0002617283950617284, |
|
"loss": 0.5736, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.7658983615662316, |
|
"grad_norm": 0.003965721465647221, |
|
"learning_rate": 0.00025925925925925926, |
|
"loss": 0.4689, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.7681199666759233, |
|
"grad_norm": 0.004251439590007067, |
|
"learning_rate": 0.00025679012345679016, |
|
"loss": 0.4203, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.770341571785615, |
|
"grad_norm": 0.004119568970054388, |
|
"learning_rate": 0.000254320987654321, |
|
"loss": 0.5342, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.7725631768953067, |
|
"grad_norm": 0.0036970556247979403, |
|
"learning_rate": 0.00025185185185185185, |
|
"loss": 0.7664, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.7747847820049985, |
|
"grad_norm": 0.005256022792309523, |
|
"learning_rate": 0.0002493827160493827, |
|
"loss": 0.6459, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.7770063871146906, |
|
"grad_norm": 0.004164376296103001, |
|
"learning_rate": 0.0002469135802469136, |
|
"loss": 0.626, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.7792279922243823, |
|
"grad_norm": 0.003417801344767213, |
|
"learning_rate": 0.0002444444444444445, |
|
"loss": 0.7121, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.781449597334074, |
|
"grad_norm": 0.0032853151205927134, |
|
"learning_rate": 0.0002419753086419753, |
|
"loss": 0.6877, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.7836712024437658, |
|
"grad_norm": 0.0036194906570017338, |
|
"learning_rate": 0.0002395061728395062, |
|
"loss": 0.6578, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.7858928075534575, |
|
"grad_norm": 0.0030540688894689083, |
|
"learning_rate": 0.00023703703703703704, |
|
"loss": 0.7105, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.788114412663149, |
|
"grad_norm": 0.002393856178969145, |
|
"learning_rate": 0.0002345679012345679, |
|
"loss": 0.57, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.790336017772841, |
|
"grad_norm": 0.0026177624240517616, |
|
"learning_rate": 0.00023209876543209875, |
|
"loss": 0.548, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.7925576228825326, |
|
"grad_norm": 0.0030573352705687284, |
|
"learning_rate": 0.00022962962962962965, |
|
"loss": 0.5298, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.7947792279922243, |
|
"grad_norm": 0.0029401860665529966, |
|
"learning_rate": 0.0002271604938271605, |
|
"loss": 0.5248, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.797000833101916, |
|
"grad_norm": 0.0033551547676324844, |
|
"learning_rate": 0.00022469135802469136, |
|
"loss": 0.6293, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.7992224382116078, |
|
"grad_norm": 0.003985394258052111, |
|
"learning_rate": 0.0002222222222222222, |
|
"loss": 0.7248, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.8014440433212995, |
|
"grad_norm": 0.0034075267612934113, |
|
"learning_rate": 0.0002197530864197531, |
|
"loss": 0.6553, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.803665648430991, |
|
"grad_norm": 0.0031692166812717915, |
|
"learning_rate": 0.00021728395061728395, |
|
"loss": 0.7047, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.805887253540683, |
|
"grad_norm": 0.003578015835955739, |
|
"learning_rate": 0.00021481481481481482, |
|
"loss": 0.4964, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.8081088586503746, |
|
"grad_norm": 0.0034794467501342297, |
|
"learning_rate": 0.00021234567901234566, |
|
"loss": 0.8116, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.810330463760067, |
|
"grad_norm": 0.003644336946308613, |
|
"learning_rate": 0.00020987654320987656, |
|
"loss": 0.6715, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.8125520688697585, |
|
"grad_norm": 0.004753046203404665, |
|
"learning_rate": 0.0002074074074074074, |
|
"loss": 0.6864, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.81477367397945, |
|
"grad_norm": 0.0034688313025981188, |
|
"learning_rate": 0.00020493827160493827, |
|
"loss": 0.6755, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.816995279089142, |
|
"grad_norm": 0.003245983738452196, |
|
"learning_rate": 0.00020246913580246917, |
|
"loss": 0.5707, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.8192168841988337, |
|
"grad_norm": 0.003639776026830077, |
|
"learning_rate": 0.0002, |
|
"loss": 0.7689, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.8214384893085254, |
|
"grad_norm": 0.00351712410338223, |
|
"learning_rate": 0.00019753086419753088, |
|
"loss": 0.5325, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.823660094418217, |
|
"grad_norm": 0.002562894020229578, |
|
"learning_rate": 0.00019506172839506172, |
|
"loss": 0.626, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.825881699527909, |
|
"grad_norm": 0.003236958524212241, |
|
"learning_rate": 0.00019259259259259262, |
|
"loss": 0.4666, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.8281033046376005, |
|
"grad_norm": 0.0036686372477561235, |
|
"learning_rate": 0.00019012345679012346, |
|
"loss": 0.6194, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.8303249097472927, |
|
"grad_norm": 0.0032101618126034737, |
|
"learning_rate": 0.00018765432098765433, |
|
"loss": 0.4723, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.8325465148569844, |
|
"grad_norm": 0.0028950897976756096, |
|
"learning_rate": 0.00018518518518518518, |
|
"loss": 0.5364, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.834768119966676, |
|
"grad_norm": 0.004062660038471222, |
|
"learning_rate": 0.00018271604938271607, |
|
"loss": 0.5738, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.836989725076368, |
|
"grad_norm": 0.003023022785782814, |
|
"learning_rate": 0.00018024691358024692, |
|
"loss": 0.6205, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.8392113301860595, |
|
"grad_norm": 0.003414308186620474, |
|
"learning_rate": 0.00017777777777777779, |
|
"loss": 0.7143, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.8414329352957512, |
|
"grad_norm": 0.002360349288210273, |
|
"learning_rate": 0.00017530864197530866, |
|
"loss": 0.485, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.843654540405443, |
|
"grad_norm": 0.0031187781132757664, |
|
"learning_rate": 0.00017283950617283953, |
|
"loss": 0.5694, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.8458761455151347, |
|
"grad_norm": 0.0037832462694495916, |
|
"learning_rate": 0.00017037037037037037, |
|
"loss": 0.6778, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.8480977506248264, |
|
"grad_norm": 0.003156352788209915, |
|
"learning_rate": 0.00016790123456790124, |
|
"loss": 0.4658, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.850319355734518, |
|
"grad_norm": 0.003529968671500683, |
|
"learning_rate": 0.0001654320987654321, |
|
"loss": 0.5322, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.85254096084421, |
|
"grad_norm": 0.005195183213800192, |
|
"learning_rate": 0.00016296296296296295, |
|
"loss": 0.6196, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.8547625659539015, |
|
"grad_norm": 0.0050943163223564625, |
|
"learning_rate": 0.00016049382716049382, |
|
"loss": 0.5934, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.8569841710635933, |
|
"grad_norm": 0.003997886553406715, |
|
"learning_rate": 0.0001580246913580247, |
|
"loss": 0.7084, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.859205776173285, |
|
"grad_norm": 0.0034028280060738325, |
|
"learning_rate": 0.00015555555555555556, |
|
"loss": 0.6223, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.8614273812829767, |
|
"grad_norm": 0.0027945416513830423, |
|
"learning_rate": 0.0001530864197530864, |
|
"loss": 0.6469, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.863648986392669, |
|
"grad_norm": 0.002943682251498103, |
|
"learning_rate": 0.00015061728395061728, |
|
"loss": 0.6867, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.8658705915023606, |
|
"grad_norm": 0.0031444213818758726, |
|
"learning_rate": 0.00014814814814814815, |
|
"loss": 0.519, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.8680921966120523, |
|
"grad_norm": 0.0026177004911005497, |
|
"learning_rate": 0.00014567901234567902, |
|
"loss": 0.6096, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.870313801721744, |
|
"grad_norm": 0.002935094991698861, |
|
"learning_rate": 0.00014320987654320989, |
|
"loss": 0.6899, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.8725354068314357, |
|
"grad_norm": 0.0028896513395011425, |
|
"learning_rate": 0.00014074074074074076, |
|
"loss": 0.7482, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.8747570119411274, |
|
"grad_norm": 0.00413087010383606, |
|
"learning_rate": 0.00013827160493827163, |
|
"loss": 0.8511, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.876978617050819, |
|
"grad_norm": 0.003740635933354497, |
|
"learning_rate": 0.00013580246913580247, |
|
"loss": 0.6157, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.879200222160511, |
|
"grad_norm": 0.002238227752968669, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 0.5067, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.8814218272702026, |
|
"grad_norm": 0.0026507952716201544, |
|
"learning_rate": 0.0001308641975308642, |
|
"loss": 0.5867, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.8836434323798947, |
|
"grad_norm": 0.0034062659833580256, |
|
"learning_rate": 0.00012839506172839508, |
|
"loss": 0.5274, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.8858650374895864, |
|
"grad_norm": 0.0057266768999397755, |
|
"learning_rate": 0.00012592592592592592, |
|
"loss": 0.5766, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.888086642599278, |
|
"grad_norm": 0.0024858866818249226, |
|
"learning_rate": 0.0001234567901234568, |
|
"loss": 0.5223, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.89030824770897, |
|
"grad_norm": 0.0042802924290299416, |
|
"learning_rate": 0.00012098765432098765, |
|
"loss": 0.8749, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.8925298528186616, |
|
"grad_norm": 0.003919759299606085, |
|
"learning_rate": 0.00011851851851851852, |
|
"loss": 0.5534, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.8947514579283533, |
|
"grad_norm": 0.0030064480379223824, |
|
"learning_rate": 0.00011604938271604938, |
|
"loss": 0.4913, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.896973063038045, |
|
"grad_norm": 0.0021056190598756075, |
|
"learning_rate": 0.00011358024691358025, |
|
"loss": 0.407, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.8991946681477367, |
|
"grad_norm": 0.002747428370639682, |
|
"learning_rate": 0.0001111111111111111, |
|
"loss": 0.573, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.9014162732574285, |
|
"grad_norm": 0.0034187480341643095, |
|
"learning_rate": 0.00010864197530864197, |
|
"loss": 0.555, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.90363787836712, |
|
"grad_norm": 0.0028815080877393484, |
|
"learning_rate": 0.00010617283950617283, |
|
"loss": 0.5424, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.905859483476812, |
|
"grad_norm": 0.004015353973954916, |
|
"learning_rate": 0.0001037037037037037, |
|
"loss": 0.646, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.9080810885865036, |
|
"grad_norm": 0.0040834201499819756, |
|
"learning_rate": 0.00010123456790123458, |
|
"loss": 0.6989, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.9103026936961953, |
|
"grad_norm": 0.0023935800418257713, |
|
"learning_rate": 9.876543209876544e-05, |
|
"loss": 0.5851, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.912524298805887, |
|
"grad_norm": 0.0023539229296147823, |
|
"learning_rate": 9.629629629629631e-05, |
|
"loss": 0.4969, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.9147459039155788, |
|
"grad_norm": 0.002933288924396038, |
|
"learning_rate": 9.382716049382717e-05, |
|
"loss": 0.6214, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.916967509025271, |
|
"grad_norm": 0.004053778015077114, |
|
"learning_rate": 9.135802469135804e-05, |
|
"loss": 0.6662, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.9191891141349626, |
|
"grad_norm": 0.0033682617358863354, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.7816, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.9214107192446543, |
|
"grad_norm": 0.010227394290268421, |
|
"learning_rate": 8.641975308641976e-05, |
|
"loss": 0.5627, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.923632324354346, |
|
"grad_norm": 0.003825727617368102, |
|
"learning_rate": 8.395061728395062e-05, |
|
"loss": 0.4702, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.9258539294640378, |
|
"grad_norm": 0.003311006585136056, |
|
"learning_rate": 8.148148148148148e-05, |
|
"loss": 0.5711, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.9280755345737295, |
|
"grad_norm": 0.004043097607791424, |
|
"learning_rate": 7.901234567901235e-05, |
|
"loss": 0.6537, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.930297139683421, |
|
"grad_norm": 0.002848139265552163, |
|
"learning_rate": 7.65432098765432e-05, |
|
"loss": 0.6193, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.932518744793113, |
|
"grad_norm": 0.002361015183851123, |
|
"learning_rate": 7.407407407407407e-05, |
|
"loss": 0.5331, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.9347403499028046, |
|
"grad_norm": 0.004088804591447115, |
|
"learning_rate": 7.160493827160494e-05, |
|
"loss": 0.6029, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.936961955012497, |
|
"grad_norm": 0.002269380958750844, |
|
"learning_rate": 6.913580246913581e-05, |
|
"loss": 0.4867, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.9391835601221885, |
|
"grad_norm": 0.0039354427717626095, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.5974, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.9414051652318802, |
|
"grad_norm": 0.004085162654519081, |
|
"learning_rate": 6.419753086419754e-05, |
|
"loss": 0.5142, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.943626770341572, |
|
"grad_norm": 0.005218549631536007, |
|
"learning_rate": 6.17283950617284e-05, |
|
"loss": 0.6215, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.9458483754512637, |
|
"grad_norm": 0.002707709791138768, |
|
"learning_rate": 5.925925925925926e-05, |
|
"loss": 0.6414, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.9480699805609554, |
|
"grad_norm": 0.005078699439764023, |
|
"learning_rate": 5.679012345679012e-05, |
|
"loss": 0.5196, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.950291585670647, |
|
"grad_norm": 0.0025226271245628595, |
|
"learning_rate": 5.4320987654320986e-05, |
|
"loss": 0.5715, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.952513190780339, |
|
"grad_norm": 0.002955828094854951, |
|
"learning_rate": 5.185185185185185e-05, |
|
"loss": 0.6267, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.9547347958900305, |
|
"grad_norm": 0.004617676604539156, |
|
"learning_rate": 4.938271604938272e-05, |
|
"loss": 0.5121, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.9569564009997222, |
|
"grad_norm": 0.003442096756771207, |
|
"learning_rate": 4.691358024691358e-05, |
|
"loss": 0.6568, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.959178006109414, |
|
"grad_norm": 0.003014144254848361, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.6131, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.9613996112191057, |
|
"grad_norm": 0.0031045388896018267, |
|
"learning_rate": 4.197530864197531e-05, |
|
"loss": 0.67, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.9636212163287974, |
|
"grad_norm": 0.0025501151103526354, |
|
"learning_rate": 3.950617283950617e-05, |
|
"loss": 0.6671, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.965842821438489, |
|
"grad_norm": 0.0036570553202182055, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.5173, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.968064426548181, |
|
"grad_norm": 0.0016833347035571933, |
|
"learning_rate": 3.456790123456791e-05, |
|
"loss": 0.4934, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.970286031657873, |
|
"grad_norm": 0.003212490351870656, |
|
"learning_rate": 3.209876543209877e-05, |
|
"loss": 0.4975, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.9725076367675647, |
|
"grad_norm": 0.0029618414118885994, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.5626, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.9747292418772564, |
|
"grad_norm": 0.004005649592727423, |
|
"learning_rate": 2.7160493827160493e-05, |
|
"loss": 0.6448, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.976950846986948, |
|
"grad_norm": 0.0033238399773836136, |
|
"learning_rate": 2.469135802469136e-05, |
|
"loss": 0.6304, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.97917245209664, |
|
"grad_norm": 0.0032543411944061518, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.5257, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.9813940572063315, |
|
"grad_norm": 0.003472758922725916, |
|
"learning_rate": 1.9753086419753087e-05, |
|
"loss": 0.6997, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.9836156623160233, |
|
"grad_norm": 0.003682138863950968, |
|
"learning_rate": 1.7283950617283953e-05, |
|
"loss": 0.6162, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.985837267425715, |
|
"grad_norm": 0.0032471835147589445, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.6362, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.9880588725354067, |
|
"grad_norm": 0.0025808580685406923, |
|
"learning_rate": 1.234567901234568e-05, |
|
"loss": 0.5575, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.990280477645099, |
|
"grad_norm": 0.00250285305082798, |
|
"learning_rate": 9.876543209876543e-06, |
|
"loss": 0.5708, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.9925020827547906, |
|
"grad_norm": 0.00289158639498055, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.5226, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.9947236878644823, |
|
"grad_norm": 0.0033831135369837284, |
|
"learning_rate": 4.938271604938272e-06, |
|
"loss": 0.4506, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.996945292974174, |
|
"grad_norm": 0.004179411567747593, |
|
"learning_rate": 2.469135802469136e-06, |
|
"loss": 0.601, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.9991668980838657, |
|
"grad_norm": 0.002582500921562314, |
|
"learning_rate": 0.0, |
|
"loss": 0.5285, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.9991668980838657, |
|
"step": 1350, |
|
"total_flos": 5.0668751263105024e+17, |
|
"train_loss": 0.6102046124140421, |
|
"train_runtime": 4348.7477, |
|
"train_samples_per_second": 4.968, |
|
"train_steps_per_second": 0.31 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1350, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.0668751263105024e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|